hexsha
stringlengths 40
40
| size
int64 1
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
239
| max_stars_repo_name
stringlengths 5
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
239
| max_issues_repo_name
stringlengths 5
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
239
| max_forks_repo_name
stringlengths 5
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 1
1.03M
| avg_line_length
float64 1
958k
| max_line_length
int64 1
1.03M
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
79480cab31ae5c901c356a851d1112f0428b0fbd
| 1,492 |
py
|
Python
|
pcg_gazebo/generators/model_group_generator.py
|
TForce1/pcg_gazebo
|
9ff88016b7b6903236484958ca7c6ed9f8ffb346
|
[
"ECL-2.0",
"Apache-2.0"
] | 40 |
2020-02-04T18:16:49.000Z
|
2022-02-22T11:36:34.000Z
|
pcg_gazebo/generators/model_group_generator.py
|
awesomebytes/pcg_gazebo
|
4f335dd460ef7c771f1df78b46a92fad4a62cedc
|
[
"ECL-2.0",
"Apache-2.0"
] | 75 |
2020-01-23T13:40:50.000Z
|
2022-02-09T07:26:01.000Z
|
pcg_gazebo/generators/model_group_generator.py
|
GimpelZhang/gazebo_world_generator
|
eb7215499d0ddc972d804c988fadab1969579b1b
|
[
"ECL-2.0",
"Apache-2.0"
] | 18 |
2020-09-10T06:35:41.000Z
|
2022-02-20T19:08:17.000Z
|
# Copyright (c) 2019 - The Procedural Generation for Gazebo authors
# For information on the respective copyright owner see the NOTICE file
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ._generator import _Generator
class ModelGroupGenerator(_Generator):
def __init__(self, name='generator', **kwargs):
super(ModelGroupGenerator, self).__init__(name=name, **kwargs)
def init(self, name=None):
from ..simulation import ModelGroup
if name is None:
name = self._name
if self.assets.has_element(name) and \
self.assets.is_model_group(name):
self._simulation_entity = self.assets.get(name)
else:
self._simulation_entity = ModelGroup(name=name)
self.engines.reset_engines()
def run(self, group_name='default', pose=[0, 0, 0, 0, 0, 0]):
self.init(group_name)
self.run_engines()
self._simulation_entity.pose = pose
return self._simulation_entity
| 38.25641 | 74 | 0.699062 |
79480d7b06302ca8154d770bd146aa25739c22fe
| 9,741 |
py
|
Python
|
nltk/classify/naivebayes.py
|
caio1982/nltk
|
740e5ebece193b7febcffdf8268834f2daee599e
|
[
"Apache-2.0"
] | 1 |
2015-07-18T03:25:26.000Z
|
2015-07-18T03:25:26.000Z
|
nltk/classify/naivebayes.py
|
caio1982/nltk
|
740e5ebece193b7febcffdf8268834f2daee599e
|
[
"Apache-2.0"
] | null | null | null |
nltk/classify/naivebayes.py
|
caio1982/nltk
|
740e5ebece193b7febcffdf8268834f2daee599e
|
[
"Apache-2.0"
] | null | null | null |
# Natural Language Toolkit: Naive Bayes Classifiers
#
# Copyright (C) 2001-2012 NLTK Project
# Author: Edward Loper <edloper@gradient.cis.upenn.edu>
# URL: <http://www.nltk.org/>
# For license information, see LICENSE.TXT
#
# $Id: naivebayes.py 2063 2004-07-17 21:02:24Z edloper $
"""
A classifier based on the Naive Bayes algorithm. In order to find the
probability for a label, this algorithm first uses the Bayes rule to
express P(label|features) in terms of P(label) and P(features|label):
| P(label) * P(features|label)
| P(label|features) = ------------------------------
| P(features)
The algorithm then makes the 'naive' assumption that all features are
independent, given the label:
| P(label) * P(f1|label) * ... * P(fn|label)
| P(label|features) = --------------------------------------------
| P(features)
Rather than computing P(featues) explicitly, the algorithm just
calculates the denominator for each label, and normalizes them so they
sum to one:
| P(label) * P(f1|label) * ... * P(fn|label)
| P(label|features) = --------------------------------------------
| SUM[l]( P(l) * P(f1|l) * ... * P(fn|l) )
"""
from collections import defaultdict
from nltk.probability import FreqDist, DictionaryProbDist, ELEProbDist, sum_logs
from api import ClassifierI
##//////////////////////////////////////////////////////
## Naive Bayes Classifier
##//////////////////////////////////////////////////////
class NaiveBayesClassifier(ClassifierI):
"""
A Naive Bayes classifier. Naive Bayes classifiers are
paramaterized by two probability distributions:
- P(label) gives the probability that an input will receive each
label, given no information about the input's features.
- P(fname=fval|label) gives the probability that a given feature
(fname) will receive a given value (fval), given that the
label (label).
If the classifier encounters an input with a feature that has
never been seen with any label, then rather than assigning a
probability of 0 to all labels, it will ignore that feature.
The feature value 'None' is reserved for unseen feature values;
you generally should not use 'None' as a feature value for one of
your own features.
"""
def __init__(self, label_probdist, feature_probdist):
"""
:param label_probdist: P(label), the probability distribution
over labels. It is expressed as a ``ProbDistI`` whose
samples are labels. I.e., P(label) =
``label_probdist.prob(label)``.
:param feature_probdist: P(fname=fval|label), the probability
distribution for feature values, given labels. It is
expressed as a dictionary whose keys are ``(label, fname)``
pairs and whose values are ``ProbDistI`` objects over feature
values. I.e., P(fname=fval|label) =
``feature_probdist[label,fname].prob(fval)``. If a given
``(label,fname)`` is not a key in ``feature_probdist``, then
it is assumed that the corresponding P(fname=fval|label)
is 0 for all values of ``fval``.
"""
self._label_probdist = label_probdist
self._feature_probdist = feature_probdist
self._labels = label_probdist.samples()
def labels(self):
return self._labels
def classify(self, featureset):
return self.prob_classify(featureset).max()
def prob_classify(self, featureset):
# Discard any feature names that we've never seen before.
# Otherwise, we'll just assign a probability of 0 to
# everything.
featureset = featureset.copy()
for fname in featureset.keys():
for label in self._labels:
if (label, fname) in self._feature_probdist:
break
else:
#print 'Ignoring unseen feature %s' % fname
del featureset[fname]
# Find the log probabilty of each label, given the features.
# Start with the log probability of the label itself.
logprob = {}
for label in self._labels:
logprob[label] = self._label_probdist.logprob(label)
# Then add in the log probability of features given labels.
for label in self._labels:
for (fname, fval) in featureset.items():
if (label, fname) in self._feature_probdist:
feature_probs = self._feature_probdist[label,fname]
logprob[label] += feature_probs.logprob(fval)
else:
# nb: This case will never come up if the
# classifier was created by
# NaiveBayesClassifier.train().
logprob[label] += sum_logs([]) # = -INF.
return DictionaryProbDist(logprob, normalize=True, log=True)
def show_most_informative_features(self, n=10):
# Determine the most relevant features, and display them.
cpdist = self._feature_probdist
print 'Most Informative Features'
for (fname, fval) in self.most_informative_features(n):
def labelprob(l):
return cpdist[l,fname].prob(fval)
labels = sorted([l for l in self._labels
if fval in cpdist[l,fname].samples()],
key=labelprob)
if len(labels) == 1: continue
l0 = labels[0]
l1 = labels[-1]
if cpdist[l0,fname].prob(fval) == 0:
ratio = 'INF'
else:
ratio = '%8.1f' % (cpdist[l1,fname].prob(fval) /
cpdist[l0,fname].prob(fval))
print ('%24s = %-14r %6s : %-6s = %s : 1.0' %
(fname, fval, str(l1)[:6], str(l0)[:6], ratio))
def most_informative_features(self, n=100):
"""
Return a list of the 'most informative' features used by this
classifier. For the purpose of this function, the
informativeness of a feature ``(fname,fval)`` is equal to the
highest value of P(fname=fval|label), for any label, divided by
the lowest value of P(fname=fval|label), for any label:
| max[ P(fname=fval|label1) / P(fname=fval|label2) ]
"""
# The set of (fname, fval) pairs used by this classifier.
features = set()
# The max & min probability associated w/ each (fname, fval)
# pair. Maps (fname,fval) -> float.
maxprob = defaultdict(lambda: 0.0)
minprob = defaultdict(lambda: 1.0)
for (label, fname), probdist in self._feature_probdist.items():
for fval in probdist.samples():
feature = (fname, fval)
features.add( feature )
p = probdist.prob(fval)
maxprob[feature] = max(p, maxprob[feature])
minprob[feature] = min(p, minprob[feature])
if minprob[feature] == 0:
features.discard(feature)
# Convert features to a list, & sort it by how informative
# features are.
features = sorted(features,
key=lambda feature: minprob[feature]/maxprob[feature])
return features[:n]
@staticmethod
def train(labeled_featuresets, estimator=ELEProbDist):
"""
:param labeled_featuresets: A list of classified featuresets,
i.e., a list of tuples ``(featureset, label)``.
"""
label_freqdist = FreqDist()
feature_freqdist = defaultdict(FreqDist)
feature_values = defaultdict(set)
fnames = set()
# Count up how many times each feature value occurred, given
# the label and featurename.
for featureset, label in labeled_featuresets:
label_freqdist.inc(label)
for fname, fval in featureset.items():
# Increment freq(fval|label, fname)
feature_freqdist[label, fname].inc(fval)
# Record that fname can take the value fval.
feature_values[fname].add(fval)
# Keep a list of all feature names.
fnames.add(fname)
# If a feature didn't have a value given for an instance, then
# we assume that it gets the implicit value 'None.' This loop
# counts up the number of 'missing' feature values for each
# (label,fname) pair, and increments the count of the fval
# 'None' by that amount.
for label in label_freqdist:
num_samples = label_freqdist[label]
for fname in fnames:
count = feature_freqdist[label, fname].N()
feature_freqdist[label, fname].inc(None, num_samples-count)
feature_values[fname].add(None)
# Create the P(label) distribution
label_probdist = estimator(label_freqdist)
# Create the P(fval|label, fname) distribution
feature_probdist = {}
for ((label, fname), freqdist) in feature_freqdist.items():
probdist = estimator(freqdist, bins=len(feature_values[fname]))
feature_probdist[label,fname] = probdist
return NaiveBayesClassifier(label_probdist, feature_probdist)
##//////////////////////////////////////////////////////
## Demo
##//////////////////////////////////////////////////////
def demo():
from nltk.classify.util import names_demo
classifier = names_demo(NaiveBayesClassifier.train)
classifier.show_most_informative_features()
if __name__ == '__main__':
demo()
| 40.757322 | 80 | 0.582281 |
79480fda51630fe5c3af76731fd81e6b211c428e
| 2,925 |
py
|
Python
|
cds_ils/literature/covers.py
|
zzacharo/cds-ils
|
6816c348e209607b97583acc40fb37dea0c62418
|
[
"MIT"
] | 6 |
2020-09-18T00:13:38.000Z
|
2021-11-14T17:12:19.000Z
|
cds_ils/literature/covers.py
|
zzacharo/cds-ils
|
6816c348e209607b97583acc40fb37dea0c62418
|
[
"MIT"
] | 321 |
2020-08-28T15:42:25.000Z
|
2022-03-14T15:11:50.000Z
|
cds_ils/literature/covers.py
|
zzacharo/cds-ils
|
6816c348e209607b97583acc40fb37dea0c62418
|
[
"MIT"
] | 8 |
2019-07-10T07:02:08.000Z
|
2020-08-10T14:07:25.000Z
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2019-2020 CERN.
#
# CDS-ILS is free software; you can redistribute it and/or modify it under
# the terms of the MIT License; see LICENSE file for more details.
"""Literature covers."""
import urllib
from functools import partial
from flask import current_app
from invenio_app_ils.literature.covers_builder import build_placeholder_urls
from invenio_app_ils.proxies import current_app_ils
MIN_CONTENT_LENGTH = 128
def should_record_have_cover(record):
"""Check if this type of record has cover."""
if "$schema" in record:
schema = record["$schema"]
Document = current_app_ils.document_record_cls
is_document = schema.endswith(Document._schema)
Series = current_app_ils.series_record_cls
is_series = schema.endswith(Series._schema)
if is_document or is_series:
return True
return False
def has_already_cover(cover_metadata={}):
"""Check if record has already valid cover in cover_metadata."""
return cover_metadata.get("ISBN") or cover_metadata.get("ISSN")
def is_valid_cover(cover_metadata):
"""Fetch all sizes of cover from url and evaluate if they are valid."""
syndetics_urls = build_syndetic_cover_urls(cover_metadata)
if syndetics_urls is None:
return False
try:
for size in ["small", "medium", "large"]:
resp = urllib.request.urlopen(syndetics_urls[size])
has_error = resp.getcode() != 200
less_than_1_pixel = (
int(resp.getheader("Content-Length")) <= MIN_CONTENT_LENGTH
)
if has_error or less_than_1_pixel:
return False
except Exception:
return False
return True
def build_syndetic_cover_urls(cover_metadata):
"""Decorate literature with cover urls for all sizes."""
client = current_app.config["CDS_ILS_SYNDETIC_CLIENT"]
url = "https://secure.syndetics.com/index.aspx"
issn = cover_metadata.get("ISSN")
if issn:
scheme = "ISSN"
scheme_value = issn
isbn = cover_metadata.get("ISBN")
if isbn:
scheme = "ISBN"
scheme_value = isbn
if issn or isbn:
_url = "{url}?client={client}&{scheme}={value}/{size}.gif"
partial_url = partial(
_url.format,
url=url,
client=client,
scheme=scheme,
value=scheme_value,
)
return {
"is_placeholder": False,
"small": partial_url(size="SC"),
"medium": partial_url(size="MC"),
"large": partial_url(size="LC"),
}
return None
def build_cover_urls(metadata):
"""Try to build the cover urls else build placeholder urls."""
cover_metadata = metadata.get("cover_metadata", {})
syndetics_urls = build_syndetic_cover_urls(cover_metadata)
return syndetics_urls or build_placeholder_urls()
| 30.154639 | 76 | 0.65265 |
79481003c1fe347a44ef0a85f9937c6f7a911724
| 5,203 |
py
|
Python
|
coinflow/protocol/structs/Varint.py
|
mariuszlitwin/coinflow
|
49686e703a6edf90777eff2a154de1d732fc6b5f
|
[
"Apache-2.0",
"MIT"
] | 1 |
2018-06-12T23:04:11.000Z
|
2018-06-12T23:04:11.000Z
|
coinflow/protocol/structs/Varint.py
|
mariuszlitwin/coinflow
|
49686e703a6edf90777eff2a154de1d732fc6b5f
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
coinflow/protocol/structs/Varint.py
|
mariuszlitwin/coinflow
|
49686e703a6edf90777eff2a154de1d732fc6b5f
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import struct
from typing import NamedTuple
from .Struct import Struct
Payload = NamedTuple('Payload', (('value', int), ('length', int)))
class Varint(int, Struct):
"""
Variable length integer structure for use in Bitcoin protocol messages
TODO: fix reference
.. Network address structure in Bitcoin wiki:
https://en.bitcoin.it/wiki/Protocol_documentation#Network_address
"""
def __new__(cls, value: int, *args, **kwargs):
return super(Varint, cls).__new__(cls, int(value))
def __len__(self) -> int:
n = int(self) # type: int
if n < 0xfd:
return 1
elif n < 0xffff:
return 3
elif n < 0xffffffff:
return 5
else:
return 7
def __add__(self, other) -> object:
i = super().__add__(other) # type: int
return self.fromint(i)
def __radd__(self, other) -> object:
i = super().__radd__(other) # type: int
return self.fromint(i)
def __sub__(self, other) -> object:
i = super().__sub__(other) # type: int
return self.fromint(i)
def __rsub__(self, other) -> object:
i = super().__rsub__(other) # type: int
return self.fromint(i)
def __mul__(self, other) -> object:
i = super().__mul__(other) # type: int
return self.fromint(i)
def __rmul__(self, other) -> object:
i = super().__rmul__(other) # type: int
return self.fromint(i)
def __floordiv__(self, other) -> object:
i = super().__floordiv__(other) # type: int
return self.fromint(i)
def __rfloordiv__(self, other) -> object:
i = super().__rfloordiv__(other) # type: int
return self.fromint(i)
def __truediv__(self, other) -> object:
i = super().__truediv__(other) # type: int
return self.fromint(i)
def __rtruediv__(self, other) -> object:
i = super().__rtruediv__(other) # type: int
return self.fromint(i)
def __mod__(self, other) -> object:
i = super().__mod__(other) # type: int
return self.fromint(i)
def __rmod__(self, other) -> object:
i = super().__rmod__(other) # type: int
return self.fromint(i)
def __pow__(self, other) -> object:
i = super().__pow__(other) # type: int
return self.fromint(i)
def __rpow__(self, other) -> object:
i = super().__rpow__(other) # type: int
return self.fromint(i)
def __lshift__(self, other) -> object:
i = super().__lshift__(other) # type: int
return self.fromint(i)
def __rshift__(self, other) -> object:
i = super().__rshift__(other) # type: int
return self.fromint(i)
def __and__(self, other) -> object:
i = super().__and__(other) # type: int
return self.fromint(i)
def __rand__(self, other) -> object:
i = super().__rand__(other) # type: int
return self.fromint(i)
def __xor__(self, other) -> object:
i = super().__xor__(other) # type: int
return self.fromint(i)
def __rxor__(self, other) -> object:
i = super().__rxor__(other) # type: int
return self.fromint(i)
def __or__(self, other) -> object:
i = super().__or__(other) # type: int
return self.fromint(i)
def __ror__(self, other) -> object:
i = super().__ror__(other) # type: int
return self.fromint(i)
def __neg__(self) -> object:
i = super().__neg__() # type: int
return self.fromint(i)
def __pos__(self) -> object:
i = super().__pos__() # type: int
return self.fromint(i)
def __abs__(self) -> object:
i = super().__abs__() # type: int
return self.fromint(i)
def __invert__(self) -> object:
i = super().__invert__() # type: int
return self.fromint(i)
@classmethod
def fromint(cls, value: int) -> object:
return cls(value)
def encode(self) -> bytes:
"""
Encode object ot Bitcoin's varint structure
Returns
-------
bytes
encoded message
"""
n = int(self) # type: int
if n < 0xfd:
return struct.pack('<B', n)
elif n < 0xffff:
return struct.pack('<cH', b'\xfd', n)
elif n < 0xffffffff:
return struct.pack('<cL', b'\xfe', n)
else:
return struct.pack('<cQ', b'\xff', n)
@classmethod
def decode(cls, n: bytes) -> Payload:
"""
Decode varint from bytes
Parameters
----------
n : bytes
netaddr structure to decode
Returns
-------
NamedTuple (Payload)
NamedTuple with all parsed fields (n, length)
"""
n0 = n[0] # type: int
if n0 < 0xfd:
return Payload(n0, 1)
elif n0 == 0xfd:
return Payload(struct.unpack('<H', n[1:3])[0], 3)
elif n0 == 0xfe:
return Payload(struct.unpack('<L', n[1:5])[0], 5)
else:
return Payload(struct.unpack('<Q', n[1:9])[0], 7)
| 28.124324 | 74 | 0.550067 |
7948110bd7c39804f99da2f242d4590f243485b5
| 16,251 |
py
|
Python
|
benchmarks/f3_wrong_hints_permutations/scaling_ltl_timed_transition_system/4-sender_receiver_10.py
|
EnricoMagnago/F3
|
c863215c318d7d5f258eb9be38c6962cf6863b52
|
[
"MIT"
] | 3 |
2021-04-23T23:29:26.000Z
|
2022-03-23T10:00:30.000Z
|
benchmarks/f3_wrong_hints_permutations/scaling_ltl_timed_transition_system/4-sender_receiver_10.py
|
EnricoMagnago/F3
|
c863215c318d7d5f258eb9be38c6962cf6863b52
|
[
"MIT"
] | null | null | null |
benchmarks/f3_wrong_hints_permutations/scaling_ltl_timed_transition_system/4-sender_receiver_10.py
|
EnricoMagnago/F3
|
c863215c318d7d5f258eb9be38c6962cf6863b52
|
[
"MIT"
] | 1 |
2021-11-17T22:02:56.000Z
|
2021-11-17T22:02:56.000Z
|
from typing import FrozenSet
from collections import Iterable
from math import log, ceil
from mathsat import msat_term, msat_env
from mathsat import msat_make_constant, msat_declare_function
from mathsat import msat_get_integer_type, msat_get_rational_type, msat_get_bool_type
from mathsat import msat_make_and, msat_make_not, msat_make_or, msat_make_iff
from mathsat import msat_make_leq, msat_make_equal, msat_make_true
from mathsat import msat_make_number, msat_make_plus, msat_make_times
from pysmt.environment import Environment as PysmtEnv
import pysmt.typing as types
from ltl.ltl import TermMap, LTLEncoder
from utils import name_next, symb_to_next
from hint import Hint, Location
delta_name = "delta"
def decl_consts(menv: msat_env, name: str, c_type) -> tuple:
assert not name.startswith("_"), name
s = msat_declare_function(menv, name, c_type)
s = msat_make_constant(menv, s)
x_s = msat_declare_function(menv, name_next(name), c_type)
x_s = msat_make_constant(menv, x_s)
return s, x_s
def make_enum(menv, v_name: str, enum_size: int):
bool_type = msat_get_bool_type(menv)
num_bits = ceil(log(enum_size, 2))
b_vars = []
for idx in range(num_bits):
c_name = "{}{}".format(v_name, idx)
b_vars.append(tuple(decl_consts(menv, c_name, bool_type)))
vals = []
x_vals = []
for enum_val in range(enum_size):
bit_val = format(enum_val, '0{}b'.format(num_bits))
assert len(bit_val) == num_bits
assert all(c in {'0', '1'} for c in bit_val)
assign = [b_vars[idx] if c == '1' else
(msat_make_not(menv, b_vars[idx][0]),
msat_make_not(menv, b_vars[idx][1]))
for idx, c in enumerate(reversed(bit_val))]
pred = assign[0][0]
x_pred = assign[0][1]
for it in assign[1:]:
pred = msat_make_and(menv, pred, it[0])
x_pred = msat_make_and(menv, x_pred, it[1])
vals.append(pred)
x_vals.append(x_pred)
assert len(vals) == enum_size
assert len(x_vals) == enum_size
return b_vars, vals, x_vals
def msat_make_minus(menv: msat_env, arg0: msat_term, arg1: msat_term):
m_one = msat_make_number(menv, "-1")
arg1 = msat_make_times(menv, arg1, m_one)
return msat_make_plus(menv, arg0, arg1)
def msat_make_lt(menv: msat_env, arg0: msat_term, arg1: msat_term):
geq = msat_make_geq(menv, arg0, arg1)
return msat_make_not(menv, geq)
def msat_make_geq(menv: msat_env, arg0: msat_term, arg1: msat_term):
return msat_make_leq(menv, arg1, arg0)
def msat_make_gt(menv: msat_env, arg0: msat_term, arg1: msat_term):
leq = msat_make_leq(menv, arg0, arg1)
return msat_make_not(menv, leq)
def msat_make_impl(menv: msat_env, arg0: msat_term, arg1: msat_term):
n_arg0 = msat_make_not(menv, arg0)
return msat_make_or(menv, n_arg0, arg1)
def diverging_symbs(menv: msat_env) -> frozenset:
real_type = msat_get_rational_type(menv)
delta = msat_declare_function(menv, delta_name, real_type)
delta = msat_make_constant(menv, delta)
return frozenset([delta])
def check_ltl(menv: msat_env, enc: LTLEncoder) -> (Iterable, msat_term,
msat_term, msat_term):
assert menv
assert isinstance(menv, msat_env)
assert enc
assert isinstance(enc, LTLEncoder)
int_type = msat_get_integer_type(menv)
real_type = msat_get_rational_type(menv)
r2s, x_r2s = decl_consts(menv, "r2s", int_type)
s2r, x_s2r = decl_consts(menv, "s2r", int_type)
delta, x_delta = decl_consts(menv, delta_name, real_type)
sender = Sender("s", menv, enc, r2s, x_r2s, s2r, x_s2r, delta)
receiver = Receiver("r", menv, enc, s2r, x_s2r, r2s, x_r2s, delta)
curr2next = {r2s: x_r2s, s2r: x_s2r, delta: x_delta}
for comp in [sender, receiver]:
for s, x_s in comp.symb2next.items():
curr2next[s] = x_s
zero = msat_make_number(menv, "0")
init = msat_make_and(menv, receiver.init, sender.init)
trans = msat_make_and(menv, receiver.trans, sender.trans)
# invar delta >= 0
init = msat_make_and(menv, init,
msat_make_geq(menv, delta, zero))
trans = msat_make_and(menv, trans,
msat_make_geq(menv, x_delta, zero))
# delta > 0 -> (r2s' = r2s & s2r' = s2r)
lhs = msat_make_gt(menv, delta, zero)
rhs = msat_make_and(menv,
msat_make_equal(menv, x_r2s, r2s),
msat_make_equal(menv, x_s2r, s2r))
trans = msat_make_and(menv, trans,
msat_make_impl(menv, lhs, rhs))
# (G F !s.stutter) -> G (s.wait_ack -> F s.send)
lhs = enc.make_G(enc.make_F(msat_make_not(menv, sender.stutter)))
rhs = enc.make_G(msat_make_impl(menv, sender.wait_ack,
enc.make_F(sender.send)))
ltl = msat_make_impl(menv, lhs, rhs)
return TermMap(curr2next), init, trans, ltl
class Module:
def __init__(self, name: str, menv: msat_env, enc: LTLEncoder,
*args, **kwargs):
self.name = name
self.menv = menv
self.enc = enc
self.symb2next = {}
true = msat_make_true(menv)
self.init = true
self.trans = true
def _symb(self, v_name, v_type):
v_name = "{}_{}".format(self.name, v_name)
return decl_consts(self.menv, v_name, v_type)
def _enum(self, v_name: str, enum_size: int):
c_name = "{}_{}".format(self.name, v_name)
return make_enum(self.menv, c_name, enum_size)
class Sender(Module):
def __init__(self, name: str, menv: msat_env, enc: LTLEncoder,
in_c, x_in_c, out_c, x_out_c, delta):
super().__init__(name, menv, enc)
bool_type = msat_get_bool_type(menv)
int_type = msat_get_integer_type(menv)
real_type = msat_get_rational_type(menv)
loc, x_loc = self._symb("l", bool_type)
evt, x_evt = self._symb("evt", bool_type)
msg_id, x_msg_id = self._symb("msg_id", int_type)
timeout, x_timeout = self._symb("timeout", real_type)
c, x_c = self._symb("c", real_type)
self.move = evt
self.stutter = msat_make_not(menv, evt)
self.x_move = x_evt
self.x_stutter = msat_make_not(menv, x_evt)
self.send = loc
self.wait_ack = msat_make_not(menv, loc)
self.x_send = x_loc
self.x_wait_ack = msat_make_not(menv, x_loc)
self.symb2next = {loc: x_loc, evt: x_evt, msg_id: x_msg_id,
timeout: x_timeout, c: x_c}
zero = msat_make_number(menv, "0")
one = msat_make_number(menv, "1")
base_timeout = one
# send & c = 0 & msg_id = 0
self.init = msat_make_and(menv,
msat_make_and(menv, self.send,
msat_make_equal(menv, c,
zero)),
msat_make_equal(menv, msg_id, zero))
# invar: wait_ack -> c <= timeout
self.init = msat_make_and(
menv, self.init,
msat_make_impl(menv, self.wait_ack,
msat_make_leq(menv, c, timeout)))
self.trans = msat_make_impl(menv, self.x_wait_ack,
msat_make_leq(menv, x_c, x_timeout))
# delta > 0 | stutter -> l' = l & msg_id' = msg_id & timeout' = timeout &
# c' = c + delta & out_c' = out_c
lhs = msat_make_or(menv, msat_make_gt(menv, delta, zero), self.stutter)
rhs = msat_make_and(
menv,
msat_make_and(menv,
msat_make_iff(menv, x_loc, loc),
msat_make_equal(menv, x_msg_id, msg_id)),
msat_make_and(menv,
msat_make_equal(menv, x_timeout, timeout),
msat_make_equal(menv, x_c,
msat_make_plus(menv, c, delta))))
rhs = msat_make_and(menv, rhs,
msat_make_equal(menv, x_out_c, out_c))
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
disc_t = msat_make_and(menv, self.move,
msat_make_equal(menv, delta, zero))
# (send & send') ->
# (msg_id' = msg_id & timeout' = base_timeout & c' = 0 & out_c' = out_c)
lhs = msat_make_and(menv, disc_t,
msat_make_and(menv, self.send, self.x_send))
rhs = msat_make_and(
menv,
msat_make_and(menv,
msat_make_equal(menv, x_msg_id, msg_id),
msat_make_equal(menv, x_timeout, base_timeout)),
msat_make_and(menv,
msat_make_equal(menv, x_c, zero),
msat_make_equal(menv, x_out_c, out_c)))
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (send & wait_ack') ->
# (msg_id' = msg_id + 1 & timeout' = base_timeout & c' = 0 & out_c' = out_c)
lhs = msat_make_and(menv, disc_t,
msat_make_and(menv, self.send, self.x_wait_ack))
rhs = msat_make_and(
menv,
msat_make_and(menv,
msat_make_equal(menv, x_msg_id,
msat_make_plus(menv, msg_id, one)),
msat_make_equal(menv, x_timeout, base_timeout)),
msat_make_and(menv,
msat_make_equal(menv, x_c, zero),
msat_make_equal(menv, x_out_c, out_c)))
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (wait_ack) -> (c' = 0 & out_c' = out_c &
# (wait_ack' <-> (in_c != msg_id & c > timeout))
lhs = msat_make_and(menv, disc_t, self.wait_ack)
rhs_iff = msat_make_and(menv,
msat_make_not(menv,
msat_make_equal(menv, in_c,
msg_id)),
msat_make_geq(menv, c, timeout))
rhs_iff = msat_make_iff(menv, self.x_wait_ack, rhs_iff)
rhs = msat_make_and(menv,
msat_make_and(menv,
msat_make_equal(menv, x_c, zero),
msat_make_equal(menv, x_out_c,
out_c)),
rhs_iff)
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (wait_ack & wait_ack') -> (timeout' > timeout)
lhs = msat_make_and(menv, disc_t,
msat_make_and(menv, self.wait_ack,
self.x_wait_ack))
rhs = msat_make_gt(menv, x_timeout, timeout)
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (wait_ack) -> (send' <-> (in_c = msg_id & c < timeout))
lhs = msat_make_and(menv, disc_t, self.wait_ack)
rhs = msat_make_iff(menv, self.x_send,
msat_make_and(menv,
msat_make_equal(menv, in_c, msg_id),
msat_make_lt(menv, c, timeout)))
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (wait_ack & send') -> (timeout' = base_timeout)
lhs = msat_make_and(menv, disc_t,
msat_make_and(menv, self.wait_ack, self.x_send))
rhs = msat_make_equal(menv, x_timeout, base_timeout)
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
class Receiver(Module):
def __init__(self, name: str, menv: msat_env, enc: LTLEncoder,
in_c, x_in_c, out_c, x_out_c, delta):
super().__init__(name, menv, enc)
bool_type = msat_get_bool_type(menv)
loc, x_loc = self._symb("l", bool_type)
self.wait = loc
self.work = msat_make_not(menv, loc)
self.x_wait = x_loc
self.x_work = msat_make_not(menv, x_loc)
self.symb2next = {loc: x_loc}
zero = msat_make_number(menv, "0")
# wait
self.init = self.wait
# delta > 0 -> loc' = loc & out_c' = out_c
lhs = msat_make_gt(menv, delta, zero)
rhs = msat_make_and(menv,
msat_make_iff(menv, x_loc, loc),
msat_make_equal(menv, x_out_c, out_c))
self.trans = msat_make_impl(menv, lhs, rhs)
disc_t = msat_make_equal(menv, delta, zero)
# wait -> (wait' <-> in_c = out_c)
lhs = msat_make_and(menv, disc_t, self.wait)
rhs = msat_make_iff(menv, self.x_wait,
msat_make_equal(menv, in_c, out_c))
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (wait & wait') -> (out_c' = out_c)
lhs = msat_make_and(menv, disc_t,
msat_make_and(menv, self.wait, self.x_wait))
rhs = msat_make_equal(menv, x_out_c, out_c)
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (wait & work') -> out_c' = in_c
lhs = msat_make_and(menv, disc_t,
msat_make_and(menv, self.wait, self.x_work))
rhs = msat_make_equal(menv, x_out_c, in_c)
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# work -> out_c' = out_c
lhs = msat_make_and(menv, disc_t, self.work)
rhs = msat_make_equal(menv, x_out_c, out_c)
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
def hints(env: PysmtEnv) -> FrozenSet[Hint]:
assert isinstance(env, PysmtEnv)
mgr = env.formula_manager
delta = mgr.Symbol(delta_name, types.REAL)
r2s = mgr.Symbol("r2s", types.INT)
s2r = mgr.Symbol("r2s", types.INT)
s_l = mgr.Symbol("s_l", types.BOOL)
s_evt = mgr.Symbol("s_evt", types.BOOL)
s_msg_id = mgr.Symbol("s_msg_id", types.INT)
s_timeout = mgr.Symbol("s_timeout", types.REAL)
s_c = mgr.Symbol("s_c", types.REAL)
r_l = mgr.Symbol("r_l", types.BOOL)
symbs = frozenset([delta, r2s, s2r, s_l, s_evt, s_msg_id, s_timeout, s_c,
r_l])
x_delta = symb_to_next(mgr, delta)
x_r2s = symb_to_next(mgr, r2s)
x_s2r = symb_to_next(mgr, s2r)
x_s_l = symb_to_next(mgr, s_l)
x_s_evt = symb_to_next(mgr, s_evt)
x_s_msg_id = symb_to_next(mgr, s_msg_id)
x_s_timeout = symb_to_next(mgr, s_timeout)
x_s_c = symb_to_next(mgr, s_c)
x_r_l = symb_to_next(mgr, r_l)
res = []
r0 = mgr.Real(0)
r1 = mgr.Real(1)
i0 = mgr.Int(0)
i1 = mgr.Int(1)
loc0 = Location(env, mgr.GE(delta, r0))
loc0.set_progress(0, mgr.Equals(x_delta, mgr.Plus(delta, r1)))
hint = Hint("h_delta2", env, frozenset([delta]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, mgr.Equals(s_c, r0))
loc0.set_progress(0, mgr.Equals(x_s_c, r0))
hint = Hint("h_s_c0", env, frozenset([s_c]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, s_l)
loc0.set_progress(0, x_s_l)
hint = Hint("h_s_l0", env, frozenset([s_l]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, mgr.GE(s_timeout, r0))
loc0.set_progress(0, mgr.Equals(x_s_timeout, mgr.Plus(s_timeout, r1)))
hint = Hint("h_s_timeout1", env, frozenset([s_timeout]), symbs)
hint.set_locs([loc0])
res.append(hint)
return frozenset(res)
| 40.325062 | 89 | 0.568211 |
7948114477a85a1825fc212739616e608651dc1c
| 7,322 |
py
|
Python
|
FC/createFCMatrix.py
|
chidiugonna/DTI-hippo
|
3215df591d80c646cd3726e36ea7783dff049ed1
|
[
"MIT"
] | 1 |
2021-04-08T06:25:35.000Z
|
2021-04-08T06:25:35.000Z
|
FC/createFCMatrix.py
|
chidiugonna/DTI-hippo
|
3215df591d80c646cd3726e36ea7783dff049ed1
|
[
"MIT"
] | 1 |
2021-05-05T22:47:02.000Z
|
2021-05-05T22:47:02.000Z
|
FC/createFCMatrix.py
|
chidiugonna/DTI-hippo
|
3215df591d80c646cd3726e36ea7783dff049ed1
|
[
"MIT"
] | 2 |
2020-11-20T18:03:40.000Z
|
2021-04-08T06:25:42.000Z
|
import os
import datetime
from nilearn import signal as sgl
from nilearn import image as img
from nilearn import plotting as plot
from nilearn import datasets
from nilearn import input_data
from nilearn.connectome import ConnectivityMeasure
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import bids
def extract_confounds(confound, fields ):
confound_df = pd.read_csv(confound, delimiter='\t')
confound_vars=fields
confound_df = confound_df[confound_vars]
return confound_df
def logtext(logfile, textstr):
stamp=datetime.datetime.now().strftime("%m-%d-%y %H:%M:%S%p")
textstring=stamp + ' ' + str(textstr)
print(textstring)
logfile.write(textstring+'\n')
def get_parser():
from argparse import ArgumentParser
from argparse import RawTextHelpFormatter
parser = ArgumentParser(description="Create FC connection Matrix using nilearn."
"fMRI data should have been processed by fmriprep and should be in same space as parcellation file.",formatter_class=RawTextHelpFormatter)
parser.add_argument('func_file', action='store',
help='The functional MRI as a NIFTI. Processed by fMRIprep.')
parser.add_argument('parcel_file', action='store',
help='The parcellation as a NIFTI used to define nodes in matrix. Should be in the same space as functional MRI.')
parser.add_argument('label_file', action='store',
help='label file for parcellation')
parser.add_argument('output_file', action='store',
help='output file ')
parser.add_argument('--skip_rows', action='store',type=int,
help='rows to skip in label file')
parser.add_argument('--confound_file', action='store',
help='Confound file created by fMRIprep.')
parser.add_argument('--confound_cols', action='store',type=str, nargs='*',
help='Confound columns to use')
parser.add_argument('--workdir', action='store',
help='Work directory for output of log file and other temporary files')
parser.add_argument('--logname', action='store',
help='name for the log file (without extension) which will be created in work directory.')
parser.add_argument('--TR', action='store',type=float,
help='Repetition Time of functional in seconds.')
parser.add_argument('--high_pass', action='store',type=float,
help='High pass frequency in Hz.')
parser.add_argument('--low_pass', action='store',type=float,
help='Low pass frequency in Hz.')
parser.add_argument('--batchmode', action='store_true', default=False,
help='Omit interactive plot of the functional matrix.')
return parser
def main():
opts = get_parser().parse_args()
# work directory
if opts.workdir:
WORKDIR=os.path.abspath(opts.workdir)
else:
WORKDIR=os.getcwd()
# log name
if opts.logname:
BASELOGNAME=opts.logname
else:
BASELOGNAME='createFCMatrix'
# create log file
TIMESTAMP=datetime.datetime.now().strftime("%m%d%y%H%M%S%p")
LOGFILENAME=BASELOGNAME + '_' + TIMESTAMP + '.log'
LOGFILE = open(os.path.join(WORKDIR,LOGFILENAME), 'w')
# functional MRI
func_file=os.path.abspath(opts.func_file)
parcel_file=os.path.abspath(opts.parcel_file)
label_file=os.path.abspath(opts.label_file)
output_file=os.path.abspath(opts.output_file)
parcel = img.load_img(parcel_file)
logtext(LOGFILE,"parcellation "+parcel_file + " has dimensions " + str(parcel.shape))
if opts.confound_file:
confound_file=os.path.abspath(opts.confound_file)
# Repetition Time
if opts.TR:
TR=opts.TR
logtext(LOGFILE,"Repetition Time passed is " + str(TR) + " seconds")
else:
logtext(LOGFILE,"No repetition time passed. This is necessary for filtering.")
# high pass
if opts.high_pass:
high_pass=opts.high_pass
logtext(LOGFILE,"High pass cutoff " + str(high_pass) + " Hz")
else:
logtext(LOGFILE,"No high pass filter passed. This is necessary for filtering.")
# low_pass
if opts.low_pass:
low_pass=opts.low_pass
logtext(LOGFILE,"Low pass cutoff is " + str(low_pass) + " Hz")
else:
logtext(LOGFILE,"No low pass filter passed. This is necessary for filtering.")
# skip rows
if opts.skip_rows:
skip_rows=opts.skip_rows
logtext(LOGFILE,"skip " + str(skip_rows) + " rows in the label file")
if opts.confound_cols:
confound_cols=opts.confound_cols
if len(confound_cols)<2:
confoundheader_file=os.path.abspath(confound_cols[0])
with open(confoundheader_file, 'r') as fd:
confound_cols=fd.readlines()
confound_columns = []
for substr in confound_cols:
confound_columns.append(substr.replace("\n",""))
else:
confound_columns=confound_cols
masker = input_data.NiftiLabelsMasker(labels_img=parcel,
standardize=True,
memory='nilearn_cache',
verbose=1,
detrend=True,
low_pass=low_pass,
high_pass=high_pass,
t_r=TR)
func_img = img.load_img(func_file)
logtext(LOGFILE,"func_file "+parcel_file + " has dimensions " + str(func_img.shape))
#Convert confounds file into required format
logtext(LOGFILE,"Extract Confounds" )
confounds = extract_confounds(confound_file,
confound_columns)
logtext(LOGFILE,"Confounds: " + str(confounds.head()) )
logtext(LOGFILE,"Save Confounds" )
confound_out=BASELOGNAME + '_confoundsOut_' + TIMESTAMP + '.csv'
confounds.to_csv(confound_out,index=False)
#Apply cleaning, parcellation and extraction to functional data
logtext(LOGFILE,"clean and extract functional data from parcellation" )
confounds_array=confounds.to_numpy()
time_series = masker.fit_transform(func_img,confounds_array)
logtext(LOGFILE,"Calculate correlation matrix" )
correlation_measure = ConnectivityMeasure(kind='correlation')
correlation_matrix = correlation_measure.fit_transform([time_series])[0]
logtext(LOGFILE,"parsing label file " + label_file + " by skipping " + str(skip_rows) +" rows")
labelfile_df=pd.read_csv(label_file, header=None, usecols=[1],delim_whitespace=True, skiprows=skip_rows)
logtext(LOGFILE,"labels: " + str(labelfile_df.head()) )
labels_array=labelfile_df.to_numpy()
logtext(LOGFILE,"saving correlation matrix to " + output_file )
np.fill_diagonal(correlation_matrix, 0)
parcel_df=pd.DataFrame(correlation_matrix)
parcel_df.to_csv(output_file,index=False, header=False)
logtext(LOGFILE,"Displaying correlation matrix")
if not (opts.batchmode):
plot.plot_matrix(correlation_matrix, figure=(10, 8), labels=labels_array,
vmax=0.8, vmin=-0.8, reorder=True)
plot.show()
LOGFILE.close()
# This is the standard boilerplate that calls the main() function.
if __name__ == '__main__':
main()
| 38.740741 | 146 | 0.660339 |
794811d5ee943c897db51e5d11dd9843344421c4
| 6,214 |
py
|
Python
|
python/cugraph/cugraph/tests/test_graph_store.py
|
wangxiaoyunNV/cugraph
|
97f75de40362740928b45b61e713ff4dae1cbf55
|
[
"Apache-2.0"
] | null | null | null |
python/cugraph/cugraph/tests/test_graph_store.py
|
wangxiaoyunNV/cugraph
|
97f75de40362740928b45b61e713ff4dae1cbf55
|
[
"Apache-2.0"
] | null | null | null |
python/cugraph/cugraph/tests/test_graph_store.py
|
wangxiaoyunNV/cugraph
|
97f75de40362740928b45b61e713ff4dae1cbf55
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2022, NVIDIA CORPORATION.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
import cugraph
from cugraph.tests import utils
from cugraph.experimental import PropertyGraph
import numpy as np
import cudf
# Test
@pytest.mark.parametrize("graph_file", utils.DATASETS)
def test_no_graph(graph_file):
with pytest.raises(TypeError):
gstore = cugraph.gnn.CuGraphStore()
gstore.num_edges()
@pytest.mark.parametrize("graph_file", utils.DATASETS)
def test_using_graph(graph_file):
with pytest.raises(ValueError):
cu_M = utils.read_csv_file(graph_file)
g = cugraph.Graph()
g.from_cudf_edgelist(cu_M, source='0',
destination='1', edge_attr='2', renumber=True)
cugraph.gnn.CuGraphStore(graph=g)
@pytest.mark.parametrize("graph_file", utils.DATASETS)
def test_using_pgraph(graph_file):
cu_M = utils.read_csv_file(graph_file)
g = cugraph.Graph(directed=True)
g.from_cudf_edgelist(cu_M, source='0', destination='1',
edge_attr='2', renumber=True)
pG = PropertyGraph()
pG.add_edge_data(cu_M,
type_name="edge",
vertex_col_names=("0", "1"),
property_columns=None)
gstore = cugraph.gnn.CuGraphStore(graph=pG)
assert g.number_of_edges() == pG.num_edges
assert g.number_of_edges() == gstore.num_edges
assert g.number_of_vertices() == pG.num_vertices
assert g.number_of_vertices() == gstore.num_vertices
@pytest.mark.parametrize("graph_file", utils.DATASETS)
def test_node_data_pg(graph_file):
cu_M = utils.read_csv_file(graph_file)
pG = PropertyGraph()
pG.add_edge_data(cu_M,
type_name="edge",
vertex_col_names=("0", "1"),
property_columns=None)
gstore = cugraph.gnn.CuGraphStore(graph=pG)
edata = gstore.edata
assert edata.shape[0] > 0
@pytest.mark.parametrize("graph_file", utils.DATASETS)
def test_egonet(graph_file):
from cugraph.community.egonet import batched_ego_graphs
cu_M = utils.read_csv_file(graph_file)
g = cugraph.Graph(directed=True)
g.from_cudf_edgelist(cu_M, source='0', destination='1', renumber=True)
pG = PropertyGraph()
pG.add_edge_data(cu_M,
type_name="edge",
vertex_col_names=("0", "1"),
property_columns=None)
gstore = cugraph.gnn.CuGraphStore(graph=pG)
nodes = [1, 2]
ego_edge_list1, seeds_offsets1 = gstore.egonet(nodes, k=1)
ego_edge_list2, seeds_offsets2 = batched_ego_graphs(g, nodes, radius=1)
assert ego_edge_list1 == ego_edge_list2
assert seeds_offsets1 == seeds_offsets2
@pytest.mark.parametrize("graph_file", utils.DATASETS)
def test_workflow(graph_file):
# from cugraph.community.egonet import batched_ego_graphs
cu_M = utils.read_csv_file(graph_file)
g = cugraph.Graph(directed=True)
g.from_cudf_edgelist(cu_M, source='0', destination='1', renumber=True)
pg = PropertyGraph()
pg.add_edge_data(cu_M,
type_name="edge",
vertex_col_names=("0", "1"),
property_columns=["2"])
gstore = cugraph.gnn.CuGraphStore(graph=pg)
nodes = gstore.get_vertex_ids()
num_nodes = len(nodes)
assert num_nodes > 0
sampled_nodes = nodes[:5]
ego_edge_list, seeds_offsets = gstore.egonet(sampled_nodes, k=1)
assert len(ego_edge_list) > 0
@pytest.mark.parametrize("graph_file", utils.DATASETS)
def test_sample_neighbors(graph_file):
cu_M = utils.read_csv_file(graph_file)
g = cugraph.Graph(directed=True)
g.from_cudf_edgelist(cu_M, source='0', destination='1', renumber=True)
pg = PropertyGraph()
pg.add_edge_data(cu_M,
type_name="edge",
vertex_col_names=("0", "1"),
property_columns=["2"])
gstore = cugraph.gnn.CuGraphStore(graph=pg)
nodes = gstore.get_vertex_ids()
num_nodes = len(nodes)
assert num_nodes > 0
sampled_nodes = nodes[:5]
parents_list, children_list = gstore.sample_neighbors(sampled_nodes, 2)
assert len(parents_list) > 0
@pytest.mark.parametrize("graph_file", utils.DATASETS)
def test_n_data(graph_file):
cu_M = utils.read_csv_file(graph_file)
g = cugraph.Graph(directed=True)
g.from_cudf_edgelist(cu_M, source='0', destination='1', renumber=True)
pg = PropertyGraph()
pg.add_edge_data(cu_M,
type_name="edge",
vertex_col_names=("0", "1"),
property_columns=["2"])
num_nodes = g.number_of_nodes()
df_feat = cudf.DataFrame()
df_feat['node_id'] = np.arange(num_nodes)
df_feat['val0'] = [float(i+1) for i in range(num_nodes)]
df_feat['val1'] = [float(i+2) for i in range(num_nodes)]
pg.add_vertex_data(df_feat,
type_name="test_feat",
vertex_col_name="node_id",
property_columns=None)
gstore = cugraph.gnn.CuGraphStore(graph=pg)
ndata = gstore.ndata
assert ndata.shape[0] > 0
@pytest.mark.parametrize("graph_file", utils.DATASETS)
def test_e_data(graph_file):
cu_M = utils.read_csv_file(graph_file)
g = cugraph.Graph(directed=True)
g.from_cudf_edgelist(cu_M, source='0', destination='1', renumber=True)
pg = PropertyGraph()
pg.add_edge_data(cu_M,
type_name="edge",
vertex_col_names=("0", "1"),
property_columns=["2"])
gstore = cugraph.gnn.CuGraphStore(graph=pg)
edata = gstore.edata
assert edata.shape[0] > 0
| 29.037383 | 75 | 0.654007 |
79481208371d4f2b376f3de7f4744be417114a3b
| 1,298 |
py
|
Python
|
app/core/tests/test_admin.py
|
kfahmi/recipe-app-api
|
fa678005a3615bff92b94d5e852526fcb00793ed
|
[
"MIT"
] | null | null | null |
app/core/tests/test_admin.py
|
kfahmi/recipe-app-api
|
fa678005a3615bff92b94d5e852526fcb00793ed
|
[
"MIT"
] | null | null | null |
app/core/tests/test_admin.py
|
kfahmi/recipe-app-api
|
fa678005a3615bff92b94d5e852526fcb00793ed
|
[
"MIT"
] | null | null | null |
from django.test import TestCase, Client
from django.contrib.auth import get_user_model
from django.urls import reverse
class AdminSiteTests(TestCase):
def setUp(self):
self.client = Client()
self.admin_user = get_user_model().objects.create_superuser(
email='admin@londonappdev.com',
password='password123'
)
self.client.force_login(self.admin_user)
self.user = get_user_model().objects.create_user(
email='test@londonappdev.com',
password='password123',
name='test username'
)
def test_user_listed(self):
"""test listed user on user page"""
url = reverse('admin:core_user_changelist')
res = self.client.get(url)
self.assertContains(res, self.user.name)
self.assertContains(res, self.user.email)
def test_user_change(self):
"""Test user edit page works"""
url = reverse('admin:core_user_change', args=[self.user.id])
res = self.client.get(url)
self.assertEqual(res.status_code, 200)
def test_create_user_page(self):
"""Test that the create user page works"""
url = reverse('admin:core_user_add')
res = self.client.get(url)
self.assertEqual(res.status_code, 200)
| 33.282051 | 68 | 0.640216 |
79481282d8747270c5ca1d77895b21b5e10d5156
| 2,443 |
py
|
Python
|
PyEEA/taxation/TaxationHelper.py
|
ThomasJFR/PyEEA
|
7927ee5ff1de8d3cf9e9654899ea4c2c0284519c
|
[
"MIT"
] | 1 |
2020-06-15T03:16:06.000Z
|
2020-06-15T03:16:06.000Z
|
PyEEA/taxation/TaxationHelper.py
|
ThomasJFR/PyEEA
|
7927ee5ff1de8d3cf9e9654899ea4c2c0284519c
|
[
"MIT"
] | 1 |
2020-06-19T04:56:21.000Z
|
2020-06-19T04:56:21.000Z
|
PyEEA/taxation/TaxationHelper.py
|
ThomasJFR/PyEEA
|
7927ee5ff1de8d3cf9e9654899ea4c2c0284519c
|
[
"MIT"
] | null | null | null |
from ..cashflow import Cashflow, NullCashflow
from ..cashflow import Future, Perpetuity, Dynamic
from ..utilities import parse_ns, parse_d, get_final_period
from copy import deepcopy
class Tax:
def __init__(self, tag, rate, title=None):
self._tag = tag
self._rate = rate
self._title = title or ("Tax on %s" % self._tag)
def get_title(self):
return self._title
def generate_cashflow(self, cashflows=[], depreciations=[]):
# Remove any irrelevant cashflows
taxable_cashflows = [cf for cf in cashflows if self._tag in cf.tags] or [
NullCashflow()
]
shielding_depreciations = [
dp for dp in depreciations if self._tag in dp.tags
] or [NullCashflow()]
# Create and return the TaxCashflow object
return TaxCashflow(
self._rate,
taxable_cashflows,
shielding_depreciations,
get_final_period(cashflows, finite=True),
title=self.get_title(),
tags=self._tag,
)
class TaxCashflow(Dynamic):
def __init__(self, rate, cashflows, depreciations, d, title=None, tags=None):
super().__init__(TaxCashflow.tax_fun, d, title, tags)
self._rate = rate
self._cashflows = cashflows
self._depreciations = depreciations
def to_shorthand(self):
return "Tax(%s, %.2f%%)" % (self.tags[0], self._rate * 100)
def to_pv(self, i):
# Handles every cashflow in range d
pv = super().to_pv(i)
# Check for Perpetuities
if perpetuities := [cf for cf in self._cashflows if isinstance(cf, Perpetuity)]:
perpetuities = deepcopy(perpetuities)
for perpetuity in perpetuities:
perpetuity.amount = perpetuity[self.d[1] + 1].amount
perpetuity.d0 = self.d[1]
pv += sum([perpetuity.to_pv(i) for perpetuity in perpetuities]) * self._rate
return pv
# Because of the way Dynamic is set up, we want to reference self. At the same time,
# we need the function to be static, so that we can manually supply 'self'.
@staticmethod
def tax_fun(self, n):
taxable_sum = sum([cashflow[n] for cashflow in self._cashflows])
shielding_sum = sum([depreciation[n] for depreciation in self._depreciations])
taxed_amount = (taxable_sum - shielding_sum) * self._rate
return taxed_amount
| 33.930556 | 88 | 0.631191 |
7948130a4e92c589a9e06802a3dfa5fa3e3e205a
| 2,796 |
py
|
Python
|
checkov/terraform/checks/resource/gcp/GoogleCloudSqlDatabasePublicallyAccessible.py
|
Devocean8-Official/checkov
|
8ce61421fa838a97981ab3bd0ae2a12e541666b2
|
[
"Apache-2.0"
] | 1 |
2022-02-15T20:46:07.000Z
|
2022-02-15T20:46:07.000Z
|
checkov/terraform/checks/resource/gcp/GoogleCloudSqlDatabasePublicallyAccessible.py
|
Devocean8-Official/checkov
|
8ce61421fa838a97981ab3bd0ae2a12e541666b2
|
[
"Apache-2.0"
] | null | null | null |
checkov/terraform/checks/resource/gcp/GoogleCloudSqlDatabasePublicallyAccessible.py
|
Devocean8-Official/checkov
|
8ce61421fa838a97981ab3bd0ae2a12e541666b2
|
[
"Apache-2.0"
] | null | null | null |
from checkov.terraform.checks.resource.base_resource_check import BaseResourceCheck
from checkov.common.models.enums import CheckResult, CheckCategories
class GoogleCloudSqlDatabasePublicallyAccessible(BaseResourceCheck):
def __init__(self):
name = "Ensure that Cloud SQL database Instances are not open to the world"
id = "CKV_GCP_11"
supported_resources = ['google_sql_database_instance']
categories = [CheckCategories.NETWORKING]
super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)
def scan_resource_conf(self, conf):
"""
Looks for google_sql_database_instance which is open to the world:
:param conf: google_sql_database_instance configuration
:return: <CheckResult>
"""
if 'settings' in conf and 'ip_configuration' in conf['settings'][0]:
ip_config = conf['settings'][0]['ip_configuration'][0]
self.evaluated_keys = ['settings/[0]/ip_configuration']
if 'authorized_networks' in ip_config:
auth_networks = ip_config['authorized_networks'][0]
if type(auth_networks) != list: # handle possible legacy case
auth_networks = [auth_networks]
for network in auth_networks:
if 'value' in network:
val = network['value']
if type(val) == list: # handle possible parsing discrepancies
val = val[0]
if val.endswith('/0'):
self.evaluated_keys = ['settings/[0]/ip_configuration/authorized_networks/[0]/value',
'settings/[0]/ip_configuration/authorized_networks/[0]/'
f'[{auth_networks.index(network)}]/value']
return CheckResult.FAILED
if 'dynamic' in ip_config:
dynamic = ip_config['dynamic']
for dynamic_block in dynamic:
if 'authorized_networks' in dynamic_block and 'content' in dynamic_block['authorized_networks']:
content = dynamic_block['authorized_networks']['content'][0]
if 'value' in content and content['value'][0].endswith('/0'):
self.evaluated_keys = ['settings/[0]/ip_configuration/dynamic/'
f'[{dynamic.index(dynamic_block)}]/'
'authorized_networks/content/[0]/value']
return CheckResult.FAILED
return CheckResult.PASSED
check = GoogleCloudSqlDatabasePublicallyAccessible()
| 54.823529 | 116 | 0.57618 |
794813819f168f66c060accc9166d5dc9d35a6ef
| 1,759 |
py
|
Python
|
src/util.py
|
dmarxn/mwdblib
|
8249bee222ed72590695b6010d6ee4cd4b6b12ca
|
[
"MIT"
] | 36 |
2018-12-16T18:59:44.000Z
|
2022-03-26T21:35:07.000Z
|
src/util.py
|
dmarxn/mwdblib
|
8249bee222ed72590695b6010d6ee4cd4b6b12ca
|
[
"MIT"
] | 26 |
2019-12-17T20:49:17.000Z
|
2022-03-31T16:38:49.000Z
|
src/util.py
|
dmarxn/mwdblib
|
8249bee222ed72590695b6010d6ee4cd4b6b12ca
|
[
"MIT"
] | 7 |
2020-05-23T00:44:46.000Z
|
2022-01-17T22:43:48.000Z
|
import hashlib
import sys
def convert_to_utf8(obj):
"""Encodes object into utf-8 bytes (or 'str' in Py2)"""
obj = str(obj)
if sys.version_info[0] == 3:
obj = bytes(obj, "utf-8")
else:
obj = u''.join(map(unichr, map(ord, obj))).encode("utf-8") # noqa: F821 in Py3 context
return obj
def _eval_config_dhash(obj):
""" Compute a data hash from the object. This is the hashing algorithm
used internally by MWDB to assign unique ids to configs
"""
if isinstance(obj, list):
# For lists: evaluate hash recursively for all elements and sort them lexicographically
return _eval_config_dhash(str(sorted([_eval_config_dhash(o) for o in obj])))
elif isinstance(obj, dict):
# For dicts: convert to key-ordered tuples with hashed value
return _eval_config_dhash(
[[o, _eval_config_dhash(obj[o])] for o in sorted(obj.keys())]
)
else:
# Other types: evaluate SHA256 after conversion to UTF-8
return hashlib.sha256(convert_to_utf8(obj)).hexdigest()
def config_dhash(obj):
"""
Compute a data hash from the object. This is the hashing algorithm
used internally by MWDB to assign unique ids to configs.
.. versionchanged:: 3.3.0
Added support for in-blob keys
:param obj: Dict with configuration
:type obj: dict
:return: SHA256 hex digest
"""
config = dict(obj)
for key, value in config.items():
if isinstance(value, dict) and list(value.keys()) == ["in-blob"]:
in_blob = value["in-blob"]
if isinstance(in_blob, dict):
config[key]["in-blob"] = hashlib.sha256(convert_to_utf8(in_blob["content"])).hexdigest()
return _eval_config_dhash(config)
| 34.490196 | 104 | 0.645821 |
7948142a715827c4bdca853567744befef12167b
| 2,670 |
py
|
Python
|
test/unit/filterer/test_base.py
|
4degrees/mill
|
ba8362193fa9cbcb52e9b6d188d3060eed495038
|
[
"Apache-2.0"
] | null | null | null |
test/unit/filterer/test_base.py
|
4degrees/mill
|
ba8362193fa9cbcb52e9b6d188d3060eed495038
|
[
"Apache-2.0"
] | 1 |
2016-05-18T08:56:43.000Z
|
2016-05-18T09:11:31.000Z
|
test/unit/filterer/test_base.py
|
4degrees/mill
|
ba8362193fa9cbcb52e9b6d188d3060eed495038
|
[
"Apache-2.0"
] | null | null | null |
# :coding: utf-8
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
import pytest
from sawmill.log import Log
from sawmill.filterer.base import Filterer, Any, All
class DenyAll(Filterer):
'''Filter all logs.'''
def filter(self, logs):
'''Filter all *logs*.'''
return []
class AllowAll(Filterer):
'''Don't filter any log.'''
def filter(self, logs):
'''Return all *logs* unfiltered.'''
return logs
def test_filter():
'''Test filter method.'''
log = Log()
allow = DenyAll()
assert allow.filter([log]) == []
deny = AllowAll()
assert deny.filter([log]) == [log]
def test_and_combine():
'''Test combining filterers with and operator.'''
log = Log()
filterer = DenyAll() & AllowAll()
assert filterer.filter([log]) == []
filterer = DenyAll()
filterer &= AllowAll()
assert filterer.filter([log]) == []
def test_and_with_non_filterer_errors():
'''Test and operator with non-filterer raises NotImplementedError.'''
with pytest.raises(NotImplementedError):
DenyAll() & 1
def test_or_combine():
'''Test combining filterers with or operator.'''
log = Log()
filterer = DenyAll() | AllowAll()
assert filterer.filter([log]) == [log]
filterer = DenyAll()
filterer |= AllowAll()
assert filterer.filter([log]) == [log]
def test_or_with_non_filterer_errors():
'''Test or operator with non-filterer raises NotImplementedError.'''
with pytest.raises(NotImplementedError):
DenyAll() | 1
def test_all():
'''Test All filterer.'''
log = Log()
allow = DenyAll()
deny = AllowAll()
filterer = All([allow, deny])
assert filterer.filter([log]) == []
filterer = All([AllowAll(), AllowAll()])
assert filterer.filter([log]) == [log]
filterer = All([DenyAll(), DenyAll()])
assert filterer.filter([log]) == []
def test_all_when_no_filterers_Set():
'''Test All filterer does not filter when no filterers set.'''
log = Log()
filterer = All()
assert filterer.filter([log]) == [log]
def test_any():
'''Test Any filterer.'''
log = Log()
allow = DenyAll()
deny = AllowAll()
filterer = Any([allow, deny])
assert filterer.filter([log]) == [log]
filterer = Any([AllowAll(), AllowAll()])
assert filterer.filter([log]) == [log]
filterer = Any([DenyAll(), DenyAll()])
assert filterer.filter([log]) == []
def test_any_when_no_filterers_Set():
'''Test Any filterer does not filter when no filterers set.'''
log = Log()
filterer = Any()
assert filterer.filter([log]) == [log]
| 22.627119 | 73 | 0.618727 |
794814c284fa57577221dfc56a1062ced75eee02
| 2,510 |
py
|
Python
|
tests/components/xbox/test_config_flow.py
|
nibdev/core
|
b7e8348c306fb6af9582a04eed6058a28409dc26
|
[
"Apache-2.0"
] | 5 |
2020-12-15T04:09:01.000Z
|
2022-03-11T21:34:24.000Z
|
tests/components/xbox/test_config_flow.py
|
nibdev/core
|
b7e8348c306fb6af9582a04eed6058a28409dc26
|
[
"Apache-2.0"
] | 87 |
2020-07-06T22:22:54.000Z
|
2022-03-31T06:01:46.000Z
|
tests/components/xbox/test_config_flow.py
|
winning1120xx/home-assistant
|
53d4c0ce2d374b5e97bbdc37742656c27adf8eea
|
[
"Apache-2.0"
] | 11 |
2020-12-16T13:48:14.000Z
|
2022-02-01T00:28:05.000Z
|
"""Test the xbox config flow."""
from unittest.mock import patch
from homeassistant import config_entries, data_entry_flow, setup
from homeassistant.components.xbox.const import DOMAIN, OAUTH2_AUTHORIZE, OAUTH2_TOKEN
from homeassistant.helpers import config_entry_oauth2_flow
from tests.common import MockConfigEntry
CLIENT_ID = "1234"
CLIENT_SECRET = "5678"
async def test_abort_if_existing_entry(hass):
"""Check flow abort when an entry already exist."""
MockConfigEntry(domain=DOMAIN).add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
"xbox", context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "single_instance_allowed"
async def test_full_flow(
hass, hass_client_no_auth, aioclient_mock, current_request_with_host
):
"""Check full flow."""
assert await setup.async_setup_component(
hass,
"xbox",
{
"xbox": {"client_id": CLIENT_ID, "client_secret": CLIENT_SECRET},
"http": {"base_url": "https://example.com"},
},
)
result = await hass.config_entries.flow.async_init(
"xbox", context={"source": config_entries.SOURCE_USER}
)
state = config_entry_oauth2_flow._encode_jwt(
hass,
{
"flow_id": result["flow_id"],
"redirect_uri": "https://example.com/auth/external/callback",
},
)
scope = "+".join(["Xboxlive.signin", "Xboxlive.offline_access"])
assert result["url"] == (
f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}"
"&redirect_uri=https://example.com/auth/external/callback"
f"&state={state}&scope={scope}"
)
client = await hass_client_no_auth()
resp = await client.get(f"/auth/external/callback?code=abcd&state={state}")
assert resp.status == 200
assert resp.headers["content-type"] == "text/html; charset=utf-8"
aioclient_mock.post(
OAUTH2_TOKEN,
json={
"refresh_token": "mock-refresh-token",
"access_token": "mock-access-token",
"type": "Bearer",
"expires_in": 60,
},
)
with patch(
"homeassistant.components.xbox.async_setup_entry", return_value=True
) as mock_setup:
await hass.config_entries.flow.async_configure(result["flow_id"])
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
assert len(mock_setup.mock_calls) == 1
| 31.772152 | 86 | 0.663347 |
794815359f7adf6ff62a5081cf302bd847bac339
| 16,221 |
py
|
Python
|
spacy/tests/pipeline/test_span_ruler.py
|
stungkit/spaCy
|
a83a50119520ea8708f0ef0730f65f486556c273
|
[
"MIT"
] | 1 |
2019-11-27T13:14:04.000Z
|
2019-11-27T13:14:04.000Z
|
spacy/tests/pipeline/test_span_ruler.py
|
stungkit/spaCy
|
a83a50119520ea8708f0ef0730f65f486556c273
|
[
"MIT"
] | null | null | null |
spacy/tests/pipeline/test_span_ruler.py
|
stungkit/spaCy
|
a83a50119520ea8708f0ef0730f65f486556c273
|
[
"MIT"
] | null | null | null |
import pytest
import spacy
from spacy import registry
from spacy.errors import MatchPatternError
from spacy.tokens import Span
from spacy.training import Example
from spacy.tests.util import make_tempdir
from thinc.api import NumpyOps, get_current_ops
@pytest.fixture
@registry.misc("span_ruler_patterns")
def patterns():
return [
{"label": "HELLO", "pattern": "hello world", "id": "hello1"},
{"label": "BYE", "pattern": [{"LOWER": "bye"}, {"LOWER": "bye"}]},
{"label": "HELLO", "pattern": [{"ORTH": "HELLO"}], "id": "hello2"},
{"label": "COMPLEX", "pattern": [{"ORTH": "foo", "OP": "*"}]},
{"label": "TECH_ORG", "pattern": "Apple"},
{"label": "TECH_ORG", "pattern": "Microsoft"},
]
@pytest.fixture
def overlapping_patterns():
return [
{"label": "FOOBAR", "pattern": "foo bar"},
{"label": "BARBAZ", "pattern": "bar baz"},
]
@pytest.fixture
def person_org_patterns():
return [
{"label": "PERSON", "pattern": "Dina"},
{"label": "ORG", "pattern": "ACME"},
{"label": "ORG", "pattern": "ACM"},
]
@pytest.fixture
def person_org_date_patterns(person_org_patterns):
return person_org_patterns + [{"label": "DATE", "pattern": "June 14th"}]
def test_span_ruler_add_empty(patterns):
"""Test that patterns don't get added excessively."""
nlp = spacy.blank("xx")
ruler = nlp.add_pipe("span_ruler", config={"validate": True})
ruler.add_patterns(patterns)
pattern_count = sum(len(mm) for mm in ruler.matcher._patterns.values())
assert pattern_count > 0
ruler.add_patterns([])
after_count = sum(len(mm) for mm in ruler.matcher._patterns.values())
assert after_count == pattern_count
def test_span_ruler_init(patterns):
nlp = spacy.blank("xx")
ruler = nlp.add_pipe("span_ruler")
ruler.add_patterns(patterns)
assert len(ruler) == len(patterns)
assert len(ruler.labels) == 4
assert "HELLO" in ruler
assert "BYE" in ruler
doc = nlp("hello world bye bye")
assert len(doc.spans["ruler"]) == 2
assert doc.spans["ruler"][0].label_ == "HELLO"
assert doc.spans["ruler"][0].id_ == "hello1"
assert doc.spans["ruler"][1].label_ == "BYE"
assert doc.spans["ruler"][1].id_ == ""
def test_span_ruler_no_patterns_warns():
nlp = spacy.blank("xx")
ruler = nlp.add_pipe("span_ruler")
assert len(ruler) == 0
assert len(ruler.labels) == 0
assert nlp.pipe_names == ["span_ruler"]
with pytest.warns(UserWarning):
doc = nlp("hello world bye bye")
assert len(doc.spans["ruler"]) == 0
def test_span_ruler_init_patterns(patterns):
# initialize with patterns
nlp = spacy.blank("xx")
ruler = nlp.add_pipe("span_ruler")
assert len(ruler.labels) == 0
ruler.initialize(lambda: [], patterns=patterns)
assert len(ruler.labels) == 4
doc = nlp("hello world bye bye")
assert doc.spans["ruler"][0].label_ == "HELLO"
assert doc.spans["ruler"][1].label_ == "BYE"
nlp.remove_pipe("span_ruler")
# initialize with patterns from misc registry
nlp.config["initialize"]["components"]["span_ruler"] = {
"patterns": {"@misc": "span_ruler_patterns"}
}
ruler = nlp.add_pipe("span_ruler")
assert len(ruler.labels) == 0
nlp.initialize()
assert len(ruler.labels) == 4
doc = nlp("hello world bye bye")
assert doc.spans["ruler"][0].label_ == "HELLO"
assert doc.spans["ruler"][1].label_ == "BYE"
def test_span_ruler_init_clear(patterns):
"""Test that initialization clears patterns."""
nlp = spacy.blank("xx")
ruler = nlp.add_pipe("span_ruler")
ruler.add_patterns(patterns)
assert len(ruler.labels) == 4
ruler.initialize(lambda: [])
assert len(ruler.labels) == 0
def test_span_ruler_clear(patterns):
nlp = spacy.blank("xx")
ruler = nlp.add_pipe("span_ruler")
ruler.add_patterns(patterns)
assert len(ruler.labels) == 4
doc = nlp("hello world")
assert len(doc.spans["ruler"]) == 1
ruler.clear()
assert len(ruler.labels) == 0
with pytest.warns(UserWarning):
doc = nlp("hello world")
assert len(doc.spans["ruler"]) == 0
def test_span_ruler_existing(patterns):
nlp = spacy.blank("xx")
ruler = nlp.add_pipe("span_ruler", config={"overwrite": False})
ruler.add_patterns(patterns)
doc = nlp.make_doc("OH HELLO WORLD bye bye")
doc.spans["ruler"] = [doc[0:2]]
doc = nlp(doc)
assert len(doc.spans["ruler"]) == 3
assert doc.spans["ruler"][0] == doc[0:2]
assert doc.spans["ruler"][1].label_ == "HELLO"
assert doc.spans["ruler"][1].id_ == "hello2"
assert doc.spans["ruler"][2].label_ == "BYE"
assert doc.spans["ruler"][2].id_ == ""
def test_span_ruler_existing_overwrite(patterns):
nlp = spacy.blank("xx")
ruler = nlp.add_pipe("span_ruler", config={"overwrite": True})
ruler.add_patterns(patterns)
doc = nlp.make_doc("OH HELLO WORLD bye bye")
doc.spans["ruler"] = [doc[0:2]]
doc = nlp(doc)
assert len(doc.spans["ruler"]) == 2
assert doc.spans["ruler"][0].label_ == "HELLO"
assert doc.spans["ruler"][0].text == "HELLO"
assert doc.spans["ruler"][1].label_ == "BYE"
def test_span_ruler_serialize_bytes(patterns):
nlp = spacy.blank("xx")
ruler = nlp.add_pipe("span_ruler")
ruler.add_patterns(patterns)
assert len(ruler) == len(patterns)
assert len(ruler.labels) == 4
ruler_bytes = ruler.to_bytes()
new_nlp = spacy.blank("xx")
new_ruler = new_nlp.add_pipe("span_ruler")
assert len(new_ruler) == 0
assert len(new_ruler.labels) == 0
new_ruler = new_ruler.from_bytes(ruler_bytes)
assert len(new_ruler) == len(patterns)
assert len(new_ruler.labels) == 4
assert len(new_ruler.patterns) == len(ruler.patterns)
for pattern in ruler.patterns:
assert pattern in new_ruler.patterns
assert sorted(new_ruler.labels) == sorted(ruler.labels)
def test_span_ruler_validate():
nlp = spacy.blank("xx")
ruler = nlp.add_pipe("span_ruler")
validated_ruler = nlp.add_pipe(
"span_ruler", name="validated_span_ruler", config={"validate": True}
)
valid_pattern = {"label": "HELLO", "pattern": [{"LOWER": "HELLO"}]}
invalid_pattern = {"label": "HELLO", "pattern": [{"ASDF": "HELLO"}]}
# invalid pattern raises error without validate
with pytest.raises(ValueError):
ruler.add_patterns([invalid_pattern])
# valid pattern is added without errors with validate
validated_ruler.add_patterns([valid_pattern])
# invalid pattern raises error with validate
with pytest.raises(MatchPatternError):
validated_ruler.add_patterns([invalid_pattern])
def test_span_ruler_properties(patterns):
nlp = spacy.blank("xx")
ruler = nlp.add_pipe("span_ruler", config={"overwrite": True})
ruler.add_patterns(patterns)
assert sorted(ruler.labels) == sorted(set([p["label"] for p in patterns]))
def test_span_ruler_overlapping_spans(overlapping_patterns):
nlp = spacy.blank("xx")
ruler = nlp.add_pipe("span_ruler")
ruler.add_patterns(overlapping_patterns)
doc = ruler(nlp.make_doc("foo bar baz"))
assert len(doc.spans["ruler"]) == 2
assert doc.spans["ruler"][0].label_ == "FOOBAR"
assert doc.spans["ruler"][1].label_ == "BARBAZ"
def test_span_ruler_scorer(overlapping_patterns):
nlp = spacy.blank("xx")
ruler = nlp.add_pipe("span_ruler")
ruler.add_patterns(overlapping_patterns)
text = "foo bar baz"
pred_doc = ruler(nlp.make_doc(text))
assert len(pred_doc.spans["ruler"]) == 2
assert pred_doc.spans["ruler"][0].label_ == "FOOBAR"
assert pred_doc.spans["ruler"][1].label_ == "BARBAZ"
ref_doc = nlp.make_doc(text)
ref_doc.spans["ruler"] = [Span(ref_doc, 0, 2, label="FOOBAR")]
scores = nlp.evaluate([Example(pred_doc, ref_doc)])
assert scores["spans_ruler_p"] == 0.5
assert scores["spans_ruler_r"] == 1.0
@pytest.mark.parametrize("n_process", [1, 2])
def test_span_ruler_multiprocessing(n_process):
if isinstance(get_current_ops, NumpyOps) or n_process < 2:
texts = ["I enjoy eating Pizza Hut pizza."]
patterns = [{"label": "FASTFOOD", "pattern": "Pizza Hut"}]
nlp = spacy.blank("xx")
ruler = nlp.add_pipe("span_ruler")
ruler.add_patterns(patterns)
for doc in nlp.pipe(texts, n_process=2):
for ent in doc.spans["ruler"]:
assert ent.label_ == "FASTFOOD"
def test_span_ruler_serialize_dir(patterns):
nlp = spacy.blank("xx")
ruler = nlp.add_pipe("span_ruler")
ruler.add_patterns(patterns)
with make_tempdir() as d:
ruler.to_disk(d / "test_ruler")
ruler.from_disk(d / "test_ruler") # read from an existing directory
with pytest.raises(ValueError):
ruler.from_disk(d / "non_existing_dir") # read from a bad directory
def test_span_ruler_remove_basic(person_org_patterns):
nlp = spacy.blank("xx")
ruler = nlp.add_pipe("span_ruler")
ruler.add_patterns(person_org_patterns)
doc = ruler(nlp.make_doc("Dina went to school"))
assert len(ruler.patterns) == 3
assert len(doc.spans["ruler"]) == 1
assert doc.spans["ruler"][0].label_ == "PERSON"
assert doc.spans["ruler"][0].text == "Dina"
ruler.remove("PERSON")
doc = ruler(nlp.make_doc("Dina went to school"))
assert len(doc.spans["ruler"]) == 0
assert len(ruler.patterns) == 2
def test_span_ruler_remove_nonexisting_pattern(person_org_patterns):
nlp = spacy.blank("xx")
ruler = nlp.add_pipe("span_ruler")
ruler.add_patterns(person_org_patterns)
assert len(ruler.patterns) == 3
with pytest.raises(ValueError):
ruler.remove("NE")
with pytest.raises(ValueError):
ruler.remove_by_id("NE")
def test_span_ruler_remove_several_patterns(person_org_patterns):
nlp = spacy.blank("xx")
ruler = nlp.add_pipe("span_ruler")
ruler.add_patterns(person_org_patterns)
doc = ruler(nlp.make_doc("Dina founded the company ACME."))
assert len(ruler.patterns) == 3
assert len(doc.spans["ruler"]) == 2
assert doc.spans["ruler"][0].label_ == "PERSON"
assert doc.spans["ruler"][0].text == "Dina"
assert doc.spans["ruler"][1].label_ == "ORG"
assert doc.spans["ruler"][1].text == "ACME"
ruler.remove("PERSON")
doc = ruler(nlp.make_doc("Dina founded the company ACME"))
assert len(ruler.patterns) == 2
assert len(doc.spans["ruler"]) == 1
assert doc.spans["ruler"][0].label_ == "ORG"
assert doc.spans["ruler"][0].text == "ACME"
ruler.remove("ORG")
with pytest.warns(UserWarning):
doc = ruler(nlp.make_doc("Dina founded the company ACME"))
assert len(ruler.patterns) == 0
assert len(doc.spans["ruler"]) == 0
def test_span_ruler_remove_patterns_in_a_row(person_org_date_patterns):
nlp = spacy.blank("xx")
ruler = nlp.add_pipe("span_ruler")
ruler.add_patterns(person_org_date_patterns)
doc = ruler(nlp.make_doc("Dina founded the company ACME on June 14th"))
assert len(doc.spans["ruler"]) == 3
assert doc.spans["ruler"][0].label_ == "PERSON"
assert doc.spans["ruler"][0].text == "Dina"
assert doc.spans["ruler"][1].label_ == "ORG"
assert doc.spans["ruler"][1].text == "ACME"
assert doc.spans["ruler"][2].label_ == "DATE"
assert doc.spans["ruler"][2].text == "June 14th"
ruler.remove("ORG")
ruler.remove("DATE")
doc = ruler(nlp.make_doc("Dina went to school"))
assert len(doc.spans["ruler"]) == 1
def test_span_ruler_remove_all_patterns(person_org_date_patterns):
nlp = spacy.blank("xx")
ruler = nlp.add_pipe("span_ruler")
ruler.add_patterns(person_org_date_patterns)
assert len(ruler.patterns) == 4
ruler.remove("PERSON")
assert len(ruler.patterns) == 3
ruler.remove("ORG")
assert len(ruler.patterns) == 1
ruler.remove("DATE")
assert len(ruler.patterns) == 0
with pytest.warns(UserWarning):
doc = ruler(nlp.make_doc("Dina founded the company ACME on June 14th"))
assert len(doc.spans["ruler"]) == 0
def test_span_ruler_remove_and_add():
nlp = spacy.blank("xx")
ruler = nlp.add_pipe("span_ruler")
patterns1 = [{"label": "DATE1", "pattern": "last time"}]
ruler.add_patterns(patterns1)
doc = ruler(
nlp.make_doc("I saw him last time we met, this time he brought some flowers")
)
assert len(ruler.patterns) == 1
assert len(doc.spans["ruler"]) == 1
assert doc.spans["ruler"][0].label_ == "DATE1"
assert doc.spans["ruler"][0].text == "last time"
patterns2 = [{"label": "DATE2", "pattern": "this time"}]
ruler.add_patterns(patterns2)
doc = ruler(
nlp.make_doc("I saw him last time we met, this time he brought some flowers")
)
assert len(ruler.patterns) == 2
assert len(doc.spans["ruler"]) == 2
assert doc.spans["ruler"][0].label_ == "DATE1"
assert doc.spans["ruler"][0].text == "last time"
assert doc.spans["ruler"][1].label_ == "DATE2"
assert doc.spans["ruler"][1].text == "this time"
ruler.remove("DATE1")
doc = ruler(
nlp.make_doc("I saw him last time we met, this time he brought some flowers")
)
assert len(ruler.patterns) == 1
assert len(doc.spans["ruler"]) == 1
assert doc.spans["ruler"][0].label_ == "DATE2"
assert doc.spans["ruler"][0].text == "this time"
ruler.add_patterns(patterns1)
doc = ruler(
nlp.make_doc("I saw him last time we met, this time he brought some flowers")
)
assert len(ruler.patterns) == 2
assert len(doc.spans["ruler"]) == 2
patterns3 = [{"label": "DATE3", "pattern": "another time"}]
ruler.add_patterns(patterns3)
doc = ruler(
nlp.make_doc(
"I saw him last time we met, this time he brought some flowers, another time some chocolate."
)
)
assert len(ruler.patterns) == 3
assert len(doc.spans["ruler"]) == 3
ruler.remove("DATE3")
doc = ruler(
nlp.make_doc(
"I saw him last time we met, this time he brought some flowers, another time some chocolate."
)
)
assert len(ruler.patterns) == 2
assert len(doc.spans["ruler"]) == 2
def test_span_ruler_spans_filter(overlapping_patterns):
nlp = spacy.blank("xx")
ruler = nlp.add_pipe(
"span_ruler",
config={"spans_filter": {"@misc": "spacy.first_longest_spans_filter.v1"}},
)
ruler.add_patterns(overlapping_patterns)
doc = ruler(nlp.make_doc("foo bar baz"))
assert len(doc.spans["ruler"]) == 1
assert doc.spans["ruler"][0].label_ == "FOOBAR"
def test_span_ruler_ents_default_filter(overlapping_patterns):
nlp = spacy.blank("xx")
ruler = nlp.add_pipe("span_ruler", config={"annotate_ents": True})
ruler.add_patterns(overlapping_patterns)
doc = ruler(nlp.make_doc("foo bar baz"))
assert len(doc.ents) == 1
assert doc.ents[0].label_ == "FOOBAR"
def test_span_ruler_ents_overwrite_filter(overlapping_patterns):
nlp = spacy.blank("xx")
ruler = nlp.add_pipe(
"span_ruler",
config={
"annotate_ents": True,
"overwrite": False,
"ents_filter": {"@misc": "spacy.prioritize_new_ents_filter.v1"},
},
)
ruler.add_patterns(overlapping_patterns)
# overlapping ents are clobbered, non-overlapping ents are preserved
doc = nlp.make_doc("foo bar baz a b c")
doc.ents = [Span(doc, 1, 3, label="BARBAZ"), Span(doc, 3, 6, label="ABC")]
doc = ruler(doc)
assert len(doc.ents) == 2
assert doc.ents[0].label_ == "FOOBAR"
assert doc.ents[1].label_ == "ABC"
def test_span_ruler_ents_bad_filter(overlapping_patterns):
@registry.misc("test_pass_through_filter")
def make_pass_through_filter():
def pass_through_filter(spans1, spans2):
return spans1 + spans2
return pass_through_filter
nlp = spacy.blank("xx")
ruler = nlp.add_pipe(
"span_ruler",
config={
"annotate_ents": True,
"ents_filter": {"@misc": "test_pass_through_filter"},
},
)
ruler.add_patterns(overlapping_patterns)
with pytest.raises(ValueError):
ruler(nlp.make_doc("foo bar baz"))
| 34.809013 | 105 | 0.647864 |
794815b4706d6f0abb0b1add13fedc138dfe5141
| 21,756 |
py
|
Python
|
modeling/fit_and_predict.py
|
kuleafenu/covid19-severity-prediction
|
ebe2e938bbeb70d3dde53c1943f0cfdc02df5098
|
[
"MIT"
] | 1 |
2021-04-25T12:36:30.000Z
|
2021-04-25T12:36:30.000Z
|
modeling/fit_and_predict.py
|
kuleafenu/covid19-severity-prediction
|
ebe2e938bbeb70d3dde53c1943f0cfdc02df5098
|
[
"MIT"
] | null | null | null |
modeling/fit_and_predict.py
|
kuleafenu/covid19-severity-prediction
|
ebe2e938bbeb70d3dde53c1943f0cfdc02df5098
|
[
"MIT"
] | null | null | null |
import copy
import numpy as np
import pandas as pd
from os.path import join as oj
import os
from collections import Counter
from models import exponential_modeling
import pmdl_weight
import datetime
from models.shared_models import SharedModel
from collections import defaultdict
import inspect
import sys
from tqdm import tqdm
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parentdir = os.path.dirname(currentdir)
sys.path.append(parentdir)
very_important_vars = ['PopulationDensityperSqMile2010',
# 'MedicareEnrollment,AgedTot2017',
'PopulationEstimate2018',
'#ICU_beds',
'MedianAge2010',
'Smokers_Percentage',
'DiabetesPercentage',
'HeartDiseaseMortality',
'#Hospitals']
exponential = {'model_type': 'exponential'}
shared_exponential = {'model_type': 'shared_exponential'}
demographics = {'model_type': 'shared_exponential', 'demographic_vars': very_important_vars}
linear = {'model_type': 'linear'}
advanced_model = {'model_type': 'advanced_shared_model'}
def fit_and_predict(df,
outcome: str = 'deaths',
method: str = 'exponential',
mode: str = 'predict_future',
target_day: np.ndarray = np.array([1]),
output_key: str = None,
demographic_vars=[],
verbose: bool = False):
"""
Trains a method (method) to predict a current number of days ahead (target_day)
Predicts the values of the number of deaths for the final day of test_df and writes to the column
'predicted_deaths_'+method+'_'+str(target_day[-1]) of the test_df
Params
------
df
a df with county level deaths and cases and demographic information
outcome
key for the outcome to predict (the values in this column should have a list for each row)
method
what method to use to do forecasting
target_day
np.array([1,2,..,n]) predicts these number of days ahead (can just be np.array([3])) for example if you just want 3 days ahead)
output_key
key to save the output as
mode:
either 'predict_future' or 'eval_mode'
predict_future is predicting deaths on FUTURE days, so target_day=np.array([1])) means it predicts tomorrow's deaths
eval_mode is for evaluating the performance of the classifier.
target_day=np.array([k])) will predict the current days death count using information from k days ago.
target_day= np.array([1,2,3,...,k]) will predict todays deaths, yesterdays deaths, deaths k-1 days ago using information from k days ago.
Returns
-------
test_df
returns dataframe with added column
"""
assert mode == 'predict_future' or mode == 'eval_mode', 'unknown mode'
if output_key is None:
output_key = f'predicted_{outcome}_{method}_{target_day[-1]}'
if len(demographic_vars) > 0:
output_key += '_demographics'
if method == 'AR':
print('currently deprecated')
raise NotImplementedError
loss, model, best_window = naive_autoreg_baselines.train_and_evaluate_model(train_df, test_df)
return naive_autoreg_baselines.make_predictions(test_df, model, best_window)
elif method == 'exponential':
preds = exponential_modeling.exponential_fit(df[outcome].values,
mode=mode,
target_day=target_day)
df[output_key] = preds
# del test_df['predicted_deaths_exponential']
return df
elif method == 'linear':
preds = exponential_modeling.linear_fit(df[outcome].values,
mode=mode,
target_day=target_day)
df[output_key] = preds
# del test_df['predicted_deaths_exponential']
return df
elif method == 'shared_exponential':
# Fit a poisson GLM with shared parameters across counties. Input to the poisson GLM is demographic_vars and log(previous_days_deaths+1)
cur_day_predictions = exponential_modeling.fit_and_predict_shared_exponential(df, mode, outcome=outcome,
demographic_vars=demographic_vars,
target_day=target_day,
verbose=verbose)
# if len(demographic_vars) > 0:
# output_key += '_demographics'
# import IPython
# IPython.embed()
df[output_key] = cur_day_predictions
return df
elif method == 'ensemble':
print('please use fit_and_predict_ensemble instead')
elif method == 'advanced_shared_model':
feat_transforms = defaultdict(lambda y: [lambda x: x])
feat_transforms['deaths_per_cap'] = [lambda x: np.log(x + 1)]
feat_transforms['deaths'] = [lambda x: np.log(x + 1)]
feat_transforms['new_deaths'] = [lambda x: np.log(x + 1)]
feat_transforms['cases'] = [lambda x: np.log(x + 1)]
feat_transforms['neighbor_deaths'] = [lambda x: np.log(x + 1)]
feat_transforms['neighbor_cases'] = [lambda x: np.log(x + 1)]
feat_transforms['days_since_order'] = [lambda x: x]
feat_transforms['week_since_order'] = [lambda x: x]
feat_transforms['two_weeks_since_order'] = [lambda x: x]
feat_transforms['is_weekday'] = [lambda x: x]
feat_transforms['deaths'] = [lambda x: np.log(x + 1)]
feat_transforms['new_deaths'] = [lambda x: np.log(x + 1)]
feat_transforms['cases'] = [lambda x: np.log(x + 1)]
feat_transforms['neighbor_deaths'] = [lambda x: np.log(x + 1)]
feat_transforms['neighbor_cases'] = [lambda x: np.log(x + 1)]
feat_transforms['new_deaths_20'] = [lambda x: np.log(max(x + 1, 1))]
default_values = defaultdict(lambda: 0)
# aux_feats = ['cases','neighbor_deaths','neighbor_cases','new_deaths']
# aux_feats = ['cases','neighbor_deaths','neighbor_cases','is_weekday']
# aux_feats = ['is_weekday']
aux_feats = ['cases', 'neighbor_deaths', 'neighbor_cases'] # ,'is_weekday']
# aux_feats = ['days_since_order','two_weeks_since_order','neighbor_deaths','neighbor_cases','cases']
# aux_feats = ['two_weeks_since_order','neighbor_deaths','neighbor_cases','cases']
shared_model_predictions = [[] for i in range(len(df))]
for t in target_day:
t = np.array([t])
shared_model = SharedModel(df=df, outcome=outcome, demographic_variables=[], mode=mode, target_days=t,
feat_transforms=feat_transforms, auxiliary_time_features=aux_feats,
time_series_default_values=default_values, scale=True)
shared_model.create_dataset()
shared_model.fit_model()
shared_model.predict()
for i in range(len(shared_model.predictions)):
assert len(shared_model.predictions[i]) == 1
# If there is a prediction, make sure the new one is at least as large
new_prediction = shared_model.predictions[i][0]
if len(shared_model_predictions[i]) > 0:
new_prediction = max(shared_model_predictions[i][-1],new_prediction)
shared_model_predictions[i].append(new_prediction)
df[output_key] = shared_model_predictions
# df[output_key] = shared_model_predictions
return df
else:
print('Unknown method')
raise ValueError
def fit_and_predict_ensemble(df,
target_day: np.ndarray = np.array([1]),
outcome: str = 'deaths',
methods: list = [shared_exponential, linear],
mode: str = 'predict_future',
output_key: str = None,
verbose: bool = False,
weight_c0: int = 1,
weight_mu: int = 0.5,
debug: bool = False,
):
"""
Function for ensemble prediction
Input:
df: pd.DataFrame
target_day: array
outcome: str
method: list of dictionary
each dictionary specify the type and parameters of the model
mode: str
output_key: str
Output:
df with ensemble prediction
"""
if output_key is None:
output_key = f'predicted_{outcome}_ensemble_{target_day[-1]}'
predictions = {}
for (i, model) in enumerate(methods):
if debug:
print(f"[DEBUG] fit_and_predict_ensemble:{i}, {model}")
if 'demographic_vars' in model:
demographic_vars = model['demographic_vars']
else:
demographic_vars = []
predictions[i] = fit_and_predict(df,
outcome=outcome,
method=model['model_type'],
mode=mode,
target_day=target_day,
output_key=f'y_preds_{i}',
demographic_vars=demographic_vars,
verbose=verbose)[f'y_preds_{i}'].values
if mode == 'predict_future':
use_df = df
else:
use_df = exponential_modeling.leave_t_day_out(df, target_day[-1])
if debug:
print(f"[DEBUG] fit_and_predict_ensemble: compute weights.")
weights = pmdl_weight.compute_pmdl_weight(use_df,
methods=methods,
outcome=outcome,
target_day=target_day,
c0=weight_c0,
mu=weight_mu)
sum_weights = np.zeros(len(use_df))
for model_index in weights:
sum_weights = sum_weights + np.array(weights[model_index])
# weighted_preds = np.zeros((len(use_df), len(target_day)))
weighted_preds = [np.zeros(len(target_day)) for i in range(len(use_df))]
for i in range(len(df)):
for model_index in weights:
weighted_preds[i] += np.array(predictions[model_index][i]) * weights[model_index][i] / sum_weights[i]
# print out the relative contribution of each model
if verbose:
print('--- Model Contributions ---')
model_weight_counter = Counter()
for model_index in weights:
m_weights = 0
for i in range(len(use_df)):
m_weights += weights[model_index][i] / sum_weights[i]
m_weights = m_weights / len(use_df)
model_weight_counter[model_index] = m_weights
for model_index, weight in model_weight_counter.most_common():
print(str(methods[model_index]) + ': ' + str(weight))
# Make sure predictions are non-decreasing
if debug:
print(f"[DEBUG] fit_and_predict_ensemble: monotonicity constraint.")
monotonic_weighted_preds = []
for preds in weighted_preds:
new_preds = []
for i in range(len(preds)):
if i > 0:
new_preds.append(max(preds[i],preds[i-1]))
else:
new_preds.append(preds[i])
monotonic_weighted_preds.append(new_preds)
weighted_preds = monotonic_weighted_preds
df[output_key] = weighted_preds
return df
def previous_prediction_errors(df,
target_day: np.ndarray = np.array([1]),
outcome: str = 'deaths',
methods: list = [advanced_model, linear],
look_back_day: int = 5,
output_key: str = None):
"""
Calculating prediction errors of previous days
Input:
df: pd.DataFrame
target_day: np.ndarray
outcome: str
methods: list
look_back_day: int
returns the prediction errors for the last {look_back_day} days
Output:
list of {len(df)} dictionaries, the keys of each dictionary are days in target_day, and the values are a list of (normalized) l1 error, of length {look_back_day}
"""
# find previous models to run
previous_start_days = defaultdict(list)
for day in target_day:
for back_day in range(look_back_day):
previous_start_days[day + back_day].append(day)
# previous_model_predictions = {}
previous_model_errors = [defaultdict(list) for i in range(len(df))]
prediction_uncertainty = [defaultdict(list) for i in range(len(df))]
for t in previous_start_days:
previous_target_days = previous_start_days[t]
df_old = exponential_modeling.leave_t_day_out(df, t)
previous_model_predictions = fit_and_predict_ensemble(df_old,
target_day=np.array(previous_target_days),
outcome=outcome,
methods=methods,
mode='predict_future',
output_key='old_predictions',
)[
'old_predictions'].values # running old prediction models
for i in range(len(df)):
for (j, td) in enumerate(previous_target_days):
pred = previous_model_predictions[i][j]
actual_outcome = df[outcome].iloc[i][td - t - 1]
error = actual_outcome / max(pred, 1) - 1
previous_model_errors[i][td].append(error)
# for i in range(len(df)):
# for td in target_day:
# prediction_uncertainty[i][td] = max(previous_model_errors[i][td])
df[output_key] = previous_model_errors
return df
def add_prediction_intervals(df,
target_day: np.ndarray = np.array([1]),
outcome: str = 'deaths',
methods: list = [advanced_model, linear],
interval_type: str = 'local',
look_back_day: int = 5,
output_key: str = None):
"""
Adding intervals for future prediction
Input:
df: pd.DataFrame
target_day: np.ndarray
outcome: str
methods: list
interval_type: str
'local' or 'combined'
Output:
list of {len(df)} dictionaries, the keys of each dictionary are days in target_day, and the values are the predicted intervals
"""
assert interval_type == 'local' or interval_type == 'combined', 'unknown interval type'
lower_bound = {'deaths': 10, 'cases': 10}
df = previous_prediction_errors(df, target_day, outcome, methods, look_back_day=5, output_key='previous_errors')
df = fit_and_predict_ensemble(df,
target_day=target_day,
outcome=outcome,
methods=methods,
mode='predict_future',
output_key='new_predictions',
verbose=False)
preds = df['new_predictions'].values
latest_cases = np.array([p[-1] for p in df[outcome].values])
intervals = [[] for i in range(len(df))]
qts = {}
for td in target_day:
all_errors = []
for i in range(len(df)):
if latest_cases[i] >= lower_bound[outcome]:
all_errors += df['previous_errors'].values[i][td]
qts[td] = (np.quantile(np.array(all_errors), .05), np.quantile(np.array(all_errors), .95))
for i in range(len(df)):
largest_error = []
for (j, td) in enumerate(target_day):
largest_error.append(max(np.abs(np.array(df['previous_errors'].values[i][td]))))
if interval_type == 'local':
intervals[i].append((max(preds[i][j] * (1 - largest_error[-1]), latest_cases[i]),
preds[i][j] * (1 + largest_error[-1])))
elif interval_type == 'combined':
intervals[i].append((max(preds[i][j] * (1 + (qts[td][0] - largest_error[-1]) / 2), latest_cases[i]),
preds[i][j] * (1 + (largest_error[-1] + qts[td][1]) / 2)))
df[output_key] = intervals
return df
def add_preds(df_county, NUM_DAYS_LIST=[1, 2, 3], verbose=False, cached_dir=None,
outcomes=['Deaths', 'Cases'], discard=False, d=datetime.datetime.today(),
add_predict_interval=True, interval_target_days=[],
):
'''Adds predictions for the current best model
Adds keys that look like 'Predicted Deaths 1-day', 'Predicted Deaths 2-day', ...
'''
# select the best model
advanced_model = {'model_type': 'advanced_shared_model'}
linear = {'model_type': 'linear'}
BEST_MODEL = [advanced_model, linear]
# load cached preds
if cached_dir is not None:
# getting current date and time
if not discard:
cached_fname = oj(cached_dir, f'preds_{d.month}_{d.day}_cached.pkl')
else:
cached_fname = oj(cached_dir, f'preds_{d.month}_{d.day}_cached_discard1day.pkl')
if os.path.exists(cached_fname):
return pd.read_pickle(cached_fname)
print('predictions not cached, now calculating (might take a while)')
for outcome in outcomes:
print(f'predicting {outcome}...')
tmp = [0 for _ in range(df_county.shape[0])]
for num_days_in_future in tqdm(NUM_DAYS_LIST): # 1 is tomorrow
output_key = f'Predicted {outcome} {num_days_in_future}-day'
df_county = fit_and_predict_ensemble(df_county,
methods=BEST_MODEL,
outcome=outcome.lower(),
mode='predict_future',
target_day=np.array([num_days_in_future]),
output_key=output_key,
verbose=verbose)
vals = df_county[output_key].values
out = []
for i in range(vals.shape[0]):
if np.isnan(vals[i]):
out.append(0)
else:
out.append(max(vals[i][0],
list(df_county[outcome.lower()])[i][-1], tmp[i]))
df_county[output_key] = out
tmp = out
output_key = f'Predicted {outcome} Intervals'
if add_predict_interval:
if not interval_target_days:
interval_target_days = NUM_DAYS_LIST
print('prediction intervals...')
print(interval_target_days)
df_county = add_prediction_intervals(df_county,
target_day=np.array(interval_target_days),
outcome=outcome.lower(),
methods=BEST_MODEL,
interval_type='local',
output_key=output_key)
# add 3-day lagged death preds
output_key = f'Predicted Deaths 3-day Lagged'
df_county = fit_and_predict_ensemble(df_county,
methods=BEST_MODEL,
outcome='deaths',
mode='eval_mode',
target_day=np.array([3]),
output_key=output_key,
verbose=verbose)
df_county[output_key] = [v[0] for v in df_county[output_key].values]
if cached_dir is not None:
df_county.to_pickle(cached_fname)
return df_county
def tune_hyperparams(df, target_day, outcome, output_key, method_hyperparam_dict, error_fn, num_iters):
def fit_model_with_random_params(df, i):
output_key = 'hyperparams_i'
methods = []
for method_name in method_hyperparam_dict:
method_dict = {}
method_dict['model_type'] = method_name
method_hyperparam_choices = method_hyperparam_dict[method_name]
for param_name in method_hyperparam_choices:
method_dict[param_name] = random.choice(method_hyperparam_choices[param_name])
methods.append(method_dict)
fit_and_predict_ensemble(df=df, target_day=target_day, outcome=outcome, methods=methods,
mode='eval_mode', output_key=output_key)
score = error_fn(df[output_key], df['outcome'])
return params, score
results = Counter()
for i in range(num_iters):
params, score = fit_model_with_random_params(copy.deepcopy(df), i)
results[params] = -1 * score
best_param, value = results.most_common()
return best_param, -1 * value
| 44.040486 | 169 | 0.557042 |
7948164552581d320272aa9e2b0a26c53ca219fb
| 7,809 |
py
|
Python
|
tests/www/test_security.py
|
Patchus/airflow
|
4e0d14b70b9edd8ff1c7f228f8e57e82b3e72d1b
|
[
"Apache-2.0"
] | null | null | null |
tests/www/test_security.py
|
Patchus/airflow
|
4e0d14b70b9edd8ff1c7f228f8e57e82b3e72d1b
|
[
"Apache-2.0"
] | 1 |
2020-08-21T07:16:08.000Z
|
2020-08-21T07:16:08.000Z
|
tests/www/test_security.py
|
Patchus/airflow
|
4e0d14b70b9edd8ff1c7f228f8e57e82b3e72d1b
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import print_function
import unittest
import logging
import mock
from flask import Flask
from flask_appbuilder import AppBuilder, SQLA, Model, has_access, expose
from flask_appbuilder.models.sqla.interface import SQLAInterface
from flask_appbuilder.views import ModelView, BaseView
from sqlalchemy import Column, Integer, String, Date, Float
from airflow.www.security import AirflowSecurityManager, dag_perms
logging.basicConfig(format='%(asctime)s:%(levelname)s:%(name)s:%(message)s')
logging.getLogger().setLevel(logging.DEBUG)
log = logging.getLogger(__name__)
class SomeModel(Model):
id = Column(Integer, primary_key=True)
field_string = Column(String(50), unique=True, nullable=False)
field_integer = Column(Integer())
field_float = Column(Float())
field_date = Column(Date())
def __repr__(self):
return str(self.field_string)
class SomeModelView(ModelView):
datamodel = SQLAInterface(SomeModel)
base_permissions = ['can_list', 'can_show', 'can_add', 'can_edit', 'can_delete']
list_columns = ['field_string', 'field_integer', 'field_float', 'field_date']
class SomeBaseView(BaseView):
route_base = ''
@expose('/some_action')
@has_access
def some_action(self):
return "action!"
class TestSecurity(unittest.TestCase):
def setUp(self):
self.app = Flask(__name__)
self.app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///'
self.app.config['SECRET_KEY'] = 'secret_key'
self.app.config['CSRF_ENABLED'] = False
self.app.config['WTF_CSRF_ENABLED'] = False
self.db = SQLA(self.app)
self.appbuilder = AppBuilder(self.app,
self.db.session,
security_manager_class=AirflowSecurityManager)
self.security_manager = self.appbuilder.sm
self.appbuilder.add_view(SomeBaseView, "SomeBaseView", category="BaseViews")
self.appbuilder.add_view(SomeModelView, "SomeModelView", category="ModelViews")
role_admin = self.security_manager.find_role('Admin')
self.user = self.appbuilder.sm.add_user('admin', 'admin', 'user', 'admin@fab.org',
role_admin, 'general')
log.debug("Complete setup!")
def tearDown(self):
self.appbuilder = None
self.app = None
self.db = None
log.debug("Complete teardown!")
def test_init_role_baseview(self):
role_name = 'MyRole1'
role_perms = ['can_some_action']
role_vms = ['SomeBaseView']
self.security_manager.init_role(role_name, role_vms, role_perms)
role = self.appbuilder.sm.find_role(role_name)
self.assertIsNotNone(role)
self.assertEqual(len(role_perms), len(role.permissions))
def test_init_role_modelview(self):
role_name = 'MyRole2'
role_perms = ['can_list', 'can_show', 'can_add', 'can_edit', 'can_delete']
role_vms = ['SomeModelView']
self.security_manager.init_role(role_name, role_vms, role_perms)
role = self.appbuilder.sm.find_role(role_name)
self.assertIsNotNone(role)
self.assertEqual(len(role_perms), len(role.permissions))
def test_update_and_verify_permission_role(self):
role_name = 'Test_Role'
self.security_manager.init_role(role_name, [], [])
role = self.security_manager.find_role(role_name)
perm = self.security_manager.\
find_permission_view_menu('can_edit', 'RoleModelView')
self.security_manager.add_permission_role(role, perm)
role_perms_len = len(role.permissions)
self.security_manager.init_role(role_name, [], [])
new_role_perms_len = len(role.permissions)
self.assertEqual(role_perms_len, new_role_perms_len)
def test_get_user_roles(self):
user = mock.MagicMock()
user.is_anonymous = False
roles = self.appbuilder.sm.find_role('Admin')
user.roles = roles
self.assertEqual(self.security_manager.get_user_roles(user), roles)
@mock.patch('airflow.www.security.AirflowSecurityManager.get_user_roles')
def test_get_all_permissions_views(self, mock_get_user_roles):
role_name = 'MyRole1'
role_perms = ['can_some_action']
role_vms = ['SomeBaseView']
self.security_manager.init_role(role_name, role_vms, role_perms)
role = self.security_manager.find_role(role_name)
mock_get_user_roles.return_value = [role]
self.assertEqual(self.security_manager
.get_all_permissions_views(),
{('can_some_action', 'SomeBaseView')})
mock_get_user_roles.return_value = []
self.assertEqual(len(self.security_manager
.get_all_permissions_views()), 0)
@mock.patch('airflow.www.security.AirflowSecurityManager'
'.get_all_permissions_views')
@mock.patch('airflow.www.security.AirflowSecurityManager'
'.get_user_roles')
def test_get_accessible_dag_ids(self, mock_get_user_roles,
mock_get_all_permissions_views):
user = mock.MagicMock()
role_name = 'MyRole1'
role_perms = ['can_dag_read']
role_vms = ['dag_id']
self.security_manager.init_role(role_name, role_vms, role_perms)
role = self.security_manager.find_role(role_name)
user.roles = [role]
user.is_anonymous = False
mock_get_all_permissions_views.return_value = {('can_dag_read', 'dag_id')}
mock_get_user_roles.return_value = [role]
self.assertEqual(self.security_manager
.get_accessible_dag_ids(user), set(['dag_id']))
@mock.patch('airflow.www.security.AirflowSecurityManager._has_view_access')
def test_has_access(self, mock_has_view_access):
user = mock.MagicMock()
user.is_anonymous = False
mock_has_view_access.return_value = True
self.assertTrue(self.security_manager.has_access('perm', 'view', user))
def test_sync_perm_for_dag(self):
test_dag_id = 'TEST_DAG'
self.security_manager.sync_perm_for_dag(test_dag_id)
for dag_perm in dag_perms:
self.assertIsNotNone(self.security_manager.
find_permission_view_menu(dag_perm, test_dag_id))
@mock.patch('airflow.www.security.AirflowSecurityManager._has_perm')
@mock.patch('airflow.www.security.AirflowSecurityManager._has_role')
def test_has_all_dag_access(self, mock_has_role, mock_has_perm):
mock_has_role.return_value = True
self.assertTrue(self.security_manager.has_all_dags_access())
mock_has_role.return_value = False
mock_has_perm.return_value = False
self.assertFalse(self.security_manager.has_all_dags_access())
mock_has_perm.return_value = True
self.assertTrue(self.security_manager.has_all_dags_access())
| 40.046154 | 90 | 0.685235 |
7948166b1eeb16bc21ba0cd21093e2cda6977806
| 54,116 |
py
|
Python
|
QAMAS/_build/jupyter_execute/BuildingModel.py
|
ebenjaminrandall/QAMAS_book
|
8a6b78fbf3564ce313380619900f2de5fcbe9035
|
[
"MIT"
] | 1 |
2021-05-18T00:57:56.000Z
|
2021-05-18T00:57:56.000Z
|
QAMAS/_build/jupyter_execute/BuildingModel.py
|
ebenjaminrandall/QAMAS_book
|
8a6b78fbf3564ce313380619900f2de5fcbe9035
|
[
"MIT"
] | null | null | null |
QAMAS/_build/jupyter_execute/BuildingModel.py
|
ebenjaminrandall/QAMAS_book
|
8a6b78fbf3564ce313380619900f2de5fcbe9035
|
[
"MIT"
] | 1 |
2021-02-05T21:07:49.000Z
|
2021-02-05T21:07:49.000Z
|
#!/usr/bin/env python
# coding: utf-8
# # Building a model of oxidative ATP synthesis from energetic components
#
# Simulations in the preceding section illustrate how matrix ATP and ADP concentrations are governed by the contributors to the proton motive force. They also show how the matrix ATP/ADP ratio must typically be less than $1$, in contrast to the cytosolic ATP/ADP ratio, which is on the order of $100$. To understand the dependence of ATP synthesis and transport on the proton motive force, the kinetics of the processes that generate it, and the interplay of these processes, we can assemble models of the $\text{F}_0\text{F}_1$ ATP synthase, adenine nucleotide translocase (ANT), mitochondrial phosphate transport, and complexes I, III, and IV of the electron transport chain (ETC) to generate a core model of mitochondrial oxidative ATP synthesis.
# ## Adenine nucleotide translocase
#
# Following synthesis of ATP from ADP and Pi in the matrix, the final step in delivering ATP to the cytosol at physiological free energy levels is the electrically driven exchange of a matrix $\text{ATP}^{4-}$ for a cytosolic $\text{ADP}^{3-}$. This exchange process,
# ```{math}
# (\text{ATP}^{4-})_x + (\text{ADP}^{3-})_c \rightleftharpoons (\text{ATP}^{4-})_c + (\text{ADP}^{3-})_x \, ,
# ```
# is catalyzed by the ANT. Here, we assume rapid transport of species between the cytosol and the IMS, and therefore, equate IMS and cytosol species concentrations.
#
# To simulate the kinetics of this process, we use the Metelkin et al. model {cite}`Metelkin2006`, which accounts for pH and electrochemical dependencies. (Kinetic parameter value estimates for this model were updated by Wu et al. {cite}`Wu2008`.) The steady-state flux of ANT is expressed
# ```{math}
# :label: J_ANT
# J_{\text{ANT}} = E_{\text{ANT}} \dfrac{ \dfrac{ k_2^{\text{ANT}} q }{ K_o^D } [ \text{ATP}^{4-} ]_x [ \text{ADP}^{3-}]_c - \dfrac{ k_3^{\text{ANT}} }{ K_o^T } [ \text{ADP}^{3-} ]_x [ \text{ATP}^{4-} ]_c }{ \left(1 + \dfrac{ [ \text{ATP}^{4-} ]_c }{ K_o^T } + \dfrac{ [ \text{ADP}^{3-} ]_c }{ K_o^D } \right)( [ \text{ADP}^{3-} ]_x + [ \text{ATP}^{4-} ]_x q) },
# ```
# where $E_{\text{ANT}} \ \text{(mol (L mito)}^{-1})$ is the total ANT content of the mitochondria and
# ```{math}
# :label: phi
# k_2^\text{ANT} &=& k_{2,o}^\text{ANT} e^{( -3A - 4B + C) \phi}, \nonumber \\
# k_3^\text{ANT} &=& k_{3,o}^\text{ANT} e^{(-4A - 3B + C) \phi}, \nonumber \\
# K_o^D &=& K_o^{D,0} e^{3 \delta_D \phi}, \nonumber \\
# K_o^T &=& K_o^{T,0} e^{4 \delta_T \phi}, \nonumber \\
# q &=& \dfrac{ k_3^\text{ANT} K_o^D }{ k_2^\text{ANT} K_o^T } e^\phi, \quad \text{and} \nonumber \\
# \phi &=& F \Delta \Psi / R{\rm T}.
# ```
# All parameter values and units can be found in {numref}`table-ANT`, reproduced from {cite}`Bazil2016`.
# ```{list-table} Adenine nucleotide translocase (ANT) parameters.
# :header-rows: 1
# :name: table-ANT
#
# * - Parameter
# - Units
# - Description
# - Value
# * - $E_\text{ANT}$
# - mol (L mito)$^{-1}$
# - ANT activity
# - $0.325$
# * - $\delta_D$
# -
# - ADP displacement binding constant
# - $0.0167 $
# * - $\delta_T$
# -
# - ATP displacement binding constant
# - $0.0699 $
# * - $k_{2,o}^\text{ANT}$
# - s$^{-1}$
# - Forward translocation rate
# - $0.159 $
# * - $k_{3,o}^\text{ANT}$
# - s$^{-1}$
# - Reverse translocation rate
# - $0.501 $
# * - $K_o^{D,0}$
# - $\mu$mol (L cyto water)$^{-1}$
# - ADP binding constant
# - $38.89 $
# * - $K_o^{T,0}$
# - $\mu$mol (L cyto water)$^{-1}$
# - ATP binding constant
# - $56.05$
# * - $A$
# -
# - Translocation displacement constant
# - $0.2829 $
# * - $B$
# -
# - Translocation displacement constant
# - $ -0.2086 $
# * - $C$
# -
# - Translocation displacement constant
# - $0.2372$
# ```
# To simulate ANT and F$_0$F$_1$ ATP synthase activity simultaneously, we extend the system of Equation {eq}`system-ATPase` by adding states for cytosolic species $[\Sigma \text{ATP} ]_c$ and $[\Sigma \text{ADP}]_c$, yielding
# ```{math}
# :label: system-ATP_ANT
# \left\{
# \renewcommand{\arraystretch}{2}
# \begin{array}{rlrl}
# \dfrac{ {\rm d} [\Sigma \text{ATP}]_x }{{\rm d} t} &= (J_\text{F} - J_\text{ANT} ) / W_x, & \dfrac{ {\rm d} [\Sigma \text{ATP}]_c }{{\rm d} t} &= (V_{m2c} J_\text{ANT}) / W_c, \\
# \dfrac{ {\rm d} [\Sigma \text{ADP}]_x }{{\rm d} t} &= (-J_\text{F} + J_\text{ANT}) / W_x, & \dfrac{ {\rm d} [\Sigma \text{ADP}]_c }{{\rm d} t} &= (-V_{m2c} J_\text{ANT}) / W_c, \\
# \dfrac{ {\rm d} [\Sigma \text{Pi}]_x }{{\rm d} t} &= 0 & &\\
# \end{array}
# \renewcommand{\arraystretch}{1}
# \right.
# ```
# where $V_{m2c} \ \text{(L mito) (L cyto)}^{-1}$ is the fraction of the volume of mitochondria per volume cytosol and $W_c \ \text{(L cyto water) (L cyto)}^{-1}$ is the fraction of water volume in the cytoplasm to the total volume of the cytoplasm ({numref}`table-biophysicalconstants`).
# Here, we clamp the matrix phosphate concentration at a constant value since the system of equations in Equation {eq}`system-ATP_ANT` does not account for phosphate transport between the matrix and the cytosol.
# In[1]:
import numpy as np
import matplotlib.pyplot as plt
get_ipython().system('pip install scipy')
from scipy.integrate import solve_ivp
###### Constants defining metabolite pools ######
# Volume fractions and water space fractions
V_c = 0.6601 # cytosol volume fraction # L cyto (L cell)**(-1)
V_m = 0.2882 # mitochondrial volume fraction # L mito (L cell)**(-1)
V_m2c = V_m / V_c # mito to cyto volume ratio # L mito (L cuvette)**(-1)
W_c = 0.8425 # cytosol water space # L cyto water (L cyto)**(-1)
W_m = 0.7238 # mitochondrial water space # L mito water (L mito)**(-1)
W_x = 0.9*W_m # matrix water space # L matrix water (L mito)**(-1)
# Membrane potential
DPsi = 175/1000
###### Set fixed pH and cation concentrations ######
# pH
pH_x = 7.40
pH_c = 7.20
# K+ concentrations
K_x = 100e-3 # mol (L matrix water)**(-1)
K_c = 140e-3 # mol (L cyto water)**(-1)
# Mg2+ concentrations
Mg_x = 1.0e-3 # mol (L matrix water)**(-1)
Mg_c = 1.0e-3 # mol (L cyto water)**(-1)
###### Parameter vector ######
X_F = 1000 # Synthase activity
E_ANT = 0.325 # Nucleotide transporter activity
activity_array = np.array([X_F, E_ANT]) # Note: This array will be larger in the future parts
###### Initial Conditions ######
# Matrix species
sumATP_x_0 = 0.5e-3 # mol (L matrix water)**(-1)
sumADP_x_0 = 9.5e-3 # mol (L matrix water)**(-1)
sumPi_x_0 = 1e-3 # mol (L matrix water)**(-1)
# Cytoplasmic species
sumATP_c_0 = 0 #9.95e-3 # mol (L cyto water)**(-1)
sumADP_c_0 = 10e-3 #0.05e-3 # mol (L cyto water)**(-1)
X_0 = np.array([sumATP_x_0, sumADP_x_0, sumPi_x_0, sumATP_c_0, sumADP_c_0])
def dXdt(t, X, activity_array):
# Unpack variables
sumATP_x, sumADP_x, sumPi_x, sumATP_c, sumADP_c = X
X_F, E_ANT = activity_array
# Hydrogen ion concentration
H_x = 10**(-pH_x) # mol (L matrix water)**(-1)
H_c = 10**(-pH_c) # mol (L cuvette water)**(-1)
# Thermochemical constants
R = 8.314 # J (mol K)**(-1)
T = 37 + 273.15 # K
F = 96485 # C mol**(-1)
# Proton motive force parameters (dimensionless)
n_F = 8/3
# Dissociation constants
K_MgATP = 10**(-3.88)
K_HATP = 10**(-6.33)
K_KATP = 10**(-1.02)
K_MgADP = 10**(-3.00)
K_HADP = 10**(-6.26)
K_KADP = 10**(-0.89)
K_MgPi = 10**(-1.66)
K_HPi = 10**(-6.62)
K_KPi = 10**(-0.42)
## Binding polynomials
# Matrix species # mol (L mito water)**(-1)
PATP_x = 1 + H_x/K_HATP + Mg_x/K_MgATP + K_x/K_KATP
PADP_x = 1 + H_x/K_HADP + Mg_x/K_MgADP + K_x/K_KADP
PPi_x = 1 + H_x/K_HPi + Mg_x/K_MgPi + K_x/K_KPi
# Cytosol species # mol (L cuvette water)**(-1)
PATP_c = 1 + H_c/K_HATP + Mg_c/K_MgATP + K_c/K_KATP
PADP_c = 1 + H_c/K_HADP + Mg_c/K_MgADP + K_c/K_KADP
## Unbound species
# Matrix species
ATP_x = sumATP_x / PATP_x # [ATP4-]_x
ADP_x = sumADP_x / PADP_x # [ADP3-]_x
# Cytosol species
ATP_c = sumATP_c / PATP_c # [ATP4-]_c
ADP_c = sumADP_c / PADP_c # [ADP3-]_c
###### F0F1-ATPase ######
# ADP3-_x + HPO42-_x + H+_x + n_A*H+_i <-> ATP4- + H2O + n_A*H+_x
# Gibbs energy (J mol**(-1))
DrGo_F = 4990
DrGapp_F = DrGo_F + R * T * np.log( H_x * PATP_x / (PADP_x * PPi_x))
# Apparent equilibrium constant
Kapp_F = np.exp( (DrGapp_F + n_F * F * DPsi ) / (R * T)) * (H_c / H_x)**n_F
# Flux (mol (s * L mito)**(-1))
J_F = X_F * (Kapp_F * sumADP_x * sumPi_x - sumATP_x)
###### ANT ######
# ATP4-_x + ADP3-_i <-> ATP4-_i + ADP3-_x
#Constants
del_D = 0.0167
del_T = 0.0699
k2o_ANT = 9.54/60 # s**(-1)
k3o_ANT = 30.05/60 # s**(-1)
K0o_D = 38.89e-6 # mol (L cuvette water)**(-1)
K0o_T = 56.05e-6 # mol (L cuvette water)**(-1)
A = +0.2829
B = -0.2086
C = +0.2372
phi = F * DPsi / (R * T)
# Reaction rates (s**(-1))
k2_ANT = k2o_ANT * np.exp((A*(-3) + B*(-4) + C)*phi)
k3_ANT = k3o_ANT * np.exp((A*(-4) + B*(-3) + C)*phi)
# Dissociation constants (M)
K0_D = K0o_D * np.exp(3*del_D*phi)
K0_T = K0o_T * np.exp(4*del_T*phi)
q = k3_ANT * K0_D * np.exp(phi) / (k2_ANT * K0_T)
term1 = k2_ANT * ATP_x * ADP_c * q / K0_D
term2 = k3_ANT * ADP_x * ATP_c / K0_T
num = term1 - term2
den = (1 + ATP_c/K0_T + ADP_c/K0_D) * (ADP_x + ATP_x * q)
# Flux (mol (s * L mito)**(-1))
J_ANT = E_ANT * num / den
###### Differential equations (equation 14) ######
# Matrix species
dATP_x = (J_F - J_ANT) / W_x
dADP_x = (-J_F + J_ANT) / W_x
dPi_x = 0
# Cytosol species
dATP_c = ( V_m2c * J_ANT) / W_c
dADP_c = (-V_m2c * J_ANT) / W_c
dX = [dATP_x, dADP_x, dPi_x, dATP_c, dADP_c]
return dX
# Solve ODE
results = solve_ivp(dXdt, [0, 2], X_0, method = 'Radau', args=(activity_array,))
t = results.t
sumATP_x, sumADP_x, sumPi_x, sumATP_c, sumADP_c = results.y
# Plot figures
fig, ax = plt.subplots(1,2, figsize = (10,5))
ax[0].plot(t, sumATP_x*1000, label = '[$\Sigma$ATP]$_x$')
ax[0].plot(t, sumADP_x*1000, label = '[$\Sigma$ADP]$_x$')
ax[0].plot(t, sumPi_x*1000, label = '[$\Sigma$Pi]$_x$')
ax[0].legend(loc="right")
ax[0].set_ylim((-.5,10.5))
ax[0].set_xlabel('Time (s)')
ax[0].set_xticks([0,1,2])
ax[0].set_ylabel('Concentration (mM)')
ax[1].plot(t, sumATP_c*1000, label = '[$\Sigma$ATP]$_c$')
ax[1].plot(t, sumADP_c*1000, label = '[$\Sigma$ADP]$_c$')
ax[1].set_ylim((-0.5,10.5))
ax[1].set_xticks([0,1,2])
ax[1].legend(loc="right")
ax[1].set_xlabel('Time (s)')
plt.show()
# **Figure 4:** Steady state solution from Equation {eq}`system-ATP_ANT` for the (a) matrix and (b) cytosol species with $\Delta \Psi = 175$ mV, $\text{pH}_x = 7.4$, and $\text{pH}_c = 7.2$.
# The above simulations of the system of Equation {eq}`system-ATP_ANT` show how the electrogenic nature of the ANT transport results in the markedly different ATP/ADP ratios in the cytosol compared to the matrix. As we saw in the previous chapter, the ATP hydrolysis potential in the matrix is approximately $\text{-}45 \ \text{kJ mol}^{-1}$. The roughly $100$:$1$ ratio of ATP to ADP in the cytosol is associated with a hydrolysis potential of approximately $\text{-}65 \ \text{kJ mol}^{-1}$. The difference of $20 \ \text{kJ mol}^{-1}$ between the matrix and the cytosolic space is driven primarily by the membrane potential, which is roughly equivalent to $20 \ \text{kJ mol}^{-1}$.
# ## Inorganic phosphate transport
#
# During active ATP synthesis, mitochondrial Pi is replenished via the activity of the phosphate-proton cotransporter (PiC), catalyzing the electroneutral cotransport of protonated inorganic phosphate, $\text{H}_2\text{PO}_4^{-}$, and $\text{H}^{+}$ across the membrane. Again, we assume rapid transport between the cytoplasm and intermembrane space, and hence, we have
# ```{math}
# (\text{H}_2\text{PO}_4^{-})_c + (\text{H}^{+})_c \rightleftharpoons (\text{H}_2\text{PO}_4^{-})_x + (\text{H}^{+})_x.
# ```
# Adopting the flux equation from Bazil et al. {cite}`Bazil2016`, we have
# ```{math}
# :label: J_PiC
# J_\text{PiC} = E_{\text{PiC}} \dfrac{ [\text{H}^{+} ]_{c} [\text{H}_2\text{PO}_4^{-}]_{c} - [\text{H}^{+}]_{x} [\text{H}_2\text{PO}_4^{-}]_{x} }{ [\text{H}_2\text{PO}_4^{-}]_c + k_{\text{PiC}} },
# ```
# where $E_{\text{PiC}} \ \text{(L matrix water) s}^{-1} \text{ (L mito)}^{-1}$ is the PiC activity rate and $k_{\text{PiC}} = 1.61$ mM is an effective Michaelis-Menten constant. The $\text{H}_2\text{PO}_4^{-}$ concentrations in the matrix and cytosol are computed via the relationship
# ```{math}
# [\text{H}_2\text{PO}_4^{-}] = [\Sigma{\rm Pi}] \left( [{\rm H}^+]/K_{\rm HPi} \right) / P_{\rm Pi}
# ```
# from Equation \eqref{sumPi}.
#
#
# To incorporate PiC into Equation {eq}`system-ATP_ANT`, we add a new state $[\Sigma \text{Pi}]_c$ such that at given membrane potential, matrix and cytosolic pH, and cation concentrations, we obtain
# ```{math}
# :label: system-ATP_ANT_PiC
# \left\{
# \renewcommand{\arraystretch}{2}
# \begin{array}{rlrl}
# \dfrac{ {\rm d} [\Sigma \text{ATP}]_x }{{\rm d} t} &= (J_\text{F} - J_\text{ANT} ) / W_x, & \dfrac{ {\rm d} [\Sigma \text{ATP}]_c }{{\rm d} t} &= (V_{m2c} J_\text{ANT}) / W_c \\
# \dfrac{ {\rm d} [\Sigma \text{ADP}]_x }{{\rm d} t} &= (-J_\text{F} + J_\text{ANT}) / W_x, & \dfrac{ {\rm d} [\Sigma \text{ADP}]_c }{{\rm d} t} &= (-V_{m2c} J_\text{ANT}) / W_c, \\
# \dfrac{ {\rm d} [\Sigma \text{Pi}]_x }{{\rm d} t} &= (-J_\text{F} + J_\text{PiC}) / W_x, & \dfrac{ {\rm d} [\Sigma \text{Pi}]_c }{{\rm d} t} &= (- V_{m2c} J_\text{PiC}) / W_c,
# \end{array}
# \renewcommand{\arraystretch}{1}
# \right.
# ```
# The following code simulates the synthesis of ATP from ADP and Pi and their translocation across the IMM under physiological conditions.
# In[2]:
import numpy as np
import matplotlib.pyplot as plt
get_ipython().system('pip install scipy')
from scipy.integrate import solve_ivp
###### Constants defining metabolite pools ######
# Volume fractions and water space fractions
V_c = 0.6601 # cytosol volume fraction # L cyto (L cell)**(-1)
V_m = 0.2882 # mitochondrial volume fraction # L mito (L cell)**(-1)
V_m2c = V_m / V_c # mito to cyto volume ratio # L mito (L cuvette)**(-1)
W_c = 0.8425 # cytosol water space # L cyto water (L cyto)**(-1)
W_m = 0.7238 # mitochondrial water space # L mito water (L mito)**(-1)
W_x = 0.9*W_m # matrix water space # L matrix water (L mito)**(-1)
# Membrane potential
DPsi = 175/1000
###### Set fixed pH, cation concentrations, and O2 partial pressure ######
# pH
pH_x = 7.40
pH_c = 7.20
# K+ concentrations
K_x = 100e-3 # mol (L matrix water)**(-1)
K_c = 140e-3 # mol (L cyto water)**(-1)
# Mg2+ concentrations
Mg_x = 1.0e-3 # mol (L matrix water)**(-1)
Mg_c = 1.0e-3 # mol (L cyto water)**(-1)
###### Parameter vector ######
X_F = 100 # Synthase activity
E_ANT = 0.325 # Nucleotide transporter activity
E_PiC = 5.0e6 # Phosphate transporter activity
activity_array = np.array([X_F, E_ANT, E_PiC])
###### Initial Conditions ######
# Matrix species
sumATP_x_0 = 0.5e-3 # mol (L matrix water)**(-1)
sumADP_x_0 = 9.5e-3 # mol (L matrix water)**(-1)
sumPi_x_0 = 1e-3 # mol (L matrix water)**(-1)
# Cytosolic species
sumATP_c_0 = 0 # mol (L cyto water)**(-1)
sumADP_c_0 = 10e-3 # mol (L cyto water)**(-1)
sumPi_c_0 = 10e-3 # mol (L cyto water)**(-1)
X_0 = np.array([sumATP_x_0, sumADP_x_0, sumPi_x_0, sumATP_c_0, sumADP_c_0, sumPi_c_0])
def dXdt(t, X, activity_array):
# Unpack variables
sumATP_x, sumADP_x, sumPi_x, sumATP_c, sumADP_c, sumPi_c = X
X_F, E_ANT, E_PiC = activity_array
# Hydrogen ion concentration
H_x = 10**(-pH_x) # mol (L matrix water)**(-1)
H_c = 10**(-pH_c) # mol (L cuvette water)**(-1)
# Thermochemical constants
R = 8.314 # J (mol K)**(-1)
T = 37 + 273.15 # K
F = 96485 # C mol**(-1)
# Proton motive force parameters (dimensionless)
n_F = 8/3
# Dissociation constants
K_MgATP = 10**(-3.88)
K_HATP = 10**(-6.33)
K_KATP = 10**(-1.02)
K_MgADP = 10**(-3.00)
K_HADP = 10**(-6.26)
K_KADP = 10**(-0.89)
K_MgPi = 10**(-1.66)
K_HPi = 10**(-6.62)
K_KPi = 10**(-0.42)
## Binding polynomials
# Matrix species # mol (L mito water)**(-1)
PATP_x = 1 + H_x/K_HATP + Mg_x/K_MgATP + K_x/K_KATP
PADP_x = 1 + H_x/K_HADP + Mg_x/K_MgADP + K_x/K_KADP
PPi_x = 1 + H_x/K_HPi + Mg_x/K_MgPi + K_x/K_KPi
# Cytosol species # mol (L cuvette water)**(-1)
PATP_c = 1 + H_c/K_HATP + Mg_c/K_MgATP + K_c/K_KATP
PADP_c = 1 + H_c/K_HADP + Mg_c/K_MgADP + K_c/K_KADP
PPi_c = 1 + H_c/K_HPi + Mg_c/K_MgPi + K_c/K_KPi
## Unbound species
# Matrix species
ATP_x = sumATP_x / PATP_x # [ATP4-]_x
ADP_x = sumADP_x / PADP_x # [ADP3-]_x
Pi_x = sumPi_x / PPi_x # [HPO42-]_x
# Cytosol species
ATP_c = sumATP_c / PATP_c # [ATP4-]_c
ADP_c = sumADP_c / PADP_c # [ADP3-]_c
Pi_c = sumPi_c / PPi_c # [HPO42-]_c
###### H+-PI2 cotransporter ######
# H2PO42-_x + H+_x = H2PO42-_c + H+_c
# Constant
k_PiC = 1.61e-3 # mol (L cuvette)**(-1)
# H2P04- species
HPi_c = Pi_c * (H_c / K_HPi)
HPi_x = Pi_x * (H_x / K_HPi)
# Flux (mol (s * L mito)**(-1))
J_PiC = E_PiC * (H_c * HPi_c - H_x * HPi_x) / (k_PiC + HPi_c)
###### F0F1-ATPase ######
# ADP3-_x + HPO42-_x + H+_x + n_A*H+_i <-> ATP4- + H2O + n_A*H+_x
# Gibbs energy (J mol**(-1))
DrGo_F = 4990
DrGapp_F = DrGo_F + R * T * np.log( H_x * PATP_x / (PADP_x * PPi_x))
# Apparent equilibrium constant
Kapp_F = np.exp( (DrGapp_F + n_F * F * DPsi ) / (R * T)) * (H_c / H_x)**n_F
# Flux (mol (s * L mito)**(-1))
J_F = X_F * (Kapp_F * sumADP_x * sumPi_x - sumATP_x)
###### ANT ######
# ATP4-_x + ADP3-_i <-> ATP4-_i + ADP3-_x
# Constants
del_D = 0.0167
del_T = 0.0699
k2o_ANT = 9.54/60 # s**(-1)
k3o_ANT = 30.05/60 # s**(-1)
K0o_D = 38.89e-6 # mol (L cuvette water)**(-1)
K0o_T = 56.05e-6 # mol (L cuvette water)**(-1)
A = +0.2829
B = -0.2086
C = +0.2372
phi = F * DPsi / (R * T)
# Reaction rates (s**(-1))
k2_ANT = k2o_ANT * np.exp((A*(-3) + B*(-4) + C)*phi)
k3_ANT = k3o_ANT * np.exp((A*(-4) + B*(-3) + C)*phi)
# Dissociation constants (M)
K0_D = K0o_D * np.exp(3*del_D*phi)
K0_T = K0o_T * np.exp(4*del_T*phi)
q = k3_ANT * K0_D * np.exp(phi) / (k2_ANT * K0_T)
term1 = k2_ANT * ATP_x * ADP_c * q / K0_D
term2 = k3_ANT * ADP_x * ATP_c / K0_T
num = term1 - term2
den = (1 + ATP_c/K0_T + ADP_c/K0_D) * (ADP_x + ATP_x * q)
# Flux (mol (s * L mito)**(-1))
J_ANT = E_ANT * num / den
###### Differential equations (equation 15) ######
# Matrix species
dATP_x = (J_F - J_ANT) / W_x
dADP_x = (-J_F + J_ANT) / W_x
dPi_x = (-J_F + J_PiC) / W_x
# Buffer species
dATP_c = ( V_m2c * J_ANT) / W_c
dADP_c = (-V_m2c * J_ANT) / W_c
dPi_c = (-V_m2c * J_PiC) / W_c
dX = [dATP_x, dADP_x, dPi_x, dATP_c, dADP_c, dPi_c]
return dX
# Solve ODE
t = np.linspace(0,2,100)
results = solve_ivp(dXdt, [0, 2], X_0, method = 'Radau', t_eval = t, args=(activity_array,))
sumATP_x, sumADP_x, sumPi_x, sumATP_c, sumADP_c, sumPi_c = results.y
# Plot figures
fig, ax = plt.subplots(1,2, figsize = (10,5))
ax[0].plot(t, sumATP_x*1000, label = '[$\Sigma$ATP]$_x$')
ax[0].plot(t, sumADP_x*1000, label = '[$\Sigma$ADP]$_x$')
ax[0].plot(t, sumPi_x*1000, label = '[$\Sigma$Pi]$_x$')
ax[0].legend(loc="right")
ax[0].set_ylim((-.5,10.5))
ax[0].set_xlim((0,2))
ax[0].set_xticks([0,1,2])
ax[0].set_xlabel('Time (s)')
ax[0].set_ylabel('Concentration (mM)')
ax[1].plot(t, sumATP_c*1000, label = '[$\Sigma$ATP]$_c$')
ax[1].plot(t, sumADP_c*1000, label = '[$\Sigma$ADP]$_c$')
ax[1].plot(t, sumPi_c*1000, label = '[$\Sigma$Pi]$_c$')
ax[1].set_ylim((-0.5,10.5))
ax[1].set_xlim((0,2))
ax[1].set_xticks([0,1,2])
ax[1].legend(loc="right")
ax[1].set_xlabel('Time (s)')
plt.show()
# **Figure 5:** Steady state solution from Equation {eq}`system-ATP_ANT_PiC` for the (a) matrix and (b) cytosol species with $\Delta \Psi = 175$ mV, $\text{pH}_x = 7.4$, and $\text{pH}_c = 7.2$.
# For the above simulations, cytosolic inorganic phosphate is set to $10 \ \text{mM}$ initially, and all other initial conditions remain unchanged. Driven by $\Delta \text{pH}$, a gradient in phosphate concentration is established, with a steady-state ratio of matrix-to-cytosol concentration of approximately $2.2$. As seen in the previous section, with a constant membrane potential of $175 \ \text{mV}$, the ATP/ADP ratio is maintained at a much higher level in the cytosol than in the matrix.
#
# The final matrix and cytosol ATP and ADP concentrations depend not only on the membrane potential, but also on the total amount of exchangeable phosphate in the system. Here these simulations start with $[\text{Pi}]_c = 10 \ \text{mM}$ and $[\text{Pi}]_x = 1 \ \text{mM}$. The initial $10 \ \text{mM}$ of ADP in the cytosol becomes almost entirely phosphorylated to ATP, leaving $0.32 \ \text{mM}$ of inorganic phosphate in the cytosol in the final steady state. To explore how these steady states depend on $\Delta\Psi$, the following code simulates the steady-state behavior of this system for a range of $\Delta\Psi$ from $100$ to $200 \ \text{mV}$. These simulations, based on a simple, thermodynamically constrained model, show that it is not possible to synthesize ATP at physiological free energy levels for values of $\Delta\Psi$ of lower than approximately $160 \ \text{mV}$.
# In[3]:
get_ipython().system('pip install scipy')
from scipy.integrate import solve_ivp
### Simulate over a range of Membrane potential from 100 mV to 250 mV ###
# Define array to iterate over
membrane_potential = np.linspace(100,250) # mV
# Define arrays to store steady state results
ATP_x_steady = np.zeros(len(membrane_potential))
ADP_x_steady = np.zeros(len(membrane_potential))
Pi_x_steady = np.zeros(len(membrane_potential))
ATP_c_steady = np.zeros(len(membrane_potential))
ADP_c_steady = np.zeros(len(membrane_potential))
Pi_c_steady = np.zeros(len(membrane_potential))
# Iterate through range of membrane potentials
for i in range(len(membrane_potential)):
DPsi = membrane_potential[i] / 1000 # convert to V
temp_results = solve_ivp(dXdt, [0, 200], X_0, method = 'Radau', args=(activity_array,)).y*1000 # Concentration in mM
ATP_x_steady[i] = temp_results[0,-1]
ADP_x_steady[i] = temp_results[1,-1]
Pi_x_steady[i] = temp_results[2,-1]
ATP_c_steady[i] = temp_results[3,-1]
ADP_c_steady[i] = temp_results[4,-1]
Pi_c_steady[i] = temp_results[5,-1]
# Plot figures
fig, ax = plt.subplots(1,2, figsize = (10,5))
ax[0].plot(membrane_potential, ATP_x_steady, label = '[$\Sigma$ATP]$_x$')
ax[0].plot(membrane_potential, ADP_x_steady, label = '[$\Sigma$ADP]$_x$')
ax[0].plot(membrane_potential, Pi_x_steady, label = '[$\Sigma$Pi]$_x$')
ax[0].legend(loc = "right")
ax[0].set_xlabel('Membrane potential (mV)')
ax[0].set_ylabel('Concentration (mM)')
ax[0].set_xlim([100, 250])
ax[0].set_ylim([-0.5,13])
ax[1].plot(membrane_potential, ATP_c_steady, label = '[$\Sigma$ATP]$_c$')
ax[1].plot(membrane_potential, ADP_c_steady, label = '[$\Sigma$ADP]$_c$')
ax[1].plot(membrane_potential, Pi_c_steady, label = '[$\Sigma$Pi]$_c$')
ax[1].legend(loc = "right")
ax[1].set_xlabel('Membrane potential (mV)')
ax[1].set_ylabel('Concentration (mM)')
ax[1].set_xlim([100, 250])
ax[1].set_ylim([-0.5,13])
plt.show()
# **Figure 6:** Simulation of concentration versus $\Delta \Psi$ for Equation {eq}`system-ATP_ANT_PiC` for the (a) matrix and (b) cytosol species with $\Delta \Psi$ from $100$ to $250$ mV.
# Simulation of this system reinforces the fact that ATP cannot be synthesized at physiological free energy levels for mitochondrial membrane potential of less than approximately $150 \ \text{mV}$.
# ## Respiratory complexes and NADH synthesis
#
# The previous sections have assumed a constant membrane potential. To account for the processes that generate the membrane potential, we model proton pumping associated with the respiratory complexes I, III, and IV of the ETC ({numref}`mitofig`).
# ### ETC complex I
#
# Coupled with the translocation of $n_\text{C1} = 4$ protons across the IMM against the electrochemical gradient, electrons are transferred from NADH to ubiquinone ($Q$) at complex I of the ETC via the reaction
# ```{math}
# :label: reaction_C1
# (\text{NADH}^{2-})_x + (\text{H}^{+})_x + (\text{Q})_x + n_\text{C1} (\text{H}^{+})_x \rightleftharpoons (\text{NAD}^{-})_x + (\text{QH}_2)_x + \text{H}_2\text{O} + n_\text{C1}(\text{H}^+)_c.
# ```
# Since protons move against the gradient when the reaction proceeds in the left-to-right direction, the overall Gibbs energy for the reaction of Equation {eq}`reaction_C1` is
# ```{math}
# \Delta G_\text{C1} &= \Delta_r G_\text{C1} - n_\text{C1} \Delta G_{\rm H} \nonumber \\
# &= \Delta_r G_\text{C1}^\circ + R{\rm T} \ln \left( \dfrac{ [\text{NAD}^{-}]_x [\text{QH}_2]_x }{ [\text{NADH}^{2-}]_x [\text{Q}]_x} \cdot \dfrac{1}{[\text{H}^{+}]_x } \right) + n_\text{C1} F \Delta \Psi - R{\rm T} \ln \left( \dfrac{ [\text{H}^{+}]_x }{ [\text{H}^{+}]_c } \right)^{n_{\text{C1}}} \nonumber \\
# &= \Delta_r G'^{\circ}_\text{C1} + R{\rm T} \ln \left( \dfrac{ [\text{NAD}^{-}]_x [\text{QH}_2]_x }{ [\text{NADH}^{2-}]_x [\text{Q}]_x} \right) + n_\text{C1} F \Delta \Psi - R{\rm T} \ln \left( \dfrac{ [\text{H}^{+}]_x }{ [\text{H}^{+}]_c } \right)^{n_{\text{C1}}},
# ```
# where
# ```{math}
# \Delta_r G'^\circ_\text{C1} = \Delta_r G^\circ_\text{C1} - R \text{T} \ln ( [\text{H}^+]_x )
# ```
# is the apparent Gibbs energy for the reaction in Equation {eq}`reaction_C1`. The apparent equilibrium constant is
# ```{math}
# :label: Kapp_C1
# K'_{eq,\text{C1}} = \left(\dfrac{ [\text{NAD}^{-}]_x [\text{QH}_2]_x }{ [\text{NADH}^{2-}]_x [\text{Q}]_x} \right)_{eq} = \exp \left\{ \dfrac{ - ( \Delta_r G'^\circ_\text{C1} + n_\text{C1} F \Delta \Psi) }{ R \text{T}} \right\} \left( \dfrac{ [\text{H}^{+}]_x }{ [\text{H}^{+}]_c } \right)^{n_\text{C1}}.
# ```
#
# To simulate the flux of complex I, $J_{\text{C1}} \ \text{(mol s}^{-1} \text{ (L mito)}^{-1})$, across the IMM by mass-action kinetics, we have
# ```{math}
# :label: J_C1
# J_{\text{C1}} = X_{\text{C1}} \left( K_{eq,\text{C1}}^\prime [\text{NADH}^{2-}]_x [\text{Q}]_x - [\text{NAD}^{-}]_x [\text{QH}_2]_x \right),
# ```
# for $X_\text{C1} \ \text{(mol s}^{-1} \text{ (L mito)}^{-1})$ the a rate constant. {numref}`table-ETC` lists the constants for complex I.
# ### ETC complex III
#
#
# The reaction catalyzed by complex III reduces two cytochrome c proteins for every $\text{QH}_2$ oxidized
# ```{math}
# :label: reaction_C3
# (\text{QH}_2)_x + 2 \ (\text{c}_{ox}^{3+})_i + n_\text{C3} (\text{H}^+)_x \rightleftharpoons (\text{Q})_x + 2 \ (\text{c}_{red}^{2+})_i + 2 \ (\text{H}^{+})_c + n_\text{C3} (\text{H}^+)_c,
# ```
# where $\text{c}_{ox}^{3+}$ and $\text{c}_{red}^{2+}$ are the oxidized and reduced cytochrome c species and the subscript $i$ indicates that cytochrome c is confined to the IMS. This reaction is coupled with the transport of $n_{\text{C3}} = 2$ protons from the matrix to the cytosol against the electrochemical gradient. Thus, the Gibbs energy for the overall reaction given in Equation {eq}`reaction_C3` is
# ```{math}
# \Delta G_{\text{C3}} &= \Delta_r G_\text{C3} - n_\text{C3} \Delta G_\text{H} \nonumber \\
# &= \Delta_r G_{\text{C3}}^\circ + R{\rm T} \ln \left( \dfrac{ [\text{Q}]_x [\text{c}_{red}^{2+}]_i^2 }{ [\text{QH}_2]_x [\text{c}_{ox}^{3+}]_i^2} \cdot [\text{H}^{+}]_c^2 \right) + n_\text{C3} F \Delta \Psi -
# R{\rm T} \ln \left( \dfrac{ [\text{H}^{+}]_x }{ [\text{H}^{+}]_c} \right)^{n_\text{C3}} \nonumber \\
# &= \Delta_r G'^\circ_\text{C3} + R{\rm T} \ln \left( \dfrac{ [\text{Q}]_x [\text{c}_{red}^{2+}]_i^2 }{ [\text{QH}_2]_x [\text{c}_{ox}^{3+}]_i^2}\right) + n_\text{C3} F \Delta \Psi -
# R{\rm T} \ln \left( \dfrac{ [\text{H}^{+}]_x }{ [\text{H}^{+}]_c} \right)^{n_\text{C3}},
# ```
# where
# ```{math}
# \Delta_r G'^\circ_\text{C3} = \Delta_r G^\circ_\text{C3} + 2 R \text{T} \ln ([\text{H}^+]_c)
# ```
# is the apparent Gibbs energy for complex III. The apparent equilibrium constant is
# ```{math}
# :label: Kapp_C3
# K_{eq,\text{C3}}^\prime = \left( \dfrac{ [\text{Q}]_x [\text{c}_{red}^{2+}]_i^2 }{ [\text{QH}_2]_x [\text{c}_{ox}^{3+}]_i^2 } \right)_{eq} = \exp \left\{ \dfrac{ -(\Delta_r G'^\circ_\text{C3} + n_\text{C3} F
# \Delta \Psi )}{ R \text{T}} \right\} \left( \dfrac{ [\text{H}^{+}]_x}{ [\text{H}^{+}]_c} \right)^{n_\text{C3}}.
# ```
#
# To simulate the flux of complex III, $J_\text{C3} \ \text{(mol s}^{-1} \text{ (L mito)}^{-1})$, by mass-action kinetics, we have
# ```{math}
# :label: J_C3
# J_{\text{C3}} = X_{\text{C3}} \left( K_{eq,\text{C3}}^\prime [\text{QH}_2]_x [\text{c}_{ox}^{3+}]_i^2 - [\text{Q}]_x [\text{c}_{red}^{2+}]_i^2 \right),
# ```
# where $X_{\text{C3}} \ \text{(mol s}^{-1} \text{ (L mito)}^{-1})$ is the rate constant.
# ### ETC complex IV
#
# In the final step of the ETC catalyzed by complex IV, electrons are transferred from cytochrome c to oxygen, forming water
# ```{math}
# :label: reaction_C4
# 2 \ (\text{c}_{red}^{2+})_i + \frac{1}{2} (\text{O}_2)_x + 2 \ (\text{H}^{+})_c + n_\text{C4} ([\text{H}^+])_x \rightleftharpoons 2 \ (\text{c}^{3+}_{ox})_i + \text{H}_2\text{O} + n_\text{C4} ([\text{H}^+])_c,
# ```
# coupled with the translocation of $n_\text{C4} = 4$ protons across the IMM against against the electrochemical gradient. The Gibbs energy of the reaction in Equation {eq}`reaction_C4` is
# ```{math}
# \Delta G_\text{C4} &= \Delta_r G_\text{C4} - n_\text{C4} \Delta G_{\rm H} \nonumber \\
# &= \Delta_r G_{\text{C4}}^o + R{\rm T} \ln \left( \dfrac{ [\text{c}^{3+}_{ox}]^2_i }{ [\text{c}^{2+}_{red}]^2_i [\text{O}_2]^{1/2}_x } \cdot \dfrac{1}{[\text{H}^{+}]^2_c}\right) + n_{\text{C4}} F \Delta \Psi - R{\rm T} \ln \left( \dfrac{ [\text{H}^{+}]_x }{ [\text{H}^{+}]_c} \right)^{n_{\text{C4}}} \nonumber \\
# &= \Delta_r G'^\circ_{\text{C4}} + R{\rm T} \ln \left( \dfrac{ [\text{c}^{3+}_{ox}]^2_i }{ [\text{c}^{2+}_{red}]^2_i [\text{O}_2]^{1/2}_x } \right) + n_{\text{C4}} F \Delta \Psi - R{\rm T} \ln \left( \dfrac{ [\text{H}^{+}]_x }{ [\text{H}^{+}]_c} \right)^{n_{\text{C4}}},
# ```
# where
# ```{math}
# \Delta_r G'^\circ_\text{C4} = \Delta_r G^\circ_\text{C4} - 2 R \text{T} \ln([\text{H}^+]_c)
# ```
# is the apparent Gibbs energy for complex IV. The apparent equilibrium constant is
# ```{math}
# :label: Kapp_C4
# K_{eq,\text{C4}}^\prime = \left( \dfrac{ [\text{c}^{3+}_{ox}]_i^2 }{ [\text{c}^{2+}_{red}]_i^2 [\text{O}_2]_x^{1/2} } \right)_{eq} = \exp \left\{ \dfrac{-(\Delta_r G'^\circ_\text{C4} + n_\text{C4} F \Delta \Psi )}{ R \text{T} } \right\} \left( \dfrac{ [\text{H}^+]_x }{[\text{H}^+]_c} \right)^{n_\text{C4}}.
# ```
#
# To simulate the flux of complex IV, $J_{\text{C4}} \ \text{(mol s}^{-1} \text{ (L mito)}^{-1})$, we use mass-action kinetics and account for binding of oxygen to complex IV as
# ```{math}
# :label: J_C4
# J_{\text{C4}} = X_{\text{C4}} \left( \dfrac{1}{1 + \frac{k_{\text{O}_2}}{[\text{O}_2]
# }} \right) \left( \left(K_{eq,\text{C4}}^\prime\right)^{1/2} [\text{c}_{red}^{2+}]_i [\text{O}_2]_x^{1/4} - [\text{c}_{ox}^{3+}]_i \right),
# ```
# where $X_{\text{C4}} \ \text{(mol s}^{-1} \text{ (L mito)}^{-1})$ is the rate constant and $k_{\text{O}_2}$ is the $\text{O}_2$ binding constant (\ref{table-ETC}). For this study, we assume a partial pressure of $\text{O}_2$ at $25 \ \text{mmHg}$.
# The apparent equilibrium constants for the $\text{F}_0\text{F}_1$ ATPase (Equation {eq}`Kapp_F`), complex I (Equation {eq}`Kapp_C1`), complex III (Equation {eq}`Kapp_C3`), and complex IV (Equation {eq}`Kapp_C4`) depend on $\Delta\Psi$. In the model developed in this section, since $\Delta\Psi$ is a variable, these apparent equilibrium constants are also variables. Thus, the flux expressions in Equations {eq}`J_F`, {eq}`J_C1`, {eq}`J_C3`, and {eq}`J_C4` depend on $\Delta \Psi$. These expressions may be compared to a generalized formulation of rate laws for reversible enzyme-catalyzed reactions {cite}`Noor2013`, where in this case the saturating dependence of flux on substrate concentrations is not accounted for. These expressions may also be compared to the more detailed representations of the underlying catalytic mechanisms used by Bazil et al. {cite}`Bazil2016`. The Bazil et al. model also accounts for side reactions generating reactive oxygen species that are not accounted for here.
# ### Dehydrogenase activity
#
# In this model, we do not explicitly simulate the reactions of the TCA cycle or beta oxidation, but rather the combined action of NADH-producing reactions, that is,
# ```{math}
# (\text{NAD}^{-})_x \rightleftharpoons (\text{NADH}^{2-})_x + (\text{H}^{+})_x
# ```
# From Beard {cite}`Beard2005`, we represent a Pi dependence of NADH production using the following phenomenological expression
# ```{math}
# :label: J_DH
# J_{\text{DH}} = X_{\text{DH}} \left( r [\text{NAD}^-] - [\text{NADH}^{2-}] \right) \left( \dfrac{ 1 + [\Sigma \text{Pi}]_x/k_{\text{Pi},1} }{ 1 + [\Sigma \text{Pi}]_x/k_{\text{Pi},2} } \right),
# ```
# where $X_\text{DH} \text{ (mol s}^{-1} \text{ (L mito)}^{-1})$ is the dehydrogenase activity and $r$ (dimensionless), $k_{\text{Pi},1} \ \text{(mol (L matrix water)}^{-1})$, and $k_{\text{Pi},2} \ \text{(mol (L matrix water)}^{-1})$ are constants. Parameter values are listed in Table {numref}`table-ETC`. The dependence of NADH production on Pi reflects the Pi-dependence of the substrate-level phosphorylation step of the TCA cycle (the succinyl coenzyme-A synthetase reaction) and the fact that Pi drives substrate oxidation via the dicarboxylate carrier.
# ### Proton leak
#
# To simulate proton leak across the IMM, we adopt the Goldman-Hodgkins-Katz formulation from Wu et al. {cite}`Wu2008`,
# ```{math}
# :label: J_H
# J_{\text{H}} = X_\text{H} \left( [\text{H}^{+}]_c \ e^{\phi/2} - [\text{H}^{+}]_x \ e^{-\phi/2} \right)
# ```
# where $X_\text{H} = 1000 \ \text{mol s}^{-1} \text{ (L mito)}^{-1}$ is the proton leak activity and $\phi$ is given in Equation {eq}`phi`. Even though the kinetic constants $X_\text{F}$ and $X_\text{H}$ attain equal values here, under the ATP-producing conditions the proton flux through the $\text{F}_0\text{F}_1$ ATPase ($J_\text{F}$, Equation {eq}`J_F`) is an order of magnitude greater than the proton leak flux ($J_\text{H}$, Equation {eq}`J_H`).
# ```{list-table} Respiratory complex and inorganic phosphate transport parameters
# :header-rows: 1
# :name: table-ETC
#
# * - Parameter
# - Units
# - Description
# - Value
# - Source
# * - $n_{\text{C}1}$
# -
# - Protons translocated by complex I
# - $4 $
# - {cite}`Nicholls2013`
# * - $n_{\text{C}3}$
# -
# - Protons translocated by complex III
# - $2 $
# - {cite}`Nicholls2013`
# * - $n_{\text{C}4}$
# -
# - Protons translocated by complex IV
# - $4 $
# - {cite}`Nicholls2013`
# * - $X_\text{C1}$
# - mol s$^{-1}$ (L mito)$^{-1}$
# - Complex I rate constant
# - $1\text{e}4$
# -
# * - $X_\text{C3}$
# - mol s$^{-1}$ (L mito)$^{-1}$
# - Complex III rate constant
# - $1\text{e}6$
# -
# * - $X_\text{C4}$
# - mol s$^{-1}$ (L mito)$^{-1}$
# - Complex IV rate constant
# - $0.0125$
# -
# * - $X_\text{DH}$
# - mol s$^{-1}$ (L mito)$^{-1}$
# - NADH dehydrogenase rate constant
# - $0.1732$
# -
# * - $X_\text{H}$
# - mol s$^{-1}$ (L mito)$^{-1}$
# - Proton leak activity
# - $1\text{e}3$
# -
# * - $r$
# -
# - Dehydrogenase parameter
# - $6.8385 $
# -
# * - $k_{\text{Pi},1}$
# - mmol (L matrix water)$^{-1}$
# - Dehydrogenase parameter
# - $0.466 $
# -
# * - $k_{\text{Pi},2}$
# - mmol (L matrix water)$^{-1}$
# - Dehydrogenase parameter
# - $0.658 $
# -
# * - $k_{\text{PiC}}$
# - mmol (L cell)$^{-1}$
# - PiC constant
# - $1.61$
# - {cite}`Bazil2016`
# * - $k_{\text{O}_2}$
# - $\mu$mol (L matrix water)$^{-1}$
# - O$_2$ binding constant
# - $120$
# - {cite}`Wu2007`
# * - $\Delta_r G^o_\text{C1}$
# - kJ mol$^{-1}$
# - Gibbs energy of reaction for complex I
# - $ -109.7 $
# - {cite}`Li2011`
# * - $\Delta_r G^o_\text{C3}$
# - kJ mol$^{-1}$
# - Gibbs energy of reaction for complex III
# - $46.7 $
# - {cite}`Li2011`
# * - $\Delta_r G^o_\text{C4}$
# - kJ mol$^{-1}$
# - Gibbs energy of reaction for complex IV
# - $ -202.2 $
# - {cite}`Li2011`
# * - $[\text{NAD}]_{tot}$
# - mmol (L matrix water)$^{-1}$
# - Total NAD pool in the matrix
# - $2.97$
# - {cite}`Wu2007`
# * - $[\text{Q}]_{tot}$
# - mmol (L matrix water)$^{-1}$
# - Total Q pool in the matrix
# - $1.35$
# - {cite}`Wu2007`
# * - $[\text{c}]_{tot}$
# - mmol (L IM water)$^{-1}$
# - Total cytochrome c pool in the IMS
# - $2.70$
# - {cite}`Wu2007`
# ```
# ## Simulating ATP synthesis in vitro
#
# The flux expressions developed above may be used to simulate mitochondrial ATP synthesis in vitro, governed by the system of equations
# ```{math}
# :label: system-singlemito
# \left\{
# \renewcommand{\arraystretch}{2.5}
# \begin{array}{rl}
# \dfrac{ {\rm d} \Delta \Psi }{{\rm d} t} & = ( n_\text{C1} J_\text{C1} + n_\text{C3} J_\text{C3} + n_\text{C4} J_\text{C4} - n_\text{F} J_\text{F} - J_\text{ANT} - J_\text{H}) / C_m \\
# \hline
# \dfrac{ {\rm d} [\Sigma \text{ATP}]_x }{{\rm d} t} &= (J_\text{F} - J_\text{ANT} ) / W_x \\
# \dfrac{ {\rm d} [\Sigma \text{ADP}]_x }{{\rm d} t} &= (-J_\text{F} + J_\text{ANT}) / W_x \\
# \dfrac{ {\rm d} [\Sigma \text{Pi}]_x }{{\rm d} t} &= (-J_\text{F} + J_\text{PiC}) / W_x \quad \text{matrix species}\\
# \dfrac{ {\rm d} [\text{NADH}^{2-}]_x }{{\rm d} t} &= (J_\text{DH} - J_\text{C1}) / W_x \\
# \dfrac{ {\rm d} [\text{QH}_2]_x }{{\rm d} t} &= (J_\text{C1} - J_\text{C3}) / W_x \\
# \hline
# \dfrac{ {\rm d} [\text{c}_{red}^{2+}]_i}{{\rm d} t} &= 2(J_\text{C3} - J_\text{C4}) / W_i \quad \text{intermembrane space species}\\
# \hline
# \dfrac{ {\rm d} [\Sigma \text{ATP}]_c }{{\rm d} t} &= (V_{m2c} J_\text{ANT} - J_\text{AtC} )/ W_c \\
# \dfrac{ {\rm d} [\Sigma \text{ADP}]_c }{{\rm d} t} &= (-V_{m2c} J_\text{ANT} + J_\text{AtC} ) / W_c \quad \text{cytosol species}\\
# \dfrac{ {\rm d} [\Sigma \text{Pi}]_c }{{\rm d} t} &= (- V_{m2c} J_\text{PiC} + J_\text{AtC}) / W_c,
# \end{array}
# \renewcommand{\arraystretch}{1}
# \right.
# ```
# where the fluxes $J_\text{F}$ (Equation {eq}`J_F`), $J_\text{ANT}$ (Equation {eq}`J_ANT`), $J_\text{PiC}$ (Equation {eq}`J_PiC`), $J_\text{C1}$ (Equation {eq}`J_C1`), $J_\text{C3}$ (Equation {eq}`J_C3`), $J_\text{C4}$ (Equation {eq}`J_C4`), $J_\text{DH}$ (Equation {eq}`J_DH`), and $J_\text{H}$ (Equation {eq}`J_H`) are given above and the constants are listed in Tables {numref}`table-biophysicalconstants` and {numref}`table-ETC`. Here, we incorporate a constant ATP consumption flux, $J_\text{AtC} \ \text{(mol s}^{-1} \text{ (L cyto)}^{-1})$, that is
# ```{math}
# J_\text{AtC} = X_\text{AtC}/V_c
# ```
# where $V_c$ is the ratio of the volume of cytosol per L cell. $X_\text{AtC}$ is the ATP consumption rate expressed in units of mmol s$^{-1}$ (L cell)$^{-1}$. Equation {eq}`system-singlemito` does not explicitly treat matrix or external $\text{pH}$, $\text{K}^+$, $\text{Mg}^{2+}$, or $\text{O}_2$ as variables. Reasonable clamped concentrations for these variables are ${\rm pH}_x = 7.4$, ${\rm pH}_c = 7.2$, $[\text{Mg}^{2+}]_x = 1 \ \text{mmol (L matrix water)}^{-1}$, $[\text{Mg}^{2+}]_c = 1 \ \text{mmol (L cyto water)}^{-1}$, $[\text{K}^{+}]_x = 100 \ \text{mmol (L matrix water)}^{-1}$, and $[K^{+}]_c = 140 \ \text{mmol (L cyto water)}^{-1}$, and $\text{O}_2$ partial pressure of $25 \ \text{mmHg}$. Respiratory chain reactants are determined from a total concentration of metabolites within the mitochondrion, that is, the total pools for NAD, cytochrome c, and Q species are
# ```{math}
# [\text{NAD}]_{tot} &= [\text{NAD}^-]_x + [\text{NADH}^{2-}]_x \\
# [\text{c}]_{tot} &= [\text{c}^{2+}_{red}]_i + [\text{c}^{3+}_{ox}]_i, \quad \text{and} \\
# [\text{Q}]_{tot} &= [\text{Q}]_x + [\text{QH}_2]_x.
# ```
# The pools are $[\text{NAD}]_{tot} = 2.97 \ \text{mmol (L matrix water)}^{-1}$, $[\text{c}]_{tot} = 2.7 \ \text{mmol (L IMS water)}^{-1}$, and $[\text{Q}]_{tot} = 1.35$ $\text{mmol}~\text{(L matrix water)}^{-1}$. The finite nature of these metabolite pools constrains the maximal concentrations of substrates available for complexes I, III, and IV. Thus, although the simple mass-action models for these complexes do not account for saturable enzyme kinetics, the fluxes are limited by the availability of substrates. Initial conditions are set under the assumption that the TAN for both the matrix and cytosol is $10 \ \text{mM}$, but the ATP/ADP ratio is $<$$1$ in the matrix and $\sim$$100$ in the cytosol. The following code simulates in vitro mitochondrial function without ATP consumption in the external (cytosolic space).
#
# In[4]:
import numpy as np
import matplotlib.pyplot as plt
get_ipython().system('pip install scipy')
from scipy.integrate import solve_ivp
###### Constants defining metabolite pools ######
# Volume fractions and water space fractions
V_c = 0.6601 # cytosol volume fraction # L cyto (L cell)**(-1)
V_m = 0.2882 # mitochondrial volume fraction # L mito (L cell)**(-1)
V_m2c = V_m / V_c # mito to cyto volume ratio # L mito (L cuvette)**(-1)
W_c = 0.8425 # cytosol water space # L cyto water (L cyto)**(-1)
W_m = 0.7238 # mitochondrial water space # L mito water (L mito)**(-1)
W_x = 0.9*W_m # matrix water space # L matrix water (L mito)**(-1)
W_i = 0.1*W_m # intermembrane water space # L IM water (L mito)**(-1)
# Total pool concentrations
NAD_tot = 2.97e-3 # NAD+ and NADH conc # mol (L matrix water)**(-1)
Q_tot = 1.35e-3 # Q and QH2 conc # mol (L matrix water)**(-1)
c_tot = 2.7e-3 # cytochrome c ox and red conc # mol (L IM water)**(-1)
# Membrane capacitance ()
Cm = 3.1e-3
###### Set fixed pH, cation concentrations, and O2 partial pressure ######
# pH
pH_x = 7.40
pH_c = 7.20
# K+ concentrations
K_x = 100e-3 # mol (L matrix water)**(-1)
K_c = 140e-3 # mol (L cyto water)**(-1)
# Mg2+ concentrations
Mg_x = 1.0e-3 # mol (L matrix water)**(-1)
Mg_c = 1.0e-3 # mol (L cyto water)**(-1)
# Oxygen partial pressure
PO2 = 25 # mmHg
###### Parameter vector ######
X_DH = 0.1732
X_C1 = 1.0e4
X_C3 = 1.0e6
X_C4 = 0.0125
X_F = 1.0e3
E_ANT = 0.325
E_PiC = 5.0e6
X_H = 1.0e3
X_AtC = 0
activity_array = np.array([X_DH, X_C1, X_C3, X_C4, X_F, E_ANT, E_PiC, X_H, X_AtC])
###### Initial Conditions ######
# Membrane Potential
DPsi_0 = 175/1000 # V
# Matrix species
sumATP_x_0 = 0.5e-3 # mol (L matrix water)**(-1)
sumADP_x_0 = 9.5e-3 # mol (L matrix water)**(-1)
sumPi_x_0 = 1.0e-3 # mol (L matrix water)**(-1)
NADH_x_0 = 2/3 * NAD_tot # mol (L matrix water)**(-1)
QH2_x_0 = 0.1 * Q_tot # mol (L matrix water)**(-1)
# IMS species
cred_i_0 = 0.1 * c_tot # mol (L IMS water)**(-1)
# Cytosolic species
sumATP_c_0 = 0 # mol (L cyto water)**(-1)
sumADP_c_0 = 10e-3 # mol (L cyto water)**(-1)
sumPi_c_0 = 10e-3 # mol (L cyto water)**(-1)
X_0 = np.array([DPsi_0, sumATP_x_0, sumADP_x_0, sumPi_x_0, NADH_x_0, QH2_x_0, cred_i_0, sumATP_c_0, sumADP_c_0, sumPi_c_0])
def dXdt(t, X, activity_array, solve_ode):
# Unpack variables
DPsi, sumATP_x,sumADP_x, sumPi_x, NADH_x, QH2_x, cred_i, sumATP_c, sumADP_c, sumPi_c = X
X_DH, X_C1, X_C3, X_C4, X_F, E_ANT, E_PiC, X_H, X_AtC = activity_array
# Hydrogen ion concentration
H_x = 10**(-pH_x) # mol (L matrix water)**(-1)
H_c = 10**(-pH_c) # mol (L cuvette water)**(-1)
# Oxygen concentration
a_3 = 1.74e-6 # oxygen solubility in cuvette # mol (L matrix water * mmHg)**(-1)
O2_x = a_3*PO2 # mol (L matrix water)**(-1)
# Thermochemical constants
R = 8.314 # J (mol K)**(-1)
T = 37 + 273.15 # K
F = 96485 # C mol**(-1)
# Proton motive force parameters (dimensionless)
n_F = 8/3
n_C1 = 4
n_C3 = 2
n_C4 = 4
# Dissociation constants
K_MgATP = 10**(-3.88)
K_HATP = 10**(-6.33)
K_KATP = 10**(-1.02)
K_MgADP = 10**(-3.00)
K_HADP = 10**(-6.26)
K_KADP = 10**(-0.89)
K_MgPi = 10**(-1.66)
K_HPi = 10**(-6.62)
K_KPi = 10**(-0.42)
# Other concentrations computed from the state variables:
NAD_x = NAD_tot - NADH_x # mol (L matrix water)**(-1)
Q_x = Q_tot - QH2_x # mol (L matrix water)**(-1)
cox_i = c_tot - cred_i # mol (L matrix water)**(-1)
## Binding polynomials
# Matrix species # mol (L mito water)**(-1)
PATP_x = 1 + H_x/K_HATP + Mg_x/K_MgATP + K_x/K_KATP
PADP_x = 1 + H_x/K_HADP + Mg_x/K_MgADP + K_x/K_KADP
PPi_x = 1 + H_x/K_HPi + Mg_x/K_MgPi + K_x/K_KPi
# Cytosol species # mol (L cuvette water)**(-1)
PATP_c = 1 + H_c/K_HATP + Mg_c/K_MgATP + K_c/K_KATP
PADP_c = 1 + H_c/K_HADP + Mg_c/K_MgADP + K_c/K_KADP
PPi_c = 1 + H_c/K_HPi + Mg_c/K_MgPi + K_c/K_KPi
## Unbound species
# Matrix species
ATP_x = sumATP_x / PATP_x # [ATP4-]_x
ADP_x = sumADP_x / PADP_x # [ADP3-]_x
Pi_x = sumPi_x / PPi_x # [HPO42-]_x
# Cytosolic species
ATP_c = sumATP_c / PATP_c # [ATP4-]_c
ADP_c = sumADP_c / PADP_c # [ADP3-]_c
Pi_c = sumPi_c / PPi_c # [HPO42-]_c
###### NADH Dehydrogenase ######
# Constants
r = 6.8385
k_Pi1 = 4.659e-4 # mol (L matrix water)**(-1)
k_Pi2 = 6.578e-4 # mol (L matrix water)**(-1)
# Flux
J_DH = X_DH * (r * NAD_x - NADH_x) * ((1 + sumPi_x / k_Pi1) / (1+sumPi_x / k_Pi2))
###### Complex I ######
# NADH_x + Q_x + 5H+_x <-> NAD+_x + QH2_x + 4H+_i + 4DPsi
# Gibbs energy (J mol**(-1))
DrGo_C1 = -109680
DrGapp_C1 = DrGo_C1 - R * T * np.log(H_x)
# Apparent equilibrium constant
Kapp_C1 = np.exp( -(DrGapp_C1 + n_C1 * F * DPsi) / (R * T)) * ((H_x / H_c)**n_C1)
# Flux (mol (s * L mito)**(-1))
J_C1 = X_C1 * (Kapp_C1 * NADH_x * Q_x - NAD_x * QH2_x)
###### Complex III ######
# QH2_x + 2cuvetteC(ox)3+_i + 2H+_x <-> Q_x + 2cuvetteC(red)2+_i + 4H+_i + 2DPsi
# Gibbs energy (J mol**(-1))
DrGo_C3 = 46690
DrGapp_C3 = DrGo_C3 + 2 * R * T * np.log(H_c)
# Apparent equilibrium constant
Kapp_C3 = np.exp(-(DrGapp_C3 + n_C3 * F * DPsi) / (R * T)) * (H_x / H_c)**n_C3
# Flux (mol (s * L mito)**(-1))
J_C3 = X_C3 * (Kapp_C3 * cox_i**2 * QH2_x - cred_i**2 * Q_x)
###### Complex IV ######
# 2 cytoC(red)2+_i + 0.5O2_x + 4H+_x <-> cytoC(ox)3+_x + H2O_x + 2H+_i +2DPsi
# Constant
k_O2 = 1.2e-4 # mol (L matrix water)**(-1)
# Gibbs energy (J mol**(-1))
DrGo_C4 = -202160 # J mol**(-1)
DrGapp_C4 = DrGo_C4 - 2 * R * T * np.log(H_c)
# Apparent equilibrium constant
Kapp_C4 = np.exp(-(DrGapp_C4 + n_C4 * F * DPsi) / (R * T)) * (H_x / H_c)**n_C4
# Flux (mol (s * L mito)**(-1))
J_C4 = X_C4 *(Kapp_C4**0.5 * cred_i * O2_x**0.25 - cox_i) * (1 / (1 + k_O2 / O2_x))
###### F1F0-ATPase ######
# ADP3-_x + HPO42-_x + H+_x + n_A*H+_i <-> ATP4- + H2O + n_A*H+_x
# Gibbs energy (J mol**(-1))
DrGo_F = 4990
DrGapp_F = DrGo_F + R * T * np.log( H_x * PATP_x / (PADP_x * PPi_x))
# Apparent equilibrium constant
Kapp_F = np.exp( (DrGapp_F + n_F * F * DPsi ) / (R * T)) * (H_c / H_x)**n_F
# Flux (mol (s * L mito)**(-1))
J_F = X_F * (Kapp_F * sumADP_x * sumPi_x - sumATP_x)
###### ANT ######
# ATP4-_x + ADP3-_i <-> ATP4-_i + ADP3-_x
# Constants
del_D = 0.0167
del_T = 0.0699
k2o_ANT = 9.54/60 # s**(-1)
k3o_ANT = 30.05/60 # s**(-1)
K0o_D = 38.89e-6 # mol (L cuvette water)**(-1)
K0o_T = 56.05e-6 # mol (L cuvette water)**(-1)
A = +0.2829
B = -0.2086
C = +0.2372
phi = F * DPsi / (R * T)
# Reaction rates
k2_ANT = k2o_ANT * np.exp((A*(-3) + B*(-4) + C)*phi)
k3_ANT = k3o_ANT * np.exp((A*(-4) + B*(-3) + C)*phi)
# Dissociation constants
K0_D = K0o_D * np.exp(3*del_D*phi)
K0_T = K0o_T * np.exp(4*del_T*phi)
q = k3_ANT * K0_D * np.exp(phi) / (k2_ANT * K0_T)
term1 = k2_ANT * ATP_x * ADP_c * q / K0_D
term2 = k3_ANT * ADP_x * ATP_c / K0_T
num = term1 - term2
den = (1 + ATP_c/K0_T + ADP_c/K0_D) * (ADP_x + ATP_x * q)
# Flux (mol (s * L mito)**(-1))
J_ANT = E_ANT * num / den
###### H+-PI2 cotransporter ######
# H2PO42-_x + H+_x = H2PO42-_c + H+_c
# Constant
k_PiC = 1.61e-3 # mol (L cuvette)**(-1)
# H2P04- species
HPi_c = Pi_c * (H_c / K_HPi)
HPi_x = Pi_x * (H_x / K_HPi)
# Flux (mol (s * L mito)**(-1))
J_PiC = E_PiC * (H_c * HPi_c - H_x * HPi_x) / (k_PiC + HPi_c)
###### H+ leak ######
# Flux (mol (s * L mito)**(-1))
J_H = X_H * (H_c * np.exp(phi/2) - H_x * np.exp(-phi/2))
###### ATPase ######
# ATP4- + H2O = ADP3- + PI2- + H+
#Flux (mol (s * L cyto)**(-1))
J_AtC = X_AtC / V_c
###### Differential equations (equation 23) ######
# Membrane potential
dDPsi = (n_C1 * J_C1 + n_C3 * J_C3 + n_C4 * J_C4 - n_F * J_F - J_ANT - J_H) / Cm
# Matrix species
dATP_x = (J_F - J_ANT) / W_x
dADP_x = (-J_F + J_ANT) / W_x
dPi_x = (-J_F + J_PiC) / W_x
dNADH_x = (J_DH - J_C1) / W_x
dQH2_x = (J_C1 - J_C3) / W_x
# IMS species
dcred_i = 2 * (J_C3 - J_C4) / W_i
# Buffer species
dATP_c = ( V_m2c * J_ANT - J_AtC ) / W_c
dADP_c = (-V_m2c * J_ANT + J_AtC ) / W_c
dPi_c = (-V_m2c * J_PiC + J_AtC) / W_c
dX = [dDPsi, dATP_x, dADP_x, dPi_x, dNADH_x, dQH2_x, dcred_i, dATP_c, dADP_c, dPi_c]
# Calculate state-dependent quantities after model is solved
if solve_ode == 1:
return dX
else:
J = np.array([PATP_x, PADP_x, PPi_x, PATP_c, PADP_c, PPi_c, J_DH, J_C1, J_C3, J_C4, J_F, J_ANT, J_PiC, DrGapp_F])
return dX, J
# Time vector
t = np.linspace(0,5,100)
# Solve ODE
results = solve_ivp(dXdt, [0, 5], X_0, method = 'Radau', t_eval=t, args=(activity_array,1))
DPsi, sumATP_x,sumADP_x, sumPi_x, NADH_x, QH2_x, cred_i, sumATP_c, sumADP_c, sumPi_c = results.y
# Plot figures
fig, ax = plt.subplots(1,2, figsize = (10,5))
ax[0].plot(t, sumATP_x*1000, label = '[$\Sigma$ATP]$_x$')
ax[0].plot(t, sumADP_x*1000, label = '[$\Sigma$ADP]$_x$')
ax[0].plot(t, sumPi_x*1000, label = '[$\Sigma$Pi]$_x$')
ax[0].legend(loc="right")
ax[0].set_xlabel('Time (s)')
ax[0].set_ylabel('Concentration (mM)')
ax[0].set_ylim((-.5,10.5))
ax[1].plot(t, sumATP_c*1000, label = '[$\Sigma$ATP]$_c$')
ax[1].plot(t, sumADP_c*1000, label = '[$\Sigma$ADP]$_c$')
ax[1].plot(t, sumPi_c*1000, label = '[$\Sigma$Pi]$_c$')
ax[1].legend(loc="right")
ax[1].set_xlabel('Time (s)')
ax[1].set_ylabel('Concentration (mM)')
ax[1].set_ylim((-.5,10.5))
plt.show()
# **Figure 7:** Steady state solution from Equation {eq}`system-singlemito` for the (a) matrix and (b) cytosol species with $\text{pH}_x = 7.4$ and $\text{pH}_c = 2$.
# The above simulations reach a final steady state where the phosphate metabolite concentrations are $[\text{ATP}]_x = 0.9 \ \text{mM}$, $[\text{ADP}]_x = 9.1 \ \text{mM} $, $[\text{Pi}]_x = 0.4 \ \text{mM}$, $[\text{ATP}]_c = 9.9 \ \text{mM}$, $[\text{ADP}]_c = 0.1 \ \text{mM}$, $[\text{Pi}]_c = 0.2 \ \text{mM}$, and the membrane potential is $186 \ \text{mV}$. This state represents a *resting* energetic state with no ATP hydrolysis in the cytosol. The Gibbs energy of ATP hydrolysis associated with this predicted state is $\Delta G_{\rm ATP} = \text{-}70 \ \text{kJ mol}^{-1}$, as calculated below.
# In[5]:
sumATP_c_ss = sumATP_c[-1]
sumADP_c_ss = sumADP_c[-1]
sumPi_c_ss = sumPi_c[-1]
H_c = 10**(-pH_c) # mol (L cuvette water)**(-1)
# Thermochemical constants
R = 8.314 # J (mol K)**(-1)
T = 37 + 273.15 # K
# Dissociation constants
K_MgATP = 10**(-3.88)
K_HATP = 10**(-6.33)
K_KATP = 10**(-1.02)
K_MgADP = 10**(-3.00)
K_HADP = 10**(-6.26)
K_KADP = 10**(-0.89)
K_MgPi = 10**(-1.66)
K_HPi = 10**(-6.62)
K_KPi = 10**(-0.42)
## Binding polynomials
# Cytosol species # mol (L cuvette water)**(-1)
PATP_c = 1 + H_c/K_HATP + Mg_c/K_MgATP + K_c/K_KATP
PADP_c = 1 + H_c/K_HADP + Mg_c/K_MgADP + K_c/K_KADP
PPi_c = 1 + H_c/K_HPi + Mg_c/K_MgPi + K_c/K_KPi
DrGo_ATP = 4990
# Use equation 9 to calcuate apparent reference cytosolic Gibbs energy
DrGo_ATP_apparent = DrGo_ATP + R * T * np.log(H_c * PATP_c / (PADP_c * PPi_c))
# Use equation 9 to calculate cytosolic Gibbs energy
DrG_ATP = DrGo_ATP_apparent + R * T * np.log((sumADP_c_ss * sumPi_c_ss / sumATP_c_ss))
print('Cytosolic Gibbs energy of ATP hydrolysis (kJ mol^(-1))')
print(DrG_ATP / 1000)
# In[ ]:
# In[ ]:
| 43.961007 | 1,001 | 0.592283 |
7948167d24cc880a73c2ba363dbf18c0796d462c
| 2,875 |
py
|
Python
|
wsi_process/resize.py
|
Lornatang/Scripts
|
ba9bb03dc3bc4b48d6314a6bd16e3ae5e813bb24
|
[
"Apache-2.0"
] | null | null | null |
wsi_process/resize.py
|
Lornatang/Scripts
|
ba9bb03dc3bc4b48d6314a6bd16e3ae5e813bb24
|
[
"Apache-2.0"
] | null | null | null |
wsi_process/resize.py
|
Lornatang/Scripts
|
ba9bb03dc3bc4b48d6314a6bd16e3ae5e813bb24
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2021 Dakewe Biotech Corporation. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import os
import cv2
def main():
filenames = os.listdir("HR")
for filename in sorted(filenames):
print(f"Process `{filename}`.")
imagex0 = cv2.imread(os.path.join("HR", filename)) # HWC.
height, width, _ = imagex0.shape
imagex2_nearest = cv2.resize(imagex0, (width // 2, height // 2), interpolation=cv2.INTER_NEAREST)
imagex2_linear = cv2.resize(imagex0, (width // 2, height // 2), interpolation=cv2.INTER_LINEAR)
imagex2_area = cv2.resize(imagex0, (width // 2, height // 2), interpolation=cv2.INTER_AREA)
imagex2_cubic = cv2.resize(imagex0, (width // 2, height // 2), interpolation=cv2.INTER_CUBIC)
imagex2_lanczos4 = cv2.resize(imagex0, (width // 2, height // 2), interpolation=cv2.INTER_LANCZOS4)
imagex4_nearest = cv2.resize(imagex0, (width // 4, height // 4), interpolation=cv2.INTER_NEAREST)
imagex4_linear = cv2.resize(imagex0, (width // 4, height // 4), interpolation=cv2.INTER_LINEAR)
imagex4_area = cv2.resize(imagex0, (width // 4, height // 4), interpolation=cv2.INTER_AREA)
imagex4_cubic = cv2.resize(imagex0, (width // 4, height // 4), interpolation=cv2.INTER_CUBIC)
imagex4_lanczos4 = cv2.resize(imagex0, (width // 4, height // 4), interpolation=cv2.INTER_LANCZOS4)
cv2.imwrite(os.path.join("LRnearestx2", filename), imagex2_nearest)
cv2.imwrite(os.path.join("LRlinearx2", filename), imagex2_linear)
cv2.imwrite(os.path.join("LRareax2", filename), imagex2_area)
cv2.imwrite(os.path.join("LRcubicx2", filename), imagex2_cubic)
cv2.imwrite(os.path.join("LRlanczos4x2", filename), imagex2_lanczos4)
cv2.imwrite(os.path.join("LRnearestx4", filename), imagex4_nearest)
cv2.imwrite(os.path.join("LRlinearx4", filename), imagex4_linear)
cv2.imwrite(os.path.join("LRareax4", filename), imagex4_area)
cv2.imwrite(os.path.join("LRcubicx4", filename), imagex4_cubic)
cv2.imwrite(os.path.join("LRlanczos4x4", filename), imagex4_lanczos4)
if __name__ == "__main__":
main()
| 55.288462 | 108 | 0.653217 |
794816c3bfbb391fc360fff094bd8a46428f3f47
| 604 |
py
|
Python
|
database_block/models/ir_http.py
|
trojikman/access-addons
|
5b056b4d0928e83f687ea7978de6f96f826c28a6
|
[
"MIT"
] | null | null | null |
database_block/models/ir_http.py
|
trojikman/access-addons
|
5b056b4d0928e83f687ea7978de6f96f826c28a6
|
[
"MIT"
] | null | null | null |
database_block/models/ir_http.py
|
trojikman/access-addons
|
5b056b4d0928e83f687ea7978de6f96f826c28a6
|
[
"MIT"
] | null | null | null |
# Copyright 2020 Eugene Molotov <https://it-projects.info/team/em230418>
# License MIT (https://opensource.org/licenses/MIT).
from odoo import SUPERUSER_ID, models
class IrHttp(models.AbstractModel):
_inherit = "ir.http"
def session_info(self):
res = super(IrHttp, self).session_info()
res["database_block_show_message_in_apps_menu"] = bool(
self.env["ir.module.module"]
.with_user(SUPERUSER_ID)
.search(
[("name", "=", "web_responsive"), ("state", "=", "installed")], limit=1,
)
)
return res
| 26.26087 | 88 | 0.600993 |
794816df16e5c3f0b0f03d61888d12de26161c40
| 14,542 |
py
|
Python
|
bitcoin/ledger.py
|
vanschelven/python-bitcoin
|
1b80c284170fd3f547cc45f4700ce169f3f99641
|
[
"MIT"
] | 8 |
2015-02-04T05:07:50.000Z
|
2019-01-17T19:52:10.000Z
|
bitcoin/ledger.py
|
vanschelven/python-bitcoin
|
1b80c284170fd3f547cc45f4700ce169f3f99641
|
[
"MIT"
] | 2 |
2020-05-13T09:50:07.000Z
|
2021-02-23T09:44:04.000Z
|
bitcoin/ledger.py
|
vanschelven/python-bitcoin
|
1b80c284170fd3f547cc45f4700ce169f3f99641
|
[
"MIT"
] | 9 |
2015-01-23T11:37:30.000Z
|
2018-10-06T11:15:07.000Z
|
# -*- coding: utf-8 -*-
# Copyright © 2012-2014 by its contributors. See AUTHORS for details.
# Distributed under the MIT/X11 software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
# Python 2 and 3 compatibility utilities
import six
from blist import sorteddict
from recordtype import recordtype
from .authtree import MemoryPatriciaAuthTree
from .core import Output
from .hash import hash256
from .mixins import SerializableMixin
from .serialize import BigCompactSize, LittleInteger, VarInt
from .tools import compress_amount, decompress_amount
__all__ = (
'UnspentTransaction',
'OutPoint',
'Coin',
'BaseValidationIndex',
'MemoryValidationIndex',
'ContractOutPoint',
'ContractCoin',
'BaseContractIndex',
'MemoryContractIndex',
)
# ===----------------------------------------------------------------------===
from .script import ScriptPickler
class UnspentTransaction(SerializableMixin, sorteddict):
"""Pruned version of core.Transaction: only retains metadata and unspent
transaction outputs.
Serialized format:
- VARINT(version)
- VARINT(code)
- unspentness bitvector, for outputs[2] and further; least significant
byte first
- the non-spent, compressed TransactionOutputs
- VARINT(height)
- VARINT(reference_height)
The code value consists of:
- bit 1: outputs[0] is not spent
- bit 2: outputs[1] is not spent
- bit 3: outputs[2] is not spent
- The higher bits encode N, the number of non-zero bytes in the following
bitvector.
- In case bit 1, bit 2 and bit 4 are all unset, they encode N-1, as
there must be at least one non-spent output.
Example: 0102835800816115944e077fe7c803cfa57f29b36bf87c1d358bb85e
<><><--------------------------------------------><---->
| \ | /
version code outputs[1] height
- version = 1
- code = 2 (outputs[1] is not spent, and 0 non-zero bytes of bitvector follow)
- unspentness bitvector: as 0 non-zero bytes follow, it has length 0
- outputs[1]: 835800816115944e077fe7c803cfa57f29b36bf87c1d35
* 8358: compact amount representation for 60000000000 (600 BTC)
* 00: special txout type pay-to-pubkey-hash
* 816115944e077fe7c803cfa57f29b36bf87c1d35: address uint160
- height = 203998
Example: 0208044086ef97d5790061b01caab50f1b8e9c50a5057eb43c2d9563a4ee...
<><><--><-------------------------------------------------->
/ | \ |
version code unspentness outputs[4]
...bbd123008c988f1a4a4de2161e0f50aac7f17e7f9555caa486af3b8668
<----------------------------------------------><----><-->
| / |
outputs[16] height reference_height
- version = 2
- code = 8: neither outputs[0], outputs[1], nor outputs[2] are unspent, 2
(1, +1 because both bit 2 and bit 4 are unset) non-zero bitvector bytes
follow.
- unspentness bitvector: bits 1 (0x02) and 13 (0x2000) are set, so
outputs[1+3] and outputs[13+3] are unspent
z - outputs[4]: 86ef97d5790061b01caab50f1b8e9c50a5057eb43c2d9563a4ee
* 86ef97d579: compact amount representation for 234925952 (2.35 BTC)
* 00: special txout type pay-to-pubkey-hash
* 61b01caab50f1b8e9c50a5057eb43c2d9563a4ee: address uint160
- outputs[16]: bbd123008c988f1a4a4de2161e0f50aac7f17e7f9555caa4
* bbd123: compact amount representation for 110397 (0.001 BTC)
* 00: special txout type pay-to-pubkey-hash
* 8c988f1a4a4de2161e0f50aac7f17e7f9555caa4: address uint160
- height = 120891
- reference_height = 1000
"""
# We only need one script pickler, which every instance of UnspentTransaction
# can use (there's no concurrency issues with picklers, and it needs to be
# available to the class anyway for deserialize).
_pickler = ScriptPickler()
def __init__(self, *args, **kwargs):
# Since we behave like a dictionary object, we implement the copy
# constructor, which requires copying meta information not contained
# within the dictionary itself.
if args and all(hasattr(args[0], x) for x in
('version', 'height', 'reference_height')):
other = args[0]
else:
other = None
# You can either specify the transaction, another UnspentTransaction
# object, or the metadata directly. Choose one.
a = 'transaction' in kwargs
b = other is not None
c = any(x in kwargs for x in ('version', 'reference_height'))
if a + b + c >= 2: # <-- yes, you can do this
raise TypeError(u"instantiate by either specifying the "
u"transaction directly, another %s, or its individual "
u"metadata; choose one" % self.__class__.__name__)
# Extract captured parameters from kwargs, starting with the transaction
# because its metadata are used as the default.
transaction = kwargs.pop('transaction', None)
if other is None:
other = transaction
version = kwargs.pop('version', getattr(other, 'version', 1))
height = kwargs.pop('height', getattr(other, 'height', 0))
# Reference heights are added with transaction version=2, so we do
# not extract that parameter unless version=2.
reference_height = getattr(other, 'reference_height', 0)
if version in (2,):
reference_height = kwargs.pop('reference_height', reference_height)
# Perform construction of the dictionary object (our superclass)
super(UnspentTransaction, self).__init__(*args, **kwargs)
# Store metadata
self.version = version
self.height = height
self.reference_height = reference_height
# Add the transaction's outputs only if outputs are not separately
# specified (as is typically done if it is known in advance which
# outputs are not spent at time of creation).
if transaction is not None and not self:
for idx,output in enumerate(transaction.outputs):
self[idx] = output
def serialize(self):
# code&0x1: outputs[0] unspent
# code&0x2: outputs[1] unspent
# code&0x4: outputs[2] unspent
# code>>3: N, the minimal length of bitvector in bytes, or N-1 if
# outputs[0], outputs[1], and outputs[1] are all spent
bitvector = 0
for idx in six.iterkeys(self):
bitvector |= 1 << idx
if not bitvector:
raise TypeError()
code = bitvector & 0x7
bitvector >>= 3
bitvector = LittleInteger(bitvector).serialize()
bitvector_len = len(bitvector)
if not code:
bitvector_len -= 1
code |= bitvector_len << 3
result = VarInt(self.version).serialize()
result += VarInt(code).serialize()
result += bitvector
for output in six.itervalues(self):
result += VarInt(compress_amount(output.amount)).serialize()
result += self._pickler.dumps(output.contract)
result += VarInt(self.height).serialize()
if self.version in (2,):
result += VarInt(self.reference_height).serialize()
return result
@classmethod
def deserialize(cls, file_):
output_class = getattr(cls, 'get_output_class', lambda:
getattr(cls, 'output_class', Output))()
kwargs = {}
kwargs['version'] = VarInt.deserialize(file_)
# See description of code, bitvector above.
code, bitvector = VarInt.deserialize(file_), 0
bitvector |= code & 0x7
code >>= 3
if not bitvector:
code += 1
if code:
bitvector |= LittleInteger.deserialize(file_, code) << 3
idx, items = 0, []
while bitvector:
if bitvector & 0x1:
items.append(
(idx, output_class(
decompress_amount(VarInt.deserialize(file_)),
cls._pickler.load(file_))))
idx, bitvector = idx + 1, bitvector >> 1
kwargs['height'] = VarInt.deserialize(file_)
if kwargs['version'] in (2,):
kwargs['reference_height'] = VarInt.deserialize(file_)
return cls(items, **kwargs)
def __eq__(self, other):
# Compare metadata first, as it's probably less expensive
if any((self.height != other.height,
self.version != other.version)):
return False
if self.version in (2,) and self.reference_height != other.reference_height:
return False
return super(UnspentTransaction, self).__eq__(other)
__ne__ = lambda a,b:not a==b
def __repr__(self):
return '%s%s, version=%d, height=%d, reference_height=%d)' % (
self.__class__.__name__,
super(UnspentTransaction, self).__repr__()[10:-1],
self.version,
self.height,
self.reference_height)
# ===----------------------------------------------------------------------===
OutPoint = recordtype('OutPoint', ['hash', 'index'])
def _serialize_outpoint(self):
parts = list()
parts.append(hash256.serialize(self.hash))
if self.index == -1:
parts.append(b'\xfe\xff\xff\xff\xff')
else:
parts.append(BigCompactSize(self.index).serialize())
return b''.join(parts)
OutPoint.serialize = _serialize_outpoint
def _deserialize_outpoint(cls, file_):
kwargs = dict()
kwargs['hash'] = hash256.deserialize(file_)
kwargs['index'] = BigCompactSize.deserialize(file_)
return cls(**kwargs)
OutPoint.deserialize = classmethod(_deserialize_outpoint)
def _repr_outpoint(self):
return '%s(hash=%064x, index=%d)' % (
self.__class__.__name__, self.hash, self.index==2**32-1 and -1 or self.index)
OutPoint.__repr__ = _repr_outpoint
Coin = recordtype('Coin',
['version', 'amount', 'contract', 'height', 'reference_height'])
Coin._pickler = ScriptPickler()
def _serialize_coin(self):
parts = list()
parts.append(VarInt(self.version).serialize())
parts.append(VarInt(compress_amount(self.amount)).serialize())
parts.append(self._pickler.dumps(self.contract.serialize()))
parts.append(VarInt(self.height).serialize())
if self.version in (2,):
parts.append(VarInt(self.reference_height).serialize())
return b''.join(parts)
Coin.serialize = _serialize_coin
def _deserialize_coin(cls, file_):
kwargs = dict()
kwargs['version'] = VarInt.deserialize(file_)
kwargs['amount'] = decompress_amount(VarInt.deserialize(file_))
kwargs['contract'] = cls._pickler.load(file_)
kwargs['height'] = VarInt.deserialize(file_)
if kwargs['version'] in (2,):
kwargs['reference_height'] = VarInt.deserialize(file_)
return cls(**kwargs)
Coin.deserialize = classmethod(_deserialize_coin)
def _repr_coin(self):
parts = list()
parts.append('version=%d' % self.version)
parts.append('amount=%d' % self.amount)
parts.append('contract=%s' % repr(self.contract))
parts.append('height=%d' % self.height)
if self.version in (2,):
parts.append('reference_height=%d' % self.reference_height)
return '%s(%s)' % (self.__class__.__name__, ', '.join(parts))
Coin.__repr__ = _repr_coin
class BaseValidationIndex(object):
key_class = OutPoint
value_class = Coin
class MemoryValidationIndex(BaseValidationIndex, MemoryPatriciaAuthTree):
pass
# ===----------------------------------------------------------------------===
ContractOutPoint = recordtype('ContractOutPoint', ['contract', 'hash', 'index'])
ContractOutPoint._pickler = ScriptPickler()
def _serialize_contract_outpoint(self):
return b''.join([self._pickler.dumps(self.contract.serialize()),
hash256.serialize(self.hash),
BigCompactSize(self.index).serialize()])
ContractOutPoint.serialize = _serialize_contract_outpoint
def _deserialize_contract_outpoint(cls, file_):
kwargs = dict()
kwargs['contract'] = cls._pickler.load(file_)
kwargs['hash'] = hash256.deserialize(file_)
kwargs['index'] = BigCompactSize.deserialize(file_)
return cls(**kwargs)
ContractOutPoint.deserialize = classmethod(_deserialize_contract_outpoint)
def _repr_contract_outpoint(self):
return '%s(contract=%s, hash=%064x, index=%d)' % (
self.__class__.__name__, repr(self.contract), self.hash, self.index)
ContractOutPoint.__repr__ = _repr_contract_outpoint
ContractCoin = recordtype('ContractCoin',
['version', 'amount', 'height', 'reference_height'])
def _serialize_contract_coin(self):
parts = list()
parts.append(VarInt(self.version).serialize())
parts.append(VarInt(compress_amount(self.amount)).serialize())
parts.append(VarInt(self.height).serialize())
if self.version in (2,):
parts.append(VarInt(self.reference_height).serialize())
return b''.join(parts)
ContractCoin.serialize = _serialize_contract_coin
def _deserialize_contract_coin(cls, file_):
kwargs = dict()
kwargs['version'] = VarInt.deserialize(file_)
kwargs['height'] = VarInt.deserialize(file_)
kwargs['amount'] = decompress_amount(VarInt.deserialize(file_))
if kwargs['version'] in (2,):
kwargs['reference_height'] = VarInt.deserialize(file_)
return cls(**kwargs)
ContractCoin.deserialize = classmethod(_deserialize_contract_coin)
def _repr_contract_coin(self):
parts = list()
parts.append('version=%d' % self.version)
parts.append('amount=%d' % self.amount)
parts.append('height=%d' % self.height)
if self.version in (2,):
parts.append('reference_height=%d' % self.reference_height)
return '%s(%s)' % (self.__class__.__name__, ', '.join(parts))
ContractCoin.__repr__ = _repr_contract_coin
class BaseContractIndex(object):
key_class = ContractOutPoint
value_class = ContractCoin
class MemoryContractIndex(BaseContractIndex, MemoryPatriciaAuthTree):
pass
# End of File
| 41.430199 | 88 | 0.624948 |
79481879fa2d2f8394dc5f19bc539efa1ac76706
| 5,157 |
py
|
Python
|
craid/eddb/util/dataUpdate/MakeSmolFiles.py
|
HausReport/ClubRaiders
|
88bd64d2512302ca2b391b48979b6e88b092eb92
|
[
"BSD-3-Clause"
] | null | null | null |
craid/eddb/util/dataUpdate/MakeSmolFiles.py
|
HausReport/ClubRaiders
|
88bd64d2512302ca2b391b48979b6e88b092eb92
|
[
"BSD-3-Clause"
] | 2 |
2020-05-28T13:30:08.000Z
|
2020-06-02T14:12:04.000Z
|
craid/eddb/util/dataUpdate/MakeSmolFiles.py
|
HausReport/ClubRaiders
|
88bd64d2512302ca2b391b48979b6e88b092eb92
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright (c) 2020 Club Raiders Project
# https://github.com/HausReport/ClubRaiders
#
# SPDX-License-Identifier: BSD-3-Clause
import gzip
import logging
import os
import shutil
import tempfile
import traceback
from shutil import copyfile
from typing import Dict, List, Set
import ujson
from craid.eddb.loader.strategy.DataLoader import DataLoader
from craid.eddb.loader.strategy.EDDBLoader import LoadDataFromEDDB
#
# Note: For the Git part, see: https://gitpython.readthedocs.io/en/stable/reference.html#module-git.cmd
#
# Note: For the sleep part, see https://stackoverflow.com/questions/2031111/in-python-how-can-i-put-a-thread-to-sleep-until-a-specific-time
#
# Note: Heroku version of cron: https://devcenter.heroku.com/articles/clock-processes-python
#
def munchFile(keys: Set[int], xinName: str):
tmp: List[Dict] = []
myLoader: DataLoader = LoadDataFromEDDB()
inFile = myLoader.find_data_file(xinName)
with gzip.open(inFile, 'rb') as f:
for line in f:
facLine = ujson.loads(line)
if facLine['id'] in keys:
tmp.append(facLine)
outName = "smol-" + xinName + ".gz"
tmpDir = tempfile.gettempdir()
outFile = os.path.join(tmpDir, outName)
with gzip.open(outFile, 'wt', encoding='utf-8') as file:
foo: Dict
for foo in tmp:
ujson.dump(foo, file)
file.write('\n')
#gitFile = os.path.join("..", "..", "..", "..", "data", outName)
gitFile = os.path.join("data", outName)
copyfile(outFile, gitFile)
def deleteOldFiles():
keyFiles = ['keys-club-faction-keys.pkl', 'keys-club-station-keys.pkl', 'keys-club-system-keys.pkl',
'keys-factions_of_interest_keys.pkl', 'keys-factions-of-interest-keys.pkl']
eFiles = ['factions.jsonl', 'stations.jsonl', 'systems_populated.jsonl']
#
# Step 0: Clean recovery directory
#
tmpDir = tempfile.gettempdir()
recoverDir = os.path.join(tmpDir, "crec")
if not os.path.exists(recoverDir):
os.makedirs(recoverDir)
else:
clearRecoveryFolder()
#
# Step 1: copy old systems file to new old-systems file
#
inFile = os.path.join(tmpDir, 'smol-systems_populated.jsonl.gz')
outFile = os.path.join(tmpDir, 'smol-sys-old.jsonl.gz')
recFile = os.path.join(recoverDir, 'smol-sys-old.jsonl.gz')
if os.path.exists(outFile):
copyfile(outFile, recFile)
if os.path.exists(inFile):
copyfile(inFile, outFile)
#
# Step 2: generate all filename permutations
#
allFiles = keyFiles
for fName in eFiles:
allFiles.append(fName)
allFiles.append(fName + ".gz")
allFiles.append("smol-" + fName + ".gz")
allFiles.append("smol-" + fName)
#
# Step 3: delete the files (move to recovery dir)
#
for fName in allFiles:
tmpDir = tempfile.gettempdir()
outFile = os.path.join(tmpDir, fName)
recFile = os.path.join(recoverDir, fName)
if os.path.exists(outFile):
logging.info("removing: " + outFile)
try:
shutil.copy2(outFile, recFile) # NOTE: copy2 to preserve file modification time
os.remove(outFile)
except Exception as e:
# FIXME: not really sure how missing file problem can happen here
# but according to the logs it can happen
traceback.print_exc()
logging.error(str(e))
def unDeleteOldFiles():
tmpDir = tempfile.gettempdir()
recoveryDir = os.path.join(tmpDir, "crec")
for filename in os.listdir(recoveryDir):
fromFile = os.path.join(recoveryDir, filename)
toFile = os.path.join(tmpDir, filename)
shutil.copy2(fromFile, toFile) # NOTE: copy2 to preserve file modification time
def clearRecoveryFolder():
tmpDir = tempfile.gettempdir()
folder = os.path.join(tmpDir, "crec")
for filename in os.listdir(folder):
file_path = os.path.join(folder, filename)
try:
if os.path.isfile(file_path) or os.path.islink(file_path):
os.unlink(file_path)
elif os.path.isdir(file_path):
shutil.rmtree(file_path)
except Exception as e:
print('Failed to delete %s. Reason: %s' % (file_path, e))
# if __name__ == '__main__':
# #
# # Fire up logger
# #
# logging.getLogger().addHandler(logging.StreamHandler())
# logging.getLogger().level = logging.DEBUG
#
# #
# # Get rid of old files
# #
# deleteOldFiles()
#
# # need to add exceptions to DataProducer.getDataArrays and handle here
#
# # download large files from eddb
# DataProducer.getDataArrays(writeKeyFiles=True, useEddb=True)
#
# # load key files & munch
# club_faction_keys = loadKeys("factions-of-interest-keys")
# munchFile(club_faction_keys, 'factions.jsonl')
# club_system_keys = loadKeys('club-system-keys')
# munchFile(club_system_keys, 'systems_populated.jsonl')
# club_station_keys = loadKeys("club-station-keys")
# munchFile(club_station_keys, 'stations.jsonl')
# inName = 'factions.jsonl'
| 32.433962 | 139 | 0.64301 |
794818a57f7b215a522fb5ce0e020ea2257e2ce6
| 3,191 |
py
|
Python
|
data/migrate/__init__.py
|
cnobile2012/inventory
|
bc09ccdead39a5cd758088dbfe5c56dec43c7e29
|
[
"MIT"
] | 10 |
2015-02-16T17:12:44.000Z
|
2021-06-04T18:14:01.000Z
|
data/migrate/__init__.py
|
cnobile2012/inventory
|
bc09ccdead39a5cd758088dbfe5c56dec43c7e29
|
[
"MIT"
] | 1 |
2021-09-20T01:17:38.000Z
|
2021-09-20T01:17:50.000Z
|
data/migrate/__init__.py
|
cnobile2012/inventory
|
bc09ccdead39a5cd758088dbfe5c56dec43c7e29
|
[
"MIT"
] | 5 |
2015-09-09T02:01:33.000Z
|
2021-05-29T09:09:14.000Z
|
# -*- coding: utf-8 -*-
#
# __init__.py
#
import logging
from django.contrib.auth import get_user_model
try:
from inventory.projects.models import Project, Membership, InventoryType
except:
pass
UserModel = get_user_model()
def setup_logger(name='root', fullpath=None, fmt=None, level=logging.INFO):
FORMAT = ("%(asctime)s %(levelname)s %(module)s %(funcName)s "
"[line:%(lineno)d] %(message)s")
if not fmt: fmt = FORMAT
# Trun off logging from django db backend.
backends = logging.getLogger('django.db.backends')
backends.setLevel(logging.WARNING)
# Setup logger.
logger = logging.getLogger(name)
logger.setLevel(level=level)
handler = logging.FileHandler(fullpath)
formatter = logging.Formatter(fmt)
handler.setFormatter(formatter)
logger.addHandler(handler)
#print(logger.getEffectiveLevel())
return logger
class MigrateBase:
_DEFAULT_USER = 'cnobile'
_INVENTORY_NAME = "Electronics"
_PROJECT_NAME = "Carl's Electronics Inventory"
_LD_NAME = "Home Inventory Location Formats"
_LD_DESC = "My DIY Inventory."
PROJECT_OWNER = Membership.ROLE_MAP[Membership.PROJECT_OWNER]
def __init__(self, log):
self._log = log
def get_user(self, username=_DEFAULT_USER):
user = UserModel.objects.filter(username=username, is_active=True)
if user.count():
user = user[0]
else:
user = None
self._log.info("Found user: %s", user)
return user
def _create_inventory_type(self):
user = self.get_user()
name = self._INVENTORY_NAME
kwargs = {}
kwargs['description'] = ("Inventory for electronic parts and "
"related items.")
kwargs['creator'] = user
kwargs['updater'] = user
in_type, created = InventoryType.objects.get_or_create(
name=name, defaults=kwargs)
return in_type
def _create_project(self):
if not self._options.noop:
user = self.get_user()
name = self._PROJECT_NAME
kwargs = {}
kwargs['inventory_type'] = self._create_inventory_type()
kwargs['creator'] = user
kwargs['updater'] = user
project, created = Project.objects.get_or_create(
name=name, defaults=kwargs)
members = [
{'user': user, 'role_text': self.PROJECT_OWNER}
]
project.process_members(members)
return project
def _fix_boolean(self, value):
value = value.strip()
result = value
if value.lower() == 'true':
result = True
elif value.lower() == 'false':
result = False
return result
def _fix_numeric(self, value):
value = value.strip()
result = ''
if value.isdigit():
result = int(value)
return result
def _yes_no(self, value):
value = value.strip().lower()
if value == 'false':
value = 0
elif value == 'true':
value = 1
else:
value = 0
return value
| 27.042373 | 76 | 0.590411 |
7948192666bb8d105d2b1ba0e08f82902f5d04a4
| 1,485 |
py
|
Python
|
setup.py
|
timgates42/django-meta
|
2fd32eab30486daeee27e8a9500045d4233996e5
|
[
"BSD-4-Clause"
] | null | null | null |
setup.py
|
timgates42/django-meta
|
2fd32eab30486daeee27e8a9500045d4233996e5
|
[
"BSD-4-Clause"
] | null | null | null |
setup.py
|
timgates42/django-meta
|
2fd32eab30486daeee27e8a9500045d4233996e5
|
[
"BSD-4-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
from setuptools import setup
import meta
setup(
name='django-meta',
description='Pluggable app for handling webpage meta tags and OpenGraph '
'properties',
long_description=open('README.rst').read(),
version=meta.__version__,
packages=['meta', 'meta.templatetags', 'meta_mixin'],
package_data={
'meta': ['templates/*.html', 'templates/meta_mixin/*.html', 'templates/meta/*.html'],
},
author='Monwara LLC',
maintainer='Nephila',
author_email='branko@monwara.com',
maintainer_email='info@nephila.it',
url='https://github.com/nephila/django-meta',
license='BSD',
install_requires=[
'six',
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Framework :: Django',
'Framework :: Django :: 1.11',
'Framework :: Django :: 2.0',
'Framework :: Django :: 2.1',
'Framework :: Django :: 2.2',
'Framework :: Django :: 3.0',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
],
)
| 33 | 93 | 0.580471 |
794819a5da6a61ff4abf24aec0df8f3da81d3038
| 79,379 |
py
|
Python
|
pineboolib/fllegacy/systype.py
|
juanjosepablos/pineboo
|
f6ce515aec6e0139821bb9c1d62536d9fb50dae4
|
[
"MIT"
] | null | null | null |
pineboolib/fllegacy/systype.py
|
juanjosepablos/pineboo
|
f6ce515aec6e0139821bb9c1d62536d9fb50dae4
|
[
"MIT"
] | 1 |
2017-10-30T22:00:48.000Z
|
2017-11-11T19:34:32.000Z
|
pineboolib/fllegacy/systype.py
|
juanjosepablos/pineboo
|
f6ce515aec6e0139821bb9c1d62536d9fb50dae4
|
[
"MIT"
] | 1 |
2017-10-30T20:16:38.000Z
|
2017-10-30T20:16:38.000Z
|
"""Systype module."""
import traceback
import os
import os.path
import sys
import re
from PyQt5 import QtCore, QtWidgets, QtGui, QtXml
from pineboolib.core.error_manager import error_manager
from pineboolib.core.system import System
from pineboolib.core.utils import utils_base, logging
from pineboolib.core import settings, decorators
from pineboolib import application
from pineboolib.application import types, process
from pineboolib.application.database import pnsqlcursor, pnsqlquery
from pineboolib.application.database import utils as utils_db
from pineboolib.application.packager import pnunpacker
from pineboolib.application.qsatypes import sysbasetype
from .aqsobjects.aqs import AQS
from .aqsobjects import aqsql
from . import flutil
from . import flapplication
from . import flvar
from pineboolib.q3widgets.dialog import Dialog
from pineboolib.q3widgets.qbytearray import QByteArray
from pineboolib.q3widgets.messagebox import MessageBox
from pineboolib.q3widgets.qtextedit import QTextEdit
from pineboolib.q3widgets.qlabel import QLabel
from pineboolib.q3widgets.qdialog import QDialog
from pineboolib.q3widgets.qvboxlayout import QVBoxLayout
from pineboolib.q3widgets.qhboxlayout import QHBoxLayout
from pineboolib.q3widgets.qpushbutton import QPushButton
from pineboolib.q3widgets.filedialog import FileDialog
from typing import cast, Optional, List, Any, Dict, Callable, TYPE_CHECKING
from pineboolib.fllegacy import flfielddb, fltabledb
if TYPE_CHECKING:
from pineboolib.interfaces import iconnection, isqlcursor
logger = logging.getLogger("fllegacy.systype")
class AQTimer(QtCore.QTimer):
"""AQTimer class."""
pass
class AQGlobalFunctions_class(QtCore.QObject):
"""AQSGlobalFunction class."""
functions_ = types.Array()
mappers_: QtCore.QSignalMapper
def __init__(self):
"""Initialize."""
super().__init__()
self.mappers_ = QtCore.QSignalMapper()
def set(self, function_name: str, global_function: Callable) -> None:
"""Set a new global function."""
self.functions_[function_name] = global_function
def get(self, function_name: str) -> Callable:
"""Return a global function specified by name."""
return self.functions_[function_name]
def exec_(self, function_name: str) -> None:
"""Execute a function specified by name."""
fn = self.functions_[function_name]
if fn is not None:
fn()
def mapConnect(self, obj: QtWidgets.QWidget, signal: str, function_name: str) -> None:
"""Add conection to map."""
self.mappers_.mapped[str].connect(self.exec_) # type: ignore
sg_name = re.sub(r" *\(.*\)", "", signal)
sg = getattr(obj, sg_name, None)
if sg is not None:
sg.connect(self.mappers_.map)
self.mappers_.setMapping(obj, function_name)
class SysType(sysbasetype.SysBaseType):
"""SysType class."""
time_user_ = QtCore.QDateTime.currentDateTime()
AQTimer = AQTimer
AQGlobalFunctions = AQGlobalFunctions_class()
@classmethod
def translate(self, *args) -> str:
"""Translate a text."""
from pineboolib.core import translate
group = args[0] if len(args) == 2 else "scripts"
text = args[1] if len(args) == 2 else args[0]
if text == "MetaData":
group, text = text, group
text = text.replace(" % ", " %% ")
return translate.translate(group, text)
def printTextEdit(self, editor: QtWidgets.QTextEdit):
"""Print text from a textEdit."""
flapplication.aqApp.printTextEdit(editor)
def dialogGetFileImage(self) -> Optional[str]:
"""Show a file dialog and return a file name."""
return flapplication.aqApp.dialogGetFileImage()
def toXmlReportData(self, qry: "pnsqlquery.PNSqlQuery") -> "QtXml.QDomDocument":
"""Return xml from a query."""
return flapplication.aqApp.toXmlReportData(qry)
def showDocPage(self, url_: str) -> None:
"""Show externa file."""
return flapplication.aqApp.showDocPage(url_)
def toPixmap(self, value_: str) -> QtGui.QPixmap:
"""Create a QPixmap from a text."""
return flapplication.aqApp.toPixmap(value_)
def setMultiLang(self, enable_: bool, lang_id_: str) -> None:
"""
Change multilang status.
@param enable, Boolean con el nuevo estado
@param langid, Identificador del leguaje a activar
"""
return flapplication.aqApp.setMultiLang(enable_, lang_id_)
def fromPixmap(self, pix_: QtGui.QPixmap) -> str:
"""Return a text from a QPixmap."""
return flapplication.aqApp.fromPixmap(pix_)
def popupWarn(self, msg_warn: str, script_calls: List[Any] = []) -> None:
"""Show a warning popup."""
flapplication.aqApp.popupWarn(msg_warn, script_calls)
def openMasterForm(self, action_name_: str, pix_: Optional[QtGui.QPixmap] = None) -> None:
"""Open default form from a action."""
flapplication.aqApp.openMasterForm(action_name_, pix_)
def scalePixmap(
self, pix_: QtGui.QPixmap, w_: int, h_: int, mode_: QtCore.Qt.AspectRatioMode
) -> QtGui.QImage:
"""Return QImage scaled from a QPixmap."""
return flapplication.aqApp.scalePixmap(pix_, w_, h_, mode_)
@classmethod
def transactionLevel(self) -> int:
"""Return transaction level."""
return application.PROJECT.conn_manager.useConn("default").transactionLevel()
@classmethod
def installACL(self, idacl) -> None:
"""Install a acl."""
from pineboolib.application.acls import pnaccesscontrollists
acl_ = pnaccesscontrollists.PNAccessControlLists()
if acl_:
acl_.install_acl(idacl)
@classmethod
def updateAreas(self) -> None:
"""Update areas in mdi."""
mw = flapplication.aqApp.mainWidget()
if hasattr(mw, "initToolBox"):
mw.initToolBox()
@classmethod
def reinit(self) -> None:
"""Call reinit script."""
flapplication.aqApp.reinit()
@classmethod
def modMainWidget(self, id_module_: str) -> Optional[QtWidgets.QWidget]:
"""Set module MainWinget."""
return flapplication.aqApp.modMainWidget(id_module_)
@classmethod
def setCaptionMainWidget(self, title: str) -> None:
"""Set caption in the main widget."""
flapplication.aqApp.setCaptionMainWidget(title)
@staticmethod
def execQSA(fileQSA=None, args=None) -> None:
"""Execute a QS file."""
from pineboolib.application import types
try:
with open(fileQSA, "r") as file:
fn = types.function(file.read())
fn(args)
except Exception:
e = traceback.format_exc()
logger.warning(e)
return
@staticmethod
def dumpDatabase() -> None:
"""Launch dump database."""
aqDumper = AbanQDbDumper()
aqDumper.init()
@staticmethod
def terminateChecksLocks(sqlCursor: "isqlcursor.ISqlCursor" = None) -> None:
"""Set check risk locks to False in a cursor."""
if sqlCursor is not None:
sqlCursor.checkRisksLocks(True)
@classmethod
def statusDbLocksDialog(self, locks: Optional[List[str]] = None) -> None:
"""Show Database locks status."""
diag = Dialog()
txtEdit = QTextEdit()
diag.caption = self.translate(u"scripts", u"Bloqueos de la base de datos")
diag.setWidth(500)
html = u'<html><table border="1">'
if locks is not None and len(locks):
j = 0
item = u""
fields = locks[0].split(u"@")
closeInfo = False
closeRecord = False
headInfo = u'<table border="1"><tr>'
i = 0
while_pass = True
while i < len(fields):
if not while_pass:
i += 1
while_pass = True
continue
while_pass = False
headInfo += utils_base.ustr(u"<td><b>", fields[i], u"</b></td>")
i += 1
while_pass = True
try:
i < len(fields)
except Exception:
break
headInfo += u"</tr>"
headRecord = utils_base.ustr(
u'<table border="1"><tr><td><b>',
self.translate(u"scripts", u"Registro bloqueado"),
u"</b></td></tr>",
)
i = 1
while_pass = True
while i < len(locks):
if not while_pass:
i += 1
while_pass = True
continue
while_pass = False
item = locks[i]
if item[0:2] == u"##":
if closeInfo:
html += u"</table>"
if not closeRecord:
html += headRecord
html += utils_base.ustr(
u"<tr><td>", item[(len(item) - (len(item) - 2)) :], u"</td></tr>"
)
closeRecord = True
closeInfo = False
else:
if closeRecord:
html += u"</table>"
if not closeInfo:
html += headInfo
html += u"<tr>"
fields = item.split(u"@")
j = 0
while_pass = True
while j < len(fields):
if not while_pass:
j += 1
while_pass = True
continue
while_pass = False
html += utils_base.ustr(u"<td>", fields[j], u"</td>")
j += 1
while_pass = True
try:
j < len(fields)
except Exception:
break
html += u"</tr>"
closeRecord = False
closeInfo = True
i += 1
while_pass = True
try:
i < len(locks)
except Exception:
break
html += u"</table></table></html>"
txtEdit.text = html
diag.add(txtEdit)
diag.exec_()
@classmethod
def mvProjectXml(self) -> QtXml.QDomDocument:
"""Extract a module defition to a QDomDocument."""
doc_ret_ = QtXml.QDomDocument()
str_xml_ = utils_db.sql_select(u"flupdates", u"modulesdef", "actual")
if not str_xml_:
return doc_ret_
doc = QtXml.QDomDocument()
if not doc.setContent(str_xml_):
return doc_ret_
str_xml_ = u""
nodes = doc.childNodes()
i = 0
while_pass = True
while i < len(nodes):
if not while_pass:
i += 1
while_pass = True
continue
while_pass = False
it = nodes.item(i)
if it.isComment():
data = it.toComment().data()
if not data == "" and data.startswith(u"<mvproject "):
str_xml_ = data
break
i += 1
while_pass = True
try:
i < len(nodes)
except Exception:
break
if str_xml_ == "":
return doc_ret_
doc_ret_.setContent(str_xml_)
return doc_ret_
@classmethod
def mvProjectModules(self) -> types.Array:
"""Return modules defitions Dict."""
ret = types.Array()
doc = self.mvProjectXml()
mods = doc.elementsByTagName(u"module")
i = 0
while_pass = True
while i < len(mods):
if not while_pass:
i += 1
while_pass = True
continue
while_pass = False
it = mods.item(i).toElement()
mod = {"name": (it.attribute(u"name")), "version": (it.attribute(u"version"))}
if len(mod["name"]) == 0:
continue
ret[mod["name"]] = mod
i += 1
while_pass = True
try:
i < len(mods)
except Exception:
break
return ret
@classmethod
def mvProjectExtensions(self) -> types.Array:
"""Return project extensions Dict."""
ret = types.Array()
doc = self.mvProjectXml()
exts = doc.elementsByTagName(u"extension")
i = 0
while_pass = True
while i < len(exts):
if not while_pass:
i += 1
while_pass = True
continue
while_pass = False
it = exts.item(i).toElement()
ext = {"name": (it.attribute(u"name")), "version": (it.attribute(u"version"))}
if len(ext["name"]) == 0:
continue
ret[ext["name"]] = ext
i += 1
while_pass = True
try:
i < len(exts)
except Exception:
break
return ret
@classmethod
def calculateShaGlobal(self) -> str:
"""Return sha global value."""
v = ""
qry = pnsqlquery.PNSqlQuery()
qry.setSelect(u"sha")
qry.setFrom(u"flfiles")
if qry.exec_() and qry.first():
v = utils_base.sha1(str(qry.value(0)))
while qry.next():
v = utils_base.sha1(v + str(qry.value(0)))
else:
v = utils_base.sha1("")
return v
@classmethod
def registerUpdate(self, input_: Any = None) -> None:
"""Install a package."""
if not input_:
return
unpacker = pnunpacker.PNUnpacker(input_)
errors = unpacker.errorMessages()
if len(errors) != 0:
msg = self.translate(u"Hubo los siguientes errores al intentar cargar los módulos:")
msg += u"\n"
i = 0
while_pass = True
while i < len(errors):
if not while_pass:
i += 1
while_pass = True
continue
while_pass = False
msg += utils_base.ustr(errors[i], u"\n")
i += 1
while_pass = True
try:
i < len(errors)
except Exception:
break
self.errorMsgBox(msg)
return
unpacker.jump()
unpacker.jump()
unpacker.jump()
now = str(types.Date())
file = types.File(input_)
fileName = file.name
modulesDef = self.toUnicode(unpacker.getText(), u"utf8")
filesDef = self.toUnicode(unpacker.getText(), u"utf8")
shaGlobal = self.calculateShaGlobal()
aqsql.AQSql.update(u"flupdates", [u"actual"], [False], "1=1")
aqsql.AQSql.insert(
u"flupdates",
[u"fecha", u"hora", u"nombre", u"modulesdef", u"filesdef", u"shaglobal"],
[
now[: now.find("T")],
str(now)[(len(str(now)) - (8)) :],
fileName,
modulesDef,
filesDef,
shaGlobal,
],
)
@classmethod
def warnLocalChanges(self, changes: Optional[Dict[str, Any]] = None) -> bool:
"""Show local changes warning."""
if changes is None:
changes = self.localChanges()
if changes["size"] == 0:
return True
diag = QDialog()
diag.caption = self.translate(u"Detectados cambios locales")
diag.setModal(True)
txt = u""
txt += self.translate(u"¡¡ CUIDADO !! DETECTADOS CAMBIOS LOCALES\n\n")
txt += self.translate(u"Se han detectado cambios locales en los módulos desde\n")
txt += self.translate(u"la última actualización/instalación de un paquete de módulos.\n")
txt += self.translate(u"Si continua es posible que estos cambios sean sobreescritos por\n")
txt += self.translate(u"los cambios que incluye el paquete que quiere cargar.\n\n")
txt += u"\n\n"
txt += self.translate(u"Registro de cambios")
lay = QVBoxLayout(diag)
# lay.setMargin(6)
# lay.setSpacing(6)
lbl = QLabel(diag)
lbl.setText(txt)
lbl.setAlignment(cast(QtCore.Qt.Alignment, QtCore.Qt.AlignTop))
lay.addWidget(lbl)
ted = QTextEdit(diag)
ted.setTextFormat(QTextEdit.LogText)
ted.setAlignment(cast(QtCore.Qt.Alignment, QtCore.Qt.AlignHCenter | QtCore.Qt.AlignVCenter))
ted.append(self.reportChanges(changes))
lay.addWidget(ted)
lbl2 = QLabel(diag)
lbl2.setText(self.translate("¿Que desea hacer?"))
lbl2.setAlignment(cast(QtCore.Qt.Alignment, QtCore.Qt.AlignTop))
lay.addWidget(lbl2)
lay2 = QHBoxLayout()
# lay2.setMargin(6)
# lay2.setSpacing(6)
lay.addLayout(lay2)
pbCancel = QPushButton(diag)
pbCancel.setText(self.translate(u"Cancelar"))
pbAccept = QPushButton(diag)
pbAccept.setText(self.translate(u"continue"))
lay2.addWidget(pbCancel)
lay2.addWidget(pbAccept)
application.connections.connect(pbAccept, "clicked()", diag, "accept()")
application.connections.connect(pbCancel, "clicked()", diag, "reject()")
return False if (diag.exec_() == 0) else True
@classmethod
def xmlFilesDefBd(self) -> QtXml.QDomDocument:
"""Return a QDomDocument with files definition."""
doc = QtXml.QDomDocument(u"files_def")
root = doc.createElement(u"files")
doc.appendChild(root)
qry = pnsqlquery.PNSqlQuery()
qry.setSelect(u"idmodulo,nombre,contenido")
qry.setFrom(u"flfiles")
if not qry.exec_():
return doc
shaSum = u""
shaSumTxt = u""
shaSumBin = u""
while qry.next():
idMod = str(qry.value(0))
if idMod == u"sys":
continue
fName = str(qry.value(1))
ba = QByteArray()
ba.string = self.fromUnicode(str(qry.value(2)), u"iso-8859-15")
sha = ba.sha1()
nf = doc.createElement(u"file")
root.appendChild(nf)
ne = doc.createElement(u"module")
nf.appendChild(ne)
nt = doc.createTextNode(idMod)
ne.appendChild(nt)
ne = doc.createElement(u"name")
nf.appendChild(ne)
nt = doc.createTextNode(fName)
ne.appendChild(nt)
if self.textPacking(fName):
ne = doc.createElement(u"text")
nf.appendChild(ne)
nt = doc.createTextNode(fName)
ne.appendChild(nt)
ne = doc.createElement(u"shatext")
nf.appendChild(ne)
nt = doc.createTextNode(sha)
ne.appendChild(nt)
ba = QByteArray()
ba.string = shaSum + sha
shaSum = ba.sha1()
ba = QByteArray()
ba.string = shaSumTxt + sha
shaSumTxt = ba.sha1()
# try:
# if self.binaryPacking(fName):
# ne = doc.createElement(u"binary")
# nf.appendChild(ne)
# nt = doc.createTextNode(utils_base.ustr(fName, u".qso"))
# ne.appendChild(nt)
# sha = AQS.sha1(qry.value(3))
# ne = doc.createElement(u"shabinary")
# nf.appendChild(ne)
# nt = doc.createTextNode(sha)
# ne.appendChild(nt)
# ba = QByteArray()
# ba.string = shaSum + sha
# shaSum = ba.sha1()
# ba = QByteArray()
# ba.string = shaSumBin + sha
# shaSumBin = ba.sha1()
# except Exception:
# e = traceback.format_exc()
# logger.error(e)
qry = pnsqlquery.PNSqlQuery()
qry.setSelect(u"idmodulo,icono")
qry.setFrom(u"flmodules")
if qry.exec_():
while qry.next():
idMod = str(qry.value(0))
if idMod == u"sys":
continue
fName = utils_base.ustr(idMod, u".xpm")
ba = QByteArray()
ba.string = str(qry.value(1))
sha = ba.sha1()
nf = doc.createElement(u"file")
root.appendChild(nf)
ne = doc.createElement(u"module")
nf.appendChild(ne)
nt = doc.createTextNode(idMod)
ne.appendChild(nt)
ne = doc.createElement(u"name")
nf.appendChild(ne)
nt = doc.createTextNode(fName)
ne.appendChild(nt)
if self.textPacking(fName):
ne = doc.createElement(u"text")
nf.appendChild(ne)
nt = doc.createTextNode(fName)
ne.appendChild(nt)
ne = doc.createElement(u"shatext")
nf.appendChild(ne)
nt = doc.createTextNode(sha)
ne.appendChild(nt)
ba = QByteArray()
ba.string = shaSum + sha
shaSum = ba.sha1()
ba = QByteArray()
ba.string = shaSumTxt + sha
shaSumTxt = ba.sha1()
ns = doc.createElement(u"shasum")
ns.appendChild(doc.createTextNode(shaSum))
root.appendChild(ns)
ns = doc.createElement(u"shasumtxt")
ns.appendChild(doc.createTextNode(shaSumTxt))
root.appendChild(ns)
ns = doc.createElement(u"shasumbin")
ns.appendChild(doc.createTextNode(shaSumBin))
root.appendChild(ns)
return doc
@classmethod
def loadModules(self, input_: Optional[Any] = None, warnBackup: bool = True):
"""Load modules from a package."""
if input_ is None:
dir_ = types.Dir(self.installPrefix())
dir_.setCurrent()
path_tuple = QtWidgets.QFileDialog.getOpenFileName(
QtWidgets.QApplication.focusWidget(),
u"Eneboo/AbanQ Packages",
self.translate(u"scripts", u"Seleccionar Fichero"),
"*.eneboopkg",
)
input_ = path_tuple[0]
if input_:
self.loadAbanQPackage(input_, warnBackup)
@classmethod
def loadAbanQPackage(self, input_: str, warnBackup: bool = True):
"""Load and process a Abanq/Eneboo package."""
if warnBackup and self.interactiveGUI():
txt = u""
txt += self.translate(u"Asegúrese de tener una copia de seguridad de todos los datos\n")
txt += self.translate(u"y de que no hay ningun otro usuario conectado a la base de\n")
txt += self.translate(u"datos mientras se realiza la carga.\n\n")
txt += u"\n\n"
txt += self.translate(u"¿Desea continuar?")
if MessageBox.Yes != MessageBox.warning(txt, MessageBox.No, MessageBox.Yes):
return
if input_:
ok = True
changes = self.localChanges()
if changes["size"] != 0:
if not self.warnLocalChanges(changes):
return
if ok:
unpacker = pnunpacker.PNUnpacker(input_)
errors = unpacker.errorMessages()
if len(errors) != 0:
msg = self.translate(
u"Hubo los siguientes errores al intentar cargar los módulos:"
)
msg += u"\n"
i = 0
while_pass = True
while i < len(errors):
if not while_pass:
i += 1
while_pass = True
continue
while_pass = False
msg += utils_base.ustr(errors[i], u"\n")
i += 1
while_pass = True
try:
i < len(errors)
except Exception:
break
self.errorMsgBox(msg)
ok = False
unpacker.jump()
unpacker.jump()
unpacker.jump()
if ok:
ok = self.loadModulesDef(unpacker)
if ok:
ok = self.loadFilesDef(unpacker)
if not ok:
self.errorMsgBox(
self.translate(u"No se ha podido realizar la carga de los módulos.")
)
else:
self.registerUpdate(input_)
self.infoMsgBox(self.translate(u"La carga de módulos se ha realizado con éxito."))
self.reinit()
tmpVar = flvar.FLVar()
tmpVar.set(u"mrproper", u"dirty")
@classmethod
def loadFilesDef(self, un: Any) -> bool:
"""Load files definition from a package to a QDomDocument."""
filesDef = self.toUnicode(un.getText(), u"utf8")
doc = QtXml.QDomDocument()
if not doc.setContent(filesDef):
self.errorMsgBox(
self.translate(u"Error XML al intentar cargar la definición de los ficheros.")
)
return False
ok = True
root = doc.firstChild()
files = root.childNodes()
flutil.FLUtil.createProgressDialog(self.translate(u"Registrando ficheros"), len(files))
i = 0
while_pass = True
while i < len(files):
if not while_pass:
i += 1
while_pass = True
continue
while_pass = False
it = files.item(i)
fil = {
"id": it.namedItem(u"name").toElement().text(),
"skip": it.namedItem(u"skip").toElement().text(),
"module": it.namedItem(u"module").toElement().text(),
"text": it.namedItem(u"text").toElement().text(),
"shatext": it.namedItem(u"shatext").toElement().text(),
"binary": it.namedItem(u"binary").toElement().text(),
"shabinary": it.namedItem(u"shabinary").toElement().text(),
}
flutil.FLUtil.setProgress(i)
flutil.FLUtil.setLabelText(
utils_base.ustr(self.translate(u"Registrando fichero"), u" ", fil["id"])
)
if len(fil["id"]) == 0 or fil["skip"] == u"true":
continue
if not self.registerFile(fil, un):
self.errorMsgBox(
utils_base.ustr(
self.translate(u"Error registrando el fichero"), u" ", fil["id"]
)
)
ok = False
break
i += 1
while_pass = True
try:
i < len(files)
except Exception:
break
flutil.FLUtil.destroyProgressDialog()
return ok
@classmethod
def registerFile(self, fil: Dict[str, Any], un: Any) -> bool:
"""Register a file in the database."""
if fil["id"].endswith(u".xpm"):
cur = pnsqlcursor.PNSqlCursor(u"flmodules")
if not cur.select(utils_base.ustr(u"idmodulo='", fil["module"], u"'")):
return False
if not cur.first():
return False
cur.setModeAccess(aqsql.AQSql.Edit)
cur.refreshBuffer()
cur.setValueBuffer(u"icono", un.getText())
return cur.commitBuffer()
cur = pnsqlcursor.PNSqlCursor(u"flfiles")
if not cur.select(utils_base.ustr(u"nombre='", fil["id"], u"'")):
return False
cur.setModeAccess((aqsql.AQSql.Edit if cur.first() else aqsql.AQSql.Insert))
cur.refreshBuffer()
cur.setValueBuffer(u"nombre", fil["id"])
cur.setValueBuffer(u"idmodulo", fil["module"])
cur.setValueBuffer(u"sha", fil["shatext"])
if len(fil["text"]) > 0:
if fil["id"].endswith(u".qs"):
cur.setValueBuffer(u"contenido", self.toUnicode(un.getText(), u"iso-8859-15"))
else:
cur.setValueBuffer(u"contenido", un.getText())
if len(fil["binary"]) > 0:
un.getBinary()
return cur.commitBuffer()
@classmethod
def checkProjectName(self, proName: str) -> bool:
"""Return if te project name is valid."""
if not proName or proName is None:
proName = u""
dbProName = flutil.FLUtil.readDBSettingEntry(u"projectname")
if not dbProName:
dbProName = u""
if proName == dbProName:
return True
if not proName == "" and dbProName == "":
return flutil.FLUtil.writeDBSettingEntry(u"projectname", proName)
txt = u""
txt += self.translate(u"¡¡ CUIDADO !! POSIBLE INCOHERENCIA EN LOS MÓDULOS\n\n")
txt += self.translate(u"Está intentando cargar un proyecto o rama de módulos cuyo\n")
txt += self.translate(u"nombre difiere del instalado actualmente en la base de datos.\n")
txt += self.translate(u"Es posible que la estructura de los módulos que quiere cargar\n")
txt += self.translate(
u"sea completamente distinta a la instalada actualmente, y si continua\n"
)
txt += self.translate(
u"podría dañar el código, datos y la estructura de tablas de Eneboo.\n\n"
)
txt += self.translate(u"- Nombre del proyecto instalado: %s\n") % (str(dbProName))
txt += self.translate(u"- Nombre del proyecto a cargar: %s\n\n") % (str(proName))
txt += u"\n\n"
if not self.interactiveGUI():
logger.warning(txt)
return False
txt += self.translate(u"¿Desea continuar?")
return MessageBox.Yes == MessageBox.warning(
txt, MessageBox.No, MessageBox.Yes, MessageBox.NoButton, u"AbanQ"
)
@classmethod
def loadModulesDef(self, un: Any) -> bool:
"""Return QDomDocument with modules definition."""
modulesDef = self.toUnicode(un.getText(), u"utf8")
doc = QtXml.QDomDocument()
if not doc.setContent(modulesDef):
self.errorMsgBox(
self.translate(u"Error XML al intentar cargar la definición de los módulos.")
)
return False
root = doc.firstChild()
if not self.checkProjectName(root.toElement().attribute(u"projectname", u"")):
return False
ok = True
modules = root.childNodes()
flutil.FLUtil.createProgressDialog(self.translate(u"Registrando módulos"), len(modules))
i = 0
while_pass = True
while i < len(modules):
if not while_pass:
i += 1
while_pass = True
continue
while_pass = False
it = modules.item(i)
mod = {
"id": it.namedItem(u"name").toElement().text(),
"alias": self.trTagText(it.namedItem(u"alias").toElement().text()),
"area": it.namedItem(u"area").toElement().text(),
"areaname": self.trTagText(it.namedItem(u"areaname").toElement().text()),
"version": it.namedItem(u"version").toElement().text(),
}
flutil.FLUtil.setProgress(i)
flutil.FLUtil.setLabelText(
utils_base.ustr(self.translate(u"Registrando módulo"), u" ", mod["id"])
)
if not self.registerArea(mod) or not self.registerModule(mod):
self.errorMsgBox(
utils_base.ustr(self.translate(u"Error registrando el módulo"), u" ", mod["id"])
)
ok = False
break
i += 1
while_pass = True
try:
i < len(modules)
except Exception:
break
flutil.FLUtil.destroyProgressDialog()
return ok
@classmethod
def registerArea(self, mod: Dict[str, Any]) -> bool:
"""Return True if the area is created or False."""
cur = pnsqlcursor.PNSqlCursor(u"flareas")
if not cur.select(utils_base.ustr(u"idarea='", mod["area"], u"'")):
return False
cur.setModeAccess((aqsql.AQSql.Edit if cur.first() else aqsql.AQSql.Insert))
cur.refreshBuffer()
cur.setValueBuffer(u"idarea", mod["area"])
cur.setValueBuffer(u"descripcion", mod["areaname"])
return cur.commitBuffer()
@classmethod
def registerModule(self, mod: Dict[str, Any]) -> bool:
"""Return True if the module is created or False."""
cur = pnsqlcursor.PNSqlCursor(u"flmodules")
if not cur.select(utils_base.ustr(u"idmodulo='", mod["id"], u"'")):
return False
cur.setModeAccess((aqsql.AQSql.Edit if cur.first() else aqsql.AQSql.Insert))
cur.refreshBuffer()
cur.setValueBuffer(u"idmodulo", mod["id"])
cur.setValueBuffer(u"idarea", mod["area"])
cur.setValueBuffer(u"descripcion", mod["alias"])
cur.setValueBuffer(u"version", mod["version"])
return cur.commitBuffer()
@classmethod
def questionMsgBox(
self,
msg: str,
keyRemember: str,
txtRemember: str,
forceShow: bool,
txtCaption: str,
txtYes: str,
txtNo: str,
) -> Any:
"""Return a messagebox result."""
key = u"QuestionMsgBox/"
valRemember = False
if keyRemember:
valRemember = settings.settings.value(key + keyRemember)
if valRemember and not forceShow:
return MessageBox.Yes
if not self.interactiveGUI():
return True
diag = QDialog()
diag.caption = txtCaption if txtCaption else u"Eneboo"
diag.setModal(True)
lay = QVBoxLayout(diag)
# lay.setMargin(6)
lay.setSpacing(6)
lay2 = QHBoxLayout(diag)
# lay2.setMargin(6)
lay2.setSpacing(6)
lblPix = QLabel(diag)
pixmap = AQS.pixmap_fromMimeSource(u"help_index.png")
if pixmap:
lblPix.setPixmap(pixmap)
lblPix.setAlignment(AQS.AlignTop)
lay2.addWidget(lblPix)
lbl = QLabel(diag)
lbl.setText(msg)
lbl.setAlignment(cast(QtCore.Qt.Alignment, AQS.AlignTop | AQS.WordBreak))
lay2.addWidget(lbl)
lay3 = QHBoxLayout(diag)
# lay3.setMargin(6)
lay3.setSpacing(6)
pbYes = QPushButton(diag)
pbYes.setText(txtYes if txtYes else self.translate(u"Sí"))
pbNo = QPushButton(diag)
pbNo.setText(txtNo if txtNo else self.translate(u"No"))
lay3.addWidget(pbYes)
lay3.addWidget(pbNo)
application.connections.connect(pbYes, u"clicked()", diag, u"accept()")
application.connections.connect(pbNo, u"clicked()", diag, u"reject()")
chkRemember = None
if keyRemember and txtRemember:
# from pineboolib.q3widgets.qcheckbox import QCheckBox
chkRemember = QtWidgets.QCheckBox(txtRemember, diag)
chkRemember.setChecked(valRemember)
lay.addWidget(chkRemember)
ret = MessageBox.No if (diag.exec_() == 0) else MessageBox.Yes
if chkRemember is not None:
settings.settings.set_value(key + keyRemember, chkRemember.isChecked())
return ret
@classmethod
def exportModules(self) -> None:
"""Export modules."""
dirBasePath = FileDialog.getExistingDirectory(types.Dir.home)
if not dirBasePath:
return
dataBaseName = application.PROJECT.conn_manager.mainConn()._db_name
dirBasePath = types.Dir.cleanDirPath(
utils_base.ustr(
dirBasePath, u"/modulos_exportados_", dataBaseName[dataBaseName.rfind(u"/") + 1 :]
)
)
dir = types.Dir()
if not dir.fileExists(dirBasePath):
try:
dir.mkdir(dirBasePath)
except Exception:
e = traceback.format_exc()
self.errorMsgBox(utils_base.ustr(u"", e))
return
else:
self.warnMsgBox(
dirBasePath + self.translate(u" ya existe,\ndebe borrarlo antes de continuar")
)
return
qry = pnsqlquery.PNSqlQuery()
qry.setSelect(u"idmodulo")
qry.setFrom(u"flmodules")
if not qry.exec_() or qry.size() == 0:
return
p = 0
flutil.FLUtil.createProgressDialog(self.translate(u"Exportando módulos"), qry.size() - 1)
while qry.next():
idMod = qry.value(0)
if idMod == u"sys":
continue
flutil.FLUtil.setLabelText(idMod)
p += 1
flutil.FLUtil.setProgress(p)
try:
self.exportModule(idMod, dirBasePath)
except Exception:
e = traceback.format_exc()
flutil.FLUtil.destroyProgressDialog()
self.errorMsgBox(utils_base.ustr(u"", e))
return
dbProName = flutil.FLUtil.readDBSettingEntry(u"projectname")
if not dbProName:
dbProName = u""
if not dbProName == "":
doc = QtXml.QDomDocument()
tag = doc.createElement(u"mvproject")
tag.toElement().setAttribute(u"name", dbProName)
doc.appendChild(tag)
try:
types.FileStatic.write(
utils_base.ustr(dirBasePath, u"/mvproject.xml"), doc.toString(2)
)
except Exception:
e = traceback.format_exc()
flutil.FLUtil.destroyProgressDialog()
self.errorMsgBox(utils_base.ustr(u"", e))
return
flutil.FLUtil.destroyProgressDialog()
self.infoMsgBox(self.translate(u"Módulos exportados en:\n") + dirBasePath)
@classmethod
def xmlModule(self, idMod: str) -> QtXml.QDomDocument:
"""Return xml data from a module."""
qry = pnsqlquery.PNSqlQuery()
qry.setSelect(u"descripcion,idarea,version")
qry.setFrom(u"flmodules")
qry.setWhere(utils_base.ustr(u"idmodulo='", idMod, u"'"))
doc = QtXml.QDomDocument(u"MODULE")
if not qry.exec_() or not qry.next():
return doc
tagMod = doc.createElement(u"MODULE")
doc.appendChild(tagMod)
tag = doc.createElement(u"name")
tag.appendChild(doc.createTextNode(idMod))
tagMod.appendChild(tag)
trNoop = u'QT_TRANSLATE_NOOP("Eneboo","%s")'
tag = doc.createElement(u"alias")
tag.appendChild(doc.createTextNode(trNoop % qry.value(0)))
tagMod.appendChild(tag)
idArea = qry.value(1)
tag = doc.createElement(u"area")
tag.appendChild(doc.createTextNode(idArea))
tagMod.appendChild(tag)
areaName = utils_db.sql_select(
u"flareas", u"descripcion", utils_base.ustr(u"idarea='", idArea, u"'")
)
tag = doc.createElement(u"areaname")
tag.appendChild(doc.createTextNode(trNoop % areaName))
tagMod.appendChild(tag)
tag = doc.createElement(u"entryclass")
tag.appendChild(doc.createTextNode(idMod))
tagMod.appendChild(tag)
tag = doc.createElement(u"version")
tag.appendChild(doc.createTextNode(qry.value(2)))
tagMod.appendChild(tag)
tag = doc.createElement(u"icon")
tag.appendChild(doc.createTextNode(utils_base.ustr(idMod, u".xpm")))
tagMod.appendChild(tag)
return doc
@classmethod
def fileWriteIso(self, file_name: str, content: str) -> None:
"""Write data into a file with ISO-8859-15 encode."""
# from PyQt5.QtCore import QtCore.QTextStream
fileISO = types.File(file_name, "ISO8859-15")
fileISO.write(content.encode("ISO8859-15", "ignore"))
# if not fileISO.open(types.File.WriteOnly):
# logger.warning(utils_base.ustr(u"Error abriendo fichero ", fileName, u" para escritura"))
# return False
# tsISO = QtCore.QTextStream(fileISO)
# tsISO.setCodec(AQS.TextCodec_codecForName(u"ISO8859-15"))
# tsISO.opIn(content)
fileISO.close()
@classmethod
def fileWriteUtf8(self, file_name: str, content: str) -> None:
"""Write data into a file with UTF-8 encode."""
# from PyQt5.QtCore import QtCore.QTextStream
fileUTF = types.File(file_name, "UTF-8")
fileUTF.write(content)
# if not fileUTF.open(types.File.WriteOnly):
# logger.warning(utils_base.ustr(u"Error abriendo fichero ", fileName, u" para escritura"))
# return False
# tsUTF = QtCore.QTextStream(fileUTF.ioDevice)
# tsUTF.setCodec(AQS.TextCodec_codecForName(u"utf8"))
# tsUTF.opIn(content)
fileUTF.close()
@classmethod
def exportModule(self, idMod: str, dirBasePath: str) -> None:
"""Export a module to a directory."""
dir = types.Dir()
dirPath = types.Dir.cleanDirPath(utils_base.ustr(dirBasePath, u"/", idMod))
if not dir.fileExists(dirPath):
dir.mkdir(dirPath)
if not dir.fileExists(utils_base.ustr(dirPath, u"/forms")):
dir.mkdir(utils_base.ustr(dirPath, u"/forms"))
if not dir.fileExists(utils_base.ustr(dirPath, u"/scripts")):
dir.mkdir(utils_base.ustr(dirPath, u"/scripts"))
if not dir.fileExists(utils_base.ustr(dirPath, u"/queries")):
dir.mkdir(utils_base.ustr(dirPath, u"/queries"))
if not dir.fileExists(utils_base.ustr(dirPath, u"/tables")):
dir.mkdir(utils_base.ustr(dirPath, u"/tables"))
if not dir.fileExists(utils_base.ustr(dirPath, u"/reports")):
dir.mkdir(utils_base.ustr(dirPath, u"/reports"))
if not dir.fileExists(utils_base.ustr(dirPath, u"/translations")):
dir.mkdir(utils_base.ustr(dirPath, u"/translations"))
xmlMod = self.xmlModule(idMod)
self.fileWriteIso(utils_base.ustr(dirPath, u"/", idMod, u".mod"), xmlMod.toString(2))
xpmMod = utils_db.sql_select(
u"flmodules", u"icono", utils_base.ustr(u"idmodulo='", idMod, u"'")
)
self.fileWriteIso(utils_base.ustr(dirPath, u"/", idMod, u".xpm"), xpmMod)
qry = pnsqlquery.PNSqlQuery()
qry.setSelect(u"nombre,contenido")
qry.setFrom(u"flfiles")
qry.setWhere(utils_base.ustr(u"idmodulo='", idMod, u"'"))
if not qry.exec_() or qry.size() == 0:
return
while qry.next():
name = qry.value(0)
content = qry.value(1)
type = name[(len(name) - (len(name) - name.rfind(u"."))) :]
if content == "":
continue
s02_when = type
s02_do_work, s02_work_done = False, False
if s02_when == u".xml":
s02_do_work, s02_work_done = True, True
if s02_do_work:
self.fileWriteIso(utils_base.ustr(dirPath, u"/", name), content)
s02_do_work = False # BREAK
if s02_when == u".ui":
s02_do_work, s02_work_done = True, True
if s02_do_work:
self.fileWriteIso(utils_base.ustr(dirPath, u"/forms/", name), content)
s02_do_work = False # BREAK
if s02_when == u".qs":
s02_do_work, s02_work_done = True, True
if s02_do_work:
self.fileWriteIso(utils_base.ustr(dirPath, u"/scripts/", name), content)
s02_do_work = False # BREAK
if s02_when == u".qry":
s02_do_work, s02_work_done = True, True
if s02_do_work:
self.fileWriteIso(utils_base.ustr(dirPath, u"/queries/", name), content)
s02_do_work = False # BREAK
if s02_when == u".mtd":
s02_do_work, s02_work_done = True, True
if s02_do_work:
self.fileWriteIso(utils_base.ustr(dirPath, u"/tables/", name), content)
s02_do_work = False # BREAK
if s02_when == u".kut":
s02_do_work, s02_work_done = True, True
if s02_do_work:
pass
if s02_when == u".ar":
s02_do_work, s02_work_done = True, True
if s02_do_work:
pass
if s02_when == u".jrxml":
s02_do_work, s02_work_done = True, True
if s02_do_work:
pass
if s02_when == u".svg":
s02_do_work, s02_work_done = True, True
if s02_do_work:
self.fileWriteIso(utils_base.ustr(dirPath, u"/reports/", name), content)
s02_do_work = False # BREAK
if s02_when == u".ts":
s02_do_work, s02_work_done = True, True # noqa
if s02_do_work:
self.fileWriteIso(utils_base.ustr(dirPath, u"/translations/", name), content)
s02_do_work = False # BREAK
@classmethod
def importModules(self, warnBackup: bool = True) -> None:
"""Import modules from a directory."""
if warnBackup and self.interactiveGUI():
txt = u""
txt += self.translate(u"Asegúrese de tener una copia de seguridad de todos los datos\n")
txt += self.translate(u"y de que no hay ningun otro usuario conectado a la base de\n")
txt += self.translate(u"datos mientras se realiza la importación.\n\n")
txt += self.translate(u"Obtenga soporte en")
txt += u" http://www.infosial.com\n(c) InfoSiAL S.L."
txt += u"\n\n"
txt += self.translate(u"¿Desea continuar?")
if MessageBox.Yes != MessageBox.warning(txt, MessageBox.No, MessageBox.Yes):
return
key = utils_base.ustr(u"scripts/sys/modLastDirModules_", self.nameBD())
dirAnt = settings.settings.value(key)
dirMods = FileDialog.getExistingDirectory(
str(dirAnt) if dirAnt else None, self.translate(u"Directorio de Módulos")
)
if not dirMods:
return
dirMods = types.Dir.cleanDirPath(dirMods)
dirMods = types.Dir.convertSeparators(dirMods)
QtCore.QDir.setCurrent(dirMods) # change current directory
listFilesMod = self.selectModsDialog(flutil.FLUtil.findFiles(dirMods, u"*.mod", False))
flutil.FLUtil.createProgressDialog(self.translate(u"Importando"), len(listFilesMod))
flutil.FLUtil.setProgress(1)
i = 0
while_pass = True
while i < len(listFilesMod):
if not while_pass:
i += 1
while_pass = True
continue
while_pass = False
flutil.FLUtil.setLabelText(listFilesMod[i])
flutil.FLUtil.setProgress(i)
if not self.importModule(listFilesMod[i]):
self.errorMsgBox(self.translate(u"Error al cargar el módulo:\n") + listFilesMod[i])
break
i += 1
while_pass = True
try:
i < len(listFilesMod)
except Exception:
break
flutil.FLUtil.destroyProgressDialog()
flutil.FLUtil.writeSettingEntry(key, dirMods)
self.infoMsgBox(self.translate(u"Importación de módulos finalizada."))
AQTimer.singleShot(0, self.reinit)
@classmethod
def selectModsDialog(self, listFilesMod: List = []) -> types.Array:
"""Select modules dialog."""
dialog = Dialog()
dialog.okButtonText = self.translate(u"Aceptar")
dialog.cancelButtonText = self.translate(u"Cancelar")
bgroup = QtWidgets.QGroupBox()
bgroup.setTitle(self.translate(u"Seleccione módulos a importar"))
dialog.add(bgroup)
res = types.Array()
cB = types.Array()
i = 0
while_pass = True
while i < len(listFilesMod):
if not while_pass:
i += 1
while_pass = True
continue
while_pass = False
cB[i] = QtWidgets.QCheckBox()
# bgroup.add(cB[i])
cB[i].text = listFilesMod[i]
cB[i].checked = True
i += 1
while_pass = True
try:
i < len(listFilesMod)
except Exception:
break
idx = 0
if self.interactiveGUI() and dialog.exec_():
i = 0
while_pass = True
while i < len(listFilesMod):
if not while_pass:
i += 1
while_pass = True
continue
while_pass = False
if cB[i].checked:
res[idx] = listFilesMod[i]
idx += 1
i += 1
while_pass = True
try:
i < len(listFilesMod)
except Exception:
break
return res
@classmethod
def importModule(self, modPath: str) -> bool:
"""Import a module specified by name."""
try:
with open(modPath, "r", encoding="ISO8859-15") as fileMod:
contentMod = fileMod.read()
except Exception:
e = traceback.format_exc()
self.errorMsgBox(utils_base.ustr(self.translate(u"Error leyendo fichero."), u"\n", e))
return False
mod_folder = os.path.dirname(modPath)
mod = None
xmlMod = QtXml.QDomDocument()
if xmlMod.setContent(contentMod):
nodeMod = xmlMod.namedItem(u"MODULE")
if not nodeMod:
self.errorMsgBox(self.translate(u"Error en la carga del fichero xml .mod"))
return False
mod = {
"id": (nodeMod.namedItem(u"name").toElement().text()),
"alias": (self.trTagText(nodeMod.namedItem(u"alias").toElement().text())),
"area": (nodeMod.namedItem(u"area").toElement().text()),
"areaname": (self.trTagText(nodeMod.namedItem(u"areaname").toElement().text())),
"version": (nodeMod.namedItem(u"version").toElement().text()),
}
if not self.registerArea(mod) or not self.registerModule(mod):
self.errorMsgBox(
utils_base.ustr(self.translate(u"Error registrando el módulo"), u" ", mod["id"])
)
return False
if not self.importFiles(mod_folder, u"*.xml", mod["id"]):
return False
if not self.importFiles(mod_folder, u"*.ui", mod["id"]):
return False
if not self.importFiles(mod_folder, u"*.qs", mod["id"]):
return False
if not self.importFiles(mod_folder, u"*.qry", mod["id"]):
return False
if not self.importFiles(mod_folder, u"*.mtd", mod["id"]):
return False
if not self.importFiles(mod_folder, u"*.kut", mod["id"]):
return False
if not self.importFiles(mod_folder, u"*.ar", mod["id"]):
return False
if not self.importFiles(mod_folder, u"*.jrxml", mod["id"]):
return False
if not self.importFiles(mod_folder, u"*.svg", mod["id"]):
return False
if not self.importFiles(mod_folder, u"*.ts", mod["id"]):
return False
else:
self.errorMsgBox(self.translate(u"Error en la carga del fichero xml .mod"))
return False
return True
@classmethod
def importFiles(self, dir_path_: str, ext: str, id_module_: str) -> bool:
"""Import files with a exension from a path."""
ok = True
util = flutil.FLUtil()
list_files_ = util.findFiles(dir_path_, ext, False)
util.createProgressDialog(self.translate(u"Importando"), len(list_files_))
util.setProgress(1)
i = 0
while_pass = True
while i < len(list_files_):
if not while_pass:
i += 1
while_pass = True
continue
while_pass = False
util.setLabelText(list_files_[i])
util.setProgress(i)
if not self.importFile(list_files_[i], id_module_):
self.errorMsgBox(self.translate(u"Error al cargar :\n") + list_files_[i])
ok = False
break
i += 1
while_pass = True
try:
i < len(list_files_)
except Exception:
break
util.destroyProgressDialog()
return ok
@classmethod
def importFile(self, file_path_: str, id_module_: str) -> bool:
"""Import a file from a path."""
file = types.File(file_path_)
content = u""
try:
file.open(types.File.ReadOnly)
content = str(file.read())
except Exception:
e = traceback.format_exc()
self.errorMsgBox(utils_base.ustr(self.translate(u"Error leyendo fichero."), u"\n", e))
return False
ok = True
name = file.name
if (
not flutil.FLUtil.isFLDefFile(content)
and not name.endswith(u".qs")
and not name.endswith(u".ar")
and not name.endswith(u".svg")
) or name.endswith(u"untranslated.ts"):
return ok
cur = pnsqlcursor.PNSqlCursor(u"flfiles")
cur.select(utils_base.ustr(u"nombre = '", name, u"'"))
if not cur.first():
if name.endswith(u".ar"):
if not self.importReportAr(file_path_, id_module_, content):
return True
cur.setModeAccess(aqsql.AQSql.Insert)
cur.refreshBuffer()
cur.setValueBuffer(u"nombre", name)
cur.setValueBuffer(u"idmodulo", id_module_)
ba = QByteArray()
ba.string = content
cur.setValueBuffer(u"sha", ba.sha1())
cur.setValueBuffer(u"contenido", content)
ok = cur.commitBuffer()
else:
cur.setModeAccess(aqsql.AQSql.Edit)
cur.refreshBuffer()
ba = QByteArray()
ba.string = content
shaCnt = ba.sha1()
if cur.valueBuffer(u"sha") != shaCnt:
contenidoCopia = cur.valueBuffer(u"contenido")
cur.setModeAccess(aqsql.AQSql.Insert)
cur.refreshBuffer()
d = types.Date()
cur.setValueBuffer(u"nombre", name + str(d))
cur.setValueBuffer(u"idmodulo", id_module_)
cur.setValueBuffer(u"contenido", contenidoCopia)
cur.commitBuffer()
cur.select(utils_base.ustr(u"nombre = '", name, u"'"))
cur.first()
cur.setModeAccess(aqsql.AQSql.Edit)
cur.refreshBuffer()
cur.setValueBuffer(u"idmodulo", id_module_)
cur.setValueBuffer(u"sha", shaCnt)
cur.setValueBuffer(u"contenido", content)
ok = cur.commitBuffer()
if name.endswith(u".ar"):
if not self.importReportAr(file_path_, id_module_, content):
return True
return ok
@classmethod
def importReportAr(self, file_path_: str, id_module_: str, content: str) -> bool:
"""Import a report file, convert and install."""
from pineboolib.application.safeqsa import SafeQSA
if not self.isLoadedModule(u"flar2kut"):
return False
if settings.settings.value(u"scripts/sys/conversionAr") != u"true":
return False
content = self.toUnicode(content, u"UTF-8")
content = SafeQSA.root_module("flar2kut").iface.pub_ar2kut(content)
file_path_ = utils_base.ustr(file_path_[0 : len(file_path_) - 3], u".kut")
if content:
localEnc = settings.settings.value(u"scripts/sys/conversionArENC")
if not localEnc:
localEnc = u"ISO-8859-15"
content = self.fromUnicode(content, localEnc)
f = types.FileStatic()
try:
f.write(file_path_, content)
except Exception:
e = traceback.format_exc()
self.errorMsgBox(
utils_base.ustr(self.translate(u"Error escribiendo fichero."), u"\n", e)
)
return False
return self.importFile(file_path_, id_module_)
return False
@classmethod
def runTransaction(self, f: Callable, oParam: Dict[str, Any]) -> Any:
"""Run a Transaction."""
roll_back_: bool = False
error_msg_: str = ""
valor_: Any
db_ = application.PROJECT.conn_manager.useConn("default")
transaction_level_ = db_.transactionLevel()
# Create Transaction.
if transaction_level_ == 0:
db_.transaction()
else:
db_.savePoint(transaction_level_)
db_._transaction += 1
if self.interactiveGUI():
QtWidgets.QApplication.setOverrideCursor(QtCore.Qt.WaitCursor)
try:
valor_ = f(oParam)
if "errorMsg" in oParam:
error_msg_ = oParam["errorMsg"]
if not valor_:
roll_back_ = True
except Exception:
e = traceback.format_exc(limit=-6, chain=False)
roll_back_ = True
valor_ = False
if error_msg_ == "":
error_msg_ = self.translate("Error al ejecutar la función")
error_msg_ = "%s:\n%s" % (error_msg_, error_manager(e))
db_._transaction -= 1
if roll_back_: # do RollBack
if error_msg_ != "":
self.warnMsgBox(error_msg_)
if transaction_level_ == 0:
db_.rollbackTransaction()
else:
db_.rollbackSavePoint(transaction_level_)
else: # do Commit
if transaction_level_ == 0:
db_.commit()
else:
db_.releaseSavePoint(transaction_level_)
if self.interactiveGUI():
AQS.Application_restoreOverrideCursor()
return valor_
@classmethod
def search_git_updates(self, url: str) -> None:
"""Search updates of pineboo."""
if not os.path.exists(utils_base.filedir("../.git")):
return
if not url:
url = settings.settings.value(
"ebcomportamiento/git_updates_repo", "https://github.com/Aulla/pineboo.git"
)
command = "git status %s" % url
pro = process.Process()
pro.execute(command)
if pro.stdout is None:
return
# print("***", pro.stdout)
if pro.stdout.find("git pull") > -1:
if MessageBox.Yes != MessageBox.warning(
"Hay nuevas actualizaciones disponibles para Pineboo. ¿Desea actualizar?",
MessageBox.No,
MessageBox.Yes,
):
return
pro.execute("git pull %s" % url)
MessageBox.information(
"Pineboo se va a reiniciar ahora",
MessageBox.Ok,
MessageBox.NoButton,
MessageBox.NoButton,
u"Eneboo",
)
# os.execl(executable, os.path.abspath(__file__)) #FIXME
@classmethod
def qsaExceptions(self):
"""Return QSA exceptions found."""
return flapplication.aqApp.db().qsaExceptions()
@classmethod
@decorators.NotImplementedWarn
def serverTime(self) -> str:
"""Return time from database."""
# FIXME: QSqlSelectCursor is not defined. Was an internal of Qt3.3
return ""
# db = aqApp.db().db()
# sql = u"select current_time"
# ahora = None
# q = QSqlSelectCursor(sql, db)
# if q.isActive() and q.next():
# ahora = q.value(0)
# return ahora
@classmethod
def localChanges(self) -> Dict[str, Any]:
"""Return xml with local changes."""
ret = {}
ret[u"size"] = 0
strXmlUpt = utils_db.sql_select("flupdates", "filesdef", "actual='true'")
if not strXmlUpt:
return ret
docUpt = QtXml.QDomDocument()
if not docUpt.setContent(strXmlUpt):
self.errorMsgBox(
self.translate(u"Error XML al intentar cargar la definición de los ficheros.")
)
return ret
docBd = self.xmlFilesDefBd()
ret = self.diffXmlFilesDef(docBd, docUpt)
return ret
@classmethod
def interactiveGUI(self):
"""Return interactiveGUI."""
return flapplication.aqApp.db().mainConn().interactiveGUI()
@classmethod
def getWidgetList(self, container: str, control_name: str) -> str:
"""Get widget list from a widget."""
obj_class: Any = None
if control_name == "FLFieldDB":
obj_class = flfielddb.FLFieldDB
elif control_name == "FLTableDB":
obj_class = fltabledb.FLTableDB
elif control_name == "Button":
control_name = "QPushButton"
if obj_class is None:
obj_class = getattr(QtWidgets, control_name, None)
if obj_class is None:
raise Exception("obj_class is empty!")
w = None
a = None
conn = application.PROJECT._conn_manager
if conn is None:
raise Exception("conn is empty!")
if container[0:10] == "formRecord":
action_ = container[10:]
a = conn.manager().action(action_)
if a.formRecord():
w = conn.managerModules().createFormRecord(a)
elif container[0:10] == "formSearch":
action_ = container[10:]
a = conn.manager().action(action_)
if a.form():
w = conn.managerModules().createForm(a)
else:
action_ = container[4:]
a = conn.manager().action(action_)
if a.form():
w = conn.managerModules().createForm(a)
if w is None:
return ""
object_list = w.findChildren(obj_class)
retorno_: str = ""
for obj in object_list:
name_ = obj.objectName()
if name_ == "":
continue
if control_name == "FLFieldDB":
field_table_ = cast(flfielddb.FLFieldDB, obj).tableName()
if field_table_ and field_table_ != a.table():
continue
retorno_ += "%s/%s*" % (name_, cast(flfielddb.FLFieldDB, obj).fieldName())
elif control_name == "FLTableDB":
retorno_ += "%s/%s*" % (name_, cast(fltabledb.FLTableDB, obj).tableName())
elif control_name in ["QPushButton", "Button"]:
if name_ in ["pushButtonDB", "pbAux", "qt_left_btn", "qt_right_btn"]:
continue
retorno_ += "%s/%s*" % (name_, obj.objectName())
else:
if name_ in [
"textLabelDB",
"componentDB",
"tab_pages",
"editor",
"FrameFind",
"TextLabelSearch",
"TextLabelIn",
"lineEditSearch",
"in-combo",
"voidTable",
]:
continue
if isinstance(obj, QtWidgets.QGroupBox):
retorno_ += "%s/%s*" % (name_, obj.title())
else:
retorno_ += "%s/*" % (name_)
return retorno_
class AbanQDbDumper(QtCore.QObject):
"""AbanqDbDumper class."""
SEP_CSV = u"\u00b6"
db_: "iconnection.IConnection"
showGui_: bool
dirBase_: str
fileName_: str
w_: QDialog
lblDirBase_: QLabel
pbChangeDir_: QPushButton
tedLog_: QTextEdit
pbInitDump_: QPushButton
state_: types.Array
funLog_: Callable
proc_: process.Process
def __init__(
self,
db: Optional["iconnection.IConnection"] = None,
dirBase: Optional[str] = None,
showGui: bool = True,
fun_log: Optional[Callable] = None,
):
"""Inicialize."""
self.funLog_ = self.addLog if fun_log is None else fun_log # type: ignore
self.db_ = flapplication.aqApp.db() if db is None else db
self.showGui_ = showGui
self.dirBase_ = types.Dir.home if dirBase is None else dirBase
self.fileName_ = self.genFileName()
self.encoding = sys.getdefaultencoding()
self.state_ = types.Array()
def init(self) -> None:
"""Inicialize dump dialog."""
if self.showGui_:
self.buildGui()
self.w_.exec_()
def buildGui(self) -> None:
"""Build a Dialog for database dump."""
self.w_ = QDialog()
self.w_.caption = SysType.translate(u"Copias de seguridad")
self.w_.setModal(True)
self.w_.resize(800, 600)
# lay = QVBoxLayout(self.w_, 6, 6)
lay = QVBoxLayout(self.w_)
frm = QtWidgets.QFrame(self.w_)
frm.setFrameShape(QtWidgets.QFrame.Box)
frm.setLineWidth(1)
frm.setFrameShadow(QtWidgets.QFrame.Plain)
# layFrm = QVBoxLayout(frm, 6, 6)
layFrm = QVBoxLayout(frm)
lbl = QLabel(frm)
lbl.setText(
SysType.translate(u"Driver: %s")
% (str(self.db_.driverNameToDriverAlias(self.db_.driverName())))
)
lbl.setAlignment(QtCore.Qt.AlignTop)
layFrm.addWidget(lbl)
lbl = QLabel(frm)
lbl.setText(SysType.translate(u"Base de datos: %s") % (str(self.db_.database())))
lbl.setAlignment(QtCore.Qt.AlignTop)
layFrm.addWidget(lbl)
lbl = QLabel(frm)
lbl.setText(SysType.translate(u"Host: %s") % (str(self.db_.host())))
lbl.setAlignment(QtCore.Qt.AlignTop)
layFrm.addWidget(lbl)
lbl = QLabel(frm)
lbl.setText(SysType.translate(u"Puerto: %s") % (str(self.db_.port())))
lbl.setAlignment(QtCore.Qt.AlignTop)
layFrm.addWidget(lbl)
lbl = QLabel(frm)
lbl.setText(SysType.translate(u"Usuario: %s") % (str(self.db_.user())))
lbl.setAlignment(QtCore.Qt.AlignTop)
layFrm.addWidget(lbl)
layAux = QHBoxLayout()
layFrm.addLayout(layAux)
self.lblDirBase_ = QLabel(frm)
self.lblDirBase_.setText(
SysType.translate(u"Directorio Destino: %s") % (str(self.dirBase_))
)
self.lblDirBase_.setAlignment(QtCore.Qt.AlignVCenter)
layAux.addWidget(self.lblDirBase_)
self.pbChangeDir_ = QPushButton(SysType.translate(u"Cambiar"), frm)
self.pbChangeDir_.setSizePolicy(
QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred
)
application.connections.connect(self.pbChangeDir_, u"clicked()", self, u"changeDirBase()")
layAux.addWidget(self.pbChangeDir_)
lay.addWidget(frm)
self.pbInitDump_ = QPushButton(SysType.translate(u"INICIAR COPIA"), self.w_)
application.connections.connect(self.pbInitDump_, u"clicked()", self, u"initDump()")
lay.addWidget(self.pbInitDump_)
lbl = QLabel(self.w_)
lbl.setText("Log:")
lay.addWidget(lbl)
self.tedLog_ = QTextEdit(self.w_)
self.tedLog_.setTextFormat(QTextEdit.LogText)
self.tedLog_.setAlignment(
cast(QtCore.Qt.Alignment, QtCore.Qt.AlignHCenter | QtCore.Qt.AlignVCenter)
)
lay.addWidget(self.tedLog_)
def initDump(self) -> None:
"""Inicialize dump."""
gui = self.showGui_ and self.w_ is not None
if gui:
self.w_.enable = False
self.dumpDatabase()
if gui:
self.w_.enable = True
if self.state_.ok:
if gui:
SysType.infoMsgBox(self.state_.msg)
self.w_.close()
else:
if gui:
SysType.errorMsgBox(self.state_.msg)
def genFileName(self) -> str:
"""Return a file name."""
now = types.Date()
timeStamp = str(now)
regExp = ["-", ":"]
# regExp.global_ = True
for rE in regExp:
timeStamp = timeStamp.replace(rE, u"")
fileName = "%s/dump_%s_%s" % (self.dirBase_, self.db_.database(), timeStamp)
fileName = types.Dir.cleanDirPath(fileName)
fileName = types.Dir.convertSeparators(fileName)
return fileName
def changeDirBase(self, dir_: Optional[str] = None) -> None:
"""Change base dir."""
dirBasePath = dir_
if not dirBasePath:
dirBasePath = FileDialog.getExistingDirectory(self.dirBase_)
if not dirBasePath:
return
self.dirBase_ = dirBasePath
if self.showGui_ and self.lblDirBase_ is not None:
self.lblDirBase_.setText(
SysType.translate(u"Directorio Destino: %s") % (str(self.dirBase_))
)
self.fileName_ = self.genFileName()
def addLog(self, msg: str) -> None:
"""Add a text to log."""
if self.showGui_ and self.tedLog_ is not None:
self.tedLog_.append(msg)
else:
logger.warning(msg)
def setState(self, ok: int, msg: str) -> None:
"""Set state."""
self.state_.ok = ok
self.state_.msg = msg
def state(self) -> types.Array:
"""Return state."""
return self.state_
def launchProc(self, command: List[str]) -> str:
"""Return the result from a Launched command."""
self.proc_ = process.Process()
self.proc_.setProgram(command[0])
self.proc_.setArguments(command[1:])
# FIXME: Mejorar lectura linea a linea
cast(QtCore.pyqtSignal, self.proc_.readyReadStandardOutput).connect(self.readFromStdout)
cast(QtCore.pyqtSignal, self.proc_.readyReadStandardError).connect(self.readFromStderr)
self.proc_.start()
while self.proc_.running:
SysType.processEvents()
return self.proc_.exitcode() == self.proc_.normalExit
def readFromStdout(self) -> None:
"""Read data from stdOutput."""
t = (
self.proc_.readLine() # type: ignore[attr-defined] # noqa : F821
.data()
.decode(self.encoding)
)
if t not in (None, ""):
self.funLog_(t)
def readFromStderr(self) -> None:
"""Read data from stdError."""
t = (
self.proc_.readLine() # type: ignore[attr-defined] # noqa : F821
.data()
.decode(self.encoding)
)
if t not in (None, ""):
self.funLog_(t)
def dumpDatabase(self) -> bool:
"""Dump database to target specified by sql driver class."""
driver = self.db_.driverName()
typeBd = 0
if driver.find("PSQL") > -1:
typeBd = 1
else:
if driver.find("MYSQL") > -1:
typeBd = 2
if typeBd == 0:
self.setState(
False,
SysType.translate(u"Este tipo de base de datos no soporta el volcado a disco."),
)
self.funLog_(self.state_.msg)
self.dumpAllTablesToCsv()
return False
file = types.File(self.fileName_) # noqa
try:
if not os.path.exists(self.fileName_):
dir_ = types.Dir(self.fileName_) # noqa
except Exception:
e = traceback.format_exc()
self.setState(False, utils_base.ustr(u"", e))
self.funLog_(self.state_.msg)
return False
ok = True
if typeBd == 1:
ok = self.dumpPostgreSQL()
if typeBd == 2:
ok = self.dumpMySQL()
if not ok:
self.dumpAllTablesToCsv()
if not ok:
self.setState(
False, SysType.translate(u"No se ha podido realizar la copia de seguridad.")
)
self.funLog_(self.state_.msg)
else:
self.setState(
True,
SysType.translate(u"Copia de seguridad realizada con éxito en:\n%s.sql")
% (str(self.fileName_)),
)
self.funLog_(self.state_.msg)
return ok
def dumpPostgreSQL(self) -> bool:
"""Dump database to PostgreSql file."""
pgDump: str = u"pg_dump"
command: List[str]
fileName = "%s.sql" % self.fileName_
db = self.db_
if SysType.osName() == u"WIN32":
pgDump += u".exe"
System.setenv(u"PGPASSWORD", db.returnword())
command = [
pgDump,
u"-f",
fileName,
u"-h",
db.host() or "",
u"-p",
str(db.port() or 0),
u"-U",
db.user() or "",
str(db.database()),
]
else:
System.setenv(u"PGPASSWORD", db.returnword())
command = [
pgDump,
u"-v",
u"-f",
fileName,
u"-h",
db.host() or "",
u"-p",
str(db.port() or 0),
u"-U",
db.user() or "",
str(db.database()),
]
if not self.launchProc(command):
self.setState(
False,
SysType.translate(u"No se ha podido volcar la base de datos a disco.\n")
+ SysType.translate(u"Es posible que no tenga instalada la herramienta ")
+ pgDump,
)
self.funLog_(self.state_.msg)
return False
self.setState(True, u"")
return True
def dumpMySQL(self) -> bool:
"""Dump database to MySql file."""
myDump: str = u"mysqldump"
command: List[str]
fileName = utils_base.ustr(self.fileName_, u".sql")
db = self.db_
if SysType.osName() == u"WIN32":
myDump += u".exe"
command = [
myDump,
u"-v",
utils_base.ustr(u"--result-file=", fileName),
utils_base.ustr(u"--host=", db.host()),
utils_base.ustr(u"--port=", db.port()),
utils_base.ustr(u"--password=", db.returnword()),
utils_base.ustr(u"--user=", db.user()),
str(db.database()),
]
else:
command = [
myDump,
u"-v",
utils_base.ustr(u"--result-file=", fileName),
utils_base.ustr(u"--host=", db.host()),
utils_base.ustr(u"--port=", db.port()),
utils_base.ustr(u"--password=", db.returnword()),
utils_base.ustr(u"--user=", db.user()),
str(db.database()),
]
if not self.launchProc(command):
self.setState(
False,
SysType.translate(u"No se ha podido volcar la base de datos a disco.\n")
+ SysType.translate(u"Es posible que no tenga instalada la herramienta ")
+ myDump,
)
self.funLog_(self.state_.msg)
return False
self.setState(True, u"")
return True
def dumpTableToCsv(self, table: str, dirBase: str) -> bool:
"""Dump a table to a CSV."""
fileName = utils_base.ustr(dirBase, table, u".csv")
file = types.File(fileName)
if not file.open(types.File.WriteOnly):
return False
ts = QtCore.QTextStream(file.ioDevice())
ts.setCodec(AQS.TextCodec_codecForName(u"utf8"))
qry = pnsqlquery.PNSqlQuery()
qry.setSelect(utils_base.ustr(table, u".*"))
qry.setFrom(table)
if not qry.exec_():
return False
rec = u""
fieldNames = qry.fieldList()
i = 0
while_pass = True
while i < len(fieldNames):
if not while_pass:
i += 1
while_pass = True
continue
while_pass = False
if i > 0:
rec += self.SEP_CSV
rec += fieldNames[i]
i += 1
while_pass = True
try:
i < len(fieldNames)
except Exception:
break
ts.device().write(utils_base.ustr(rec, u"\n").encode())
# ts.opIn(utils_base.ustr(rec, u"\n"))
flutil.FLUtil.createProgressDialog(
SysType.translate(u"Haciendo copia en CSV de ") + table, qry.size()
)
p = 0
while qry.next():
rec = u""
i = 0
while_pass = True
while i < len(fieldNames):
if not while_pass:
i += 1
while_pass = True
continue
while_pass = False
if i > 0:
rec += self.SEP_CSV
rec += str(qry.value(i))
i += 1
while_pass = True
try:
i < len(fieldNames)
except Exception:
break
# ts.opIn(utils_base.ustr(rec, u"\n"))
ts.device().write(utils_base.ustr(rec, u"\n").encode())
p += 1
flutil.FLUtil.setProgress(p)
file.close()
flutil.FLUtil.destroyProgressDialog()
return True
def dumpAllTablesToCsv(self) -> bool:
"""Dump all tables to a csv files."""
fileName = self.fileName_
tables = self.db_.tables(aqsql.AQSql.TableType.Tables)
dir_ = types.Dir(fileName)
dir_.mkdir()
dirBase = types.Dir.convertSeparators(utils_base.ustr(fileName, u"/"))
# i = 0
# while_pass = True
for table_ in tables:
self.dumpTableToCsv(table_, dirBase)
return True
| 35.659928 | 102 | 0.537523 |
794819aba2dc129ff9c7fb65919bc100f82aae4c
| 1,879 |
py
|
Python
|
app.py
|
ohst19/ohst19.github.io
|
48af75ca7dbc4002b3222ed1499250da296a6d38
|
[
"MIT"
] | 1 |
2021-07-08T09:07:34.000Z
|
2021-07-08T09:07:34.000Z
|
app.py
|
ohst19/ohst19.github.io
|
48af75ca7dbc4002b3222ed1499250da296a6d38
|
[
"MIT"
] | null | null | null |
app.py
|
ohst19/ohst19.github.io
|
48af75ca7dbc4002b3222ed1499250da296a6d38
|
[
"MIT"
] | 2 |
2020-10-25T16:31:48.000Z
|
2021-02-27T14:17:26.000Z
|
# MIT License
# Copyright (c) 2020 Shrid Pant
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from flask import Flask
from flask_session import Session
from werkzeug.exceptions import HTTPException, InternalServerError
from src.auth import auth, login_required
from src.helpers import UserInfo, error
from src.profile import profile
from src.search import search
from src.home import home
app = Flask(__name__)
app.config.from_object("config")
app.register_blueprint(auth, url_prefix="/")
app.register_blueprint(profile, url_prefix="/")
app.register_blueprint(home, url_prefix="/")
app.register_blueprint(search, url_prefix="/")
Session(app)
@app.errorhandler(Exception)
def errorhandler(e):
print(str(e))
if not isinstance(e, HTTPException):
e = InternalServerError()
return error(e.name, e.code)
if __name__ == "__main__":
app.run()
| 37.58 | 80 | 0.771687 |
79481caf788ea3b341fd5b36db204b54eb960805
| 39,754 |
py
|
Python
|
tensorflow_probability/python/distributions/hidden_markov_model.py
|
fehiepsi/probability
|
9deb067891a974ec9ee1636167b0b0fce9f44a40
|
[
"Apache-2.0"
] | 1 |
2020-05-16T13:00:01.000Z
|
2020-05-16T13:00:01.000Z
|
tensorflow_probability/python/distributions/hidden_markov_model.py
|
fehiepsi/probability
|
9deb067891a974ec9ee1636167b0b0fce9f44a40
|
[
"Apache-2.0"
] | null | null | null |
tensorflow_probability/python/distributions/hidden_markov_model.py
|
fehiepsi/probability
|
9deb067891a974ec9ee1636167b0b0fce9f44a40
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2018 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""The HiddenMarkovModel distribution class."""
from __future__ import absolute_import
from __future__ import division
import tensorflow as tf
from tensorflow_probability.python.distributions import categorical
from tensorflow_probability.python.distributions import distribution
from tensorflow_probability.python.distributions import seed_stream
from tensorflow_probability.python.internal import assert_util
from tensorflow_probability.python.internal import distribution_util as util
__all__ = [
"HiddenMarkovModel",
]
class HiddenMarkovModel(distribution.Distribution):
"""Hidden Markov model distribution.
The `HiddenMarkovModel` distribution implements a (batch of) hidden
Markov models where the initial states, transition probabilities
and observed states are all given by user-provided distributions.
This model assumes that the transition matrices are fixed over time.
In this model, there is a sequence of integer-valued hidden states:
`z[0], z[1], ..., z[num_steps - 1]` and a sequence of observed states:
`x[0], ..., x[num_steps - 1]`.
The distribution of `z[0]` is given by `initial_distribution`.
The conditional probability of `z[i + 1]` given `z[i]` is described by
the batch of distributions in `transition_distribution`.
For a batch of hidden Markov models, the coordinates before the rightmost one
of the `transition_distribution` batch correspond to indices into the hidden
Markov model batch. The rightmost coordinate of the batch is used to select
which distribution `z[i + 1]` is drawn from. The distributions corresponding
to the probability of `z[i + 1]` conditional on `z[i] == k` is given by the
elements of the batch whose rightmost coordinate is `k`.
Similarly, the conditional distribution of `z[i]` given `x[i]` is given by
the batch of `observation_distribution`.
When the rightmost coordinate of `observation_distribution` is `k` it
gives the conditional probabilities of `x[i]` given `z[i] == k`.
The probability distribution associated with the `HiddenMarkovModel`
distribution is the marginal distribution of `x[0],...,x[num_steps - 1]`.
#### Examples
```python
tfd = tfp.distributions
# A simple weather model.
# Represent a cold day with 0 and a hot day with 1.
# Suppose the first day of a sequence has a 0.8 chance of being cold.
# We can model this using the categorical distribution:
initial_distribution = tfd.Categorical(probs=[0.8, 0.2])
# Suppose a cold day has a 30% chance of being followed by a hot day
# and a hot day has a 20% chance of being followed by a cold day.
# We can model this as:
transition_distribution = tfd.Categorical(probs=[[0.7, 0.3],
[0.2, 0.8]])
# Suppose additionally that on each day the temperature is
# normally distributed with mean and standard deviation 0 and 5 on
# a cold day and mean and standard deviation 15 and 10 on a hot day.
# We can model this with:
observation_distribution = tfd.Normal(loc=[0., 15.], scale=[5., 10.])
# We can combine these distributions into a single week long
# hidden Markov model with:
model = tfd.HiddenMarkovModel(
initial_distribution=initial_distribution,
transition_distribution=transition_distribution,
observation_distribution=observation_distribution,
num_steps=7)
# The expected temperatures for each day are given by:
model.mean() # shape [7], elements approach 9.0
# The log pdf of a week of temperature 0 is:
model.log_prob(tfp.zeros(shape=[7]))
```
#### References
[1] https://en.wikipedia.org/wiki/Hidden_Markov_model
"""
def __init__(self,
initial_distribution,
transition_distribution,
observation_distribution,
num_steps,
validate_args=False,
allow_nan_stats=True,
name="HiddenMarkovModel"):
"""Initialize hidden Markov model.
Args:
initial_distribution: A `Categorical`-like instance.
Determines probability of first hidden state in Markov chain.
The number of categories must match the number of categories of
`transition_distribution` as well as both the rightmost batch
dimension of `transition_distribution` and the rightmost batch
dimension of `observation_distribution`.
transition_distribution: A `Categorical`-like instance.
The rightmost batch dimension indexes the probability distribution
of each hidden state conditioned on the previous hidden state.
observation_distribution: A `tfp.distributions.Distribution`-like
instance. The rightmost batch dimension indexes the distribution
of each observation conditioned on the corresponding hidden state.
num_steps: The number of steps taken in Markov chain. A python `int`.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
Default value: `False`.
allow_nan_stats: Python `bool`, default `True`. When `True`, statistics
(e.g., mean, mode, variance) use the value "`NaN`" to indicate the
result is undefined. When `False`, an exception is raised if one or
more of the statistic's batch members are undefined.
Default value: `True`.
name: Python `str` name prefixed to Ops created by this class.
Default value: "HiddenMarkovModel".
Raises:
ValueError: if `num_steps` is not at least 1.
ValueError: if `initial_distribution` does not have scalar `event_shape`.
ValueError: if `transition_distribution` does not have scalar
`event_shape.`
ValueError: if `transition_distribution` and `observation_distribution`
are fully defined but don't have matching rightmost dimension.
"""
parameters = dict(locals())
# pylint: disable=protected-access
with tf.compat.v2.name_scope(name) as name:
self._runtime_assertions = [] # pylint: enable=protected-access
if num_steps < 1:
raise ValueError("num_steps ({}) must be at least 1.".format(num_steps))
self._initial_distribution = initial_distribution
self._observation_distribution = observation_distribution
self._transition_distribution = transition_distribution
if (initial_distribution.event_shape is not None
and initial_distribution.event_shape.ndims != 0):
raise ValueError(
"`initial_distribution` must have scalar `event_dim`s")
elif validate_args:
self._runtime_assertions += [
assert_util.assert_equal(
tf.shape(input=initial_distribution.event_shape_tensor())[0],
0,
message="`initial_distribution` must have scalar"
"`event_dim`s")
]
if (transition_distribution.event_shape is not None
and transition_distribution.event_shape.ndims != 0):
raise ValueError(
"`transition_distribution` must have scalar `event_dim`s")
elif validate_args:
self._runtime_assertions += [
assert_util.assert_equal(
tf.shape(input=transition_distribution.event_shape_tensor())[0],
0,
message="`transition_distribution` must have scalar"
"`event_dim`s")
]
if (transition_distribution.batch_shape is not None
and transition_distribution.batch_shape.ndims == 0):
raise ValueError(
"`transition_distribution` can't have scalar batches")
elif validate_args:
self._runtime_assertions += [
assert_util.assert_greater(
tf.size(input=transition_distribution.batch_shape_tensor()),
0,
message="`transition_distribution` can't have scalar "
"batches")
]
if (observation_distribution.batch_shape is not None
and observation_distribution.batch_shape.ndims == 0):
raise ValueError(
"`observation_distribution` can't have scalar batches")
elif validate_args:
self._runtime_assertions += [
assert_util.assert_greater(
tf.size(input=observation_distribution.batch_shape_tensor()),
0,
message="`observation_distribution` can't have scalar "
"batches")
]
# Infer number of hidden states and check consistency
# between transitions and observations
with tf.control_dependencies(self._runtime_assertions):
self._num_states = ((transition_distribution.batch_shape and
transition_distribution.batch_shape[-1]) or
transition_distribution.batch_shape_tensor()[-1])
observation_states = ((observation_distribution.batch_shape and
observation_distribution.batch_shape[-1]) or
observation_distribution.batch_shape_tensor()[-1])
if (tf.is_tensor(self._num_states) or tf.is_tensor(observation_states)):
if validate_args:
self._runtime_assertions += [
assert_util.assert_equal(
self._num_states,
observation_states,
message="`transition_distribution` and "
"`observation_distribution` must agree on "
"last dimension of batch size")
]
elif self._num_states != observation_states:
raise ValueError("`transition_distribution` and "
"`observation_distribution` must agree on "
"last dimension of batch size")
self._log_init = _extract_log_probs(self._num_states,
initial_distribution)
self._log_trans = _extract_log_probs(self._num_states,
transition_distribution)
self._num_steps = num_steps
self._num_states = tf.shape(input=self._log_init)[-1]
self._underlying_event_rank = tf.size(
input=self._observation_distribution.event_shape_tensor())
self.static_event_shape = tf.TensorShape(
[num_steps]).concatenate(self._observation_distribution.event_shape)
with tf.control_dependencies(self._runtime_assertions):
self.static_batch_shape = tf.broadcast_static_shape(
self._initial_distribution.batch_shape,
tf.broadcast_static_shape(
self._transition_distribution.batch_shape[:-1],
self._observation_distribution.batch_shape[:-1]))
# pylint: disable=protected-access
super(HiddenMarkovModel, self).__init__(
dtype=self._observation_distribution.dtype,
reparameterization_type=tf.compat.v1.distributions
.NOT_REPARAMETERIZED,
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
parameters=parameters,
graph_parents=(self._initial_distribution._graph_parents +
self._transition_distribution._graph_parents +
self._observation_distribution._graph_parents),
name=name)
# pylint: enable=protected-access
self._parameters = parameters
def _batch_shape_tensor(self):
with tf.control_dependencies(self._runtime_assertions):
return tf.broadcast_dynamic_shape(
self._initial_distribution.batch_shape_tensor(),
tf.broadcast_dynamic_shape(
self._transition_distribution.batch_shape_tensor()[:-1],
self._observation_distribution.batch_shape_tensor()[:-1]))
def _batch_shape(self):
return self.static_batch_shape
def _event_shape_tensor(self):
with tf.control_dependencies(self._runtime_assertions):
return tf.concat([[self._num_steps],
self.observation_distribution.event_shape_tensor()],
axis=0)
def _event_shape(self):
return self.static_event_shape
@property
def initial_distribution(self):
return self._initial_distribution
@property
def transition_distribution(self):
return self._transition_distribution
@property
def observation_distribution(self):
return self._observation_distribution
@property
def num_steps(self):
return self._num_steps
@property
def num_states(self):
return self._num_states
def _sample_n(self, n, seed=None):
with tf.control_dependencies(self._runtime_assertions):
seed = seed_stream.SeedStream(seed, salt="HiddenMarkovModel")
num_states = self._num_states
batch_shape = self.batch_shape_tensor()
batch_size = tf.reduce_prod(input_tensor=batch_shape)
# The batch sizes of the underlying initial distributions and
# transition distributions might not match the batch size of
# the HMM distribution.
# As a result we need to ask for more samples from the
# underlying distributions and then reshape the results into
# the correct batch size for the HMM.
init_repeat = (
tf.reduce_prod(input_tensor=self.batch_shape_tensor()) //
tf.reduce_prod(
input_tensor=self._initial_distribution.batch_shape_tensor()))
init_state = self._initial_distribution.sample(n * init_repeat,
seed=seed())
init_state = tf.reshape(init_state, [n, batch_size])
# init_state :: n batch_size
transition_repeat = (
tf.reduce_prod(input_tensor=self.batch_shape_tensor()) //
tf.reduce_prod(input_tensor=self._transition_distribution
.batch_shape_tensor()[:-1]))
def generate_step(state, _):
"""Take a single step in Markov chain."""
gen = self._transition_distribution.sample(n * transition_repeat,
seed=seed())
# gen :: (n * transition_repeat) transition_batch
new_states = tf.reshape(gen,
[n, batch_size, num_states])
# new_states :: n batch_size num_states
old_states_one_hot = tf.one_hot(state, num_states, dtype=tf.int32)
# old_states :: n batch_size num_states
return tf.reduce_sum(
input_tensor=old_states_one_hot * new_states, axis=-1)
if self._num_steps > 1:
dummy_index = tf.zeros(self._num_steps - 1, dtype=tf.float32)
hidden_states = tf.scan(generate_step, dummy_index,
initializer=init_state)
# TODO(b/115618503): add/use prepend_initializer to tf.scan
hidden_states = tf.concat([[init_state],
hidden_states], axis=0)
else:
hidden_states = init_state[tf.newaxis, ...]
# hidden_states :: num_steps n batch_size num_states
hidden_one_hot = tf.one_hot(hidden_states, num_states,
dtype=self._observation_distribution.dtype)
# hidden_one_hot :: num_steps n batch_size num_states
# The observation distribution batch size might not match
# the required batch size so as with the initial and
# transition distributions we generate more samples and
# reshape.
observation_repeat = (
batch_size //
tf.reduce_prod(input_tensor=self._observation_distribution
.batch_shape_tensor()[:-1]))
possible_observations = self._observation_distribution.sample(
[self._num_steps, observation_repeat * n])
inner_shape = self._observation_distribution.event_shape
# possible_observations :: num_steps (observation_repeat * n)
# observation_batch[:-1] num_states inner_shape
possible_observations = tf.reshape(
possible_observations,
tf.concat([[self._num_steps, n],
batch_shape,
[num_states],
inner_shape], axis=0))
# possible_observations :: steps n batch_size num_states inner_shape
hidden_one_hot = tf.reshape(hidden_one_hot,
tf.concat([[self._num_steps, n],
batch_shape,
[num_states],
tf.ones_like(inner_shape)],
axis=0))
# hidden_one_hot :: steps n batch_size num_states "inner_shape"
observations = tf.reduce_sum(
input_tensor=hidden_one_hot * possible_observations,
axis=-1 - tf.size(input=inner_shape))
# observations :: steps n batch_size inner_shape
observations = util.move_dimension(observations, 0,
1 + tf.size(input=batch_shape))
# returned :: n batch_shape steps inner_shape
return observations
def _log_prob(self, value):
with tf.control_dependencies(self._runtime_assertions):
# The argument `value` is a tensor of sequences of observations.
# `observation_batch_shape` is the shape of that tensor with the
# sequence part removed.
# `observation_batch_shape` is then broadcast to the full batch shape
# to give the `batch_shape` that defines the shape of the result.
observation_tensor_shape = tf.shape(input=value)
observation_batch_shape = observation_tensor_shape[
:-1 - self._underlying_event_rank]
# value :: observation_batch_shape num_steps observation_event_shape
batch_shape = tf.broadcast_dynamic_shape(observation_batch_shape,
self.batch_shape_tensor())
log_init = tf.broadcast_to(self._log_init,
tf.concat([batch_shape,
[self._num_states]], axis=0))
# log_init :: batch_shape num_states
log_transition = self._log_trans
# `observation_event_shape` is the shape of each sequence of observations
# emitted by the model.
observation_event_shape = observation_tensor_shape[
-1 - self._underlying_event_rank:]
working_obs = tf.broadcast_to(value,
tf.concat([batch_shape,
observation_event_shape],
axis=0))
# working_obs :: batch_shape observation_event_shape
r = self._underlying_event_rank
# Move index into sequence of observations to front so we can apply
# tf.foldl
working_obs = util.move_dimension(working_obs,
-1 - r, 0)[..., tf.newaxis]
# working_obs :: num_steps batch_shape underlying_event_shape
observation_probs = (
self._observation_distribution.log_prob(working_obs))
def forward_step(log_prev_step, log_prob_observation):
return _log_vector_matrix(log_prev_step,
log_transition) + log_prob_observation
fwd_prob = tf.foldl(forward_step, observation_probs, initializer=log_init)
# fwd_prob :: batch_shape num_states
log_prob = tf.reduce_logsumexp(input_tensor=fwd_prob, axis=-1)
# log_prob :: batch_shape
return log_prob
def _marginal_hidden_probs(self):
"""Compute marginal pdf for each individual observable."""
initial_log_probs = tf.broadcast_to(self._log_init,
tf.concat([self.batch_shape_tensor(),
[self._num_states]],
axis=0))
# initial_log_probs :: batch_shape num_states
if self._num_steps > 1:
transition_log_probs = self._log_trans
def forward_step(log_probs, _):
return _log_vector_matrix(log_probs, transition_log_probs)
dummy_index = tf.zeros(self._num_steps - 1, dtype=tf.float32)
forward_log_probs = tf.scan(forward_step, dummy_index,
initializer=initial_log_probs,
name="forward_log_probs")
forward_log_probs = tf.concat([[initial_log_probs], forward_log_probs],
axis=0)
else:
forward_log_probs = initial_log_probs[tf.newaxis, ...]
# returns :: num_steps batch_shape num_states
return tf.exp(forward_log_probs)
def _mean(self):
with tf.control_dependencies(self._runtime_assertions):
probs = self._marginal_hidden_probs()
# probs :: num_steps batch_shape num_states
means = self._observation_distribution.mean()
# means :: observation_batch_shape[:-1] num_states
# observation_event_shape
means_shape = tf.concat(
[self.batch_shape_tensor(),
[self._num_states],
self._observation_distribution.event_shape_tensor()],
axis=0)
means = tf.broadcast_to(means, means_shape)
# means :: batch_shape num_states observation_event_shape
observation_event_shape = (
self._observation_distribution.event_shape_tensor())
batch_size = tf.reduce_prod(input_tensor=self.batch_shape_tensor())
flat_probs_shape = [self._num_steps, batch_size, self._num_states]
flat_means_shape = [
batch_size, self._num_states,
tf.reduce_prod(input_tensor=observation_event_shape)
]
flat_probs = tf.reshape(probs, flat_probs_shape)
# flat_probs :: num_steps batch_size num_states
flat_means = tf.reshape(means, flat_means_shape)
# flat_means :: batch_size num_states observation_event_size
flat_mean = tf.einsum("ijk,jkl->jil", flat_probs, flat_means)
# flat_mean :: batch_size num_steps observation_event_size
unflat_mean_shape = tf.concat(
[self.batch_shape_tensor(),
[self._num_steps],
observation_event_shape],
axis=0)
# returns :: batch_shape num_steps observation_event_shape
return tf.reshape(flat_mean, unflat_mean_shape)
def _variance(self):
with tf.control_dependencies(self._runtime_assertions):
probs = self._marginal_hidden_probs()
# probs :: num_steps batch_shape num_states
means = self._observation_distribution.mean()
# means :: observation_batch_shape[:-1] num_states
# observation_event_shape
means_shape = tf.concat(
[self.batch_shape_tensor(),
[self._num_states],
self._observation_distribution.event_shape_tensor()],
axis=0)
means = tf.broadcast_to(means, means_shape)
# means :: batch_shape num_states observation_event_shape
observation_event_shape = (
self._observation_distribution.event_shape_tensor())
batch_size = tf.reduce_prod(input_tensor=self.batch_shape_tensor())
flat_probs_shape = [self._num_steps, batch_size, self._num_states]
flat_means_shape = [
batch_size, 1, self._num_states,
tf.reduce_prod(input_tensor=observation_event_shape)
]
flat_probs = tf.reshape(probs, flat_probs_shape)
# flat_probs :: num_steps batch_size num_states
flat_means = tf.reshape(means, flat_means_shape)
# flat_means :: batch_size 1 num_states observation_event_size
flat_mean = tf.einsum("ijk,jmkl->jiml", flat_probs, flat_means)
# flat_mean :: batch_size num_steps 1 observation_event_size
variances = self._observation_distribution.variance()
variances = tf.broadcast_to(variances, means_shape)
# variances :: batch_shape num_states observation_event_shape
flat_variances = tf.reshape(variances, flat_means_shape)
# flat_variances :: batch_size 1 num_states observation_event_size
# For a mixture of n distributions with mixture probabilities
# p[i], and where the individual distributions have means and
# variances given by mean[i] and var[i], the variance of
# the mixture is given by:
#
# var = sum i=1..n p[i] * ((mean[i] - mean)**2 + var[i]**2)
flat_variance = tf.einsum("ijk,jikl->jil",
flat_probs,
(flat_means - flat_mean)**2 + flat_variances)
# flat_variance :: batch_size num_steps observation_event_size
unflat_mean_shape = tf.concat(
[self.batch_shape_tensor(),
[self._num_steps],
observation_event_shape],
axis=0)
# returns :: batch_shape num_steps observation_event_shape
return tf.reshape(flat_variance, unflat_mean_shape)
def _observation_shape_preconditions(self, observation_tensor_shape):
return tf.control_dependencies([assert_util.assert_equal(
observation_tensor_shape[-1 - self._underlying_event_rank],
self._num_steps,
message="The tensor `observations` must consist of sequences"
"of observations from `HiddenMarkovModel` of length"
"`num_steps`.")])
def posterior_marginals(self, observations, name=None):
"""Compute marginal posterior distribution for each state.
This function computes, for each time step, the marginal
conditional probability that the hidden Markov model was in
each possible state given the observations that were made
at each time step.
So if the hidden states are `z[0],...,z[num_steps - 1]` and
the observations are `x[0], ..., x[num_steps - 1]`, then
this function computes `P(z[i] | x[0], ..., x[num_steps - 1])`
for all `i` from `0` to `num_steps - 1`.
This operation is sometimes called smoothing. It uses a form
of the forward-backward algorithm.
Note: the behavior of this function is undefined if the
`observations` argument represents impossible observations
from the model.
Args:
observations: A tensor representing a batch of observations
made on the hidden Markov model. The rightmost dimension of this tensor
gives the steps in a sequence of observations from a single sample from
the hidden Markov model. The size of this dimension should match the
`num_steps` parameter of the hidden Markov model object. The other
dimensions are the dimensions of the batch and these are broadcast with
the hidden Markov model's parameters.
name: Python `str` name prefixed to Ops created by this class.
Default value: "HiddenMarkovModel".
Returns:
posterior_marginal: A `Categorical` distribution object representing the
marginal probability of the hidden Markov model being in each state at
each step. The rightmost dimension of the `Categorical` distributions
batch will equal the `num_steps` parameter providing one marginal
distribution for each step. The other dimensions are the dimensions
corresponding to the batch of observations.
Raises:
ValueError: if rightmost dimension of `observations` does not
have size `num_steps`.
"""
with tf.compat.v2.name_scope(name or "posterior_marginals"):
with tf.control_dependencies(self._runtime_assertions):
observation_tensor_shape = tf.shape(input=observations)
with self._observation_shape_preconditions(observation_tensor_shape):
observation_batch_shape = observation_tensor_shape[
:-1 - self._underlying_event_rank]
observation_event_shape = observation_tensor_shape[
-1 - self._underlying_event_rank:]
batch_shape = tf.broadcast_dynamic_shape(observation_batch_shape,
self.batch_shape_tensor())
log_init = tf.broadcast_to(self._log_init,
tf.concat([batch_shape,
[self._num_states]],
axis=0))
log_transition = self._log_trans
observations = tf.broadcast_to(observations,
tf.concat([batch_shape,
observation_event_shape],
axis=0))
observation_rank = tf.rank(observations)
underlying_event_rank = self._underlying_event_rank
observations = util.move_dimension(
observations,
observation_rank - underlying_event_rank - 1, 0)
observations = tf.expand_dims(
observations,
observation_rank - underlying_event_rank)
observation_log_probs = self._observation_distribution.log_prob(
observations)
log_adjoint_prob = tf.zeros_like(log_init)
def forward_step(log_previous_step, log_prob_observation):
return _log_vector_matrix(log_previous_step,
log_transition) + log_prob_observation
log_prob = log_init + observation_log_probs[0]
forward_log_probs = tf.scan(forward_step, observation_log_probs[1:],
initializer=log_prob,
name="forward_log_probs")
forward_log_probs = tf.concat([[log_prob], forward_log_probs], axis=0)
def backward_step(log_previous_step, log_prob_observation):
return _log_matrix_vector(log_transition,
log_prob_observation + log_previous_step)
backward_log_adjoint_probs = tf.scan(
backward_step,
observation_log_probs[1:],
initializer=log_adjoint_prob,
reverse=True,
name="backward_log_adjoint_probs")
total_log_prob = tf.reduce_logsumexp(
input_tensor=forward_log_probs[-1], axis=-1)
backward_log_adjoint_probs = tf.concat([backward_log_adjoint_probs,
[log_adjoint_prob]], axis=0)
log_likelihoods = forward_log_probs + backward_log_adjoint_probs
marginal_log_probs = util.move_dimension(
log_likelihoods - total_log_prob[..., tf.newaxis], 0, -2)
return categorical.Categorical(logits=marginal_log_probs)
def posterior_mode(self, observations, name=None):
"""Compute maximum likelihood sequence of hidden states.
When this function is provided with a sequence of observations
`x[0], ..., x[num_steps - 1]`, it returns the sequence of hidden
states `z[0], ..., z[num_steps - 1]`, drawn from the underlying
Markov chain, that is most likely to yield those observations.
It uses the [Viterbi algorithm](
https://en.wikipedia.org/wiki/Viterbi_algorithm).
Note: the behavior of this function is undefined if the
`observations` argument represents impossible observations
from the model.
Note: if there isn't a unique most likely sequence then one
of the equally most likely sequences is chosen.
Args:
observations: A tensor representing a batch of observations made on the
hidden Markov model. The rightmost dimensions of this tensor correspond
to the dimensions of the observation distributions of the underlying
Markov chain. The next dimension from the right indexes the steps in a
sequence of observations from a single sample from the hidden Markov
model. The size of this dimension should match the `num_steps`
parameter of the hidden Markov model object. The other dimensions are
the dimensions of the batch and these are broadcast with the hidden
Markov model's parameters.
name: Python `str` name prefixed to Ops created by this class.
Default value: "HiddenMarkovModel".
Returns:
posterior_mode: A `Tensor` representing the most likely sequence of hidden
states. The rightmost dimension of this tensor will equal the
`num_steps` parameter providing one hidden state for each step. The
other dimensions are those of the batch.
Raises:
ValueError: if the `observations` tensor does not consist of
sequences of `num_steps` observations.
#### Examples
```python
tfd = tfp.distributions
# A simple weather model.
# Represent a cold day with 0 and a hot day with 1.
# Suppose the first day of a sequence has a 0.8 chance of being cold.
initial_distribution = tfd.Categorical(probs=[0.8, 0.2])
# Suppose a cold day has a 30% chance of being followed by a hot day
# and a hot day has a 20% chance of being followed by a cold day.
transition_distribution = tfd.Categorical(probs=[[0.7, 0.3],
[0.2, 0.8]])
# Suppose additionally that on each day the temperature is
# normally distributed with mean and standard deviation 0 and 5 on
# a cold day and mean and standard deviation 15 and 10 on a hot day.
observation_distribution = tfd.Normal(loc=[0., 15.], scale=[5., 10.])
# This gives the hidden Markov model:
model = tfd.HiddenMarkovModel(
initial_distribution=initial_distribution,
transition_distribution=transition_distribution,
observation_distribution=observation_distribution,
num_steps=7)
# Suppose we observe gradually rising temperatures over a week:
temps = [-2., 0., 2., 4., 6., 8., 10.]
# We can now compute the most probable sequence of hidden states:
model.posterior_mode(temps)
# The result is [0 0 0 0 0 1 1] telling us that the transition
# from "cold" to "hot" most likely happened between the
# 5th and 6th days.
```
"""
with tf.compat.v2.name_scope(name or "posterior_mode"):
with tf.control_dependencies(self._runtime_assertions):
observation_tensor_shape = tf.shape(input=observations)
with self._observation_shape_preconditions(observation_tensor_shape):
observation_batch_shape = observation_tensor_shape[
:-1 - self._underlying_event_rank]
observation_event_shape = observation_tensor_shape[
-1 - self._underlying_event_rank:]
batch_shape = tf.broadcast_dynamic_shape(observation_batch_shape,
self.batch_shape_tensor())
log_init = tf.broadcast_to(self._log_init,
tf.concat([batch_shape,
[self._num_states]],
axis=0))
observations = tf.broadcast_to(observations,
tf.concat([batch_shape,
observation_event_shape],
axis=0))
observation_rank = tf.rank(observations)
underlying_event_rank = self._underlying_event_rank
observations = util.move_dimension(
observations,
observation_rank - underlying_event_rank - 1, 0)
# We need to compute the probability of each observation for
# each possible state.
# This requires inserting an extra index just before the
# observation event indices that will be broadcast with the
# last batch index in `observation_distribution`.
observations = tf.expand_dims(
observations,
observation_rank - underlying_event_rank)
observation_log_probs = self._observation_distribution.log_prob(
observations)
log_prob = log_init + observation_log_probs[0]
if self._num_steps == 1:
most_likely_end = tf.argmax(input=log_prob, axis=-1)
return most_likely_end[..., tf.newaxis]
def forward_step(previous_step_pair, log_prob_observation):
log_prob_previous = previous_step_pair[0]
log_prob = (log_prob_previous[..., tf.newaxis] +
self._log_trans +
log_prob_observation[..., tf.newaxis, :])
most_likely_given_successor = tf.argmax(input=log_prob, axis=-2)
max_log_p_given_successor = tf.reduce_max(input_tensor=log_prob,
axis=-2)
return (max_log_p_given_successor, most_likely_given_successor)
forward_log_probs, all_most_likely_given_successor = tf.scan(
forward_step,
observation_log_probs[1:],
initializer=(log_prob,
tf.zeros(tf.shape(input=log_init), dtype=tf.int64)),
name="forward_log_probs")
most_likely_end = tf.argmax(input=forward_log_probs[-1], axis=-1)
# We require the operation that gives C from A and B where
# C[i...j] = A[i...j, B[i...j]]
# and A = most_likely_given_successor
# B = most_likely_successor.
# tf.gather requires indices of known shape so instead we use
# reduction with tf.one_hot(B) to pick out elements from B
def backward_step(most_likely_successor, most_likely_given_successor):
return tf.reduce_sum(
input_tensor=(most_likely_given_successor *
tf.one_hot(most_likely_successor,
self._num_states,
dtype=tf.int64)),
axis=-1)
backward_scan = tf.scan(
backward_step,
all_most_likely_given_successor,
most_likely_end,
reverse=True)
most_likely_sequences = tf.concat([backward_scan, [most_likely_end]],
axis=0)
return util.move_dimension(most_likely_sequences, 0, -1)
def _log_vector_matrix(vs, ms):
"""Multiply tensor of vectors by matrices assuming values stored are logs."""
return tf.reduce_logsumexp(input_tensor=vs[..., tf.newaxis] + ms, axis=-2)
def _log_matrix_vector(ms, vs):
"""Multiply tensor of matrices by vectors assuming values stored are logs."""
return tf.reduce_logsumexp(input_tensor=ms + vs[..., tf.newaxis, :], axis=-1)
def _vector_matrix(vs, ms):
"""Multiply tensor of vectors by matrices."""
return tf.reduce_sum(input_tensor=vs[..., tf.newaxis] * ms, axis=-2)
def _extract_log_probs(num_states, dist):
"""Tabulate log probabilities from a batch of distributions."""
states = tf.reshape(tf.range(num_states),
tf.concat([[num_states],
tf.ones_like(dist.batch_shape_tensor())],
axis=0))
return util.move_dimension(dist.log_prob(states), 0, -1)
| 42.517647 | 80 | 0.645897 |
79481db37d6c8c5c2f2d451293ddf2b7c849798d
| 4,716 |
py
|
Python
|
src/yotsugi_cli.py
|
Nyx0uf/Yotsugi
|
45ad4a87695bdb5d9d7b98ebc44a4c3161bb7952
|
[
"MIT"
] | 1 |
2020-11-21T16:40:20.000Z
|
2020-11-21T16:40:20.000Z
|
src/yotsugi_cli.py
|
Nyx0uf/Yotsugi
|
45ad4a87695bdb5d9d7b98ebc44a4c3161bb7952
|
[
"MIT"
] | null | null | null |
src/yotsugi_cli.py
|
Nyx0uf/Yotsugi
|
45ad4a87695bdb5d9d7b98ebc44a4c3161bb7952
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# coding: utf-8
"""
Client
"""
import argparse
import requests
import sys
from configparser import ConfigParser
from pathlib import Path
from typing import List, Dict
from requests.models import HTTPBasicAuth
from model import note
def load_cfg() -> ConfigParser:
"""Load the client config file"""
cfg_path = Path.home().joinpath(".yotsugi").joinpath("yotsugi.conf")
cfg = ConfigParser()
cfg.read(cfg_path)
return cfg["SERVER"]
def has_auth_infos(cfg: ConfigParser) -> bool:
"""Check if http basic auth infos are provided"""
user = cfg["basic_auth_user"]
password = cfg["basic_auth_password"]
return user and len(user) > 0 and password and len(password) > 0
def list_notes(cfg: ConfigParser):
"""List all notes"""
auth = HTTPBasicAuth(cfg["basic_auth_user"], cfg["basic_auth_password"]) if has_auth_infos(cfg) is True else None
response = requests.get(f"{cfg['url']}:{cfg['port']}/api/notes", auth=auth)
if response.status_code == 200:
notes: List[Dict] = response.json()
print(f"{len(notes)} Note{'' if len(notes) == 1 else 's'} :")
for d in notes:
n = note.Note(d["id"], d["title"], d["content"], d["creation_date"], d["update_date"])
print(f"\t- {n.title} (id {n.id})")
def show_note(cfg: ConfigParser, note_id: int):
"""Show the note `note_id`"""
auth = HTTPBasicAuth(cfg["basic_auth_user"], cfg["basic_auth_password"]) if has_auth_infos(cfg) is True else None
response = requests.get(f"{cfg['url']}:{cfg['port']}/api/notes/{note_id}", auth=auth)
if response.status_code == 200:
tmp = response.json()
n = note.Note(tmp["id"], tmp["title"], tmp["content"], tmp["creation_date"], tmp["update_date"])
print(f"{n.title} :\n---")
print(f"{n.content}")
def create_note(cfg: ConfigParser, title: str, body: str):
"""Create a new note with a title and body"""
auth = HTTPBasicAuth(cfg["basic_auth_user"], cfg["basic_auth_password"]) if has_auth_infos(cfg) is True else None
response = requests.post(f"{cfg['url']}:{cfg['port']}/api/notes/add", data={"title": title, "content": body}, auth=auth)
if response.status_code == 200:
print(f"[+] Note created")
else:
print(f"[!] Error creating note : {response}")
def delete_note(cfg: ConfigParser, note_id: int):
"""Delete the note `note_id`"""
auth = HTTPBasicAuth(cfg["basic_auth_user"], cfg["basic_auth_password"]) if has_auth_infos(cfg) is True else None
response = requests.delete(f"{cfg['url']}:{cfg['port']}/api/notes/delete/{note_id}", auth=auth)
if response.status_code == 200:
print(f"[+] Note {note_id} deleted")
else:
print(f"[!] Error deleting note {note_id} : {response}")
def update_note(cfg: ConfigParser, note_id: int, title: str, body: str):
"""Update the note `note_id`"""
auth = HTTPBasicAuth(cfg["basic_auth_user"], cfg["basic_auth_password"]) if has_auth_infos(cfg) is True else None
response = requests.put(f"{cfg['url']}:{cfg['port']}/api/notes/update/{note_id}", data={"title": title, "content": body}, auth=auth)
if response.status_code == 200:
print(f"[+] Note {note_id} updated")
else:
print(f"[!] Error updating note {note_id} : {response}")
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("-l", "--list", dest="list", action='store_true', help="List all notes")
parser.add_argument("-s", "--show", dest="show", action='store_true', help="Show the given note")
parser.add_argument("-c", "--create", dest="create", action='store_true', help="Create a new note")
parser.add_argument("-d", "--delete", dest="delete", action='store_true', help="Delete a note")
parser.add_argument("-u", "--update", dest="update", action='store_true', help="Update a note")
parser.add_argument("-i", "--id", dest="id", type=int, default=0, help="Note id")
parser.add_argument("-t", "--title", dest="title", type=str, default=None, help="Note title")
parser.add_argument("-b", "--body", dest="body", type=str, default=None, help="Note body")
args = parser.parse_args()
cfg = load_cfg()
if args.list is True:
list_notes(cfg)
sys.exit(0)
if args.show is True and args.id > 0:
show_note(cfg, args.id)
if args.delete is True and args.id > 0:
delete_note(cfg, args.id)
list_notes(cfg)
if args.create is True and args.title and len(args.title) > 0:
create_note(cfg, args.title, args.body)
list_notes(cfg)
if args.update is True and args.id > 0:
update_note(cfg, args.id, args.title, args.body)
list_notes(cfg)
| 42.872727 | 136 | 0.646098 |
79481e83748d7667306466f94746861ce8f9803f
| 4,888 |
py
|
Python
|
examples/optimal_burst/optimal_burst_by_range.py
|
spascou/ps2-analysis
|
00f99b009d15d4c401a3338ddd0408ac7eedcc0b
|
[
"MIT"
] | 2 |
2020-06-25T17:19:05.000Z
|
2020-10-13T06:08:39.000Z
|
examples/optimal_burst/optimal_burst_by_range.py
|
spascou/ps2-analysis
|
00f99b009d15d4c401a3338ddd0408ac7eedcc0b
|
[
"MIT"
] | null | null | null |
examples/optimal_burst/optimal_burst_by_range.py
|
spascou/ps2-analysis
|
00f99b009d15d4c401a3338ddd0408ac7eedcc0b
|
[
"MIT"
] | null | null | null |
import logging
import os
from itertools import groupby
from typing import List, Optional
import altair
from ps2_census.enums import PlayerState
from ps2_analysis.enums import DamageLocation
from ps2_analysis.fire_groups.cone_of_fire import ConeOfFire
from ps2_analysis.fire_groups.data_files import (
update_data_files as update_fire_groups_data_files,
)
from ps2_analysis.fire_groups.fire_mode import FireMode
from ps2_analysis.utils import CodeTimer
from ps2_analysis.weapons.infantry.data_files import (
update_data_files as update_infantry_weapons_data_files,
)
from ps2_analysis.weapons.infantry.generate import generate_all_infantry_weapons
from ps2_analysis.weapons.infantry.infantry_weapon import InfantryWeapon
logging.basicConfig(level=logging.INFO)
SERVICE_ID: Optional[str] = os.environ.get("CENSUS_SERVICE_ID")
DATAFILES_DIRECTORY: str = "../datafiles"
if not SERVICE_ID:
raise ValueError("CENSUS_SERVICE_ID envvar not found")
update_fire_groups_data_files(
directory=DATAFILES_DIRECTORY, service_id=SERVICE_ID,
)
update_infantry_weapons_data_files(
directory=DATAFILES_DIRECTORY, service_id=SERVICE_ID,
)
infantry_weapons: List[InfantryWeapon] = list(
generate_all_infantry_weapons(data_files_directory=DATAFILES_DIRECTORY)
)
print(f"Generated {len(infantry_weapons)} infantry weapons")
wp: InfantryWeapon = next(x for x in infantry_weapons if x.item_id == 43)
fm: FireMode = wp.fire_groups[0].fire_modes[1]
cof: ConeOfFire = fm.player_state_cone_of_fire[PlayerState.STANDING]
rttks: List[dict] = []
distance: int
for distance in range(0, 100, 2):
with CodeTimer(f"determination at {distance}m"):
burst_length: int
for burst_length in range(0, int(round(fm.max_consecutive_shots / 4)) + 1, 1):
control_time: int = cof.recover_time(
cof.min_cof_angle() + cof.bloom * burst_length
)
ttk: int
timed_out_ratio: float
ttk, timed_out_ratio = fm.real_time_to_kill(
distance=distance,
runs=500,
control_time=control_time,
auto_burst_length=burst_length,
aim_location=DamageLocation.HEAD,
recoil_compensation=True,
# recoil_compensation_accuracy=0.1,
)
rttks.append(
{
"distance": distance,
"control_time": control_time + fm.fire_timing.refire_time,
"burst_length": burst_length,
"ttk": ttk,
"timed_out_ratio": timed_out_ratio,
}
)
dataset = altair.Data(values=rttks)
dst_ttk_chart = (
altair.Chart(dataset)
.mark_line()
.encode(
x="distance:Q",
y="ttk:Q",
color=altair.Color("burst_length:O", scale=altair.Scale(scheme="dark2")),
tooltip=[
"distance:Q",
"control_time:Q",
"burst_length:Q",
"ttk:Q",
"timed_out_ratio:Q",
],
)
.properties(title=f"{wp.name} optimal ttk at distance by burst length", width=900)
.interactive()
)
dst_tor_chart = (
altair.Chart(dataset)
.mark_line()
.encode(
x="distance:Q",
y="timed_out_ratio:Q",
color=altair.Color("burst_length:O", scale=altair.Scale(scheme="dark2")),
tooltip=[
"distance:Q",
"control_time:Q",
"burst_length:Q",
"ttk:Q",
"timed_out_ratio:Q",
],
)
.properties(
title=f"{wp.name} timed out ratio at distance by burst length", width=900
)
.interactive()
)
filtered_rttks: List[dict] = []
for _, distance_rttks_it in groupby(
sorted(rttks, key=lambda x: x["distance"]), lambda x: x["distance"]
):
distance_rttks: List[dict] = list(distance_rttks_it)
min_ttk: int = min(
(x["ttk"] for x in filter(lambda x: x["ttk"] > 0, distance_rttks))
)
candidates: List[dict] = list(
filter(lambda x: 0 <= x["ttk"] <= round(1.05 * min_ttk), distance_rttks)
)
auto_candidates: List[dict] = list(
filter(lambda x: x["burst_length"] == 0, candidates)
)
if auto_candidates:
filtered_rttks.append(min(auto_candidates, key=lambda x: x["ttk"]))
else:
filtered_rttks.append(min(candidates, key=lambda x: x["ttk"],))
filtered_dataset = altair.Data(values=filtered_rttks)
burst_length_chart = (
altair.Chart(filtered_dataset)
.mark_line()
.encode(
x="distance:Q",
y="burst_length:Q",
tooltip=["ttk:Q", "distance:Q", "burst_length:Q", "control_time:Q"],
)
.properties(title=f"{wp.name} optimal burst length at distance", width=900)
.interactive()
)
(dst_ttk_chart & dst_tor_chart & burst_length_chart).save("optimal_burst_by_range.html")
| 28.254335 | 88 | 0.648936 |
79481f478f4e07714ee2cd9849b42f2dfe3fb2c1
| 522 |
py
|
Python
|
homework-12345/homework3/network.py
|
easilylazy/pattern-recognition
|
2b95689bb3f34e4821a0211b19b76164aa6e615f
|
[
"MIT"
] | 2 |
2021-03-07T14:40:23.000Z
|
2021-03-08T08:51:01.000Z
|
homework-12345/homework3/network.py
|
easilylazy/pattern-recognition
|
2b95689bb3f34e4821a0211b19b76164aa6e615f
|
[
"MIT"
] | null | null | null |
homework-12345/homework3/network.py
|
easilylazy/pattern-recognition
|
2b95689bb3f34e4821a0211b19b76164aa6e615f
|
[
"MIT"
] | null | null | null |
""" Network Class """
class Network():
def __init__(self):
self.layerList = []
self.numLayer = 0
self.is_training = True
def add(self, layer):
self.numLayer += 1
self.layerList.append(layer)
def forward(self, x):
# forward layer by layer
for i in range(self.numLayer):
x = self.layerList[i].forward(x, is_training=self.is_training)
return x
def backward(self, delta):
# backward layer by layer
for i in reversed(range(self.numLayer)): # reversed
delta = self.layerList[i].backward(delta)
| 22.695652 | 65 | 0.689655 |
79482043152df3fe6c094446c1a654e2df15c89d
| 2,210 |
py
|
Python
|
setup.py
|
mauriciogtec/ml-logger
|
b66be0f18097456618eecfb15e425ea9bb51bfbb
|
[
"MIT"
] | null | null | null |
setup.py
|
mauriciogtec/ml-logger
|
b66be0f18097456618eecfb15e425ea9bb51bfbb
|
[
"MIT"
] | null | null | null |
setup.py
|
mauriciogtec/ml-logger
|
b66be0f18097456618eecfb15e425ea9bb51bfbb
|
[
"MIT"
] | null | null | null |
import codecs
import os.path
import setuptools
def read(rel_path):
here = os.path.abspath(os.path.dirname(__file__))
with codecs.open(os.path.join(here, rel_path), "r") as fp:
return fp.read()
def get_version(rel_path):
# code taken from https://packaging.python.org/guides/single-sourcing-package-version/
for line in read(rel_path).splitlines():
if line.startswith("__version__"):
delim = '"' if '"' in line else "'"
return line.split(delim)[1]
raise RuntimeError("Unable to find version string.")
def parse_dependency(filepath):
return [
dependency
for dependency in open(filepath).read().splitlines()
if "==" in dependency
]
base_requirements = parse_dependency("requirements/filesystem.txt")
all_requirements = base_requirements + parse_dependency("requirements/all.txt")
dev_requirements = all_requirements + parse_dependency("requirements/dev.txt")
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="ml_logger",
version=get_version("ml_logger/__init__.py"),
author="Shagun Sodhani",
author_email="sshagunsodhani@gmail.com",
description="Logging Utility for ML Experiments",
long_description=open("README.md").read(),
long_description_content_type="text/markdown",
# Install the basic setup (without wandb, tensorboardX and mlflow) with
install_requires=base_requirements,
url="https://github.com/shagunsodhani/ml-logger",
packages=setuptools.find_packages(
exclude=["*.tests", "*.tests.*", "tests.*", "tests", "docs", "docsrc"]
),
classifiers=[
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires=">=3.6",
extras_require={
# Install development dependencies with
# pip install -e .[dev]
"dev": dev_requirements,
# Install the complete setup (wandb, mlflow and tensorboardX)
# pip install .[all]
"all": all_requirements,
},
)
| 32.5 | 90 | 0.661991 |
7948205235e3ba42220d72e5638695fd66bc4115
| 37,074 |
py
|
Python
|
old_projects/eola/chapter4.py
|
mertyildiran/manim
|
db7f8320bd84e9ebbf75adfcd6dfc480881e5849
|
[
"MIT"
] | 2 |
2020-12-26T05:21:23.000Z
|
2022-02-22T23:00:28.000Z
|
old_projects/eola/chapter4.py
|
xiangxun/manim
|
31478c523c9febb85d673219c6451ce545b5e319
|
[
"MIT"
] | 4 |
2021-03-19T09:45:18.000Z
|
2022-01-13T02:00:35.000Z
|
old_projects/eola/chapter4.py
|
xiangxun/manim
|
31478c523c9febb85d673219c6451ce545b5e319
|
[
"MIT"
] | 2 |
2021-07-21T17:49:09.000Z
|
2022-01-24T11:56:17.000Z
|
from big_ol_pile_of_manim_imports import *
from old_projects.eola.chapter3 import MatrixVectorMultiplicationAbstract
class OpeningQuote(Scene):
def construct(self):
words = TextMobject([
"It is my experience that proofs involving",
"matrices",
"can be shortened by 50\\% if one",
"throws the matrices out."
])
words.set_width(FRAME_WIDTH - 2)
words.to_edge(UP)
words.split()[1].set_color(GREEN)
words.split()[3].set_color(BLUE)
author = TextMobject("-Emil Artin")
author.set_color(YELLOW)
author.next_to(words, DOWN, buff = 0.5)
self.play(FadeIn(words))
self.wait(2)
self.play(Write(author, run_time = 3))
self.wait()
class MatrixToBlank(Scene):
def construct(self):
matrix = Matrix([[3, 1], [0, 2]])
arrow = Arrow(LEFT, RIGHT)
matrix.to_edge(LEFT)
arrow.next_to(matrix, RIGHT)
matrix.add(arrow)
self.play(Write(matrix))
self.wait()
class ExampleTransformation(LinearTransformationScene):
def construct(self):
self.setup()
self.apply_transposed_matrix([[3, 0], [1, 2]])
self.wait(2)
class RecapTime(TeacherStudentsScene):
def construct(self):
self.setup()
self.teacher_says("Quick recap time!")
self.random_blink()
self.wait()
student = self.get_students()[0]
everyone = self.get_mobjects()
everyone.remove(student)
everyone = VMobject(*everyone)
self.play(
ApplyMethod(everyone.fade, 0.7),
ApplyMethod(student.change_mode, "confused")
)
self.play(Blink(student))
self.wait()
self.play(ApplyFunction(
lambda m : m.change_mode("pondering").look(LEFT),
student
))
self.play(Blink(student))
self.wait()
class DeterminedByTwoBasisVectors(LinearTransformationScene):
CONFIG = {
"show_basis_vectors" : False
}
def construct(self):
self.setup()
i_hat = self.add_vector([1, 0], color = X_COLOR)
self.add_transformable_label(
i_hat, "\\hat{\\imath}", "\\hat{\\imath}",
color = X_COLOR
)
j_hat = self.add_vector([0, 1], color = Y_COLOR)
self.add_transformable_label(
j_hat, "\\hat{\\jmath}", "\\hat{\\jmath}",
color = Y_COLOR
)
t_matrix = np.array([[2, 2], [-2, 1]])
matrix = t_matrix.transpose()
matrix1 = np.array(matrix)
matrix1[:,1] = [0, 1]
matrix2 = np.dot(matrix, np.linalg.inv(matrix1))
self.wait()
self.apply_transposed_matrix(matrix1.transpose())
self.apply_transposed_matrix(matrix2.transpose())
self.wait()
class FollowLinearCombination(LinearTransformationScene):
def construct(self):
vect_coords = [-1, 2]
t_matrix = np.array([[2, 2], [-2, 1]])
#Draw vectors
self.setup()
i_label = self.add_transformable_label(
self.i_hat, "\\hat{\\imath}", animate = False,
direction = "right", color = X_COLOR
)
j_label = self.add_transformable_label(
self.j_hat, "\\hat{\\jmath}", animate = False,
direction = "right", color = Y_COLOR
)
vect = self.add_vector(vect_coords)
vect_array = Matrix(["x", "y"], add_background_rectangles_to_entries = True)
v_equals = TexMobject(["\\vec{\\textbf{v}}", "="])
v_equals.split()[0].set_color(YELLOW)
v_equals.next_to(vect_array, LEFT)
vect_array.add(v_equals)
vect_array.to_edge(UP, buff = 0.2)
background_rect = BackgroundRectangle(vect_array)
vect_array.get_entries().set_color(YELLOW)
self.play(ShowCreation(background_rect), Write(vect_array))
self.add_foreground_mobject(background_rect, vect_array)
#Show scaled vectors
x, y = vect_array.get_entries().split()
scaled_i_label = VMobject(x.copy(), i_label)
scaled_j_label = VMobject(y.copy(), j_label)
scaled_i = self.i_hat.copy().scale(vect_coords[0])
scaled_j = self.j_hat.copy().scale(vect_coords[1])
for mob in scaled_i, scaled_j:
mob.fade(0.3)
scaled_i_label_target = scaled_i_label.copy()
scaled_i_label_target.arrange_submobjects(buff = 0.1)
scaled_i_label_target.next_to(scaled_i, DOWN)
scaled_j_label_target = scaled_j_label.copy()
scaled_j_label_target.arrange_submobjects(buff = 0.1)
scaled_j_label_target.next_to(scaled_j, LEFT)
self.show_scaled_vectors(vect_array, vect_coords, i_label, j_label)
self.apply_transposed_matrix(t_matrix)
self.show_scaled_vectors(vect_array, vect_coords, i_label, j_label)
self.record_basis_coordinates(vect_array, vect)
def show_scaled_vectors(self, vect_array, vect_coords, i_label, j_label):
x, y = vect_array.get_entries().split()
scaled_i_label = VMobject(x.copy(), i_label.copy())
scaled_j_label = VMobject(y.copy(), j_label.copy())
scaled_i = self.i_hat.copy().scale(vect_coords[0])
scaled_j = self.j_hat.copy().scale(vect_coords[1])
for mob in scaled_i, scaled_j:
mob.fade(0.3)
scaled_i_label_target = scaled_i_label.copy()
scaled_i_label_target.arrange_submobjects(buff = 0.1)
scaled_i_label_target.next_to(scaled_i.get_center(), DOWN)
scaled_j_label_target = scaled_j_label.copy()
scaled_j_label_target.arrange_submobjects(buff = 0.1)
scaled_j_label_target.next_to(scaled_j.get_center(), LEFT)
self.play(
Transform(self.i_hat.copy(), scaled_i),
Transform(scaled_i_label, scaled_i_label_target)
)
scaled_i = self.get_mobjects_from_last_animation()[0]
self.play(
Transform(self.j_hat.copy(), scaled_j),
Transform(scaled_j_label, scaled_j_label_target)
)
scaled_j = self.get_mobjects_from_last_animation()[0]
self.play(*[
ApplyMethod(mob.shift, scaled_i.get_end())
for mob in (scaled_j, scaled_j_label)
])
self.wait()
self.play(*list(map(FadeOut, [
scaled_i, scaled_j, scaled_i_label, scaled_j_label,
])))
def record_basis_coordinates(self, vect_array, vect):
i_label = vector_coordinate_label(self.i_hat)
i_label.set_color(X_COLOR)
j_label = vector_coordinate_label(self.j_hat)
j_label.set_color(Y_COLOR)
for mob in i_label, j_label:
mob.scale_in_place(0.8)
background = BackgroundRectangle(mob)
self.play(ShowCreation(background), Write(mob))
self.wait()
x, y = vect_array.get_entries().split()
pre_formula = VMobject(
x, i_label, TexMobject("+"),
y, j_label
)
post_formula = pre_formula.copy()
pre_formula.split()[2].fade(1)
post_formula.arrange_submobjects(buff = 0.1)
post_formula.next_to(vect, DOWN)
background = BackgroundRectangle(post_formula)
everything = self.get_mobjects()
everything.remove(vect)
self.play(*[
ApplyMethod(m.fade) for m in everything
] + [
ShowCreation(background, run_time = 2, rate_func = squish_rate_func(smooth, 0.5, 1)),
Transform(pre_formula.copy(), post_formula, run_time = 2),
ApplyMethod(vect.set_stroke, width = 7)
])
self.wait()
class MatrixVectorMultiplicationCopy(MatrixVectorMultiplicationAbstract):
pass ## Here just for stage_animations.py purposes
class RecapOver(TeacherStudentsScene):
def construct(self):
self.setup()
self.teacher_says("Recap over!")
class TwoSuccessiveTransformations(LinearTransformationScene):
CONFIG = {
"foreground_plane_kwargs" : {
"x_radius" : FRAME_WIDTH,
"y_radius" : FRAME_WIDTH,
"secondary_line_ratio" : 0
},
}
def construct(self):
self.setup()
self.apply_transposed_matrix([[2, 1],[1, 2]])
self.apply_transposed_matrix([[-1, -0.5],[0, -0.5]])
self.wait()
class RotationThenShear(LinearTransformationScene):
CONFIG = {
"foreground_plane_kwargs" : {
"x_radius" : FRAME_X_RADIUS,
"y_radius" : FRAME_WIDTH,
"secondary_line_ratio" : 0
},
}
def construct(self):
self.setup()
rot_words = TextMobject("$90^\\circ$ rotation counterclockwise")
shear_words = TextMobject("followed by a shear")
rot_words.set_color(YELLOW)
shear_words.set_color(PINK)
VMobject(rot_words, shear_words).arrange_submobjects(DOWN).to_edge(UP)
for words in rot_words, shear_words:
words.add_background_rectangle()
self.play(Write(rot_words, run_time = 1))
self.add_foreground_mobject(rot_words)
self.apply_transposed_matrix([[0, 1], [-1, 0]])
self.play(Write(shear_words, run_time = 1))
self.add_foreground_mobject(shear_words)
self.apply_transposed_matrix([[1, 0], [1, 1]])
self.wait()
class IntroduceIdeaOfComposition(RotationThenShear):
def construct(self):
self.setup()
self.show_composition()
matrix = self.track_basis_vectors()
self.show_overall_effect(matrix)
def show_composition(self):
words = TextMobject([
"``Composition''",
"of a",
"rotation",
"and a",
"shear"
])
words.split()[0].set_submobject_colors_by_gradient(YELLOW, PINK, use_color_range_to = False)
words.split()[2].set_color(YELLOW)
words.split()[4].set_color(PINK)
words.add_background_rectangle()
words.to_edge(UP)
self.apply_transposed_matrix([[0, 1], [-1, 0]], run_time = 2)
self.apply_transposed_matrix([[1, 0], [1, 1]], run_time = 2)
self.play(
ApplyMethod(self.plane.fade),
Write(words),
Animation(self.i_hat),
Animation(self.j_hat),
)
self.wait()
def track_basis_vectors(self):
last_words = self.get_mobjects_from_last_animation()[1]
words = TextMobject([
"Record where",
"$\\hat{\\imath}$",
"and",
"$\\hat{\\jmath}$",
"land:"
])
rw, i_hat, a, j_hat, l = words.split()
i_hat.set_color(X_COLOR)
j_hat.set_color(Y_COLOR)
words.add_background_rectangle()
words.next_to(last_words, DOWN)
i_coords = vector_coordinate_label(self.i_hat)
j_coords = vector_coordinate_label(self.j_hat)
i_coords.set_color(X_COLOR)
j_coords.set_color(Y_COLOR)
i_background = BackgroundRectangle(i_coords)
j_background = BackgroundRectangle(j_coords)
matrix = Matrix(np.append(
i_coords.copy().get_mob_matrix(),
j_coords.copy().get_mob_matrix(),
axis = 1
))
matrix.next_to(words, RIGHT, aligned_edge = UP)
col1, col2 = [
VMobject(*matrix.get_mob_matrix()[:,i])
for i in (0, 1)
]
matrix_background = BackgroundRectangle(matrix)
self.play(Write(words))
self.wait()
self.play(ShowCreation(i_background), Write(i_coords), run_time = 2)
self.wait()
self.play(
Transform(i_background.copy(), matrix_background),
Transform(i_coords.copy().get_brackets(), matrix.get_brackets()),
ApplyMethod(i_coords.copy().get_entries().move_to, col1)
)
self.wait()
self.play(ShowCreation(j_background), Write(j_coords), run_time = 2)
self.wait()
self.play(
ApplyMethod(j_coords.copy().get_entries().move_to, col2)
)
self.wait()
matrix = VMobject(matrix_background, matrix)
return matrix
def show_overall_effect(self, matrix):
everything = self.get_mobjects()
everything = list_difference_update(
everything, matrix.get_family()
)
self.play(*list(map(FadeOut, everything)) + [Animation(matrix)])
new_matrix = matrix.copy()
new_matrix.center().to_edge(UP)
self.play(Transform(matrix, new_matrix))
self.wait()
self.remove(matrix)
self.setup()
everything = self.get_mobjects()
self.play(*list(map(FadeIn, everything)) + [Animation(matrix)])
func = self.get_matrix_transformation([[1, 1], [-1, 0]])
bases = VMobject(self.i_hat, self.j_hat)
new_bases = VMobject(*[
Vector(func(v.get_end()), color = v.get_color())
for v in bases.split()
])
self.play(
ApplyPointwiseFunction(func, self.plane),
Transform(bases, new_bases),
Animation(matrix),
run_time = 3
)
self.wait()
class PumpVectorThroughRotationThenShear(RotationThenShear):
def construct(self):
self.setup()
self.add_vector([2, 3])
self.apply_transposed_matrix([[0, 1], [-1, 0]], run_time = 2)
self.apply_transposed_matrix([[1, 0], [1, 1]], run_time = 2)
self.wait()
class ExplainWhyItsMatrixMultiplication(Scene):
def construct(self):
vect = Matrix(["x", "y"])
vect.get_entries().set_color(YELLOW)
rot_matrix = Matrix([[0, -1], [1, 0]])
rot_matrix.set_color(TEAL)
shear_matrix = Matrix([[1, 1], [0, 1]])
shear_matrix.set_color(PINK)
l_paren, r_paren = list(map(TexMobject, ["\\Big(", "\\Big)"]))
for p in l_paren, r_paren:
p.set_height(1.4*vect.get_height())
long_way = VMobject(
shear_matrix, l_paren, rot_matrix, vect, r_paren
)
long_way.arrange_submobjects(buff = 0.1)
long_way.to_edge(LEFT).shift(UP)
equals = TexMobject("=").next_to(long_way, RIGHT)
comp_matrix = Matrix([[1, -1], [1, 0]])
comp_matrix.set_column_colors(X_COLOR, Y_COLOR)
vect_copy = vect.copy()
short_way = VMobject(comp_matrix, vect_copy)
short_way.arrange_submobjects(buff = 0.1)
short_way.next_to(equals, RIGHT)
pairs = [
(rot_matrix, "Rotation"),
(shear_matrix, "Shear"),
(comp_matrix, "Composition"),
]
for matrix, word in pairs:
brace = Brace(matrix)
text = TextMobject(word).next_to(brace, DOWN)
brace.set_color(matrix.get_color())
text.set_color(matrix.get_color())
matrix.add(brace, text)
comp_matrix.split()[-1].set_submobject_colors_by_gradient(TEAL, PINK)
self.add(vect)
groups = [
[rot_matrix],
[l_paren, r_paren, shear_matrix],
[equals, comp_matrix, vect_copy],
]
for group in groups:
self.play(*list(map(Write, group)))
self.wait()
self.play(*list(map(FadeOut, [l_paren, r_paren, vect, vect_copy])))
comp_matrix.add(equals)
matrices = VMobject(shear_matrix, rot_matrix, comp_matrix)
self.play(ApplyMethod(
matrices.arrange_submobjects, buff = 0.1,
aligned_edge = UP
))
self.wait()
arrow = Arrow(rot_matrix.get_right(), shear_matrix.get_left())
arrow.shift((rot_matrix.get_top()[1]+0.2)*UP)
words = TextMobject("Read right to left")
words.submobjects.reverse()
words.next_to(arrow, UP)
functions = TexMobject("f(g(x))")
functions.next_to(words, UP)
self.play(ShowCreation(arrow))
self.play(Write(words))
self.wait()
self.play(Write(functions))
self.wait()
class MoreComplicatedExampleVisually(LinearTransformationScene):
CONFIG = {
"t_matrix1" : [[1, 1], [-2, 0]],
"t_matrix2" : [[0, 1], [2, 0]],
}
def construct(self):
self.setup()
t_matrix1 = np.array(self.t_matrix1)
t_matrix2 = np.array(self.t_matrix2)
t_m1_inv = np.linalg.inv(t_matrix1.transpose()).transpose()
t_m2_inv = np.linalg.inv(t_matrix2.transpose()).transpose()
m1_mob, m2_mob, comp_matrix = self.get_matrices()
self.play(Write(m1_mob))
self.add_foreground_mobject(m1_mob)
self.wait()
self.apply_transposed_matrix(t_matrix1)
self.wait()
self.play(Write(m1_mob.label))
self.add_foreground_mobject(m1_mob.label)
self.wait()
self.apply_transposed_matrix(t_m1_inv, run_time = 0)
self.wait()
self.play(Write(m2_mob))
self.add_foreground_mobject(m2_mob)
self.wait()
self.apply_transposed_matrix(t_matrix2)
self.wait()
self.play(Write(m2_mob.label))
self.add_foreground_mobject(m2_mob.label)
self.wait()
self.apply_transposed_matrix(t_m2_inv, run_time = 0)
self.wait()
for matrix in t_matrix1, t_matrix2:
self.apply_transposed_matrix(matrix, run_time = 1)
self.play(Write(comp_matrix))
self.add_foreground_mobject(comp_matrix)
self.wait()
self.play(*list(map(FadeOut, [
self.background_plane,
self.plane,
self.i_hat,
self.j_hat,
])) + [
Animation(m) for m in self.foreground_mobjects
])
self.remove(self.i_hat, self.j_hat)
self.wait()
def get_matrices(self):
m1_mob = Matrix(np.array(self.t_matrix1).transpose())
m2_mob = Matrix(np.array(self.t_matrix2).transpose())
comp_matrix = Matrix([["?", "?"], ["?", "?"]])
m1_mob.set_color(YELLOW)
m2_mob.set_color(PINK)
comp_matrix.get_entries().set_submobject_colors_by_gradient(YELLOW, PINK)
equals = TexMobject("=")
equals.next_to(comp_matrix, LEFT)
comp_matrix.add(equals)
m1_mob = VMobject(BackgroundRectangle(m1_mob), m1_mob)
m2_mob = VMobject(BackgroundRectangle(m2_mob), m2_mob)
comp_matrix = VMobject(BackgroundRectangle(comp_matrix), comp_matrix)
VMobject(
m2_mob, m1_mob, comp_matrix
).arrange_submobjects(buff = 0.1).to_corner(UP+LEFT).shift(DOWN)
for i, mob in enumerate([m1_mob, m2_mob]):
brace = Brace(mob, UP)
text = TexMobject("M_%d"%(i+1))
text.next_to(brace, UP)
brace.add_background_rectangle()
text.add_background_rectangle()
brace.add(text)
mob.label = brace
return m1_mob, m2_mob, comp_matrix
class MoreComplicatedExampleNumerically(MoreComplicatedExampleVisually):
def get_result(self):
return np.dot(self.t_matrix1, self.t_matrix2).transpose()
def construct(self):
m1_mob, m2_mob, comp_matrix = self.get_matrices()
self.add(m1_mob, m2_mob, m1_mob.label, m2_mob.label, comp_matrix)
result = self.get_result()
col1, col2 = [
VMobject(*m1_mob.split()[1].get_mob_matrix()[:,i])
for i in (0, 1)
]
col1.target_color = X_COLOR
col2.target_color = Y_COLOR
for col in col1, col2:
circle = Circle()
circle.stretch_to_fit_height(m1_mob.get_height())
circle.stretch_to_fit_width(m1_mob.get_width()/2.5)
circle.set_color(col.target_color)
circle.move_to(col)
col.circle = circle
triplets = [
(col1, "i", X_COLOR),
(col2, "j", Y_COLOR),
]
for i, (col, char, color) in enumerate(triplets):
self.add(col)
start_state = self.get_mobjects()
question = TextMobject(
"Where does $\\hat{\\%smath}$ go?"%char
)
question.split()[-4].set_color(color)
question.split()[-5].set_color(color)
question.scale(1.2)
question.shift(DOWN)
first = TextMobject("First here")
first.set_color(color)
first.shift(DOWN+LEFT)
first_arrow = Arrow(
first, col.circle.get_bottom(), color = color
)
second = TextMobject("Then to whatever this is")
second.set_color(color)
second.to_edge(RIGHT).shift(DOWN)
m2_copy = m2_mob.copy()
m2_target = m2_mob.copy()
m2_target.next_to(m2_mob, DOWN, buff = 1)
col_vect = Matrix(col.copy().split())
col_vect.set_color(color)
col_vect.next_to(m2_target, RIGHT, buff = 0.1)
second_arrow = Arrow(second, col_vect, color = color)
new_m2_copy = m2_mob.copy().split()[1]
intermediate = VMobject(
TexMobject("="),
col_vect.copy().get_entries().split()[0],
Matrix(new_m2_copy.get_mob_matrix()[:,0]),
TexMobject("+"),
col_vect.copy().get_entries().split()[1],
Matrix(new_m2_copy.get_mob_matrix()[:,1]),
TexMobject("=")
)
intermediate.arrange_submobjects(buff = 0.1)
intermediate.next_to(col_vect, RIGHT)
product = Matrix(result[:,i])
product.next_to(intermediate, RIGHT)
comp_col = VMobject(*comp_matrix.split()[1].get_mob_matrix()[:,i])
self.play(Write(question, run_time = 1 ))
self.wait()
self.play(
Transform(question, first),
ShowCreation(first_arrow),
ShowCreation(col.circle),
ApplyMethod(col.set_color, col.target_color)
)
self.wait()
self.play(
Transform(m2_copy, m2_target, run_time = 2),
ApplyMethod(col.copy().move_to, col_vect, run_time = 2),
Write(col_vect.get_brackets()),
Transform(first_arrow, second_arrow),
Transform(question, second),
)
self.wait()
self.play(*list(map(FadeOut, [question, first_arrow])))
self.play(Write(intermediate))
self.wait()
self.play(Write(product))
self.wait()
product_entries = product.get_entries()
self.play(
ApplyMethod(comp_col.set_color, BLACK),
ApplyMethod(product_entries.move_to, comp_col)
)
self.wait()
start_state.append(product_entries)
self.play(*[
FadeOut(mob)
for mob in self.get_mobjects()
if mob not in start_state
] + [
Animation(product_entries)
])
self.wait()
class GeneralMultiplication(MoreComplicatedExampleNumerically):
def get_result(self):
entries = list(map(TexMobject, [
"ae+bg", "af+bh", "ce+dg", "cf+dh"
]))
for mob in entries:
mob.split()[0].set_color(PINK)
mob.split()[3].set_color(PINK)
for mob in entries[0], entries[2]:
mob.split()[1].set_color(X_COLOR)
mob.split()[4].set_color(X_COLOR)
for mob in entries[1], entries[3]:
mob.split()[1].set_color(Y_COLOR)
mob.split()[4].set_color(Y_COLOR)
return np.array(entries).reshape((2, 2))
def get_matrices(self):
m1, m2, comp = MoreComplicatedExampleNumerically.get_matrices(self)
self.add(m1, m2, m1.label, m2.label, comp)
m1_entries = m1.split()[1].get_entries()
m2_entries = m2.split()[1].get_entries()
m2_entries_target = VMobject(*[
TexMobject(char).move_to(entry).set_color(entry.get_color())
for entry, char in zip(m2_entries.split(), "abcd")
])
m1_entries_target = VMobject(*[
TexMobject(char).move_to(entry).set_color(entry.get_color())
for entry, char in zip(m1_entries.split(), "efgh")
])
words = TextMobject("This method works generally")
self.play(Write(words, run_time = 2))
self.play(Transform(
m1_entries, m1_entries_target,
submobject_mode = "lagged_start"
))
self.play(Transform(
m2_entries, m2_entries_target,
submobject_mode = "lagged_start"
))
self.wait()
new_comp = Matrix(self.get_result())
new_comp.next_to(comp.split()[1].submobjects[-1], RIGHT)
new_comp.get_entries().set_color(BLACK)
self.play(
Transform(comp.split()[1].get_brackets(), new_comp.get_brackets()),
*[
ApplyMethod(q_mark.move_to, entry)
for q_mark, entry in zip(
comp.split()[1].get_entries().split(),
new_comp.get_entries().split()
)
]
)
self.wait()
self.play(FadeOut(words))
return m1, m2, comp
class MoreComplicatedExampleWithJustIHat(MoreComplicatedExampleVisually):
CONFIG = {
"show_basis_vectors" : False,
"v_coords" : [1, 0],
"v_color" : X_COLOR,
}
def construct(self):
self.setup()
self.add_vector(self.v_coords, self.v_color)
self.apply_transposed_matrix(self.t_matrix1)
self.wait()
self.apply_transposed_matrix(self.t_matrix2)
self.wait()
class MoreComplicatedExampleWithJustJHat(MoreComplicatedExampleWithJustIHat):
CONFIG = {
"v_coords" : [0, 1],
"v_color" : Y_COLOR,
}
class RoteMatrixMultiplication(NumericalMatrixMultiplication):
CONFIG = {
"left_matrix" : [[-3, 1], [2, 5]],
"right_matrix" : [[5, 3], [7, -3]]
}
class NeverForget(TeacherStudentsScene):
def construct(self):
self.setup()
self.teacher_says("Never forget what \\\\ this represents!")
self.random_blink()
self.student_thinks("", student_index = 0)
def warp(point):
point += 2*DOWN+RIGHT
return 20*point/get_norm(point)
self.play(ApplyPointwiseFunction(
warp,
VMobject(*self.get_mobjects())
))
class AskAboutCommutativity(Scene):
def construct(self):
l_m1, l_m2, eq, r_m2, r_m1 = TexMobject([
"M_1", "M_2", "=", "M_2", "M_1"
]).scale(1.5).split()
VMobject(l_m1, r_m1).set_color(YELLOW)
VMobject(l_m2, r_m2).set_color(PINK)
q_marks = TextMobject("???")
q_marks.set_color(TEAL)
q_marks.next_to(eq, UP)
neq = TexMobject("\\neq")
neq.move_to(eq)
self.play(*list(map(Write, [l_m1, l_m2, eq])))
self.play(
Transform(l_m1.copy(), r_m1),
Transform(l_m2.copy(), r_m2),
path_arc = -np.pi,
run_time = 2
)
self.play(Write(q_marks))
self.wait()
self.play(Transform(
VMobject(eq, q_marks),
VMobject(neq),
submobject_mode = "lagged_start"
))
self.wait()
class ShowShear(LinearTransformationScene):
CONFIG = {
"title" : "Shear",
"title_color" : PINK,
"t_matrix" : [[1, 0], [1, 1]]
}
def construct(self):
self.setup()
title = TextMobject(self.title)
title.scale(1.5).to_edge(UP)
title.set_color(self.title_color)
title.add_background_rectangle()
self.add_foreground_mobject(title)
self.wait()
self.apply_transposed_matrix(self.t_matrix)
self.wait()
class ShowRotation(ShowShear):
CONFIG = {
"title" : "$90^\\circ$ rotation",
"title_color" : YELLOW,
"t_matrix" : [[0, 1], [-1, 0]]
}
class FirstShearThenRotation(LinearTransformationScene):
CONFIG = {
"title" : "First shear then rotation",
"t_matrix1" : [[1, 0], [1, 1]],
"t_matrix2" : [[0, 1], [-1, 0]],
"foreground_plane_kwargs" : {
"x_radius" : FRAME_WIDTH,
"y_radius" : FRAME_WIDTH,
"secondary_line_ratio" : 0
},
}
def construct(self):
self.setup()
title_parts = self.title.split(" ")
title = TextMobject(title_parts)
for i, part in enumerate(title_parts):
if part == "rotation":
title.split()[i].set_color(YELLOW)
elif part == "shear":
title.split()[i].set_color(PINK)
title.scale(1.5)
self.add_title(title)
self.apply_transposed_matrix(self.t_matrix1)
self.apply_transposed_matrix(self.t_matrix2)
self.i_hat.rotate(-0.01)##Laziness
self.wait()
self.write_vector_coordinates(self.i_hat, color = X_COLOR)
self.wait()
self.write_vector_coordinates(self.j_hat, color = Y_COLOR)
self.wait()
class RotationThenShear(FirstShearThenRotation):
CONFIG = {
"title" : "First rotation then shear",
"t_matrix1" : [[0, 1], [-1, 0]],
"t_matrix2" : [[1, 0], [1, 1]],
}
class NoticeTheLackOfComputations(TeacherStudentsScene):
def construct(self):
self.setup()
self.teacher_says("""
Notice the lack
of computations!
""")
self.random_blink()
students = self.get_students()
random.shuffle(students)
unit = np.array([-0.5, 0.5])
self.play(*[
ApplyMethod(
pi.change_mode, "pondering",
rate_func = squish_rate_func(smooth, *np.clip(unit+0.5*i, 0, 1))
)
for i, pi in enumerate(students)
])
self.random_blink()
self.wait()
class AskAssociativityQuestion(Scene):
def construct(self):
morty = Mortimer()
morty.scale(0.8)
morty.to_corner(DOWN+RIGHT)
morty.shift(0.5*LEFT)
title = TextMobject("Associativity:")
title.to_corner(UP+LEFT)
lhs = TexMobject(list("(AB)C"))
lp, a, b, rp, c = lhs.split()
rhs = VMobject(*[m.copy() for m in (a, lp, b, c, rp)])
point = VectorizedPoint()
start = VMobject(*[m.copy() for m in (point, a, b, point, c)])
for mob in lhs, rhs, start:
mob.arrange_submobjects(buff = 0.1)
a, lp, b, c, rp = rhs.split()
rhs = VMobject(lp, a, b, rp, c)##Align order to lhs
eq = TexMobject("=")
q_marks = TextMobject("???")
q_marks.set_submobject_colors_by_gradient(TEAL_B, TEAL_D)
q_marks.next_to(eq, UP)
lhs.next_to(eq, LEFT)
rhs.next_to(eq, RIGHT)
start.move_to(lhs)
self.add(morty, title)
self.wait()
self.play(Blink(morty))
self.play(Write(start))
self.wait()
self.play(Transform(start, lhs))
self.wait()
self.play(
Transform(lhs, rhs, path_arc = -np.pi),
Write(eq)
)
self.play(Write(q_marks))
self.play(Blink(morty))
self.play(morty.change_mode, "pondering")
lp, a, b, rp, c = start.split()
self.show_full_matrices(morty, a, b, c, title)
def show_full_matrices(self, morty, a, b, c, title):
everything = self.get_mobjects()
everything.remove(morty)
everything.remove(title)
everything = VMobject(*everything)
matrices = list(map(matrix_to_mobject, [
np.array(list(m)).reshape((2, 2))
for m in ("abcd", "efgh", "ijkl")
]))
VMobject(*matrices).arrange_submobjects()
self.play(everything.to_edge, UP)
for letter, matrix in zip([a, b, c], matrices):
self.play(Transform(
letter.copy(), matrix,
submobject_mode = "lagged_start"
))
self.remove(*self.get_mobjects_from_last_animation())
self.add(matrix)
self.wait()
self.move_matrix_parentheses(morty, matrices)
def move_matrix_parentheses(self, morty, matrices):
m1, m2, m3 = matrices
parens = TexMobject(["(", ")"])
parens.set_height(1.2*m1.get_height())
lp, rp = parens.split()
state1 = VMobject(
VectorizedPoint(m1.get_left()),
m1, m2,
VectorizedPoint(m2.get_right()),
m3
)
state2 = VMobject(*[
m.copy() for m in (lp, m1, m2, rp, m3)
])
state3 = VMobject(*[
m.copy() for m in (m1, lp, m2, m3, rp)
])
for state in state2, state3:
state.arrange_submobjects(RIGHT, buff = 0.1)
m1, lp, m2, m3, rp = state3.split()
state3 = VMobject(lp, m1, m2, rp, m3)
self.play(morty.change_mode, "angry")
for state in state2, state3:
self.play(Transform(state1, state))
self.wait()
self.play(morty.change_mode, "confused")
self.wait()
class ThreeSuccessiveTransformations(LinearTransformationScene):
CONFIG = {
"t_matrices" : [
[[2, 1], [1, 2]],
[[np.cos(-np.pi/6), np.sin(-np.pi/6)], [-np.sin(-np.pi/6), np.cos(-np.pi/6)]],
[[1, 0], [1, 1]]
],
"symbols_str" : "A(BC)",
"include_background_plane" : False,
}
def construct(self):
self.setup()
symbols = TexMobject(list(self.symbols_str))
symbols.scale(1.5)
symbols.to_edge(UP)
a, b, c = None, None, None
for mob, letter in zip(symbols.split(), self.symbols_str):
if letter == "A":
a = mob
elif letter == "B":
b = mob
elif letter == "C":
c = mob
symbols.add_background_rectangle()
self.add_foreground_mobject(symbols)
brace = Brace(c, DOWN)
words = TextMobject("Apply this transformation")
words.add_background_rectangle()
words.next_to(brace, DOWN)
brace.add(words)
self.play(Write(brace, run_time = 1))
self.add_foreground_mobject(brace)
last = VectorizedPoint()
for t_matrix, sym in zip(self.t_matrices, [c, b, a]):
self.play(
brace.next_to, sym, DOWN,
sym.set_color, YELLOW,
last.set_color, WHITE
)
self.apply_transposed_matrix(t_matrix, run_time = 1)
last = sym
self.wait()
class ThreeSuccessiveTransformationsAltParens(ThreeSuccessiveTransformations):
CONFIG = {
"symbols_str" : "(AB)C"
}
class ThreeSuccessiveTransformationsSimple(ThreeSuccessiveTransformations):
CONFIG = {
"symbols_str" : "ABC"
}
class ExplanationTrumpsProof(Scene):
def construct(self):
greater = TexMobject(">")
greater.shift(RIGHT)
explanation = TextMobject("Good explanation")
explanation.set_color(BLUE)
proof = TextMobject("Symbolic proof")
proof.set_color(LIGHT_BROWN)
explanation.next_to(greater, LEFT)
proof.next_to(greater, RIGHT)
explanation.get_center = lambda : explanation.get_right()
proof.get_center = lambda : proof.get_left()
self.play(
Write(explanation),
Write(greater),
Write(proof),
run_time = 1
)
self.play(
explanation.scale_in_place, 1.5,
proof.scale_in_place, 0.7
)
self.wait()
class GoPlay(TeacherStudentsScene):
def construct(self):
self.setup()
self.teacher_says("Go play!", height = 3, width = 5)
self.play(*[
ApplyMethod(student.change_mode, "happy")
for student in self.get_students()
])
self.random_blink()
student = self.get_students()[-1]
bubble = ThoughtBubble(direction = RIGHT, width = 6, height = 5)
bubble.pin_to(student, allow_flipping = False)
bubble.make_green_screen()
self.play(
ShowCreation(bubble),
student.look, UP+LEFT,
)
self.play(student.change_mode, "pondering")
for x in range(3):
self.random_blink()
self.wait(2)
class NextVideo(Scene):
def construct(self):
title = TextMobject("""
Next video: Linear transformations in three dimensions
""")
title.set_width(FRAME_WIDTH - 2)
title.to_edge(UP)
rect = Rectangle(width = 16, height = 9, color = BLUE)
rect.set_height(6)
rect.next_to(title, DOWN)
self.add(title)
self.play(ShowCreation(rect))
self.wait()
| 34.232687 | 100 | 0.574527 |
7948206db2fd8ebfbae9d97d49de873dd53f7638
| 6,288 |
py
|
Python
|
module.py
|
dtylor/WalkAsString
|
88841062b2eaa343da0661c3fe31acc4a7d1a0b8
|
[
"MIT"
] | 5 |
2019-03-10T13:38:57.000Z
|
2019-11-08T00:12:32.000Z
|
module.py
|
dtylor/WalkAsString
|
88841062b2eaa343da0661c3fe31acc4a7d1a0b8
|
[
"MIT"
] | 10 |
2019-02-25T01:16:57.000Z
|
2019-03-04T03:01:16.000Z
|
module.py
|
dtylor/WalkAsString
|
88841062b2eaa343da0661c3fe31acc4a7d1a0b8
|
[
"MIT"
] | 1 |
2020-05-10T09:19:13.000Z
|
2020-05-10T09:19:13.000Z
|
from n2v import node2vec
from sklearn.cluster import KMeans
from sklearn.preprocessing import StandardScaler
from sklearn.decomposition import PCA
import networkx as nx
import pandas as pd
import numpy as np
from graphwave import graphwave
from graphwave.utils import utils
from sklearn.decomposition import PCA
from sklearn.preprocessing import StandardScaler
from sklearn.cluster import KMeans
def _get_components(networkXGraph):
return pd.DataFrame([{"node": k, "component": v} for k, v in nx.get_node_attributes(
G=networkXGraph, name='component').items()]).groupby('component')['node'].apply(list).to_dict()
def divide_chunks(l, n):
# looping till length l
for i in range(0, len(l), n):
yield l[i:i + n]
def get_structural_signatures(networkXGraph, vocab_size=1, params={'num_kmeans_clusters': 4, "num_pca_components": 6, "num_batch":500}):
"""
Get structural embeddings using GraphWave.
Learns structural signatures of each node using GraphWave, and adds these learned structures to the node attributes on the NetworkX graph.
Example:
Attributes:
networkXGraph (networkx.classes.graph.Graph): A graph containing nodes, edges, and their attributes. Nodes must include an attribute called 'component'.
Returns: a NetworkX graph where structural embeddings are added as `structure` node attributes.
"""
nb_clust = params['num_kmeans_clusters']
n_components = params['num_pca_components']
batch_size = params['num_batch']
components = list(divide_chunks(list(_get_components(networkXGraph).values()),batch_size))
heat_signatures = []
nodes_list = []
for n in components:
#flatten list of lists
nodes = [item for sublist in n for item in sublist]
subgraph = networkXGraph.subgraph(nodes)
chi, heat_print, taus = graphwave.graphwave_alg(
subgraph, np.linspace(0, 10, 20), taus='auto', verbose=True)
if len(subgraph.nodes) < n_components:
print("Omitting graph " + str(subgraph_id) + " with node count: " +
str(len(subgraph.nodes)) + " < " + str(n_components))
else:
heat_signatures += chi.tolist()
nodes_list += nodes
print("finished graphwave_alg batches")
pca = PCA(n_components = n_components)
trans_data_all = pca.fit_transform(StandardScaler().fit_transform(np.array(heat_signatures)))
km = KMeans(n_clusters = nb_clust).fit(trans_data_all)
labels_pred = km.labels_
out = pd.DataFrame(labels_pred.astype(int), index=nodes_list)
out[0] += vocab_size
structure_labels = out[0].to_dict()
nx.set_node_attributes(G=networkXGraph, values=structure_labels, name='structure')
return networkXGraph, pca, km
def walk_as_string(networkXGraph, componentLabels, params={'num_walks': 20, 'walk_length': 30}):
"""
Generate random walks over a graph.
Create a language to define nodes/edges and their characteristics, and generate random walks in this language.
Example:
Attributes:
networkXGraph (networkx.classes.graph.Graph): A graph containing nodes, edges, and their attributes. Nodes must include an attribute called 'component'.
componentLabels (dict): A dictionary mapping graph components to their y-values.
Returns: a DataFrame containing each walk, and the associated graph label.
"""
# TODO: Allow custom mapping for features e.g. pass a dict for node labels to convert them to chemical names
# graphComponentLabels = nx.get_node_attributes(G=networkXGraph, name='component')
num_walks = params['num_walks']
walk_length = params['walk_length']
nodeFeatures = list(
set([z for x in list(networkXGraph.nodes(data=True)) for z in x[1].keys()]))
nodeFeatures.remove('component')
edgeFeatures = list(
set([z for x in list(networkXGraph.edges(data=True)) for z in x[2].keys()]))
# Remove 'cheating' features (e.g. component)
if "component" in nodeFeatures:
nodeFeatures.remove("component")
# Make sure edges have weights
if "weight" in edgeFeatures:
edgeFeatures.remove("weight")
else:
nx.set_edge_attributes(
G=networkXGraph, values=1, name='weight')
n2vG = node2vec.Graph(nx_G=networkXGraph, is_directed=False, p=1, q=.7)
n2vG.preprocess_transition_probs()
walks = n2vG.simulate_walks(num_walks, walk_length)
node_words = {}
edge_words = {}
def express_node(node_idx):
if node_idx not in node_words:
node = networkXGraph.nodes[node_idx]
# node_words[node_idx] = " ".join([str(attribute)+"_"+str(node[attribute])
# for attribute in nodeFeatures if attribute in node])
node_words[node_idx] = " ".join([str(node[attribute])
for attribute in nodeFeatures if attribute in node])
return node_words[node_idx]
def express_edge(src_node, dst_node):
if (src_node, dst_node) not in edge_words:
edge = networkXGraph.edges[src_node, dst_node]
# edge_words[src_node, dst_node] = " ".join([str(attribute)+"_"+str(edge[attribute])
# for attribute in edgeFeatures if attribute in edge])
edge_words[src_node, dst_node] = " ".join(
[str(edge[attribute]) for attribute in edgeFeatures if attribute in edge])
return edge_words[src_node, dst_node]
sorted_walks = pd.DataFrame(walks).sort_values(0).as_matrix()
print(sorted_walks[0])
print(sorted_walks[1])
print(sorted_walks[2])
walks = [list(a) for a in sorted_walks]
walks_as_words = [express_node(walk[0]) + " " + " ".join([express_edge(walk[step], walk[step+1]) + " " +
express_node(walk[step+1]) for step in range(len(walk) - 1)]) for walk in walks]
result = pd.DataFrame({"walk": walks_as_words, "start_node": np.array(walks)[:,0]})
result['component'] = result['start_node'].map(nx.get_node_attributes(networkXGraph, name='component'))
result['label'] = result['component'].map(componentLabels)
result = result[['walk', 'label', 'start_node', 'component']]
return result
| 39.54717 | 160 | 0.675254 |
794820ed3aa4128aa9e8d1a9a24dc19521fb3ede
| 2,914 |
py
|
Python
|
insights/parsers/postconf.py
|
haithcockce/insights-core
|
b2e197c6bfc25bcbe2926f07c35a80f2cf8232f5
|
[
"Apache-2.0"
] | 121 |
2017-05-30T20:23:25.000Z
|
2022-03-23T12:52:15.000Z
|
insights/parsers/postconf.py
|
haithcockce/insights-core
|
b2e197c6bfc25bcbe2926f07c35a80f2cf8232f5
|
[
"Apache-2.0"
] | 1,977 |
2017-05-26T14:36:03.000Z
|
2022-03-31T10:38:53.000Z
|
insights/parsers/postconf.py
|
haithcockce/insights-core
|
b2e197c6bfc25bcbe2926f07c35a80f2cf8232f5
|
[
"Apache-2.0"
] | 244 |
2017-05-30T20:22:57.000Z
|
2022-03-26T10:09:39.000Z
|
"""
Postconf - command ``postconf``
===============================
PostconfBuiltin - command ``postconf -C builtin``
=================================================
"""
from insights.core import CommandParser
from insights.core.plugins import parser
from insights.specs import Specs
from insights.parsers import SkipException
class _Postconf(CommandParser, dict):
"""
Class for parsing the ``postconf`` command.
Sample input::
smtpd_tls_loglevel = 0
smtpd_tls_mandatory_ciphers = medium
smtpd_tls_mandatory_exclude_ciphers =
smtpd_tls_mandatory_protocols = !SSLv2, !SSLv3, !TLSv1
Examples:
>>> type(_postconf)
<class 'insights.parsers.postconf._Postconf'>
>>> _postconf['smtpd_tls_loglevel'] == '0'
True
>>> _postconf['smtpd_tls_mandatory_ciphers'] == 'medium'
True
>>> _postconf['smtpd_tls_mandatory_exclude_ciphers'] == ''
True
>>> _postconf['smtpd_tls_mandatory_protocols'] == '!SSLv2, !SSLv3, !TLSv1'
True
"""
def parse_content(self, content):
if not content:
raise SkipException
data = dict()
for line in content:
if '=' in line:
key, value = [i.strip() for i in line.split('=', 1)]
data[key] = value
if not data:
raise SkipException
self.update(data)
@parser(Specs.postconf_builtin)
class PostconfBuiltin(_Postconf):
"""
Class for parsing the ``postconf -C builtin`` command.
Sample input::
smtpd_tls_loglevel = 0
smtpd_tls_mandatory_ciphers = medium
smtpd_tls_mandatory_exclude_ciphers =
smtpd_tls_mandatory_protocols = !SSLv2, !SSLv3, !TLSv1
Examples:
>>> type(postconfb)
<class 'insights.parsers.postconf.PostconfBuiltin'>
>>> postconfb['smtpd_tls_loglevel'] == '0'
True
>>> postconfb['smtpd_tls_mandatory_ciphers'] == 'medium'
True
>>> postconfb['smtpd_tls_mandatory_exclude_ciphers'] == ''
True
>>> postconfb['smtpd_tls_mandatory_protocols'] == '!SSLv2, !SSLv3, !TLSv1'
True
"""
@parser(Specs.postconf)
class Postconf(_Postconf):
"""
Class for parsing the ``postconf`` command.
Sample input::
smtpd_tls_loglevel = 0
smtpd_tls_mandatory_ciphers = medium
smtpd_tls_mandatory_exclude_ciphers =
smtpd_tls_mandatory_protocols = !SSLv2, !SSLv3, !TLSv1
Examples:
>>> type(postconf)
<class 'insights.parsers.postconf.Postconf'>
>>> postconf['smtpd_tls_loglevel'] == '0'
True
>>> postconf['smtpd_tls_mandatory_ciphers'] == 'medium'
True
>>> postconf['smtpd_tls_mandatory_exclude_ciphers'] == ''
True
>>> postconf['smtpd_tls_mandatory_protocols'] == '!SSLv2, !SSLv3, !TLSv1'
True
"""
| 28.568627 | 82 | 0.601579 |
79482145c3765c05fd812b000d67344485fe6f12
| 2,733 |
py
|
Python
|
train.py
|
batuhan-baskurt/AICITY2020_DMT_VehicleReID
|
3f18b61f0dbf1fbac1f2adb0dbb07a8b647e1747
|
[
"MIT"
] | null | null | null |
train.py
|
batuhan-baskurt/AICITY2020_DMT_VehicleReID
|
3f18b61f0dbf1fbac1f2adb0dbb07a8b647e1747
|
[
"MIT"
] | null | null | null |
train.py
|
batuhan-baskurt/AICITY2020_DMT_VehicleReID
|
3f18b61f0dbf1fbac1f2adb0dbb07a8b647e1747
|
[
"MIT"
] | null | null | null |
import os
from torch.backends import cudnn
from utils.logger import setup_logger
from datasets import make_dataloader
from model import make_model
from solver import make_optimizer, WarmupMultiStepLR
from loss import make_loss
from processor import do_train
import random
import torch
import numpy as np
import os
import argparse
from config import cfg
if __name__ == '__main__':
torch.manual_seed(1234)
torch.cuda.manual_seed(1234)
torch.cuda.manual_seed_all(1234)
np.random.seed(1234)
random.seed(1234)
torch.backends.cudnn.deterministic = True
cudnn.benchmark = True
parser = argparse.ArgumentParser(description="ReID Baseline Training")
parser.add_argument(
"--config_file", default="", help="path to config file", type=str
)
parser.add_argument("opts", help="Modify config options using the command-line", default=None,
nargs=argparse.REMAINDER)
args = parser.parse_args()
if args.config_file != "":
cfg.merge_from_file(args.config_file)
cfg.merge_from_list(args.opts)
cfg.freeze()
output_dir = cfg.OUTPUT_DIR
if output_dir and not os.path.exists(output_dir):
os.makedirs(output_dir)
logger = setup_logger("reid_baseline", output_dir, if_train=True)
logger.info("Saving model in the path :{}".format(cfg.OUTPUT_DIR))
logger.info(args)
if args.config_file != "":
logger.info("Loaded configuration file {}".format(args.config_file))
with open(args.config_file, 'r') as cf:
config_str = "\n" + cf.read()
logger.info(config_str)
logger.info("Running with config:\n{}".format(cfg))
os.environ['CUDA_VISIBLE_DEVICES'] = cfg.MODEL.DEVICE_ID
train_loader, val_loader, num_query, num_classes = make_dataloader(cfg)
if cfg.MODEL.PRETRAIN_CHOICE == 'finetune':
model = make_model(cfg, num_class=num_classes)
model.load_param_finetune(cfg.MODEL.PRETRAIN_PATH)
print('Loading pretrained model for finetuning......')
else:
model = make_model(cfg, num_class=num_classes)
loss_func, center_criterion = make_loss(cfg, num_classes=num_classes)
optimizer, optimizer_center = make_optimizer(cfg, model, center_criterion)
scheduler = WarmupMultiStepLR(optimizer, cfg.SOLVER.STEPS, cfg.SOLVER.GAMMA,
cfg.SOLVER.WARMUP_FACTOR,
cfg.SOLVER.WARMUP_EPOCHS, cfg.SOLVER.WARMUP_METHOD)
do_train(
cfg,
model,
center_criterion,
train_loader,
val_loader,
optimizer,
optimizer_center,
scheduler, # modify for using self trained model
loss_func,
num_query
)
| 32.927711 | 98 | 0.681668 |
7948218f5ea7d53d79e559d4dacb8274b9d84ae5
| 1,153 |
py
|
Python
|
var/spack/repos/builtin/packages/r-rcppcctz/package.py
|
adrianjhpc/spack
|
0a9e4fcee57911f2db586aa50c8873d9cca8de92
|
[
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 2 |
2020-10-15T01:08:42.000Z
|
2021-10-18T01:28:18.000Z
|
var/spack/repos/builtin/packages/r-rcppcctz/package.py
|
adrianjhpc/spack
|
0a9e4fcee57911f2db586aa50c8873d9cca8de92
|
[
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 2 |
2019-07-30T10:12:28.000Z
|
2019-12-17T09:02:27.000Z
|
var/spack/repos/builtin/packages/r-rcppcctz/package.py
|
adrianjhpc/spack
|
0a9e4fcee57911f2db586aa50c8873d9cca8de92
|
[
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 5 |
2019-07-30T09:42:14.000Z
|
2021-01-25T05:39:20.000Z
|
# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RRcppcctz(RPackage):
"""'Rcpp' Access to the 'CCTZ' timezone library is provided. 'CCTZ' is a
C++ library for translating between absolute and civil times using the
rules of a time zone. The 'CCTZ' source code, released under the
Apache 2.0 License, is included in this package. See
<https://github.com/google/cctz> for more details."""
homepage = "https://github.com/eddelbuettel/rcppcctz"
url = "https://cloud.r-project.org/src/contrib/RcppCCTZ_0.2.3.tar.gz"
list_url = "https://cloud.r-project.org/src/contrib/Archive/RcppCCTZ"
version('0.2.6', sha256='0e9a76055d29da24cd4c4069c78c1f44998f3461be60c7a6c3e7a35059fb79ae')
version('0.2.4', sha256='98b6867d38abe03957fe803e88b6cc2d122b85a68ef07fa86f7e1009d6c00819')
version('0.2.3', sha256='0fefcc98387b2c1a5907e5230babb46e2cc11b603424f458f515e445a3236031')
depends_on('r-rcpp@0.11.0:', type=('build', 'run'))
| 46.12 | 95 | 0.733738 |
794821df6efe801ba51e52d361d5aa4d3c7d5dac
| 25,267 |
py
|
Python
|
fbcrawl/items.py
|
pramoth/fbcrawl
|
dd0ed825364f6960456a9f3a00d12a2e143f733d
|
[
"Apache-2.0"
] | null | null | null |
fbcrawl/items.py
|
pramoth/fbcrawl
|
dd0ed825364f6960456a9f3a00d12a2e143f733d
|
[
"Apache-2.0"
] | null | null | null |
fbcrawl/items.py
|
pramoth/fbcrawl
|
dd0ed825364f6960456a9f3a00d12a2e143f733d
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# https://doc.scrapy.org/en/latest/topics/items.html
import scrapy
from scrapy.loader.processors import TakeFirst, Join, MapCompose
from datetime import datetime, timedelta
def comments_strip(string,loader_context):
lang = loader_context['lang']
if lang == 'it':
if string[0].rfind('Commenta') != -1:
return
else:
return string[0].rstrip(' commenti')
elif lang == 'en':
if(string[0] == 'Share'):
return '0'
new_string = string[0].rstrip(' Comments')
while new_string.rfind(',') != -1:
new_string = new_string[0:new_string.rfind(',')] + new_string[new_string.rfind(',')+1:]
return new_string
else:
return string
def reactions_strip(string,loader_context):
lang = loader_context['lang']
if lang == 'it':
newstring = string[0]
#19.298.873
if len(newstring.split()) == 1:
while newstring.rfind('.') != -1:
newstring = newstring[0:newstring.rfind('.')] + newstring[newstring.rfind('.')+1:]
return newstring
#Pamela, Luigi e altri 4
else:
return string
friends = newstring.count(' e ') + newstring.count(',')
newstring = newstring.split()[::-1][0]
while newstring.rfind('.') != -1:
newstring = newstring[0:newstring.rfind('.')] + newstring[newstring.rfind('.')+1:]
return int(newstring) + friends
elif lang == 'en':
newstring = string[0]
#19,298,873
if len(newstring.split()) == 1:
while newstring.rfind(',') != -1:
newstring = newstring[0:newstring.rfind(',')] + newstring[newstring.rfind(',')+1:]
return newstring
#Mark and other 254,134
elif newstring.split()[::-1][1].isdigit():
friends = newstring.count(' and ') + newstring.count(',')
newstring = newstring.split()[::-1][1]
while newstring.rfind(',') != -1:
newstring = newstring[0:newstring.rfind(',')] + newstring[newstring.rfind(',')+1:]
return int(newstring) + friends
#Philip and 1K others
else:
return newstring
else:
return string
def url_strip(url):
fullurl = url[0]
#catchin '&id=' is enough to identify the post
i = fullurl.find('&id=')
if i != -1:
return fullurl[:i+4] + fullurl[i+4:].split('&')[0]
else: #catch photos
i = fullurl.find('/photos/')
if i != -1:
return fullurl[:i+8] + fullurl[i+8:].split('/?')[0]
else: #catch albums
i = fullurl.find('/albums/')
if i != -1:
return fullurl[:i+8] + fullurl[i+8:].split('/?')[0]
else:
return fullurl
def parse_date(date,loader_context):
import json
d = json.loads(date[0]) #nested dict of features
flat_d = dict() #only retain 'leaves' of d tree
def recursive_items(dictionary):
'''
Get most nested key:value pair of nested dict
'''
for key, value in dictionary.items():
if type(value) is dict:
yield from recursive_items(value)
else:
yield (key, value)
for key, value in recursive_items(d):
flat_d[key] = value
#returns timestamp in localtime conversion from linux timestamp UTC
ret = str(datetime.fromtimestamp(flat_d['publish_time'])) if 'publish_time' in flat_d else None
return ret
def parse_date2(init_date,loader_context):
lang = loader_context['lang']
# =============================================================================
# Italian - status:final
# =============================================================================
if lang == 'it':
months = {
'gennaio':1,
'febbraio':2,
'marzo':3,
'aprile':4,
'maggio':5,
'giugno':6,
'luglio':7,
'agosto':8,
'settembre':9,
'ottobre':10,
'novembre':11,
'dicembre':12
}
months_abbr = {
'gen':1,
'feb':2,
'mar':3,
'apr':4,
'mag':5,
'giu':6,
'lug':7,
'ago':8,
'set':9,
'ott':10,
'nov':11,
'dic':12
}
giorni = {
'lunedì':0,
'martedì':1,
'mercoledì':2,
'giovedì':3,
'venerdì':4,
'sabato':5,
'domenica':6
}
date = init_date[0].split()
year, month, day = [int(i) for i in str(datetime.now().date()).split(sep='-')] #default is today
l = len(date)
#sanity check
if l == 0:
return 'Error: no data'
#adesso, ieri, 4h, 50min
elif l == 1:
if date[0].isalpha():
if date[0].lower() == 'ieri':
day = int(str(datetime.now().date()-timedelta(1)).split(sep='-')[2])
#check that yesterday was not in another month
month = int(str(datetime.now().date()-timedelta(1)).split(sep='-')[1])
elif date[0].lower() == 'adesso':
return datetime(year,month,day).date() #return today
else: #not recognized, (return date or init_date)
return date
else:
#4h, 50min (exploit future parsing)
l = 2
new_date = [x for x in date[0] if x.isdigit()]
date[0] = ''.join(new_date)
new_date = [x for x in date[0] if not(x.isdigit())]
date[1] = ''.join(new_date)
# l = 2
elif l == 2:
#22 min (oggi)
if date[1] == 'min':
if int(str(datetime.now().time()).split(sep=':')[1]) - int(date[0]) >= 0:
return datetime(year,month,day).date()
#22 min (ieri)
else:
day = int(str(datetime.now().date()-timedelta(1)).split(sep='-')[2])
month = int(str(datetime.now().date()-timedelta(1)).split(sep='-')[1])
return datetime(year,month,day).date()
#4 h (oggi)
elif date[1] == 'h':
if int(str(datetime.now().time()).split(sep=':')[0]) - int(date[0]) >= 0:
return datetime(year,month,day).date()
#4 h (ieri)
else:
day = int(str(datetime.now().date()-timedelta(1)).split(sep='-')[2])
month = int(str(datetime.now().date()-timedelta(1)).split(sep='-')[1])
return datetime(year,month,day).date()
#2 gen
elif len(date[1]) == 3 and date[1].isalpha():
day = int(date[0])
month = months_abbr[date[1].lower()]
return datetime(year,month,day).date()
#2 gennaio
elif len(date[1]) > 3 and date[1].isalpha():
day = int(date[0])
month = months[date[1]]
return datetime(year,month,day).date()
#parsing failed
else:
return date
# l = 3
elif l == 3:
#21 giu 2017
if len(date[1]) == 3 and date[2].isdigit():
day = int(date[0])
month = months_abbr[date[1]]
year = int(date[2])
return datetime(year,month,day).date()
#21 giugno 2017
elif len(date[1]) > 3 and date[2].isdigit():
day = int(date[0])
month = months[date[1]]
year = int(date[2])
return datetime(year,month,day).date()
#9 ore fa
elif date[0].isdigit() and date[1][:2] == 'or':
if int(str(datetime.now().time()).split(sep=':')[0]) - int(date[0]) >= 0:
return datetime(year,month,day).date()
#9 ore fa (ieri)
else:
day = int(str(datetime.now().date()-timedelta(1)).split(sep='-')[2])
month = int(str(datetime.now().date()-timedelta(1)).split(sep='-')[1])
return datetime(year,month,day).date()
#7 minuti fa
elif date[0].isdigit() and date[1][:3] == 'min':
return datetime(year,month,day).date()
#ieri alle 20:45
elif date[0].lower() == 'ieri' and date[1] == 'alle':
day = int(str(datetime.now().date()-timedelta(1)).split(sep='-')[2])
month = int(str(datetime.now().date()-timedelta(1)).split(sep='-')[1])
return datetime(year,month,day).date()
#oggi alle 11:11
elif date[0].lower() == 'oggi' and date[1] == 'alle':
return datetime(year,month,day).date()
#lunedì alle 12:34
elif date[0].isalpha() and date[1] == 'alle':
today = datetime.now().weekday() #today as a weekday
weekday = giorni[date[0].lower()] #day to be match as number weekday
#weekday is chronologically always lower than day
delta = today - weekday
if delta >= 0:
day = int(str(datetime.now().date()-timedelta(delta)).split(sep='-')[2])
month = int(str(datetime.now().date()-timedelta(delta)).split(sep='-')[1])
return datetime(year,month,day).date()
#lunedì = 0 sabato = 6, mar 1 ven 5
else:
delta += 8
day = int(str(datetime.now().date()-timedelta(delta)).split(sep='-')[2])
month = int(str(datetime.now().date()-timedelta(delta)).split(sep='-')[1])
return datetime(year,month,day).date()
#parsing failed
else:
return date
# l = 4
elif l == 4:
#Ieri alle ore 23:32
if date[0].lower() == 'ieri' and date[1] == 'alle':
day = int(str(datetime.now().date()-timedelta(1)).split(sep='-')[2])
month = int(str(datetime.now().date()-timedelta(1)).split(sep='-')[1])
return datetime(year,month,day).date()
#domenica alle ore 19:29
elif date[0].isalpha() and date[1] == 'alle':
today = datetime.now().weekday() #today as a weekday
weekday = giorni[date[0].lower()] #day to be match as number weekday
#weekday is chronologically always lower than day
delta = today - weekday
if delta >= 0:
day = int(str(datetime.now().date()-timedelta(delta)).split(sep='-')[2])
month = int(str(datetime.now().date()-timedelta(delta)).split(sep='-')[1])
return datetime(year,month,day).date()
#lunedì = 0 sabato = 6, mar 1 ven 5
else:
delta += 8
day = int(str(datetime.now().date()-timedelta(delta)).split(sep='-')[2])
month = int(str(datetime.now().date()-timedelta(delta)).split(sep='-')[1])
return datetime(year,month,day).date()
#parsing failed
else:
return date
# l = 5
elif l == 5:
if date[2] == 'alle':
#29 feb alle ore 21:49
if len(date[1]) == 3:
day = int(date[0])
month = months_abbr[date[1].lower()]
return datetime(year,month,day).date()
#29 febbraio alle ore 21:49
else:
day = int(date[0])
month = months[date[1].lower()]
return datetime(year,month,day).date()
#parsing failed
else:
return date
# l = 6
elif l == 6:
if date[3] == 'alle':
#29 feb 2016 alle ore 21:49
if len(date[1]) == 3:
day = int(date[0])
month = months_abbr[date[1].lower()]
year = int(date[2])
return datetime(year,month,day).date()
#29 febbraio 2016 alle ore 21:49
else:
day = int(date[0])
month = months[date[1].lower()]
year = int(date[2])
return datetime(year,month,day).date()
#parsing failed
else:
return date
# =============================================================================
# English - status:beta
# =============================================================================
elif lang == 'en':
months = {
'january':1,
'february':2,
'march':3,
'april':4,
'may':5,
'june':6,
'july':7,
'august':8,
'september':9,
'october':10,
'november':11,
'december':12
}
months_abbr = {
'jan':1,
'feb':2,
'mar':3,
'apr':4,
'may':5,
'jun':6,
'jul':7,
'aug':8,
'sep':9,
'oct':10,
'nov':11,
'dec':12
}
days = {
'monday':0,
'tuesday':1,
'wednesday':2,
'thursday':3,
'friday':4,
'saturday':5,
'sunday':6
}
date = init_date[0].split()
year, month, day = [int(i) for i in str(datetime.now().date()).split(sep='-')] #default is today
l = len(date)
#sanity check
if l == 0:
return 'Error: no data'
#Yesterday, Now, 4hr, 50mins
elif l == 1:
if date[0].isalpha():
if date[0].lower() == 'yesterday':
day = int(str(datetime.now().date()-timedelta(1)).split(sep='-')[2])
#check that yesterday was not in another month
month = int(str(datetime.now().date()-timedelta(1)).split(sep='-')[1])
elif date[0].lower() == 'now':
return datetime(year,month,day).date() #return today
else: #not recognized, (return date or init_date)
return date
else:
#4h, 50min (exploit future parsing)
l = 2
new_date = [x for x in date[0] if x.isdigit()]
date[0] = ''.join(new_date)
new_date = [x for x in date[0] if not(x.isdigit())]
date[1] = ''.join(new_date)
# l = 2
elif l == 2:
if date[1] == 'now':
return datetime(year,month,day).date()
#22 min (ieri)
if date[1] == 'min' or date[1] == 'mins':
if int(str(datetime.now().time()).split(sep=':')[1]) - int(date[0]) < 0 and int(str(datetime.now().time()).split(sep=':')[0])==0:
day = int(str(datetime.now().date()-timedelta(1)).split(sep='-')[2])
month = int(str(datetime.now().date()-timedelta(1)).split(sep='-')[1])
return datetime(year,month,day).date()
#22 min (oggi)
else:
return datetime(year,month,day).date()
#4 h (ieri)
elif date[1] == 'hr' or date[1] == 'hrs':
if int(str(datetime.now().time()).split(sep=':')[0]) - int(date[0]) < 0:
day = int(str(datetime.now().date()-timedelta(1)).split(sep='-')[2])
month = int(str(datetime.now().date()-timedelta(1)).split(sep='-')[1])
return datetime(year,month,day).date()
#4 h (oggi)
else:
return datetime(year,month,day).date()
#2 jan
elif len(date[1]) == 3 and date[1].isalpha():
day = int(date[0])
month = months_abbr[date[1].lower()]
return datetime(year,month,day).date()
#2 january
elif len(date[1]) > 3 and date[1].isalpha():
day = int(date[0])
month = months[date[1]]
return datetime(year,month,day).date()
#jan 2
elif len(date[0]) == 3 and date[0].isalpha():
day = int(date[1])
month = months_abbr[date[0].lower()]
return datetime(year,month,day).date()
#january 2
elif len(date[0]) > 3 and date[0].isalpha():
day = int(date[1])
month = months[date[0]]
return datetime(year,month,day).date()
#parsing failed
else:
return date
return date
# l = 3
elif l == 3:
#5 hours ago
if date[2] == 'ago':
if date[1] == 'hour' or date[1] == 'hours' or date[1] == 'hr' or date[1] == 'hrs':
# 5 hours ago (yesterday)
if int(str(datetime.now().time()).split(sep=':')[0]) - int(date[0]) < 0:
day = int(str(datetime.now().date()-timedelta(1)).split(sep='-')[2])
month = int(str(datetime.now().date()-timedelta(1)).split(sep='-')[1])
return datetime(year,month,day).date()
# 5 hours ago (today)
else:
return datetime(year,month,day).date()
#10 minutes ago
elif date[1] == 'minute' or date[1] == 'minutes' or date[1] == 'min' or date[1] == 'mins':
#22 minutes ago (yesterday)
if int(str(datetime.now().time()).split(sep=':')[1]) - int(date[0]) < 0 and int(str(datetime.now().time()).split(sep=':')[0])==0:
day = int(str(datetime.now().date()-timedelta(1)).split(sep='-')[2])
month = int(str(datetime.now().date()-timedelta(1)).split(sep='-')[1])
return datetime(year,month,day).date()
#22 minutes ago (today)
else:
return datetime(year,month,day).date()
else:
return date
else:
#21 Jun 2017
if len(date[1]) == 3 and date[1].isalpha() and date[2].isdigit():
day = int(date[0])
month = months_abbr[date[1].lower()]
year = int(date[2])
return datetime(year,month,day).date()
#21 June 2017
elif len(date[1]) > 3 and date[1].isalpha() and date[2].isdigit():
day = int(date[0])
month = months[date[1].lower()]
year = int(date[2])
return datetime(year,month,day).date()
#Jul 11, 2016
elif len(date[0]) == 3 and len(date[1]) == 3 and date[0].isalpha():
day = int(date[1][:-1])
month = months_abbr[date[0].lower()]
year = int(date[2])
return datetime(year,month,day).date()
#parsing failed
else:
return date
# l = 4
elif l == 4:
#yesterday at 23:32 PM
if date[0].lower() == 'yesterday' and date[1] == 'at':
day = int(str(datetime.now().date()-timedelta(1)).split(sep='-')[2])
month = int(str(datetime.now().date()-timedelta(1)).split(sep='-')[1])
return datetime(year,month,day).date()
#Thursday at 4:27 PM
elif date[1] == 'at':
today = datetime.now().weekday() #today as a weekday
weekday = days[date[0].lower()] #day to be match as number weekday
#weekday is chronologically always lower than day
delta = today - weekday
if delta >= 0:
day = int(str(datetime.now().date()-timedelta(delta)).split(sep='-')[2])
month = int(str(datetime.now().date()-timedelta(delta)).split(sep='-')[1])
return datetime(year,month,day).date()
#monday = 0 saturday = 6
else:
delta += 8
day = int(str(datetime.now().date()-timedelta(delta)).split(sep='-')[2])
month = int(str(datetime.now().date()-timedelta(delta)).split(sep='-')[1])
return datetime(year,month,day).date()
#parsing failed
else:
return date
# l = 5
elif l == 5:
if date[2] == 'at':
#Jan 29 at 10:00 PM
if len(date[0]) == 3:
day = int(date[1])
month = months_abbr[date[0].lower()]
return datetime(year,month,day).date()
#29 febbraio alle ore 21:49
else:
day = int(date[1])
month = months[date[0].lower()]
return datetime(year,month,day).date()
#parsing failed
else:
return date
# l = 6
elif l == 6:
if date[3] == 'at':
date[1]
#Aug 25, 2016 at 7:00 PM
if len(date[0]) == 3:
day = int(date[1][:-1])
month = months_abbr[date[0].lower()]
year = int(date[2])
return datetime(year,month,day).date()
#August 25, 2016 at 7:00 PM
else:
day = int(date[1][:-1])
month = months[date[0].lower()]
year = int(date[2])
return datetime(year,month,day).date()
#parsing failed
else:
return date
# l > 6
#parsing failed - l too big
else:
return date
#parsing failed - language not supported
else:
return init_date
def id_strip(post_id):
import json
d = json.loads(post_id[::-1][0]) #nested dict of features
return str(d['top_level_post_id'])
class FbcrawlItem(scrapy.Item):
source = scrapy.Field()
date = scrapy.Field()
text = scrapy.Field(
output_processor=Join(separator=u'')
) # full text of the post
comments = scrapy.Field(
output_processor=comments_strip
)
reactions = scrapy.Field(
output_processor=reactions_strip
) # num of reactions
likes = scrapy.Field(
output_processor=reactions_strip
)
ahah = scrapy.Field(
output_processor=reactions_strip
)
love = scrapy.Field(
output_processor=reactions_strip
)
wow = scrapy.Field(
output_processor=reactions_strip
)
sigh = scrapy.Field(
output_processor=reactions_strip
)
grrr = scrapy.Field(
output_processor=reactions_strip
)
share = scrapy.Field() # num of shares
url = scrapy.Field(
output_processor=url_strip
)
post_id = scrapy.Field(
output_processor=id_strip
)
shared_from = scrapy.Field()
class CommentsItem(scrapy.Item):
source = scrapy.Field()
reply_to=scrapy.Field()
date = scrapy.Field( # when was the post published
output_processor=parse_date2
)
text = scrapy.Field(
output_processor=Join(separator=u'')
) # full text of the post
reactions = scrapy.Field(
output_processor=reactions_strip
) # num of reactions
likes = scrapy.Field(
output_processor=reactions_strip
)
source_url = scrapy.Field()
url = scrapy.Field()
ahah = scrapy.Field()
love = scrapy.Field()
wow = scrapy.Field()
sigh = scrapy.Field()
grrr = scrapy.Field()
share = scrapy.Field() # num of shares
class ProfileItem(scrapy.Item):
name = scrapy.Field()
gender = scrapy.Field()
birthday = scrapy.Field()
current_city = scrapy.Field()
hometown = scrapy.Field()
work = scrapy.Field()
education = scrapy.Field()
interested_in = scrapy.Field()
page = scrapy.Field()
| 39.418097 | 149 | 0.450034 |
794821ea0d096b0d913925265027f21b65835d93
| 1,390 |
py
|
Python
|
V2.0.0/steneografia/popups.py
|
ziajek444/Stenography
|
4faf946065a675b995e83a15d7c2e8c4958a2192
|
[
"MIT"
] | null | null | null |
V2.0.0/steneografia/popups.py
|
ziajek444/Stenography
|
4faf946065a675b995e83a15d7c2e8c4958a2192
|
[
"MIT"
] | null | null | null |
V2.0.0/steneografia/popups.py
|
ziajek444/Stenography
|
4faf946065a675b995e83a15d7c2e8c4958a2192
|
[
"MIT"
] | null | null | null |
from kivy.uix.button import Button
from kivy.uix.popup import Popup
# content_ = Button(text='ok')
content_image_saved = Button(text='ok')
content_image_saved_m = Button(text='ok')
content_hided_data = Button(text='ok')
content_wrong_format = Button(text='ok')
content_drafted_data = Button(text='ok')
# popup_ = Popup(title='xxx', content=content_, size_hint=(0.5, 0.2), auto_dismiss=False)
popup_image_saved = Popup(title='Image saved', content=content_image_saved, size_hint=(0.5, 0.2), auto_dismiss=False)
popup_image_saved_m = Popup(title='Image & mask saved', content=content_image_saved_m, size_hint=(0.5, 0.2), auto_dismiss=False)
popup_hided_data = Popup(title='Hided data', content=content_hided_data, size_hint=(0.5, 0.2), auto_dismiss=False)
popup_wrong_format = Popup(title='Wrong format', content=content_wrong_format, size_hint=(0.5, 0.2), auto_dismiss=False)
popup_drafted_data = Popup(title='Drafted data', content=content_drafted_data, size_hint=(0.5, 0.2), auto_dismiss=False)
# content_.bind(on_press=popup_.dismiss)
content_image_saved.bind(on_press=popup_image_saved.dismiss)
content_image_saved_m.bind(on_press=popup_image_saved_m.dismiss)
content_hided_data.bind(on_press=popup_hided_data.dismiss)
content_wrong_format.bind(on_press=popup_wrong_format.dismiss)
content_drafted_data.bind(on_press=popup_drafted_data.dismiss)
| 43.4375 | 129 | 0.782734 |
7948221593e915362dcccd9939f1b2bf9546e351
| 2,763 |
py
|
Python
|
darwin/exporter/formats/dataloop.py
|
ck37/darwin-py
|
a231e3844759c44a954b47126bbbf9d6afe658fc
|
[
"MIT"
] | null | null | null |
darwin/exporter/formats/dataloop.py
|
ck37/darwin-py
|
a231e3844759c44a954b47126bbbf9d6afe658fc
|
[
"MIT"
] | null | null | null |
darwin/exporter/formats/dataloop.py
|
ck37/darwin-py
|
a231e3844759c44a954b47126bbbf9d6afe658fc
|
[
"MIT"
] | null | null | null |
import json
from pathlib import Path
from typing import Any, Dict, Iterable, Iterator
import numpy as np
import darwin.datatypes as dt
class NumpyEncoder(json.JSONEncoder):
def default(self, obj: Any) -> Any:
if isinstance(obj, np.integer):
return int(obj)
elif isinstance(obj, np.floating):
return float(obj)
elif isinstance(obj, np.ndarray):
return obj.tolist()
else:
return super(NumpyEncoder, self).default(obj)
def export(annotation_files: Iterator[dt.AnnotationFile], output_dir: Path) -> None:
for id, annotation_file in enumerate(annotation_files):
export_file(annotation_file, id, output_dir)
def export_file(annotation_file: dt.AnnotationFile, id: int, output_dir: Path) -> None:
output: Dict[str, Any] = build_json(annotation_file, id)
output_file_path: Path = (output_dir / annotation_file.filename).with_suffix(".json")
with open(output_file_path, "w") as f:
json.dump(output, f, cls=NumpyEncoder, indent=1)
def build_json(annotation_file: dt.AnnotationFile, id: int) -> Dict[str, Any]:
return {
"_id": id,
"filename": annotation_file.filename,
"itemMetadata": [],
"annotations": build_annotations(annotation_file, id),
}
def build_annotations(annotation_file: dt.AnnotationFile, id: int) -> Iterable[Dict[str, Any]]:
output = []
for annotation_id, annotation in enumerate(annotation_file.annotations):
print(annotation)
if annotation.annotation_class.annotation_type == "bounding_box":
entry = {
"id": annotation_id,
"datasetId": "darwin",
"type": "box",
"label": annotation.annotation_class.name,
"attributes": [],
"coordinates": [
{"x": annotation.data["x"], "y": annotation.data["y"], "z": 0},
{
"x": annotation.data["x"] + annotation.data["w"],
"y": annotation.data["y"] + annotation.data["h"],
"z": 0,
},
],
"metadata": {},
}
output.append(entry)
elif annotation.annotation_class.annotation_type == "polygon":
entry = {
"id": annotation_id,
"datasetId": "darwin",
"type": "segment",
"label": annotation.annotation_class.name,
"attributes": [],
"coordinates": [{"x": point["x"], "y": point["y"], "z": 0} for point in annotation.data["path"]],
"metadata": {},
}
output.append(entry)
return output
| 35.423077 | 113 | 0.559899 |
794822445d9e5cd57d4594cf1db8fca48cbdae12
| 1,712 |
py
|
Python
|
3DGenZ/genz3d/fkaconv/lightconvpoint/knn/sampling_fps_op.py
|
valeoai/3DGenZ
|
3368585e10f127f7a0d71af98994a6cff5235dab
|
[
"Apache-2.0"
] | 8 |
2021-12-10T03:21:21.000Z
|
2022-03-11T06:23:30.000Z
|
3DGenZ/genz3d/fkaconv/lightconvpoint/knn/sampling_fps_op.py
|
valeoai/3DGenZ
|
3368585e10f127f7a0d71af98994a6cff5235dab
|
[
"Apache-2.0"
] | 1 |
2022-03-02T09:33:36.000Z
|
2022-03-06T16:29:44.000Z
|
3DGenZ/genz3d/fkaconv/lightconvpoint/knn/sampling_fps_op.py
|
valeoai/3DGenZ
|
3368585e10f127f7a0d71af98994a6cff5235dab
|
[
"Apache-2.0"
] | 2 |
2022-01-12T17:57:13.000Z
|
2022-02-22T05:22:24.000Z
|
import torch
import math
from torch_geometric.nn.pool import fps
from lightconvpoint.knn import knn
import importlib
knn_c_func_spec = importlib.util.find_spec('lightconvpoint.knn_c_func')
if knn_c_func_spec is not None:
knn_c_func = importlib.util.module_from_spec(knn_c_func_spec)
knn_c_func_spec.loader.exec_module(knn_c_func)
def sampling_fps(points: torch.Tensor, nqueries: int):
if knn_c_func_spec is not None:
return knn_c_func.sampling_fps(points, nqueries)
bs, dim, nx = points.shape
ratio = nqueries / nx
batch_x = torch.arange(0, bs, dtype=torch.long, device=points.device).unsqueeze(1).expand(bs,nx)
x = points.transpose(1,2).reshape(-1, dim)
batch_x = batch_x.view(-1)
indices_queries = fps(x, batch_x, ratio)
indices_queries = indices_queries.view(bs, -1)
assert(indices_queries.shape[1] == nqueries)
return indices_queries
def sampling_knn_fps(points: torch.Tensor, nqueries: int, K: int):
if knn_c_func_spec is not None:
return knn_c_func.sampling_knn_fps(points, nqueries, K)
bs, dim, nx = points.shape
ratio = nqueries / nx
batch_x = torch.arange(0, bs, dtype=torch.long, device=points.device).unsqueeze(1).expand(bs,nx)
x = points.transpose(1,2).reshape(-1, dim)
batch_x = batch_x.view(-1)
indices_queries = fps(x, batch_x, ratio)
points_queries = x[indices_queries]
indices_queries = indices_queries.view(bs, -1)
points_queries = points_queries.view(bs,-1,3)
points_queries = points_queries.transpose(1,2)
assert(indices_queries.shape[1] == nqueries)
indices_knn = knn(points, points_queries, K)
return indices_queries, indices_knn, points_queries
| 27.612903 | 100 | 0.723715 |
794822d7b29e0383e49346c81da3197a894e717b
| 5,938 |
py
|
Python
|
plugins/metrics/Wikipedia/Plugin.py
|
figshare/Total-Impact
|
4b4be729a12c1f19bdb69e77614ce577a5553386
|
[
"MIT"
] | 5 |
2015-11-26T20:42:32.000Z
|
2018-08-08T18:37:07.000Z
|
plugins/metrics/Wikipedia/Plugin.py
|
figshare/Total-Impact
|
4b4be729a12c1f19bdb69e77614ce577a5553386
|
[
"MIT"
] | null | null | null |
plugins/metrics/Wikipedia/Plugin.py
|
figshare/Total-Impact
|
4b4be729a12c1f19bdb69e77614ce577a5553386
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import simplejson
import json
import urllib
import urllib2
import BeautifulSoup
from BeautifulSoup import BeautifulStoneSoup
import time
import re
import nose
from nose.tools import assert_equals
import sys
import os
# This hack is to add current path when running script from command line
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import BasePlugin
from BasePlugin.BasePlugin import BasePluginClass
from BasePlugin.BasePlugin import TestBasePluginClass
# Permissions: RWX for owner, WX for others. Set this here so that .pyc are created with these permissions
os.umask(022)
# Conforms to API specified here: https://github.com/mhahnel/Total-Impact/wiki/Plugin-requirements
# To do automated tests with nosy
# nosy CrossrefPlugin.py -A \'not skip\'
def skip(f):
f.skip = True
return f
class PluginClass(BasePluginClass):
# each plugin needs to customize this stuff
SOURCE_NAME = "Wikipedia"
SOURCE_DESCRIPTION = "Wikipedia is the free encyclopedia that anyone can edit."
SOURCE_URL = "http://www.wikipedia.org/"
SOURCE_ICON = "http://wikipedia.org/favicon.ico"
SOURCE_METRICS = dict(mentions="The number of articles that mention this artifact")
DEBUG = False
WIKIPEDIA_API_URL = 'http://en.wikipedia.org/w/api.php?action=query&list=search&srprop=timestamp&format=xml&srsearch="%s"'
def get_page(self, url):
if not url:
return(None)
if (self.DEBUG):
print url
try:
page = self.get_cache_timeout_response(url)
if (self.DEBUG):
print page
except:
page = None
return(page)
def extract_stats(self, page, doi):
if not page:
return(None)
(response_header, content) = page
soup = BeautifulStoneSoup(content)
try:
articles = soup.search.findAll(title=True)
metrics_dict = dict(mentions=len(articles))
except AttributeError:
metrics_dict = None
return(metrics_dict)
def get_metric_values(self, doi):
url = self.WIKIPEDIA_API_URL % doi
page = self.get_page(url)
if page:
response = self.extract_stats(page, doi)
else:
response = None
return(response)
def artifact_type_recognized(self, id):
response = self.is_doi(id)
return(response)
def build_artifact_response(self, artifact_id):
metrics_response = self.get_metric_values(artifact_id)
if metrics_response:
show_details_url = 'http://en.wikipedia.org/wiki/Special:Search?search="' + artifact_id + '"&go=Go'
metrics_response.update({"type":"unknown", "show_details_url":show_details_url})
return(metrics_response)
def get_artifacts_metrics(self, query):
response_dict = dict()
error = None
time_started = time.time()
for artifact_id in query:
(artifact_id, lookup_id) = self.get_relevant_id(artifact_id, query[artifact_id], ["doi"])
if (artifact_id):
artifact_response = self.build_artifact_response(lookup_id)
if artifact_response:
response_dict[artifact_id] = artifact_response
if (time.time() - time_started > self.MAX_ELAPSED_TIME):
error = "TIMEOUT"
break
return(response_dict, error)
class TestPluginClass(TestBasePluginClass):
def setup(self):
self.plugin = PluginClass()
self.test_parse_input = self.testinput.TEST_INPUT_DOI
## this changes for every plugin
def test_build_artifact_response(self):
response = self.plugin.build_artifact_response('10.1371/journal.pcbi.1000361')
assert_equals(response, {'doi': '10.1371/journal.pcbi.1000361', 'title': 'Adventures in Semantic Publishing: Exemplar Semantic Enhancements of a Research Article', 'url': 'http://www.ploscompbiol.org/article/info%3Adoi%2F10.1371%2Fjournal.pcbi.1000361', 'journal': 'PLoS Comput Biol', 'authors': 'Shotton, Portwin, Klyne, Miles', 'year': '2009', 'pmid': '19381256', 'type': 'article'})
## this changes for every plugin
def test_get_artifacts_metrics(self):
response = self.plugin.get_artifacts_metrics(self.test_parse_input)
assert_equals(response, ({u'10.1371/journal.pcbi.1000361': {'doi': u'10.1371/journal.pcbi.1000361', 'title': 'Adventures in Semantic Publishing: Exemplar Semantic Enhancements of a Research Article', 'url': 'http://www.ploscompbiol.org/article/info%3Adoi%2F10.1371%2Fjournal.pcbi.1000361', 'journal': 'PLoS Comput Biol', 'authors': 'Shotton, Portwin, Klyne, Miles', 'year': '2009', 'pmid': '19381256', 'type': 'article'}}, 'NA'))
#each plugin should make sure its range of inputs are covered
def test_run_plugin_doi(self):
response = self.plugin.run_plugin(simplejson.dumps(self.testinput.TEST_INPUT_DOI))
assert_equals(len(response), 1077)
def test_run_plugin_pmid(self):
response = self.plugin.run_plugin(simplejson.dumps(self.testinput.TEST_INPUT_PMID))
assert_equals(len(response), 961)
def test_run_plugin_url(self):
response = self.plugin.run_plugin(simplejson.dumps(self.testinput.TEST_INPUT_URL))
assert_equals(len(response), 685)
def test_run_plugin_invalid_id(self):
response = self.plugin.run_plugin(simplejson.dumps(self.testinput.TEST_INPUT_DUD))
assert_equals(len(response), 685)
def test_run_plugin_multiple(self):
response = self.plugin.run_plugin(simplejson.dumps(self.testinput.TEST_INPUT_ALL))
assert_equals(len(response), 1710)
| 40.394558 | 437 | 0.66066 |
794823241dd86fc53ef1c920715777c3f4cc023e
| 5,578 |
py
|
Python
|
fabfile.py
|
digirea/HIVE
|
8896b0cc858c1ad0683888b925f71c0f0d71bf9d
|
[
"MIT"
] | null | null | null |
fabfile.py
|
digirea/HIVE
|
8896b0cc858c1ad0683888b925f71c0f0d71bf9d
|
[
"MIT"
] | null | null | null |
fabfile.py
|
digirea/HIVE
|
8896b0cc858c1ad0683888b925f71c0f0d71bf9d
|
[
"MIT"
] | null | null | null |
#
# Edit `config` line to fit in your environemnt.
#
# To install fabric and cuisne,
#
# # update setuptools
# $ sudo pip install -U setuptools
# $ sudo pip install setuptools
#
# $ sudo pip install fabric
# $ sudo pip install cuisine
#
# You may need to speicfy ARCHFLAGFS on MacOSX environemnt.
# (https://langui.sh/2014/03/10/wunused-command-line-argument-hard-error-in-future-is-a-harsh-mistress/)
#
# $ sudo ARCHFLAGS=-Wno-error=unused-command-line-argument-hard-error-in-future pip install fabric
#
#
import os, sys
import json
from fabric.api import (env, sudo, put, get, cd, local)
from fabric.utils import puts
from fabric.colors import green, magenta
from fabric.decorators import task
from cuisine import (run, dir_ensure, dir_exists, file_exists)
# ----- config --------------------------------------------
config_file = "hostconfig.json"
env.use_ssh_config = True
# ---------------------------------------------------------
config = json.loads(open(config_file).read())
env.hosts = config['hosts']
file_hashes = {
'cmake-2.8.12.2.tar.gz' : '8c6574e9afabcb9fc66f463bb1f2f051958d86c85c37fccf067eb1a44a120e5e'
}
@task
def prepare():
puts(green('Prepare tools'))
remote_build_dir = config['config'][env.host_string]['remote_build_dir']
build_cmake = config['config'][env.host_string]['build_cmake']
dir_ensure(remote_build_dir)
puts(magenta('remote_build_dir: ' + remote_build_dir))
puts(magenta('build_cmake: %d' % build_cmake))
if build_cmake == True:
if not os.path.exists('./deploy/cmake-2.8.12.2.tar.gz'):
local('curl http://www.cmake.org/files/v2.8/cmake-2.8.12.2.tar.gz -o deploy/cmake-2.8.12.2.tar.gz')
put('deploy/cmake-2.8.12.2.tar.gz', remote_build_dir + '/cmake-2.8.12.2.tar.gz')
with cd(remote_build_dir):
dest_dir = os.path.join(remote_build_dir, 'tools')
run('rm -rf cmake-2.8.12.2')
run('tar -zxvf cmake-2.8.12.2.tar.gz')
run('cd cmake-2.8.12.2; ./configure --prefix=' + dest_dir + ' && make && make install')
@task
def build():
remote_build_dir = config['config'][env.host_string]['remote_build_dir']
if not dir_exists(remote_build_dir):
dir_ensure(remote_build_dir, recursive=True)
dir_ensure(remote_build_dir + '/build')
build_hive()
# Dependency: (None)
@task
def build_hive():
puts(green('Configuring HIVE'))
remote_build_dir = config['config'][env.host_string]['remote_build_dir']
host_type = config['config'][env.host_string]['type']
build_cmake = config['config'][env.host_string]['build_cmake']
c_compiler = config['config'][env.host_string]['c_compiler']
cxx_compiler = config['config'][env.host_string]['cxx_compiler']
local('./scripts/git-archive-all.sh --prefix HIVE-master/ deploy/HIVE-master.tar') # --format tar.gz doesn't work well.
local('gzip -f deploy/HIVE-master.tar')
put('deploy/HIVE-master.tar.gz', remote_build_dir + '/HIVE-master.tar.gz')
with cd(remote_build_dir):
run('rm -rf HIVE-master')
run('tar -zxvf HIVE-master.tar.gz')
# put deplibs
put('third_party/fpzip-1.0.1.tar.gz', remote_build_dir + '/HIVE-master/third_party/fpzip-1.0.1.tar.gz')
put('third_party/cgnslib_3.2.1.tar.gz', remote_build_dir + '/HIVE-master/third_party/cgnslib_3.2.1.tar.gz')
put('third_party/zoltan_distrib_v3.81.tar.gz', remote_build_dir + '/HIVE-master/third_party/zoltan_distrib_v3.81.tar.gz')
# build loader libs.
loader_build_script = ""
if host_type == 'k_cross':
loader_build_script = './scripts/build_loader_libs_k_cross.sh'
elif host_type == 'linux64':
loader_build_script = './scripts/build_loader_libs_linux-x64.sh'
elif host_type == 'darwin64':
loader_build_script = './scripts/build_loader_libs_macosx.sh'
else:
print(host_type)
raise # todo
if build_cmake == True:
cmake_bin_path = os.path.join(config['config'][env.host_string]['remote_build_dir'], "tools/bin/cmake")
loader_build_script = "CMAKE_BIN=" + cmake_bin_path + ' ' + loader_build_script
if c_compiler is not None:
loader_build_script = "CC=" + c_compiler + ' ' + loader_build_script
if cxx_compiler is not None:
loader_build_script = "CXX=" + cxx_compiler + ' ' + loader_build_script
#run('module load PrgEnv-intel')
#run('module load impi411')
with cd(remote_build_dir + '/HIVE-master'):
run(loader_build_script)
#run('make -C build')
#setup_script = ""
#if host_type == 'k_cross':
# setup_script = './scripts/cmake_k_cross.sh'
#elif host_type == 'linux64':
# setup_script = './scripts/cmake_linux_x64.sh'
#elif host_type == 'darwin64':
# setup_script = './scripts/cmake_macosx.sh'
#else:
# print(host_type)
# raise # todo
#if build_cmake == True:
# cmake_bin_path = os.path.join(config['config'][env.host_string]['remote_build_dir'], "tools/bin/cmake")
# setup_script = "CMAKE_BIN=" + cmake_bin_path + ' ' + setup_script
#with cd(remote_build_dir + '/HIVE-master'):
# run(setup_script)
# run('make -C build')
# Dependency: (None)
@task
def deploy_vsp():
puts(green('Putting HIVE to vsp'))
env.hosts = ['vsp']
local('./scripts/git-archive-all.sh --prefix HIVE-master/ deploy/HIVE-master.tar') # --format tar.gz doesn't work well.
local('gzip -f deploy/HIVE-master.tar')
put('deploy/HIVE-master.tar.gz', '/media/dali/data1/share/HIVE/release/HIVE-master.tar.gz')
| 35.081761 | 125 | 0.659376 |
794823c9d01e14edf1641fe97b90f577d30dfe8c
| 185 |
py
|
Python
|
src/db/redis.py
|
arthasyou/py_frame
|
35303cc1970e4a83a60c3e3d1a341a107d6b94aa
|
[
"MIT"
] | null | null | null |
src/db/redis.py
|
arthasyou/py_frame
|
35303cc1970e4a83a60c3e3d1a341a107d6b94aa
|
[
"MIT"
] | null | null | null |
src/db/redis.py
|
arthasyou/py_frame
|
35303cc1970e4a83a60c3e3d1a341a107d6b94aa
|
[
"MIT"
] | null | null | null |
import redis
from config.database import redis as cfg
__all__:['r']
r = redis.Redis(
host=cfg['host'],
port=cfg['port'],
password=cfg['password'],
db=cfg['database']
)
| 16.818182 | 40 | 0.637838 |
794824317af503e165503b8bf360a6d13cc892ec
| 441 |
py
|
Python
|
examples/stations/id.py
|
clampr/meteostat-python
|
d4d828ad81d60482ec50276deba7413e2ff042f5
|
[
"MIT"
] | null | null | null |
examples/stations/id.py
|
clampr/meteostat-python
|
d4d828ad81d60482ec50276deba7413e2ff042f5
|
[
"MIT"
] | null | null | null |
examples/stations/id.py
|
clampr/meteostat-python
|
d4d828ad81d60482ec50276deba7413e2ff042f5
|
[
"MIT"
] | null | null | null |
"""
Example: Get weather stations by identifier
Meteorological data provided by Meteostat (https://dev.meteostat.net)
under the terms of the Creative Commons Attribution-NonCommercial
4.0 International Public License.
The code is licensed under the MIT license.
"""
from meteostat import Stations
# Get weather station with ICAO ID EDDF
stations = Stations()
station = stations.id('icao', 'EDDF').fetch()
# Print station
print(station)
| 23.210526 | 69 | 0.77551 |
794824765f2e494dd32c203a0360acd587eed748
| 12,189 |
py
|
Python
|
EnlightenGAN/data/unaligned_dataset.py
|
chenwydj/dynamic_light_unfolding
|
607371b9176e10ebfcc1f350511dae4ead260aa2
|
[
"MIT"
] | null | null | null |
EnlightenGAN/data/unaligned_dataset.py
|
chenwydj/dynamic_light_unfolding
|
607371b9176e10ebfcc1f350511dae4ead260aa2
|
[
"MIT"
] | null | null | null |
EnlightenGAN/data/unaligned_dataset.py
|
chenwydj/dynamic_light_unfolding
|
607371b9176e10ebfcc1f350511dae4ead260aa2
|
[
"MIT"
] | null | null | null |
import torch
from torch import nn
import os.path
import torchvision.transforms as transforms
from EnlightenGAN.data.base_dataset import BaseDataset, get_transform
from EnlightenGAN.data.image_folder import make_dataset
import random
from PIL import Image
import PIL
from pdb import set_trace as st
import numpy as np
from skimage import color, feature
from skimage.filters import gaussian
def pad_tensor(input):
height_org, width_org = input.shape[2], input.shape[3]
divide = 16
if width_org % divide != 0 or height_org % divide != 0:
width_res = width_org % divide
height_res = height_org % divide
if width_res != 0:
width_div = divide - width_res
pad_left = int(width_div / 2)
pad_right = int(width_div - pad_left)
else:
pad_left = 0
pad_right = 0
if height_res != 0:
height_div = divide - height_res
pad_top = int(height_div / 2)
pad_bottom = int(height_div - pad_top)
else:
pad_top = 0
pad_bottom = 0
padding = nn.ReflectionPad2d((pad_left, pad_right, pad_top, pad_bottom))
input = padding(input).data
else:
pad_left = 0
pad_right = 0
pad_top = 0
pad_bottom = 0
height, width = input.shape[2], input.shape[3]
assert width % divide == 0, 'width cant divided by stride'
assert height % divide == 0, 'height cant divided by stride'
return input, pad_left, pad_right, pad_top, pad_bottom
def pad_tensor_back(input, pad_left, pad_right, pad_top, pad_bottom):
height, width = input.shape[2], input.shape[3]
return input[:,:, pad_top: height - pad_bottom, pad_left: width - pad_right]
class UnalignedDataset(BaseDataset):
def _reinit_A_paths(self):
self.A_paths = self.pos_names# + np.random.choice(self.neg_names_all, int(948/(10/1)), replace=False).tolist()
random.shuffle(self.A_paths)
self.B_paths = list(self.A_paths)
self.A_size = len(self.A_paths)
self.B_size = len(self.B_paths)
def initialize(self, opt):
self.opt = opt
self.root = opt.dataroot
##############################
# self.dir_A = os.path.join(opt.dataroot)#, opt.phase + 'A')
# self.dir_B = os.path.join(opt.dataroot)#, opt.phase + 'B')
if not 'images' in self.opt.name:
self.dir_A = os.path.join("/ssd1/chenwy/bdd100k/seg_luminance/0_100/", opt.phase)
self.dir_B = os.path.join("/ssd1/chenwy/bdd100k/seg_luminance/100_255/", opt.phase)
# self.dir_A = os.path.join("/ssd1/chenwy/bdd100k/seg_luminance/0_75/", opt.phase)
# self.dir_B = os.path.join("/ssd1/chenwy/bdd100k/seg_luminance/100_105/", opt.phase)
else:
self.dir_A = os.path.join("/ssd1/chenwy/bdd100k/images_luminance/100k/0_100/", opt.phase)
self.dir_B = os.path.join("/ssd1/chenwy/bdd100k/images_luminance/100k/100_255/", opt.phase)
# self.dir_A = os.path.join("/ssd1/chenwy/bdd100k/images_luminance/100k/0_75/", opt.phase)
# self.dir_B = os.path.join("/ssd1/chenwy/bdd100k/images_luminance/100k/100_105/", opt.phase)
##############################
self.A_paths = make_dataset(self.dir_A)
self.B_paths = make_dataset(self.dir_B)
self.A_paths = sorted(self.A_paths)
self.B_paths = sorted(self.B_paths)
self.A_size = len(self.A_paths)
self.B_size = len(self.B_paths)
self.transform = get_transform(opt)
##### load image2reward to resample dataset ############################
# image2reward = np.load("/home/chenwy/DynamicLightEnlighten/image2reward.npy").item()
# self.pos = []; self.pos_names = []; self.neg_names_all = []
# for k, v in image2reward.items():
# if v > 0:
# self.pos.append(v)
# self.pos_names.append(k)
# elif v < 0:
# self.neg_names_all.append(k)
# self.pos_names = [k for v,k in sorted(zip(self.pos, self.pos_names), reverse=True)]
# self._reinit_A_paths()
#################################
self.low_range = range(55, 70)
self.high_range = range(110, 125)
self.N_TRY = 20
def __getitem__(self, index_A):
A_path = self.A_paths[index_A % self.A_size]
index_B = random.randint(0, self.B_size - 1)
B_path = self.B_paths[index_B % self.B_size]
A_image = Image.open(A_path).convert('RGB')
B_image = Image.open(B_path).convert('RGB')
# A_size = A_img.size
# B_size = B_img.size
# A_size = A_size = (A_size[0]//16*16, A_size[1]//16*16)
# B_size = B_size = (B_size[0]//16*16, B_size[1]//16*16)
# A_img = A_img.resize(A_size, Image.BICUBIC)
# B_img = B_img.resize(B_size, Image.BICUBIC)
# A_gray = A_img.convert('LA')
# A_gray = 255.0-A_gray
w, h = A_image.size
# without luminance selection #####################
# x1 = random.randint(0, w - self.opt.fineSize)
# y1 = random.randint(0, h - self.opt.fineSize)
# A_img = A_image.crop((x1, y1, x1+self.opt.fineSize, y1+self.opt.fineSize))
# B_img = B_image.crop((x1, y1, x1+self.opt.fineSize, y1+self.opt.fineSize))
# A_npy = np.array(A_img)
# B_npy = np.array(B_img)
# r,g,b = A_npy[:, :, 0], A_npy[:, :, 1], A_npy[:, :, 2]
# value_A = (0.299*r+0.587*g+0.114*b) / 255.
# value_A = np.sort(value_A.flatten())
# length = value_A.shape[0]
# value_A = value_A[int(np.round(length * 0.1)) : int(np.round(length * 0.9))].mean()
# if not 'images' in self.opt.name:
# # mask = Image.open(os.path.join("/ssd1/chenwy/bdd100k/seg/labels/", "train", os.path.splitext(A_path.split("/")[-1])[0] + '_train_id.png'))
# mask = Image.open(os.path.join("/ssd1/chenwy/bdd100k/seg/labels/", self.opt.phase, os.path.splitext(A_path.split("/")[-1])[0] + '_train_id.png'))
# mask = np.array(mask.crop((x1, y1, x1+self.opt.fineSize, y1+self.opt.fineSize))).astype('int32') # cropped mask for light_enhance_AB/seg
# mask = self._mask_transform(mask)
# else:
# mask = torch.zeros(1)
###################################################
# patch luminance & mask class diversity selection ###########################
n_try = 0
while n_try < self.N_TRY:
x1 = random.randint(0, w - self.opt.fineSize)
y1 = random.randint(0, h - self.opt.fineSize)
A_img = A_image.crop((x1, y1, x1+self.opt.fineSize, y1+self.opt.fineSize))
B_img = B_image.crop((x1, y1, x1+self.opt.fineSize, y1+self.opt.fineSize))
A_npy = np.array(A_img)
B_npy = np.array(B_img)
r,g,b = A_npy[:, :, 0], A_npy[:, :, 1], A_npy[:, :, 2]
value_A = (0.299*r+0.587*g+0.114*b) / 255.
value_A = np.sort(value_A.flatten())
length = value_A.shape[0]
value_A = value_A[int(np.round(length * 0.1)) : int(np.round(length * 0.9))].mean()
if int(np.round(value_A*255)) not in self.low_range: n_try += 1; continue
r,g,b = B_npy[:, :, 0], B_npy[:, :, 1], B_npy[:, :, 2]
value_B = (0.299*r+0.587*g+0.114*b) / 255.
value_B = np.sort(value_B.flatten())
length = value_B.shape[0]
value_B = value_B[int(np.round(length * 0.1)) : int(np.round(length * 0.9))].mean()
if int(np.round(value_B*255)) not in self.high_range: n_try += 1; continue
if not 'images' in self.opt.name:
# mask = Image.open(os.path.join("/ssd1/chenwy/bdd100k/seg/labels/", "train", os.path.splitext(A_path.split("/")[-1])[0] + '_train_id.png'))
mask = Image.open(os.path.join("/ssd1/chenwy/bdd100k/seg/labels/", self.opt.phase, os.path.splitext(A_path.split("/")[-1])[0] + '_train_id.png'))
mask = np.array(mask.crop((x1, y1, x1+self.opt.fineSize, y1+self.opt.fineSize))).astype('int32') # cropped mask for light_enhance_AB/seg
unique, counts = np.unique(mask, return_counts=True)
if len(unique) < 2 or (counts / counts.sum()).max() > 0.7: n_try += 1; continue
mask = self._mask_transform(mask)
else:
mask = torch.zeros(1)
break
if n_try == self.N_TRY:
# if int(np.round(value_A)) not in self.low_range:
# self.A_paths.pop(index_A % self.A_size)
# self.A_size -= 1
# if int(np.round(value_B)) not in self.high_range:
# self.B_paths.pop(index_B % self.B_size)
# self.B_size -= 1
index_A = random.randint(0, self.__len__())
return self.__getitem__(index_A)
##########################################################################
gray_mask = torch.ones(1, self.opt.fineSize, self.opt.fineSize) * value_A
A_img_border = A_image.crop((x1-self.opt.fineSize//2, y1-self.opt.fineSize//2, x1+2*self.opt.fineSize, y1+2*self.opt.fineSize))
A_Lab = torch.Tensor(color.rgb2lab(A_npy) / 100).permute([2, 0, 1])
A_npy = gaussian(A_npy, sigma=2, multichannel=True)
r,g,b = A_npy[:, :, 0], A_npy[:, :, 1], A_npy[:, :, 2]
A_npy = 0.299*r+0.587*g+0.114*b
edges_A = torch.unsqueeze(torch.from_numpy(feature.canny(A_npy, sigma=2).astype("float32")), 0)
A_img = self.transform(A_img)
A_img_border = self.transform(A_img_border)
B_img = self.transform(B_img)
if self.opt.resize_or_crop == 'no':
r,g,b = A_img[0]+1, A_img[1]+1, A_img[2]+1
A_gray = 1. - (0.299*r+0.587*g+0.114*b)/2.
A_gray = torch.unsqueeze(A_gray, 0)
input_img = A_img
# A_gray = (1./A_gray)/255.
r,g,b = A_img_border[0]+1, A_img_border[1]+1, A_img_border[2]+1
A_gray_border = 1. - (0.299*r+0.587*g+0.114*b)/2.
A_gray_border = torch.unsqueeze(A_gray_border, 0)
else:
w = A_img.size(2)
h = A_img.size(1)
# A_gray = (1./A_gray)/255.
if (not self.opt.no_flip) and random.random() < 0.5:
idx = [i for i in range(A_img.size(2) - 1, -1, -1)]
idx = torch.LongTensor(idx)
A_img = A_img.index_select(2, idx)
B_img = B_img.index_select(2, idx)
if (not self.opt.no_flip) and random.random() < 0.5:
idx = [i for i in range(A_img.size(1) - 1, -1, -1)]
idx = torch.LongTensor(idx)
A_img = A_img.index_select(1, idx)
B_img = B_img.index_select(1, idx)
if self.opt.vary == 1 and (not self.opt.no_flip) and random.random() < 0.5:
times = random.randint(self.opt.low_times,self.opt.high_times)/100.
input_img = (A_img+1)/2./times
input_img = input_img*2-1
else:
input_img = A_img
if self.opt.lighten:
B_img = (B_img + 1)/2.
B_img = (B_img - torch.min(B_img))/(torch.max(B_img) - torch.min(B_img))
B_img = B_img*2. -1
r,g,b = input_img[0]+1, input_img[1]+1, input_img[2]+1
A_gray = 1. - (0.299*r+0.587*g+0.114*b)/2.
A_gray = torch.unsqueeze(A_gray, 0)
return {'A': A_img, 'B': B_img, 'A_gray': A_gray, 'input_img': input_img,
'A_paths': A_path, 'B_paths': B_path, 'mask': mask,
'A_border': A_img_border, 'A_gray_border': A_gray_border,
'A_Lab': A_Lab, 'gray_mask': gray_mask, 'edges_A': edges_A
}
def __len__(self):
return max(self.A_size, self.B_size)
def name(self):
return 'UnalignedDataset'
def _mask_transform(self, mask):
target = np.array(mask).astype('int32')
target[target == 255] = -1
return torch.from_numpy(target).long()
| 46.170455 | 161 | 0.559111 |
794824894042580e73abe3503e9fab353aff6d02
| 2,898 |
py
|
Python
|
Project3/dqn_model.py
|
radhasaraf/ds595-rl
|
3360d01da8a292358d000da2df3f2e65875fbae9
|
[
"MIT"
] | null | null | null |
Project3/dqn_model.py
|
radhasaraf/ds595-rl
|
3360d01da8a292358d000da2df3f2e65875fbae9
|
[
"MIT"
] | null | null | null |
Project3/dqn_model.py
|
radhasaraf/ds595-rl
|
3360d01da8a292358d000da2df3f2e65875fbae9
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import torch
from torch import nn, Tensor
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
class DQN(nn.Module):
"""
Initialize a deep Q-learning network
Architecture reference: Original paper for DQN
(https://storage.googleapis.com/deepmind-data/assets/papers/DeepMindNature14236Paper.pdf)
"""
def __init__(self, in_channels=4, num_actions=4):
super(DQN, self).__init__()
self.num_actions = num_actions
self.conv_relu_stack = nn.Sequential(
nn.Conv2d(in_channels, 32, kernel_size=8, stride=4),
nn.ReLU(),
nn.Conv2d(32, 64, kernel_size=4, stride=2),
nn.ReLU(),
nn.Conv2d(64, 64, kernel_size=3, stride=1),
nn.ReLU(),
)
# Expected (sample) dummy input = zeros(batch_size, in_channels, 84, 84)
h_out = w_out = self._conv2d_size_out(
self._conv2d_size_out(self._conv2d_size_out(84, 8, 4), 4, 2),
3,
1
)
no_filters_last_conv_layer = 64
self.in_features = int(h_out * w_out * no_filters_last_conv_layer)
self.fc_stack = nn.Sequential(
nn.Linear(self.in_features, 512),
nn.ReLU(),
nn.Linear(512, self.num_actions),
)
self.value_stream = nn.Sequential(
nn.Linear(self.in_features, 512),
nn.ReLU(),
nn.Linear(512, 1)
)
self.advantage_stream = nn.Sequential(
nn.Linear(self.in_features, 512),
nn.ReLU(),
nn.Linear(512, self.num_actions)
)
# Get the no. of features in the output of the conv-relu-layers-stack which
# is required to be known for the Linear layer 'in_features' arg.
# Following is simplified version. Checkout link below for the detailed one
# https://pytorch.org/docs/stable/generated/torch.nn.Conv2d.html
@staticmethod
def _conv2d_size_out(size, kernel_size, stride):
return (size - (kernel_size - 1) - 1) / stride + 1
# def forward(self, obs: Tensor) -> Tensor:
# """
# Passes an observation(state) through the network and generates action
# probabilities
# """
# obs = obs.to(device)
# intermediate_output = self.conv_relu_stack(obs)
# intermediate_output = intermediate_output.view(obs.size()[0], -1)
# return self.fc_stack(intermediate_output)
def forward(self, obs: Tensor) -> Tensor:
obs = obs.to(device)
intermediate_output = self.conv_relu_stack(obs)
intermediate_output = intermediate_output.view(obs.size(0), -1)
values = self.value_stream(intermediate_output)
advantages = self.advantage_stream(intermediate_output)
qvals = values + (advantages - advantages.mean())
return qvals
| 34.915663 | 93 | 0.616287 |
79482490bbac752ce74aa7a6746b894b32edfe12
| 2,277 |
py
|
Python
|
software/openvisualizer/openvisualizer/JRC/cojpDefines.py
|
pedrohenriquegomes/openwsn-sw
|
24761e01dc1e8dbc351e61bd927c57facc64c155
|
[
"BSD-3-Clause"
] | 26 |
2015-01-31T23:56:36.000Z
|
2021-04-10T20:26:11.000Z
|
software/openvisualizer/openvisualizer/JRC/cojpDefines.py
|
pedrohenriquegomes/openwsn-sw
|
24761e01dc1e8dbc351e61bd927c57facc64c155
|
[
"BSD-3-Clause"
] | 33 |
2015-03-18T15:54:01.000Z
|
2018-03-08T14:28:50.000Z
|
software/openvisualizer/openvisualizer/JRC/cojpDefines.py
|
pedrohenriquegomes/openwsn-sw
|
24761e01dc1e8dbc351e61bd927c57facc64c155
|
[
"BSD-3-Clause"
] | 80 |
2015-01-07T08:54:58.000Z
|
2020-05-06T21:33:41.000Z
|
# constants from draft-ietf-6tisch-minimal-security-06
COJP_PARAMETERS_LABELS_ROLE = 1 # Identifies the role parameter
COJP_PARAMETERS_LABELS_LLKEYSET = 2 # Identifies the array carrying one or more link-layer cryptographic keys
COJP_PARAMETERS_LABELS_LLSHORTADDRESS = 3 # Identifies the assigned link-layer short address
COJP_PARAMETERS_LABELS_JRCADDRESS = 4 # Identifies the IPv6 address of the JRC
COJP_PARAMETERS_LABELS_NETID = 5 # Identifies the network identifier (PAN ID)
COJP_PARAMETERS_LABELS_NETPREFIX = 6 # Identifies the IPv6 prefix of the network
COJP_PARAMETERS_LABELS_ALL = [
COJP_PARAMETERS_LABELS_ROLE,
COJP_PARAMETERS_LABELS_LLKEYSET,
COJP_PARAMETERS_LABELS_LLSHORTADDRESS,
COJP_PARAMETERS_LABELS_JRCADDRESS,
COJP_PARAMETERS_LABELS_NETID,
COJP_PARAMETERS_LABELS_NETPREFIX
]
COJP_ROLE_VALUE_6N = 0 # 6TiSCH Node
COJP_ROLE_VALUE_6LBR = 1 # 6LBR Node
COJP_ROLE_ALL = [
COJP_ROLE_VALUE_6N,
COJP_ROLE_VALUE_6LBR,
]
COJP_KEY_USAGE_6TiSCH_K1K2_ENC_MIC32 = 0
COJP_KEY_USAGE_6TiSCH_K1K2_ENC_MIC64 = 1
COJP_KEY_USAGE_6TiSCH_K1K2_ENC_MIC128 = 2
COJP_KEY_USAGE_6TiSCH_K1K2_MIC32 = 3
COJP_KEY_USAGE_6TiSCH_K1K2_MIC64 = 4
COJP_KEY_USAGE_6TiSCH_K1K2_MIC128 = 5
COJP_KEY_USAGE_6TiSCH_K1_MIC32 = 6
COJP_KEY_USAGE_6TiSCH_K1_MIC64 = 7
COJP_KEY_USAGE_6TiSCH_K1_MIC128 = 8
COJP_KEY_USAGE_6TiSCH_K2_MIC32 = 9
COJP_KEY_USAGE_6TiSCH_K2_MIC64 = 10
COJP_KEY_USAGE_6TiSCH_K2_MIC128 = 11
COJP_KEY_USAGE_6TiSCH_K2_ENC_MIC32 = 12
COJP_KEY_USAGE_6TiSCH_K2_ENC_MIC64 = 13
COJP_KEY_USAGE_6TiSCH_K2_ENC_MIC128 = 14
COJP_KEY_USAGE_ALL = [
COJP_KEY_USAGE_6TiSCH_K1K2_ENC_MIC32,
COJP_KEY_USAGE_6TiSCH_K1K2_ENC_MIC64,
COJP_KEY_USAGE_6TiSCH_K1K2_ENC_MIC128,
COJP_KEY_USAGE_6TiSCH_K1K2_MIC32,
COJP_KEY_USAGE_6TiSCH_K1K2_MIC64,
COJP_KEY_USAGE_6TiSCH_K1K2_MIC128,
COJP_KEY_USAGE_6TiSCH_K1_MIC32,
COJP_KEY_USAGE_6TiSCH_K1_MIC64,
COJP_KEY_USAGE_6TiSCH_K1_MIC128,
COJP_KEY_USAGE_6TiSCH_K2_MIC32,
COJP_KEY_USAGE_6TiSCH_K2_MIC64,
COJP_KEY_USAGE_6TiSCH_K2_MIC128,
COJP_KEY_USAGE_6TiSCH_K2_ENC_MIC32,
COJP_KEY_USAGE_6TiSCH_K2_ENC_MIC64,
COJP_KEY_USAGE_6TiSCH_K2_ENC_MIC128,
]
| 37.95 | 115 | 0.796223 |
794825347534fb0320706508b85dd8428d90b498
| 1,053 |
py
|
Python
|
openacs/inserts/opt/ConnectionRequest/XMPP/xmpp-con-req.py
|
devolo/tr069
|
f1a35b35e36b92ac37a33fea41d1671768b30620
|
[
"MIT"
] | 18 |
2019-12-05T10:04:19.000Z
|
2022-03-09T02:23:12.000Z
|
openacs/inserts/opt/ConnectionRequest/XMPP/xmpp-con-req.py
|
devolo/tr069
|
f1a35b35e36b92ac37a33fea41d1671768b30620
|
[
"MIT"
] | null | null | null |
openacs/inserts/opt/ConnectionRequest/XMPP/xmpp-con-req.py
|
devolo/tr069
|
f1a35b35e36b92ac37a33fea41d1671768b30620
|
[
"MIT"
] | 12 |
2019-12-05T13:21:39.000Z
|
2021-11-22T01:10:43.000Z
|
#!/usr/bin/python
import sys
import xmpp
#ACS user beim prosody
username="acs"
password="devolo"
#XMPP server is 'xmpp' in our internal network, telco0 to externals
server = ('xmpp.public', 5222)
domain="xmpp.public"
resource=""
agent="cpe@xmpp.public/my_fixed_xmpp_resource"
crs_username="Admin"
crs_password="devolo"
iq_id="2342"
def send_notify(conn):
iq = xmpp.Iq(to = agent, frm=username + "@" + domain + resource, typ="get")
iq.setID(iq_id)
cr = iq.addChild(name = "connectionRequest", namespace="urn:broadband-forum-org:cwmp:xmppConnReq-1-0")
cr.addChild(name = "username").setData(crs_username)
cr.addChild(name = "password").setData(crs_password)
conn.send(iq)
def iq_handler(conn, iq):
if iq.getID() == iq_id:
sys.exit(0)
jid = xmpp.JID(username + "@" + domain)
connection = xmpp.Client(domain)
connection.connect(server = server)
result = connection.auth(jid.getNode(), password, resource)
connection.RegisterHandler("iq", iq_handler)
send_notify(connection)
while connection.Process(1):
pass
| 25.071429 | 106 | 0.7151 |
794826cbd0996a8f8763039eaed5929077074d58
| 11,630 |
py
|
Python
|
sdk/python/pulumi_azure_native/storage/v20210401/blob_inventory_policy.py
|
polivbr/pulumi-azure-native
|
09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/storage/v20210401/blob_inventory_policy.py
|
polivbr/pulumi-azure-native
|
09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/storage/v20210401/blob_inventory_policy.py
|
polivbr/pulumi-azure-native
|
09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._enums import *
from ._inputs import *
__all__ = ['BlobInventoryPolicyArgs', 'BlobInventoryPolicy']
@pulumi.input_type
class BlobInventoryPolicyArgs:
def __init__(__self__, *,
account_name: pulumi.Input[str],
policy: pulumi.Input['BlobInventoryPolicySchemaArgs'],
resource_group_name: pulumi.Input[str],
blob_inventory_policy_name: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a BlobInventoryPolicy resource.
:param pulumi.Input[str] account_name: The name of the storage account within the specified resource group. Storage account names must be between 3 and 24 characters in length and use numbers and lower-case letters only.
:param pulumi.Input['BlobInventoryPolicySchemaArgs'] policy: The storage account blob inventory policy object. It is composed of policy rules.
:param pulumi.Input[str] resource_group_name: The name of the resource group within the user's subscription. The name is case insensitive.
:param pulumi.Input[str] blob_inventory_policy_name: The name of the storage account blob inventory policy. It should always be 'default'
"""
pulumi.set(__self__, "account_name", account_name)
pulumi.set(__self__, "policy", policy)
pulumi.set(__self__, "resource_group_name", resource_group_name)
if blob_inventory_policy_name is not None:
pulumi.set(__self__, "blob_inventory_policy_name", blob_inventory_policy_name)
@property
@pulumi.getter(name="accountName")
def account_name(self) -> pulumi.Input[str]:
"""
The name of the storage account within the specified resource group. Storage account names must be between 3 and 24 characters in length and use numbers and lower-case letters only.
"""
return pulumi.get(self, "account_name")
@account_name.setter
def account_name(self, value: pulumi.Input[str]):
pulumi.set(self, "account_name", value)
@property
@pulumi.getter
def policy(self) -> pulumi.Input['BlobInventoryPolicySchemaArgs']:
"""
The storage account blob inventory policy object. It is composed of policy rules.
"""
return pulumi.get(self, "policy")
@policy.setter
def policy(self, value: pulumi.Input['BlobInventoryPolicySchemaArgs']):
pulumi.set(self, "policy", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group within the user's subscription. The name is case insensitive.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="blobInventoryPolicyName")
def blob_inventory_policy_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the storage account blob inventory policy. It should always be 'default'
"""
return pulumi.get(self, "blob_inventory_policy_name")
@blob_inventory_policy_name.setter
def blob_inventory_policy_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "blob_inventory_policy_name", value)
class BlobInventoryPolicy(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
account_name: Optional[pulumi.Input[str]] = None,
blob_inventory_policy_name: Optional[pulumi.Input[str]] = None,
policy: Optional[pulumi.Input[pulumi.InputType['BlobInventoryPolicySchemaArgs']]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
The storage account blob inventory policy.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] account_name: The name of the storage account within the specified resource group. Storage account names must be between 3 and 24 characters in length and use numbers and lower-case letters only.
:param pulumi.Input[str] blob_inventory_policy_name: The name of the storage account blob inventory policy. It should always be 'default'
:param pulumi.Input[pulumi.InputType['BlobInventoryPolicySchemaArgs']] policy: The storage account blob inventory policy object. It is composed of policy rules.
:param pulumi.Input[str] resource_group_name: The name of the resource group within the user's subscription. The name is case insensitive.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: BlobInventoryPolicyArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
The storage account blob inventory policy.
:param str resource_name: The name of the resource.
:param BlobInventoryPolicyArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(BlobInventoryPolicyArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
account_name: Optional[pulumi.Input[str]] = None,
blob_inventory_policy_name: Optional[pulumi.Input[str]] = None,
policy: Optional[pulumi.Input[pulumi.InputType['BlobInventoryPolicySchemaArgs']]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = BlobInventoryPolicyArgs.__new__(BlobInventoryPolicyArgs)
if account_name is None and not opts.urn:
raise TypeError("Missing required property 'account_name'")
__props__.__dict__["account_name"] = account_name
__props__.__dict__["blob_inventory_policy_name"] = blob_inventory_policy_name
if policy is None and not opts.urn:
raise TypeError("Missing required property 'policy'")
__props__.__dict__["policy"] = policy
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["last_modified_time"] = None
__props__.__dict__["name"] = None
__props__.__dict__["system_data"] = None
__props__.__dict__["type"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:storage/v20210401:BlobInventoryPolicy"), pulumi.Alias(type_="azure-native:storage:BlobInventoryPolicy"), pulumi.Alias(type_="azure-nextgen:storage:BlobInventoryPolicy"), pulumi.Alias(type_="azure-native:storage/v20190601:BlobInventoryPolicy"), pulumi.Alias(type_="azure-nextgen:storage/v20190601:BlobInventoryPolicy"), pulumi.Alias(type_="azure-native:storage/v20200801preview:BlobInventoryPolicy"), pulumi.Alias(type_="azure-nextgen:storage/v20200801preview:BlobInventoryPolicy"), pulumi.Alias(type_="azure-native:storage/v20210101:BlobInventoryPolicy"), pulumi.Alias(type_="azure-nextgen:storage/v20210101:BlobInventoryPolicy"), pulumi.Alias(type_="azure-native:storage/v20210201:BlobInventoryPolicy"), pulumi.Alias(type_="azure-nextgen:storage/v20210201:BlobInventoryPolicy"), pulumi.Alias(type_="azure-native:storage/v20210601:BlobInventoryPolicy"), pulumi.Alias(type_="azure-nextgen:storage/v20210601:BlobInventoryPolicy")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(BlobInventoryPolicy, __self__).__init__(
'azure-native:storage/v20210401:BlobInventoryPolicy',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'BlobInventoryPolicy':
"""
Get an existing BlobInventoryPolicy resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = BlobInventoryPolicyArgs.__new__(BlobInventoryPolicyArgs)
__props__.__dict__["last_modified_time"] = None
__props__.__dict__["name"] = None
__props__.__dict__["policy"] = None
__props__.__dict__["system_data"] = None
__props__.__dict__["type"] = None
return BlobInventoryPolicy(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="lastModifiedTime")
def last_modified_time(self) -> pulumi.Output[str]:
"""
Returns the last modified date and time of the blob inventory policy.
"""
return pulumi.get(self, "last_modified_time")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def policy(self) -> pulumi.Output['outputs.BlobInventoryPolicySchemaResponse']:
"""
The storage account blob inventory policy object. It is composed of policy rules.
"""
return pulumi.get(self, "policy")
@property
@pulumi.getter(name="systemData")
def system_data(self) -> pulumi.Output['outputs.SystemDataResponse']:
"""
Metadata pertaining to creation and last modification of the resource.
"""
return pulumi.get(self, "system_data")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
"""
return pulumi.get(self, "type")
| 50.565217 | 1,016 | 0.685039 |
794827cc54f07165e51acf11a43d4f3674ad00de
| 46 |
py
|
Python
|
src/constants.py
|
egorsimchuk/binance_bot
|
af1caac32f8d4804aea3af83250fd4530d9787df
|
[
"Unlicense"
] | 1 |
2022-03-13T01:07:08.000Z
|
2022-03-13T01:07:08.000Z
|
src/constants.py
|
egorsimchuk/binance_bot
|
af1caac32f8d4804aea3af83250fd4530d9787df
|
[
"Unlicense"
] | 4 |
2022-02-20T10:33:45.000Z
|
2022-03-03T22:59:24.000Z
|
src/constants.py
|
egorsimchuk/binance_bot
|
af1caac32f8d4804aea3af83250fd4530d9787df
|
[
"Unlicense"
] | null | null | null |
remove_from_plots = ['USDT']
time_col = 'date'
| 23 | 28 | 0.717391 |
794829866746b7481686b83814320a3920971ae1
| 6,068 |
py
|
Python
|
evaluation/viapoint_environment.py
|
rock-learning/approxik
|
877d50d4d045457593a2fafefd267339a11de20f
|
[
"BSD-3-Clause"
] | 1 |
2020-03-27T01:53:57.000Z
|
2020-03-27T01:53:57.000Z
|
evaluation/viapoint_environment.py
|
rock-learning/approxik
|
877d50d4d045457593a2fafefd267339a11de20f
|
[
"BSD-3-Clause"
] | null | null | null |
evaluation/viapoint_environment.py
|
rock-learning/approxik
|
877d50d4d045457593a2fafefd267339a11de20f
|
[
"BSD-3-Clause"
] | 1 |
2020-12-18T02:09:21.000Z
|
2020-12-18T02:09:21.000Z
|
# Authors: Alexander Fabisch <afabisch@informatik.uni-bremen.de>
import numpy as np
from scipy.spatial.distance import cdist
from bolero.environment import Environment
from bolero.utils.log import get_logger
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import axes3d
from pytransform.rotations import matrix_from_quaternion
class ViaPointEnvironment(Environment):
"""Optimize a trajectory that passes several viapoints.
Parameters
----------
ik : object
Inverse kinematics solver
x0 : array-like, shape = (7,)
Initial position.
via_points : array-like, shape = (n_via_points, 4)
Via points: (t, x, y, z)
execution_time : float
Execution time in seconds
dt : float
Time between successive steps in seconds.
qlo : array-like, shape (n_joints,)
Lower joint limits
qhi : array-like, shape (n_joints,)
Upper joint limits
penalty_vel : float, optional (default: 0)
Penalty weight for velocities
penalty_acc : float, optional (default: 0)
Penalty weight for accelerations
penalty_via_point : float, optional (default: 0)
Penalty weight for distance to via points
log_to_file: optional, boolean or string (default: False)
Log results to given file, it will be located in the $BL_LOG_PATH
log_to_stdout: optional, boolean (default: False)
Log to standard output
"""
def __init__(self, ik, x0, via_points, execution_time, dt, qlo, qhi,
penalty_vel=0.0, penalty_acc=0.0, penalty_via_point=0.0,
log_to_file=False, log_to_stdout=False):
self.ik = ik
self.x0 = x0
self.via_points = via_points
self.execution_time = execution_time
self.dt = dt
self.qlo = qlo
self.qhi = qhi
self.penalty_vel = penalty_vel
self.penalty_acc = penalty_acc
self.penalty_via_point = penalty_via_point
self.log_to_file = log_to_file
self.log_to_stdout = log_to_stdout
def init(self):
self.x0 = np.asarray(self.x0)
self.logger = get_logger(self, self.log_to_file, self.log_to_stdout)
self.n_steps = 1 + int(self.execution_time / self.dt)
self.n_joints = self.ik.get_n_joints()
self.P = np.empty((self.n_steps, 7))
self.Q = np.empty((self.n_steps, self.n_joints))
self.p = np.empty(7)
self.q = np.empty(self.n_joints)
def reset(self):
self.p[:] = self.x0.copy()
self.q[:] = 0.0
self.ik.cart_to_jnt(self.p, self.q)
self.t = 0
def get_num_inputs(self):
return self.n_joints
def get_num_outputs(self):
return self.n_joints
def get_outputs(self, values):
values[:] = self.q[:]
def set_inputs(self, values):
if np.isfinite(values).all():
self.q[:] = np.clip(values[:], self.qlo, self.qhi)
def step_action(self):
self.ik.jnt_to_cart(self.q, self.p)
self.P[self.t, :] = self.p[:]
self.Q[self.t, :] = self.q[:]
self.t += 1
def is_evaluation_done(self):
return self.t >= self.n_steps
def get_speed(self):
Qd = np.vstack((np.zeros(self.n_joints), np.diff(self.Q, axis=0) / self.dt))
speed = np.sqrt(np.sum(Qd ** 2, axis=1))
self.logger.info("Speed: %g" % speed.sum())
return speed
def get_acceleration(self):
Qd = np.vstack((np.zeros(self.n_joints), np.diff(self.Q, axis=0) / self.dt))
Qdd = np.vstack((np.zeros(self.n_joints), np.diff(Qd, axis=0) / self.dt))
acceleration = np.sqrt(np.sum(Qdd ** 2, axis=1))
self.logger.info("Accelerations: %g" % acceleration.sum())
return acceleration
def get_via_point_dist(self):
"""Get list of collisions with obstacles during the performed movement.
Returns
-------
min_dist : array-like, shape (n_via_points,)
Minimum distances to all via points
"""
dists = np.empty(len(self.via_points))
for i, via_point in enumerate(self.via_points):
t = int(via_point[0] / self.dt)
p = via_point[1:]
dists[i] = np.linalg.norm(p - self.P[t, :3])
self.logger.info("Distances: %r" % dists)
return dists
def get_feedback(self):
rewards = np.zeros(self.t)
if self.penalty_vel > 0.0:
rewards -= self.get_speed() * self.penalty_vel
if self.penalty_acc > 0.0:
rewards -= self.get_acceleration() * self.penalty_acc
if self.penalty_via_point > 0.0:
rewards[-1] -= self.penalty_via_point * self.get_via_point_dist().sum()
return rewards
def is_behavior_learning_done(self):
return False
def get_maximum_feedback(self):
return 0.0
def plot(self):
plt.figure()
ax = plt.subplot(111, projection="3d", aspect="equal")
#ax.scatter(self.x0[0], self.x0[1], self.x0[2], c="r", s=100)
for viapoint in self.via_points:
ax.scatter(viapoint[1], viapoint[2], viapoint[3], c="k", s=100)
# TODO sort by time
ax.plot(self.via_points[:, 1], self.via_points[:, 2],
self.via_points[:, 3], c="k", alpha=0.5)
#plot_trajectory(ax=ax, P=self.P, s=0.05, lw=2, c="k",
# show_direction=False)
ax.plot(self.P[:, 0], self.P[:, 1], self.P[:, 2], c="k")
key_frames = np.linspace(0, self.P.shape[0] - 1, 10).astype(np.int)
s = 0.1
for p in self.P[key_frames]:
R = matrix_from_quaternion(p[3:])
for d in range(3):
ax.plot([p[0], p[0] + s * R[0, d]],
[p[1], p[1] + s * R[1, d]],
[p[2], p[2] + s * R[2, d]],
color="k")
ax.set_xlim((-0.4, 0.4))
ax.set_ylim((-0.9, 0.1))
ax.set_zlim((0.2, 1.0))
ax.set_xlabel("x")
ax.set_ylabel("y")
ax.set_zlabel("z")
return ax
| 32.623656 | 84 | 0.585531 |
794829f0d3fa2323b3466c2ea6b5d090adf10914
| 4,159 |
py
|
Python
|
base/dependency/interface/interface_dependency_extraction.py
|
btc-ag/revengtools
|
d58680ef7d6bdc8ef518860d5d13a5acc0d01758
|
[
"Apache-2.0"
] | 2 |
2019-07-15T14:59:59.000Z
|
2022-01-18T14:23:54.000Z
|
base/dependency/interface/interface_dependency_extraction.py
|
btc-ag/revengtools
|
d58680ef7d6bdc8ef518860d5d13a5acc0d01758
|
[
"Apache-2.0"
] | 10 |
2018-05-03T13:25:07.000Z
|
2021-06-25T15:14:55.000Z
|
base/dependency/interface/interface_dependency_extraction.py
|
btc-ag/revengtools
|
d58680ef7d6bdc8ef518860d5d13a5acc0d01758
|
[
"Apache-2.0"
] | 1 |
2018-05-02T13:59:27.000Z
|
2018-05-02T13:59:27.000Z
|
# -*- coding: utf-8 -*-
import collections
import json
import logging
import subprocess
TypeInformation = collections.namedtuple('TypeInformation', ['TypeName','Assembly'])
# TODO rename the module according to the naming convention to interface_dependency_extractor
class ExternalAnalyzerFailedError(Exception):
pass
class WrongFileTypeError(ExternalAnalyzerFailedError):
"""Thrown when the analysed file has the wrong format (e.g. not a .NET assembly)"""
pass
class ExternalAnalyserAdapter(object):
def __init__(self, analyserExe):
self.logger = logging.getLogger(self.__class__.__module__)
self.pathToAnalyserExe = analyserExe
def analyse_dll(self, dllFile):
print dllFile
prog = subprocess.Popen([self.pathToAnalyserExe, dllFile],stdout=subprocess.PIPE)
xmlResult = prog.communicate()[0]
if prog.returncode != 0:
if prog.returncode == -2:
raise WrongFileTypeError(xmlResult)
else:
raise ExternalAnalyzerFailedError(xmlResult)
return xmlResult
class InterfaceDependencyExtractor(object):
# TODO (low prio) according to the design guidelines, it should be preferred to return generators etc. instead of lists. never return mutable lists
# TODO change logging.* to self.__logger.*
def __init__(self, analyserAdapter):
self.__analyserAdapter = analyserAdapter
def __convert_to_type_info(self, jsonData):
return TypeInformation(jsonData['TypeName'], jsonData['DefinedInAssembly'])
def left_right_compare(self, rightSideDlls, leftSideDlls=None):
# TODO rename "left_right_compare", "rightSideDlls" and "leftSideDlls" to more meaningful names and
# add a documentation comment
types = {}
if leftSideDlls is None:
#Analyse all interface deps to all targets
types = collections.defaultdict(lambda: set())
else:
#Initialize every class with 0
for dll in leftSideDlls:
result = self.analyse_exported_classes(dll)
for cls in result:
types[cls] = set()
for dll in rightSideDlls:
try:
result = self.analyse_interface_dependencies(dll)
except WrongFileTypeError:
logging.warn("Skipped DLL %s" % dll)
continue
for (cls,deps) in result:
for dep in deps:
if dep in types or leftSideDlls is None:
types[dep].add(cls)
return types.items()
def analyse_exported_classes(self, filename):
""" Returns a list of all types that are exported by a assembly """
logging.info("Analyzing dll '{0}'".format(filename))
output = self.__analyserAdapter.analyse_dll(filename)
jsonData = json.loads(output)
assemblyName = jsonData['Name']
classList = jsonData['Classes']
classList = filter(lambda x: x['IsExported'], classList)
classList = [TypeInformation(x['Name'], assemblyName) for x in classList]
return classList
def analyse_interface_dependencies(self, filename):
""" Returns a list of tuples with the format (name of class, [list of interface dependencies]) """
output = self.__analyserAdapter.analyse_dll(filename)
jsonData = json.loads(output)
classList = jsonData['Classes']
classList = filter(lambda x: x['IsExported'], classList)
result = []
for cls in classList:
interfaceTypes = set()
for m in cls['Methods']:
if m['IsVisible']:
interfaceTypes.add(self.__convert_to_type_info(m['ReturnType']))
interfaceTypes.update([self.__convert_to_type_info(t['Type']) for t in m['Parameters']])
for f in cls['Fields']:
if f['IsVisible']:
interfaceTypes.add(self.__convert_to_type_info(f['Type']))
result.append((cls['Name'],interfaceTypes))
return result
| 40.77451 | 152 | 0.629478 |
79482ad0fa59ba562528e45d90ddc8a13bc998f1
| 2,099 |
py
|
Python
|
web/web_lianxiang.py
|
jerryluosuper/Lensi
|
3f5c8d6f3c57d58523f4fe5f5f6bcc9657b82c24
|
[
"MIT"
] | 1 |
2022-03-23T00:53:56.000Z
|
2022-03-23T00:53:56.000Z
|
web/web_lianxiang.py
|
jerryluosuper/Lensi
|
3f5c8d6f3c57d58523f4fe5f5f6bcc9657b82c24
|
[
"MIT"
] | null | null | null |
web/web_lianxiang.py
|
jerryluosuper/Lensi
|
3f5c8d6f3c57d58523f4fe5f5f6bcc9657b82c24
|
[
"MIT"
] | null | null | null |
'''
Author: your name
Date: 2022-02-20 21:28:15
LastEditTime: 2022-03-01 20:45:06
LastEditors: Please set LastEditors
Description: 打开koroFileHeader查看配置 进行设置: https://github.com/OBKoro1/koro1FileHeader/wiki/%E9%85%8D%E7%BD%AE
FilePath: \Work\Lensi\web_lianxiang.py
'''
import requests #导入requests包
import json
'''
TODO 未使用完成post失败 待优化
'''
url = 'https://lestore.lenovo.com/api/webstorecontents/download/getDownloadUrl'
headers = {'content-encoding': 'gzip',
'content-type':'application/json;charset=UTF-8',
'date': 'Tue, 01 Mar 2022 11:56:37 GMT',
'strict-transport-security': 'max-age=15724800;includeSubDomains',
'vary': 'Accept-Encoding',
'x-powered-by': 'Express'}
From_data={
'authority': 'lestore.lenovo.com',
'method': 'POST',
'path': '/api/webstorecontents/download/getDownloadUrl',
'scheme': 'https',
'accept': 'application/json,text/plain, */*',
'accept-encoding': 'gzip, deflate, br',
'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6',
'content-length': '99',
'content-type': 'application/json;charset=UTF-8',
'cookie': 'avatar_name=41209ab9-acf3-400c-bcb9-cd0814b3ad5c; avt_v=vid%3D%3E17edec244ac605b%7C%7C%7Cfsts%3D%3E1644414780586%7C%7C%7Cdsfs%3D%3E19919%7C%7C%7Cnps%3D%3E11; avt_s=lsts%3D%3E1646135804140%7C%7C%7Csid%3D%3E7999688612%7C%7C%7Cvs%3D%3E3%7C%7C%7Csource%3D%3Edirect%7C%7C%7Cpref%3D%3Ehttps%3A//lestore.lenovo.com/%7C%7C%7Cref%3D%3Ehttps%3A//lestore.lenovo.com/search%3Fk%3Dgeek',
'dnt': '1',
'origin': 'https://lestore.lenovo.com',
'referer': 'https://lestore.lenovo.com/detail/23258',
'sec-ch-ua': '" Not A;Brand";v="99", "Chromium";v="98", "Microsoft Edge";v="98"',
'sec-ch-ua-mobile': '?0',
'sec-ch-ua-platform': '"Windows"',
'sec-fetch-dest': 'empty',
'sec-fetch-mode': 'cors',
'sec-fetch-site': 'same-origin',
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4758.102 Safari/537.36 Edg/98.0.1108.56'}
response = requests.post(url,data=From_data,headers=headers)
content = json.loads(response.text)
print(response.text)
print(content)
| 43.729167 | 386 | 0.707003 |
79482af939570211a1e7f30ed84dd08b9c6fa2ff
| 416 |
py
|
Python
|
hubspot/crm/objects/feedback_submissions/api/__init__.py
|
fakepop/hubspot-api-python
|
f04103a09f93f5c26c99991b25fa76801074f3d3
|
[
"Apache-2.0"
] | null | null | null |
hubspot/crm/objects/feedback_submissions/api/__init__.py
|
fakepop/hubspot-api-python
|
f04103a09f93f5c26c99991b25fa76801074f3d3
|
[
"Apache-2.0"
] | null | null | null |
hubspot/crm/objects/feedback_submissions/api/__init__.py
|
fakepop/hubspot-api-python
|
f04103a09f93f5c26c99991b25fa76801074f3d3
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import absolute_import
# flake8: noqa
# import apis into api package
from hubspot.crm.objects.feedback_submissions.api.associations_api import (
AssociationsApi,
)
from hubspot.crm.objects.feedback_submissions.api.basic_api import BasicApi
from hubspot.crm.objects.feedback_submissions.api.batch_api import BatchApi
from hubspot.crm.objects.feedback_submissions.api.search_api import SearchApi
| 34.666667 | 77 | 0.848558 |
79482b1d8a4e500eb665725e004cf8e3a0791e4b
| 889 |
py
|
Python
|
conf/settings_development.py
|
tom523/bkapp_test
|
d7569219e316811c4257684598c52a1b13e67d87
|
[
"Apache-2.0"
] | null | null | null |
conf/settings_development.py
|
tom523/bkapp_test
|
d7569219e316811c4257684598c52a1b13e67d87
|
[
"Apache-2.0"
] | null | null | null |
conf/settings_development.py
|
tom523/bkapp_test
|
d7569219e316811c4257684598c52a1b13e67d87
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云(BlueKing) available.
Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and limitations under the License.
用于本地开发环境的全局配置
"""
import os
from settings import BASE_DIR
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3')
}
}
| 42.333333 | 115 | 0.750281 |
79482b22685431677db04d2292ef0357ebea4625
| 2,134 |
py
|
Python
|
libwinmedia/media.py
|
libwinmedia/libwinmedia-py
|
28de70680ccef5f76381c8c8c77d30600495c6f5
|
[
"MIT"
] | 1 |
2022-01-30T11:05:12.000Z
|
2022-01-30T11:05:12.000Z
|
libwinmedia/media.py
|
mytja/libwinmedia-py
|
28de70680ccef5f76381c8c8c77d30600495c6f5
|
[
"MIT"
] | null | null | null |
libwinmedia/media.py
|
mytja/libwinmedia-py
|
28de70680ccef5f76381c8c8c77d30600495c6f5
|
[
"MIT"
] | null | null | null |
import os
import platform
from .library import lib
from ctypes import POINTER, c_char_p
from .tags import MusicTags, VideoTags
media_id = 0
class Media(object):
"""A class representing a media file."""
def __init__(self, uri: str, parse: bool = True):
"""Create a new Media instance.
The URI can be either a local file (e.g. "file://C:/music/track.mp3")
or an HTTP URL (e.g. "https://www.kozco.com/tech/piano2.wav").
Args:
uri (str): A URI of the media.
parse (bool, optional): Whether to parse the media. Defaults to True. True is required for duration parsing
"""
global media_id
self.id = media_id
if not os.path.isabs(uri) and not ("https" in uri or "http" in uri):
uri = os.path.join(os.path.dirname(os.path.abspath(uri)), uri)
self.uri = uri
if platform.system() != "Linux":
lib.MediaCreate(self.id, uri.encode("utf-8"), parse)
media_id += 1
def dispose(self) -> None:
"""Release system resources and kill the media instance."""
lib.MediaDispose(self.id)
@property
def duration(self) -> int:
return lib.MediaGetDuration(self.id)
def tags_from_music(self) -> dict:
# TODO: add docstring
lib.TagsFromMusic.args = [c_char_p]
lib.TagsFromMusic.restype = POINTER(c_char_p)
meta = lib.TagsFromMusic(self.uri.encode("utf-8"))
return MusicTags.get(meta)
def tags_from_video(self) -> dict:
lib.TagsFromVideo.args = [c_char_p]
lib.TagsFromVideo.restype = POINTER(c_char_p)
meta = lib.TagsFromVideo(self.uri.encode("utf-8"))
return VideoTags.get(meta)
def extract_thumbnail(self, output_folder: str, output_file: str) -> None:
"""Extract the thumbnail and save it to a file.
Args:
output_folder (str): A folder for saving the thumbnail.
output_file (str): A name of the thumbnail file.
"""
lib.TagsExtractThumbnail(self.uri.encode(), ("file://" + output_folder).encode(), output_file.encode(), 2, 400)
| 32.333333 | 119 | 0.621837 |
79482bde896dbb5a879ad7eb2b569ca9dd6cf311
| 550 |
py
|
Python
|
forms.py
|
Dev-Elie/Threaded-Nested-Replies-using-Flask-SQLAlchemy
|
141c096899c190825690292e9c8f78412b5422a9
|
[
"MIT"
] | 3 |
2021-10-06T07:27:25.000Z
|
2022-01-31T12:22:35.000Z
|
forms.py
|
Dev-Elie/Threaded-Nested-Replies-using-Flask-SQLAlchemy
|
141c096899c190825690292e9c8f78412b5422a9
|
[
"MIT"
] | null | null | null |
forms.py
|
Dev-Elie/Threaded-Nested-Replies-using-Flask-SQLAlchemy
|
141c096899c190825690292e9c8f78412b5422a9
|
[
"MIT"
] | null | null | null |
from wtforms import (
StringField,
TextAreaField,
)
from flask_wtf import FlaskForm
from wtforms.validators import InputRequired, Length, EqualTo, Email, Regexp
from wtforms import ValidationError
from wtforms import validators
# No forms here
class CommentForm(FlaskForm):
author = StringField(validators = [InputRequired()])
comment = TextAreaField(validators = [InputRequired()])
class ReplyForm(FlaskForm):
author = StringField(validators = [InputRequired()])
reply = TextAreaField(validators = [InputRequired()])
| 27.5 | 76 | 0.754545 |
79482bf5dd460ccb1b6c184d11912d3613eab28e
| 6,821 |
py
|
Python
|
loldib/getratings/models/NA/na_nocturne/na_nocturne_top.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_nocturne/na_nocturne_top.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_nocturne/na_nocturne_top.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from getratings.models.ratings import Ratings
class NA_Nocturne_Top_Aatrox(Ratings):
pass
class NA_Nocturne_Top_Ahri(Ratings):
pass
class NA_Nocturne_Top_Akali(Ratings):
pass
class NA_Nocturne_Top_Alistar(Ratings):
pass
class NA_Nocturne_Top_Amumu(Ratings):
pass
class NA_Nocturne_Top_Anivia(Ratings):
pass
class NA_Nocturne_Top_Annie(Ratings):
pass
class NA_Nocturne_Top_Ashe(Ratings):
pass
class NA_Nocturne_Top_AurelionSol(Ratings):
pass
class NA_Nocturne_Top_Azir(Ratings):
pass
class NA_Nocturne_Top_Bard(Ratings):
pass
class NA_Nocturne_Top_Blitzcrank(Ratings):
pass
class NA_Nocturne_Top_Brand(Ratings):
pass
class NA_Nocturne_Top_Braum(Ratings):
pass
class NA_Nocturne_Top_Caitlyn(Ratings):
pass
class NA_Nocturne_Top_Camille(Ratings):
pass
class NA_Nocturne_Top_Cassiopeia(Ratings):
pass
class NA_Nocturne_Top_Chogath(Ratings):
pass
class NA_Nocturne_Top_Corki(Ratings):
pass
class NA_Nocturne_Top_Darius(Ratings):
pass
class NA_Nocturne_Top_Diana(Ratings):
pass
class NA_Nocturne_Top_Draven(Ratings):
pass
class NA_Nocturne_Top_DrMundo(Ratings):
pass
class NA_Nocturne_Top_Ekko(Ratings):
pass
class NA_Nocturne_Top_Elise(Ratings):
pass
class NA_Nocturne_Top_Evelynn(Ratings):
pass
class NA_Nocturne_Top_Ezreal(Ratings):
pass
class NA_Nocturne_Top_Fiddlesticks(Ratings):
pass
class NA_Nocturne_Top_Fiora(Ratings):
pass
class NA_Nocturne_Top_Fizz(Ratings):
pass
class NA_Nocturne_Top_Galio(Ratings):
pass
class NA_Nocturne_Top_Gangplank(Ratings):
pass
class NA_Nocturne_Top_Garen(Ratings):
pass
class NA_Nocturne_Top_Gnar(Ratings):
pass
class NA_Nocturne_Top_Gragas(Ratings):
pass
class NA_Nocturne_Top_Graves(Ratings):
pass
class NA_Nocturne_Top_Hecarim(Ratings):
pass
class NA_Nocturne_Top_Heimerdinger(Ratings):
pass
class NA_Nocturne_Top_Illaoi(Ratings):
pass
class NA_Nocturne_Top_Irelia(Ratings):
pass
class NA_Nocturne_Top_Ivern(Ratings):
pass
class NA_Nocturne_Top_Janna(Ratings):
pass
class NA_Nocturne_Top_JarvanIV(Ratings):
pass
class NA_Nocturne_Top_Jax(Ratings):
pass
class NA_Nocturne_Top_Jayce(Ratings):
pass
class NA_Nocturne_Top_Jhin(Ratings):
pass
class NA_Nocturne_Top_Jinx(Ratings):
pass
class NA_Nocturne_Top_Kalista(Ratings):
pass
class NA_Nocturne_Top_Karma(Ratings):
pass
class NA_Nocturne_Top_Karthus(Ratings):
pass
class NA_Nocturne_Top_Kassadin(Ratings):
pass
class NA_Nocturne_Top_Katarina(Ratings):
pass
class NA_Nocturne_Top_Kayle(Ratings):
pass
class NA_Nocturne_Top_Kayn(Ratings):
pass
class NA_Nocturne_Top_Kennen(Ratings):
pass
class NA_Nocturne_Top_Khazix(Ratings):
pass
class NA_Nocturne_Top_Kindred(Ratings):
pass
class NA_Nocturne_Top_Kled(Ratings):
pass
class NA_Nocturne_Top_KogMaw(Ratings):
pass
class NA_Nocturne_Top_Leblanc(Ratings):
pass
class NA_Nocturne_Top_LeeSin(Ratings):
pass
class NA_Nocturne_Top_Leona(Ratings):
pass
class NA_Nocturne_Top_Lissandra(Ratings):
pass
class NA_Nocturne_Top_Lucian(Ratings):
pass
class NA_Nocturne_Top_Lulu(Ratings):
pass
class NA_Nocturne_Top_Lux(Ratings):
pass
class NA_Nocturne_Top_Malphite(Ratings):
pass
class NA_Nocturne_Top_Malzahar(Ratings):
pass
class NA_Nocturne_Top_Maokai(Ratings):
pass
class NA_Nocturne_Top_MasterYi(Ratings):
pass
class NA_Nocturne_Top_MissFortune(Ratings):
pass
class NA_Nocturne_Top_MonkeyKing(Ratings):
pass
class NA_Nocturne_Top_Mordekaiser(Ratings):
pass
class NA_Nocturne_Top_Morgana(Ratings):
pass
class NA_Nocturne_Top_Nami(Ratings):
pass
class NA_Nocturne_Top_Nasus(Ratings):
pass
class NA_Nocturne_Top_Nautilus(Ratings):
pass
class NA_Nocturne_Top_Nidalee(Ratings):
pass
class NA_Nocturne_Top_Nocturne(Ratings):
pass
class NA_Nocturne_Top_Nunu(Ratings):
pass
class NA_Nocturne_Top_Olaf(Ratings):
pass
class NA_Nocturne_Top_Orianna(Ratings):
pass
class NA_Nocturne_Top_Ornn(Ratings):
pass
class NA_Nocturne_Top_Pantheon(Ratings):
pass
class NA_Nocturne_Top_Poppy(Ratings):
pass
class NA_Nocturne_Top_Quinn(Ratings):
pass
class NA_Nocturne_Top_Rakan(Ratings):
pass
class NA_Nocturne_Top_Rammus(Ratings):
pass
class NA_Nocturne_Top_RekSai(Ratings):
pass
class NA_Nocturne_Top_Renekton(Ratings):
pass
class NA_Nocturne_Top_Rengar(Ratings):
pass
class NA_Nocturne_Top_Riven(Ratings):
pass
class NA_Nocturne_Top_Rumble(Ratings):
pass
class NA_Nocturne_Top_Ryze(Ratings):
pass
class NA_Nocturne_Top_Sejuani(Ratings):
pass
class NA_Nocturne_Top_Shaco(Ratings):
pass
class NA_Nocturne_Top_Shen(Ratings):
pass
class NA_Nocturne_Top_Shyvana(Ratings):
pass
class NA_Nocturne_Top_Singed(Ratings):
pass
class NA_Nocturne_Top_Sion(Ratings):
pass
class NA_Nocturne_Top_Sivir(Ratings):
pass
class NA_Nocturne_Top_Skarner(Ratings):
pass
class NA_Nocturne_Top_Sona(Ratings):
pass
class NA_Nocturne_Top_Soraka(Ratings):
pass
class NA_Nocturne_Top_Swain(Ratings):
pass
class NA_Nocturne_Top_Syndra(Ratings):
pass
class NA_Nocturne_Top_TahmKench(Ratings):
pass
class NA_Nocturne_Top_Taliyah(Ratings):
pass
class NA_Nocturne_Top_Talon(Ratings):
pass
class NA_Nocturne_Top_Taric(Ratings):
pass
class NA_Nocturne_Top_Teemo(Ratings):
pass
class NA_Nocturne_Top_Thresh(Ratings):
pass
class NA_Nocturne_Top_Tristana(Ratings):
pass
class NA_Nocturne_Top_Trundle(Ratings):
pass
class NA_Nocturne_Top_Tryndamere(Ratings):
pass
class NA_Nocturne_Top_TwistedFate(Ratings):
pass
class NA_Nocturne_Top_Twitch(Ratings):
pass
class NA_Nocturne_Top_Udyr(Ratings):
pass
class NA_Nocturne_Top_Urgot(Ratings):
pass
class NA_Nocturne_Top_Varus(Ratings):
pass
class NA_Nocturne_Top_Vayne(Ratings):
pass
class NA_Nocturne_Top_Veigar(Ratings):
pass
class NA_Nocturne_Top_Velkoz(Ratings):
pass
class NA_Nocturne_Top_Vi(Ratings):
pass
class NA_Nocturne_Top_Viktor(Ratings):
pass
class NA_Nocturne_Top_Vladimir(Ratings):
pass
class NA_Nocturne_Top_Volibear(Ratings):
pass
class NA_Nocturne_Top_Warwick(Ratings):
pass
class NA_Nocturne_Top_Xayah(Ratings):
pass
class NA_Nocturne_Top_Xerath(Ratings):
pass
class NA_Nocturne_Top_XinZhao(Ratings):
pass
class NA_Nocturne_Top_Yasuo(Ratings):
pass
class NA_Nocturne_Top_Yorick(Ratings):
pass
class NA_Nocturne_Top_Zac(Ratings):
pass
class NA_Nocturne_Top_Zed(Ratings):
pass
class NA_Nocturne_Top_Ziggs(Ratings):
pass
class NA_Nocturne_Top_Zilean(Ratings):
pass
class NA_Nocturne_Top_Zyra(Ratings):
pass
| 16.357314 | 46 | 0.776133 |
79482c8bfa4476f757229a7a8465c7b8e0972669
| 1,836 |
py
|
Python
|
sokoapp/gallery/parser.py
|
Mercy-Nekesa/sokoapp
|
6c7bc4c1278b7223226124a49fc33c5b8b6b617a
|
[
"MIT"
] | 1 |
2019-04-01T05:52:37.000Z
|
2019-04-01T05:52:37.000Z
|
sokoapp/gallery/parser.py
|
Mercy-Nekesa/sokoapp
|
6c7bc4c1278b7223226124a49fc33c5b8b6b617a
|
[
"MIT"
] | 1 |
2015-03-11T16:18:12.000Z
|
2015-03-11T16:18:12.000Z
|
sokoapp/gallery/parser.py
|
Mercy-Nekesa/sokoapp
|
6c7bc4c1278b7223226124a49fc33c5b8b6b617a
|
[
"MIT"
] | null | null | null |
from django.template import TemplateSyntaxError
from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from django.http import Http404
from django.utils.encoding import smart_unicode
from django.template.loader import render_to_string
from django.utils.safestring import mark_safe
from models import Album
def inlines(value, return_list=False):
try:
from BeautifulSoup import BeautifulStoneSoup, BeautifulSoup
except ImportError:
from beautifulsoup import BeautifulStoneSoup, BeautifulSoup
content = BeautifulSoup(value, selfClosingTags=['gallery','img','br','input','meta','link','hr'])
inline_list = []
if return_list:
for inline in content.findAll('gallery'):
rendered_inline = render_inline(inline)
inline_list.append(rendered_inline['context'])
return inline_list
else:
for inline in content.findAll('gallery'):
rendered_inline = render_inline(inline)
if rendered_inline:
inline.replaceWith(render_to_string(rendered_inline['template'], rendered_inline['context']))
else:
inline.replaceWith('')
return mark_safe(content)
def render_inline(inline):
"""
Replace inline markup with template markup that matches the
appropriate app and model.
"""
# Look for inline type, 'app.model'
try:
id = inline['id']
except:
if settings.DEBUG:
raise TemplateSyntaxError, "Couldn't find the attribute 'id' in the <gallery> tag."
else:
return ''
album = Album.objects.get(pk=id)
context = { 'album': album, }
template = ["fancy/gallery/inline_album.html"]
rendered_inline = {'template':template, 'context':context}
return rendered_inline
| 32.785714 | 109 | 0.680828 |
79482cf9cacbfdc7c07dd4400e4e615edab946d3
| 11,288 |
py
|
Python
|
Lib/site-packages/notebook/services/contents/fileio.py
|
edupyter/EDUPYTER38
|
396183cea72987506f1ef647c0272a2577c56218
|
[
"bzip2-1.0.6"
] | null | null | null |
Lib/site-packages/notebook/services/contents/fileio.py
|
edupyter/EDUPYTER38
|
396183cea72987506f1ef647c0272a2577c56218
|
[
"bzip2-1.0.6"
] | null | null | null |
Lib/site-packages/notebook/services/contents/fileio.py
|
edupyter/EDUPYTER38
|
396183cea72987506f1ef647c0272a2577c56218
|
[
"bzip2-1.0.6"
] | null | null | null |
"""
Utilities for file-based Contents/Checkpoints managers.
"""
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
from contextlib import contextmanager
import errno
import os
import shutil
from tornado.web import HTTPError
from notebook.utils import (
to_api_path,
to_os_path,
)
import nbformat
from ipython_genutils.py3compat import str_to_unicode
from traitlets.config import Configurable
from traitlets import Bool
from base64 import encodebytes, decodebytes
def replace_file(src, dst):
""" replace dst with src
switches between os.replace or os.rename based on python 2.7 or python 3
"""
if hasattr(os, 'replace'): # PY3
os.replace(src, dst)
else:
if os.name == 'nt' and os.path.exists(dst):
# Rename over existing file doesn't work on Windows
os.remove(dst)
os.rename(src, dst)
def copy2_safe(src, dst, log=None):
"""copy src to dst
like shutil.copy2, but log errors in copystat instead of raising
"""
shutil.copyfile(src, dst)
try:
shutil.copystat(src, dst)
except OSError:
if log:
log.debug("copystat on %s failed", dst, exc_info=True)
def path_to_intermediate(path):
'''Name of the intermediate file used in atomic writes.
The .~ prefix will make Dropbox ignore the temporary file.'''
dirname, basename = os.path.split(path)
return os.path.join(dirname, '.~'+basename)
def path_to_invalid(path):
'''Name of invalid file after a failed atomic write and subsequent read.'''
dirname, basename = os.path.split(path)
return os.path.join(dirname, basename+'.invalid')
@contextmanager
def atomic_writing(path, text=True, encoding='utf-8', log=None, **kwargs):
"""Context manager to write to a file only if the entire write is successful.
This works by copying the previous file contents to a temporary file in the
same directory, and renaming that file back to the target if the context
exits with an error. If the context is successful, the new data is synced to
disk and the temporary file is removed.
Parameters
----------
path : str
The target file to write to.
text : bool, optional
Whether to open the file in text mode (i.e. to write unicode). Default is
True.
encoding : str, optional
The encoding to use for files opened in text mode. Default is UTF-8.
**kwargs
Passed to :func:`io.open`.
"""
# realpath doesn't work on Windows: https://bugs.python.org/issue9949
# Luckily, we only need to resolve the file itself being a symlink, not
# any of its directories, so this will suffice:
if os.path.islink(path):
path = os.path.join(os.path.dirname(path), os.readlink(path))
tmp_path = path_to_intermediate(path)
if os.path.isfile(path):
copy2_safe(path, tmp_path, log=log)
if text:
# Make sure that text files have Unix linefeeds by default
kwargs.setdefault('newline', '\n')
fileobj = open(path, 'w', encoding=encoding, **kwargs)
else:
fileobj = open(path, 'wb', **kwargs)
try:
yield fileobj
except:
# Failed! Move the backup file back to the real path to avoid corruption
fileobj.close()
replace_file(tmp_path, path)
raise
# Flush to disk
fileobj.flush()
os.fsync(fileobj.fileno())
fileobj.close()
# Written successfully, now remove the backup copy
if os.path.isfile(tmp_path):
os.remove(tmp_path)
@contextmanager
def _simple_writing(path, text=True, encoding='utf-8', log=None, **kwargs):
"""Context manager to write file without doing atomic writing
( for weird filesystem eg: nfs).
Parameters
----------
path : str
The target file to write to.
text : bool, optional
Whether to open the file in text mode (i.e. to write unicode). Default is
True.
encoding : str, optional
The encoding to use for files opened in text mode. Default is UTF-8.
**kwargs
Passed to :func:`io.open`.
"""
# realpath doesn't work on Windows: https://bugs.python.org/issue9949
# Luckily, we only need to resolve the file itself being a symlink, not
# any of its directories, so this will suffice:
if os.path.islink(path):
path = os.path.join(os.path.dirname(path), os.readlink(path))
if text:
# Make sure that text files have Unix linefeeds by default
kwargs.setdefault('newline', '\n')
fileobj = open(path, 'w', encoding=encoding, **kwargs)
else:
fileobj = open(path, 'wb', **kwargs)
try:
yield fileobj
except:
fileobj.close()
raise
fileobj.close()
class FileManagerMixin(Configurable):
"""
Mixin for ContentsAPI classes that interact with the filesystem.
Provides facilities for reading, writing, and copying both notebooks and
generic files.
Shared by FileContentsManager and FileCheckpoints.
Note
----
Classes using this mixin must provide the following attributes:
root_dir : unicode
A directory against which API-style paths are to be resolved.
log : logging.Logger
"""
use_atomic_writing = Bool(True, config=True, help=
"""By default notebooks are saved on disk on a temporary file and then if successfully written, it replaces the old ones.
This procedure, namely 'atomic_writing', causes some bugs on file system without operation order enforcement (like some networked fs).
If set to False, the new notebook is written directly on the old one which could fail (eg: full filesystem or quota )""")
@contextmanager
def open(self, os_path, *args, **kwargs):
"""wrapper around io.open that turns permission errors into 403"""
with self.perm_to_403(os_path):
with open(os_path, *args, **kwargs) as f:
yield f
@contextmanager
def atomic_writing(self, os_path, *args, **kwargs):
"""wrapper around atomic_writing that turns permission errors to 403.
Depending on flag 'use_atomic_writing', the wrapper perform an actual atomic writing or
simply writes the file (whatever an old exists or not)"""
with self.perm_to_403(os_path):
if self.use_atomic_writing:
with atomic_writing(os_path, *args, log=self.log, **kwargs) as f:
yield f
else:
with _simple_writing(os_path, *args, log=self.log, **kwargs) as f:
yield f
@contextmanager
def perm_to_403(self, os_path=''):
"""context manager for turning permission errors into 403."""
try:
yield
except OSError as e:
if e.errno in {errno.EPERM, errno.EACCES}:
# make 403 error message without root prefix
# this may not work perfectly on unicode paths on Python 2,
# but nobody should be doing that anyway.
if not os_path:
os_path = str_to_unicode(e.filename or 'unknown file')
path = to_api_path(os_path, root=self.root_dir)
raise HTTPError(403, f'Permission denied: {path}') from e
else:
raise
def _copy(self, src, dest):
"""copy src to dest
like shutil.copy2, but log errors in copystat
"""
copy2_safe(src, dest, log=self.log)
def _get_os_path(self, path):
"""Given an API path, return its file system path.
Parameters
----------
path : string
The relative API path to the named file.
Returns
-------
path : string
Native, absolute OS path to for a file.
Raises
------
404: if path is outside root
"""
root = os.path.abspath(self.root_dir)
os_path = to_os_path(path, root)
if not (os.path.abspath(os_path) + os.path.sep).startswith(root):
raise HTTPError(404, f"{path} is outside root contents directory")
return os_path
def _read_notebook(self, os_path, as_version=4):
"""Read a notebook from an os path."""
with self.open(os_path, 'r', encoding='utf-8') as f:
try:
return nbformat.read(f, as_version=as_version)
except Exception as e:
e_orig = e
# If use_atomic_writing is enabled, we'll guess that it was also
# enabled when this notebook was written and look for a valid
# atomic intermediate.
tmp_path = path_to_intermediate(os_path)
if not self.use_atomic_writing or not os.path.exists(tmp_path):
raise HTTPError(
400,
f"Unreadable Notebook: {os_path} {e_orig!r}",
)
# Move the bad file aside, restore the intermediate, and try again.
invalid_file = path_to_invalid(os_path)
replace_file(os_path, invalid_file)
replace_file(tmp_path, os_path)
return self._read_notebook(os_path, as_version)
def _save_notebook(self, os_path, nb):
"""Save a notebook to an os_path."""
with self.atomic_writing(os_path, encoding='utf-8') as f:
nbformat.write(nb, f, version=nbformat.NO_CONVERT)
def _read_file(self, os_path, format):
"""Read a non-notebook file.
os_path: The path to be read.
format:
If 'text', the contents will be decoded as UTF-8.
If 'base64', the raw bytes contents will be encoded as base64.
If not specified, try to decode as UTF-8, and fall back to base64
"""
if not os.path.isfile(os_path):
raise HTTPError(400, f"Cannot read non-file {os_path}")
with self.open(os_path, 'rb') as f:
bcontent = f.read()
if format is None or format == 'text':
# Try to interpret as unicode if format is unknown or if unicode
# was explicitly requested.
try:
return bcontent.decode('utf8'), 'text'
except UnicodeError as e:
if format == 'text':
raise HTTPError(
400,
f"{os_path} is not UTF-8 encoded",
reason='bad format',
) from e
return encodebytes(bcontent).decode('ascii'), 'base64'
def _save_file(self, os_path, content, format):
"""Save content of a generic file."""
if format not in {'text', 'base64'}:
raise HTTPError(
400,
"Must specify format of file contents as 'text' or 'base64'",
)
try:
if format == 'text':
bcontent = content.encode('utf8')
else:
b64_bytes = content.encode('ascii')
bcontent = decodebytes(b64_bytes)
except Exception as e:
raise HTTPError(
400, f'Encoding error saving {os_path}: {e}'
) from e
with self.atomic_writing(os_path, text=False) as f:
f.write(bcontent)
| 33.102639 | 140 | 0.613395 |
79482d0c84f18091ae358d2870efb2ccdfcf1ec8
| 1,267 |
py
|
Python
|
hummingbot/strategy/dev_1_get_order_book/dev_1_get_order_book_config_map.py
|
TritumDigitalAssets/hummingbot
|
13fde61a41a0b13651117c06fc87d02a9cd55a44
|
[
"Apache-2.0"
] | 2 |
2019-09-14T12:55:03.000Z
|
2019-11-11T12:17:42.000Z
|
hummingbot/strategy/dev_1_get_order_book/dev_1_get_order_book_config_map.py
|
TritumDigitalAssets/hummingbot
|
13fde61a41a0b13651117c06fc87d02a9cd55a44
|
[
"Apache-2.0"
] | 1 |
2021-01-22T13:19:11.000Z
|
2021-01-22T13:19:11.000Z
|
hummingbot/strategy/dev_1_get_order_book/dev_1_get_order_book_config_map.py
|
TritumDigitalAssets/hummingbot
|
13fde61a41a0b13651117c06fc87d02a9cd55a44
|
[
"Apache-2.0"
] | 2 |
2020-03-25T00:47:45.000Z
|
2020-04-09T20:16:37.000Z
|
from hummingbot.client.config.config_var import ConfigVar
from hummingbot.client.config.config_validators import (
is_exchange,
is_valid_market_trading_pair,
)
from hummingbot.client.settings import (
required_exchanges,
EXAMPLE_PAIRS,
)
def trading_pair_prompt():
market = dev_1_get_order_book_config_map.get("market").value
example = EXAMPLE_PAIRS.get(market)
return "Enter the token trading pair to fetch its order book on %s%s >>> " \
% (market, f" (e.g. {example})" if example else "")
def str2bool(value: str):
return str(value).lower() in ("yes", "true", "t", "1")
# checks if the trading pair is valid
def is_valid_trading_pair(value: str) -> bool:
market = dev_1_get_order_book_config_map.get("market").value
return is_valid_market_trading_pair(market, value)
dev_1_get_order_book_config_map = {
"market":
ConfigVar(key="market",
prompt="Enter the name of the exchange >>> ",
validator=is_exchange,
on_validated=lambda value: required_exchanges.append(value)),
"market_trading_pair":
ConfigVar(key="market_trading_pair",
prompt=trading_pair_prompt,
validator=is_valid_trading_pair),
}
| 31.675 | 80 | 0.679558 |
79482d321b71728a27694d9bdfc27142c54a9432
| 191 |
py
|
Python
|
create_python_app/config.py
|
xuchaoqian/create-python-app
|
70745432b972b96a3faf95a378d54d922b77f2be
|
[
"MIT"
] | null | null | null |
create_python_app/config.py
|
xuchaoqian/create-python-app
|
70745432b972b96a3faf95a378d54d922b77f2be
|
[
"MIT"
] | null | null | null |
create_python_app/config.py
|
xuchaoqian/create-python-app
|
70745432b972b96a3faf95a378d54d922b77f2be
|
[
"MIT"
] | null | null | null |
import json
import os
path = os.path.join(os.path.dirname(__file__), '..', 'config', 'sys.json')
with open(path) as file:
options = json.load(file)
def get_options():
return options
| 21.222222 | 74 | 0.680628 |
79482d74e2c538af850f937997d06bb4e473b9d9
| 1,194 |
py
|
Python
|
fisher_py/data/business/tune_data_values.py
|
abdelq/fisher_py
|
befb98732ba7c4e57858d158c68cda09ed829d66
|
[
"MIT"
] | 3 |
2021-11-03T20:55:45.000Z
|
2022-02-01T10:11:47.000Z
|
fisher_py/data/business/tune_data_values.py
|
abdelq/fisher_py
|
befb98732ba7c4e57858d158c68cda09ed829d66
|
[
"MIT"
] | 2 |
2022-01-28T02:04:21.000Z
|
2022-01-29T01:29:14.000Z
|
fisher_py/data/business/tune_data_values.py
|
abdelq/fisher_py
|
befb98732ba7c4e57858d158c68cda09ed829d66
|
[
"MIT"
] | 1 |
2022-01-26T23:30:37.000Z
|
2022-01-26T23:30:37.000Z
|
from fisher_py.net_wrapping import NetWrapperBase, ThermoFisher
from fisher_py.utils import to_net_list
from typing import List
class TuneDataValues(NetWrapperBase):
_wrapped_type = ThermoFisher.CommonCore.Data.Business.TuneDataValues
def __init__(self):
super().__init__()
self._wrapped_object = self._wrapped_type()
@property
def id(self) -> int:
"""
Gets or sets the index number of the tune record
"""
return self._get_wrapped_object_().ID
@id.setter
def id(self, value: int):
"""
Gets or sets the index number of the tune record
"""
assert type(value) is int
self._get_wrapped_object_().ID = value
@property
def values(self) -> List[str]:
"""
Gets or sets the array of tune data values for an instrument
"""
return self._get_wrapped_object_().Values
@values.setter
def values(self, value: List[str]):
"""
Gets or sets the array of tune data values for an instrument
"""
assert type(value) is list
value = to_net_list(value, str)
self._get_wrapped_object_().Values = value
| 26.533333 | 72 | 0.632328 |
79482da9422bb9430757d06ef988e94bac22d774
| 372 |
py
|
Python
|
detector/YOLOX/tools/__init__.py
|
collector-m/UniTrack
|
e8e56e164f2dd40ba590a19ed7a4a75d8da7e2eb
|
[
"MIT"
] | 240 |
2021-06-20T13:50:42.000Z
|
2022-03-31T05:08:29.000Z
|
detector/YOLOX/tools/__init__.py
|
collector-m/UniTrack
|
e8e56e164f2dd40ba590a19ed7a4a75d8da7e2eb
|
[
"MIT"
] | 27 |
2021-07-12T01:19:39.000Z
|
2021-12-27T08:05:08.000Z
|
detector/YOLOX/tools/__init__.py
|
collector-m/UniTrack
|
e8e56e164f2dd40ba590a19ed7a4a75d8da7e2eb
|
[
"MIT"
] | 24 |
2021-07-01T09:48:24.000Z
|
2022-03-14T06:39:46.000Z
|
###################################################################
# File Name: __init__.py
# Author: Zhongdao Wang
# mail: wcd17@mails.tsinghua.edu.cn
# Created Time: Sun Jul 25 17:14:12 2021
###################################################################
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
| 33.818182 | 67 | 0.508065 |
79482da99e23ce36ad25c3058513f74a4e0a6483
| 11,054 |
py
|
Python
|
resolwe_bio/kb/tests/test_feature.py
|
MiaZganjar/resolwe-bio
|
1559435796cb40905d81c8fd14872f7ac66e66e8
|
[
"Apache-2.0"
] | null | null | null |
resolwe_bio/kb/tests/test_feature.py
|
MiaZganjar/resolwe-bio
|
1559435796cb40905d81c8fd14872f7ac66e66e8
|
[
"Apache-2.0"
] | null | null | null |
resolwe_bio/kb/tests/test_feature.py
|
MiaZganjar/resolwe-bio
|
1559435796cb40905d81c8fd14872f7ac66e66e8
|
[
"Apache-2.0"
] | null | null | null |
from django.urls import reverse
from rest_framework import status
from rest_framework.test import APITestCase
from resolwe.test import TestCase
from ..models import Feature
class FeatureTestCase(TestCase, APITestCase):
@staticmethod
def create_feature(index, source, species, feature_type):
return Feature.objects.create(
source=source,
feature_id="FT-{}".format(index),
species=species,
type=feature_type,
sub_type=Feature.SUBTYPE_PROTEIN_CODING,
name="FOO{}".format(index),
full_name="Foobarius machinus",
aliases=["BAR{}".format(index), "BTMK{}".format(index), "SHARED"],
)
@classmethod
def setUpTestData(cls):
cls.features = [
FeatureTestCase.create_feature(
0, "NCBI", "Homo sapiens", Feature.TYPE_GENE
),
FeatureTestCase.create_feature(
1, "NCBI", "Mus musculus", Feature.TYPE_GENE
),
FeatureTestCase.create_feature(2, "NCBI", "Rat rattus", Feature.TYPE_GENE),
FeatureTestCase.create_feature(
3, "NCBI", "Homo sapiens", Feature.TYPE_TRANSCRIPT
),
FeatureTestCase.create_feature(
4, "ENSEMBL", "Homo sapiens", Feature.TYPE_GENE
),
FeatureTestCase.create_feature(
5, "ENSEMBL", "Mus musculus", Feature.TYPE_GENE
),
FeatureTestCase.create_feature(
6, "ENSEMBL", "Rat rattus", Feature.TYPE_GENE
),
FeatureTestCase.create_feature(
7, "XSRC", "Homo sapiens", Feature.TYPE_GENE
),
FeatureTestCase.create_feature(
8, "XSRC", "Mus musculus", Feature.TYPE_GENE
),
FeatureTestCase.create_feature(9, "XSRC", "Rat rattus", Feature.TYPE_GENE),
]
def assertFeatureEqual(self, data, feature):
self.assertEqual(data["source"], feature.source)
self.assertEqual(data["species"], feature.species)
self.assertEqual(data["type"], feature.type)
self.assertEqual(data["sub_type"], feature.sub_type)
self.assertEqual(data["name"], feature.name)
self.assertEqual(data["full_name"], feature.full_name)
self.assertEqual(data["aliases"], feature.aliases)
def test_feature_search(self):
FEATURE_SEARCH_URL = reverse("resolwebio-api:kb_feature_search")
# Test without any query.
response = self.client.get(FEATURE_SEARCH_URL, format="json")
self.assertEqual(len(response.data), len(self.features))
# Test with empty query.
response = self.client.get(FEATURE_SEARCH_URL, {"query": ""}, format="json")
self.assertEqual(len(response.data), 10)
# Test with non-matching query.
response = self.client.get(FEATURE_SEARCH_URL, {"query": "F1"}, format="json")
self.assertEqual(len(response.data), 0)
# Test query by feature name.
response = self.client.get(
FEATURE_SEARCH_URL, {"query": self.features[0].name}, format="json"
)
self.assertEqual(len(response.data), 1)
self.assertFeatureEqual(response.data[0], self.features[0])
# Test query by alias.
response = self.client.get(
FEATURE_SEARCH_URL, {"query": self.features[0].aliases[0]}, format="json"
)
self.assertEqual(len(response.data), 1)
self.assertFeatureEqual(response.data[0], self.features[0])
# Test query by shared alias.
response = self.client.get(
FEATURE_SEARCH_URL, {"query": "SHARED"}, format="json"
)
self.assertEqual(len(response.data), len(self.features))
# Test query by multiple gene IDs.
response = self.client.get(
FEATURE_SEARCH_URL, {"query": "FOO1,FOO2,FT-7"}, format="json"
)
self.assertEqual(len(response.data), 3)
response = self.client.get(
FEATURE_SEARCH_URL, {"query": "FOO1,SHARED,FT-7"}, format="json"
)
self.assertEqual(len(response.data), len(self.features))
# Test query by source.
response = self.client.get(
FEATURE_SEARCH_URL, {"source": "NCBI"}, format="json"
)
self.assertEqual(len(response.data), 4)
# Test query by source.
response = self.client.get(
FEATURE_SEARCH_URL, {"source": "NCBI,ENSEMBL"}, format="json"
)
self.assertEqual(len(response.data), 7)
# Test query by species.
response = self.client.get(
FEATURE_SEARCH_URL, {"species": "Homo sapiens"}, format="json"
)
self.assertEqual(len(response.data), 4)
# Test query by species.
response = self.client.get(
FEATURE_SEARCH_URL, {"species": "Homo sapiens,Rat rattus"}, format="json"
)
self.assertEqual(len(response.data), 7)
# Test query by type.
response = self.client.get(FEATURE_SEARCH_URL, {"type": "gene"}, format="json")
self.assertEqual(len(response.data), 9)
# Mixed queries.
response = self.client.get(
FEATURE_SEARCH_URL,
{"query": "FOO1,FOO2,FT-3", "source": "NCBI", "species": "Homo sapiens"},
format="json",
)
self.assertEqual(len(response.data), 1)
response = self.client.get(
FEATURE_SEARCH_URL,
{"query": "FOO1,FOO2,FT-7", "source": "FOO"},
format="json",
)
self.assertEqual(len(response.data), 0)
# Test query with a lot of features.
response = self.client.get(
FEATURE_SEARCH_URL,
{"query": ",".join([str(x) for x in range(1024)])},
format="json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
# Test search by feature_id.
response = self.client.get(
FEATURE_SEARCH_URL,
{"feature_id": "FOO", "source": "XSRC", "species": "Homo sapiens"},
format="json",
)
self.assertEqual(len(response.data), 0)
response = self.client.get(
FEATURE_SEARCH_URL,
{"feature_id": "FT-9", "source": "XSRC", "species": "Rat rattus"},
format="json",
)
self.assertEqual(len(response.data), 1)
response = self.client.get(
FEATURE_SEARCH_URL,
{"feature_id": "FT-0,FT-3", "source": "NCBI", "species": "Homo sapiens"},
format="json",
)
self.assertEqual(len(response.data), 2)
response = self.client.get(
FEATURE_SEARCH_URL,
{
"feature_id": "FT-0,FT-3",
"source": "NCBI",
"species": "Homo sapiens",
"type": "gene",
},
format="json",
)
self.assertEqual(len(response.data), 1)
def test_serialization(self):
FEATURE_SEARCH_URL = reverse("resolwebio-api:kb_feature_search")
kwargs = {"feature_id": "FT-0"}
feature = Feature.objects.get(**kwargs)
response = self.client.get(FEATURE_SEARCH_URL, kwargs, format="json")
self.assertEqual(len(response.data), 1)
serialized_feature = response.data[0]
self.assertEqual(serialized_feature["aliases"], feature.aliases)
self.assertEqual(serialized_feature["description"], feature.description)
self.assertEqual(serialized_feature["feature_id"], feature.feature_id)
self.assertEqual(serialized_feature["full_name"], feature.full_name)
self.assertEqual(serialized_feature["id"], feature.id)
self.assertEqual(serialized_feature["name"], feature.name)
self.assertEqual(serialized_feature["source"], feature.source)
self.assertEqual(serialized_feature["species"], feature.species)
self.assertEqual(serialized_feature["sub_type"], feature.sub_type)
self.assertEqual(serialized_feature["type"], feature.type)
def test_feature_autocomplete(self):
FEATURE_AUTOCOMPLETE_URL = reverse("resolwebio-api:kb_feature_autocomplete")
# Test empty query.
response = self.client.get(
FEATURE_AUTOCOMPLETE_URL, {"query": ""}, format="json"
)
self.assertEqual(len(response.data), 0)
response = self.client.get(
FEATURE_AUTOCOMPLETE_URL,
{"query": "", "source": "NCBI", "species": "Homo sapiens"},
format="json",
)
self.assertEqual(len(response.data), 0)
response = self.client.get(FEATURE_AUTOCOMPLETE_URL, format="json")
self.assertEqual(len(response.data), 0)
# Test non-matching query.
response = self.client.get(
FEATURE_AUTOCOMPLETE_URL, {"query": "FOU"}, format="json"
)
self.assertEqual(len(response.data), 0)
# Test partial name query.
response = self.client.get(
FEATURE_AUTOCOMPLETE_URL, {"query": "FO"}, format="json"
)
self.assertEqual(len(response.data), len(self.features))
# Test partial name query with source and species.
response = self.client.get(
FEATURE_AUTOCOMPLETE_URL,
{"query": "FO", "source": "NCBI", "species": "Homo sapiens"},
format="json",
)
self.assertEqual(len(response.data), 2)
# Test partial name query with source, species and type.
response = self.client.get(
FEATURE_AUTOCOMPLETE_URL,
{
"query": "FO",
"source": "NCBI",
"species": "Homo sapiens",
"type": "gene",
},
format="json",
)
self.assertEqual(len(response.data), 1)
# Test partial alias query.
response = self.client.get(
FEATURE_AUTOCOMPLETE_URL, {"query": "SHAR"}, format="json"
)
self.assertEqual(len(response.data), len(self.features))
# Test partial alias query with source and species.
response = self.client.get(
FEATURE_AUTOCOMPLETE_URL,
{"query": "SHAR", "source": "XSRC", "species": "Rat rattus"},
format="json",
)
self.assertEqual(len(response.data), 1)
# Test query by full feature name.
feature = self.features[0]
response = self.client.get(
FEATURE_AUTOCOMPLETE_URL,
{"query": feature.name, "source": feature.source},
format="json",
)
self.assertEqual(len(response.data), 1)
self.assertFeatureEqual(response.data[0], feature)
# Test query by alias.
feature = self.features[0]
response = self.client.get(
FEATURE_AUTOCOMPLETE_URL, {"query": feature.aliases[0]}, format="json"
)
self.assertEqual(len(response.data), 1)
self.assertFeatureEqual(response.data[0], feature)
| 37.09396 | 87 | 0.585308 |
79482e27fabe2ba7ecf44d0568dd4d7fbfa2414e
| 17,955 |
py
|
Python
|
lib/tool_shed/utility_containers/__init__.py
|
beatrizserrano/galaxy
|
e149d9d32e1bca6c07c38b1a9cdabfee60323610
|
[
"CC-BY-3.0"
] | null | null | null |
lib/tool_shed/utility_containers/__init__.py
|
beatrizserrano/galaxy
|
e149d9d32e1bca6c07c38b1a9cdabfee60323610
|
[
"CC-BY-3.0"
] | 6 |
2021-11-11T20:57:49.000Z
|
2021-12-10T15:30:33.000Z
|
lib/tool_shed/utility_containers/__init__.py
|
beatrizserrano/galaxy
|
e149d9d32e1bca6c07c38b1a9cdabfee60323610
|
[
"CC-BY-3.0"
] | null | null | null |
import logging
import threading
from galaxy import util
from tool_shed.util import (
common_util,
container_util,
readme_util,
)
from . import utility_container_manager
log = logging.getLogger(__name__)
class FailedTest:
"""Failed tool tests object"""
def __init__(self, id=None, stderr=None, test_id=None, tool_id=None, tool_version=None, traceback=None):
self.id = id
self.stderr = stderr
self.test_id = test_id
self.tool_id = tool_id
self.tool_version = tool_version
self.traceback = traceback
class InvalidRepositoryDependency:
"""Invalid repository dependency definition object"""
def __init__(
self,
id=None,
toolshed=None,
repository_name=None,
repository_owner=None,
changeset_revision=None,
prior_installation_required=False,
only_if_compiling_contained_td=False,
error=None,
):
self.id = id
self.toolshed = toolshed
self.repository_name = repository_name
self.repository_owner = repository_owner
self.changeset_revision = changeset_revision
self.prior_installation_required = prior_installation_required
self.only_if_compiling_contained_td = only_if_compiling_contained_td
self.error = error
class InvalidToolDependency:
"""Invalid tool dependency definition object"""
def __init__(self, id=None, name=None, version=None, type=None, error=None):
self.id = id
self.name = name
self.version = version
self.type = type
self.error = error
class MissingTestComponent:
"""Missing tool test components object"""
def __init__(self, id=None, missing_components=None, tool_guid=None, tool_id=None, tool_version=None):
self.id = id
self.missing_components = missing_components
self.tool_guid = tool_guid
self.tool_id = tool_id
self.tool_version = tool_version
class NotTested:
"""NotTested object"""
def __init__(self, id=None, reason=None):
self.id = id
self.reason = reason
class PassedTest:
"""Passed tool tests object"""
def __init__(self, id=None, test_id=None, tool_id=None, tool_version=None):
self.id = id
self.test_id = test_id
self.tool_id = tool_id
self.tool_version = tool_version
class RepositoryInstallationError:
"""Repository installation error object"""
def __init__(self, id=None, tool_shed=None, name=None, owner=None, changeset_revision=None, error_message=None):
self.id = id
self.tool_shed = tool_shed
self.name = name
self.owner = owner
self.changeset_revision = changeset_revision
self.error_message = error_message
class RepositorySuccessfulInstallation:
"""Repository installation object"""
def __init__(self, id=None, tool_shed=None, name=None, owner=None, changeset_revision=None):
self.id = id
self.tool_shed = tool_shed
self.name = name
self.owner = owner
self.changeset_revision = changeset_revision
class ToolDependencyInstallationError:
"""Tool dependency installation error object"""
def __init__(self, id=None, type=None, name=None, version=None, error_message=None):
self.id = id
self.type = type
self.name = name
self.version = version
self.error_message = error_message
class ToolDependencySuccessfulInstallation:
"""Tool dependency installation object"""
def __init__(self, id=None, type=None, name=None, version=None, installation_directory=None):
self.id = id
self.type = type
self.name = name
self.version = version
self.installation_directory = installation_directory
class ToolShedUtilityContainerManager(utility_container_manager.UtilityContainerManager):
def __init__(self, app):
self.app = app
def build_invalid_repository_dependencies_root_folder(self, folder_id, invalid_repository_dependencies_dict):
"""Return a folder hierarchy containing invalid repository dependencies."""
label = "Invalid repository dependencies"
if invalid_repository_dependencies_dict:
invalid_repository_dependency_id = 0
folder_id += 1
invalid_repository_dependencies_root_folder = utility_container_manager.Folder(
id=folder_id, key="root", label="root", parent=None
)
folder_id += 1
invalid_repository_dependencies_folder = utility_container_manager.Folder(
id=folder_id,
key="invalid_repository_dependencies",
label=label,
parent=invalid_repository_dependencies_root_folder,
)
invalid_repository_dependencies_root_folder.folders.append(invalid_repository_dependencies_folder)
invalid_repository_dependencies = invalid_repository_dependencies_dict["repository_dependencies"]
for invalid_repository_dependency in invalid_repository_dependencies:
folder_id += 1
invalid_repository_dependency_id += 1
(
toolshed,
name,
owner,
changeset_revision,
prior_installation_required,
only_if_compiling_contained_td,
error,
) = common_util.parse_repository_dependency_tuple(invalid_repository_dependency, contains_error=True)
key = container_util.generate_repository_dependencies_key_for_repository(
toolshed,
name,
owner,
changeset_revision,
prior_installation_required,
only_if_compiling_contained_td,
)
label = f"Repository <b>{name}</b> revision <b>{changeset_revision}</b> owned by <b>{owner}</b>"
folder = utility_container_manager.Folder(
id=folder_id, key=key, label=label, parent=invalid_repository_dependencies_folder
)
ird = InvalidRepositoryDependency(
id=invalid_repository_dependency_id,
toolshed=toolshed,
repository_name=name,
repository_owner=owner,
changeset_revision=changeset_revision,
prior_installation_required=util.asbool(prior_installation_required),
only_if_compiling_contained_td=util.asbool(only_if_compiling_contained_td),
error=error,
)
folder.invalid_repository_dependencies.append(ird)
invalid_repository_dependencies_folder.folders.append(folder)
else:
invalid_repository_dependencies_root_folder = None
return folder_id, invalid_repository_dependencies_root_folder
def build_invalid_tool_dependencies_root_folder(self, folder_id, invalid_tool_dependencies_dict):
"""Return a folder hierarchy containing invalid tool dependencies."""
# # INvalid tool dependencies are always packages like:
# {"R/2.15.1": {"name": "R", "readme": "some string", "type": "package", "version": "2.15.1" "error" : "some sting" }
label = "Invalid tool dependencies"
if invalid_tool_dependencies_dict:
invalid_tool_dependency_id = 0
folder_id += 1
invalid_tool_dependencies_root_folder = utility_container_manager.Folder(
id=folder_id, key="root", label="root", parent=None
)
folder_id += 1
invalid_tool_dependencies_folder = utility_container_manager.Folder(
id=folder_id, key="invalid_tool_dependencies", label=label, parent=invalid_tool_dependencies_root_folder
)
invalid_tool_dependencies_root_folder.folders.append(invalid_tool_dependencies_folder)
for requirements_dict in invalid_tool_dependencies_dict.values():
folder_id += 1
invalid_tool_dependency_id += 1
try:
name = requirements_dict["name"]
type = requirements_dict["type"]
version = requirements_dict["version"]
error = requirements_dict["error"]
except Exception as e:
name = "unknown"
type = "unknown"
version = "unknown"
error = str(e)
key = self.generate_tool_dependencies_key(name, version, type)
label = f"Version <b>{version}</b> of the <b>{name}</b> <b>{type}</b>"
folder = utility_container_manager.Folder(
id=folder_id, key=key, label=label, parent=invalid_tool_dependencies_folder
)
itd = InvalidToolDependency(
id=invalid_tool_dependency_id, name=name, version=version, type=type, error=error
)
folder.invalid_tool_dependencies.append(itd)
invalid_tool_dependencies_folder.folders.append(folder)
else:
invalid_tool_dependencies_root_folder = None
return folder_id, invalid_tool_dependencies_root_folder
def build_repository_containers(
self, repository, changeset_revision, repository_dependencies, repository_metadata, exclude=None
):
"""
Return a dictionary of containers for the received repository's dependencies and
contents for display in the Tool Shed.
"""
if exclude is None:
exclude = []
containers_dict = dict(
datatypes=None,
invalid_tools=None,
readme_files=None,
repository_dependencies=None,
tool_dependencies=None,
valid_tools=None,
workflows=None,
valid_data_managers=None,
)
if repository_metadata:
metadata = repository_metadata.metadata
lock = threading.Lock()
lock.acquire(True)
try:
folder_id = 0
# Datatypes container.
if metadata:
if "datatypes" not in exclude and "datatypes" in metadata:
datatypes = metadata["datatypes"]
folder_id, datatypes_root_folder = self.build_datatypes_folder(folder_id, datatypes)
containers_dict["datatypes"] = datatypes_root_folder
# Invalid repository dependencies container.
if metadata:
if (
"invalid_repository_dependencies" not in exclude
and "invalid_repository_dependencies" in metadata
):
invalid_repository_dependencies = metadata["invalid_repository_dependencies"]
(
folder_id,
invalid_repository_dependencies_root_folder,
) = self.build_invalid_repository_dependencies_root_folder(
folder_id, invalid_repository_dependencies
)
containers_dict["invalid_repository_dependencies"] = invalid_repository_dependencies_root_folder
# Invalid tool dependencies container.
if metadata:
if "invalid_tool_dependencies" not in exclude and "invalid_tool_dependencies" in metadata:
invalid_tool_dependencies = metadata["invalid_tool_dependencies"]
(
folder_id,
invalid_tool_dependencies_root_folder,
) = self.build_invalid_tool_dependencies_root_folder(folder_id, invalid_tool_dependencies)
containers_dict["invalid_tool_dependencies"] = invalid_tool_dependencies_root_folder
# Invalid tools container.
if metadata:
if "invalid_tools" not in exclude and "invalid_tools" in metadata:
invalid_tool_configs = metadata["invalid_tools"]
folder_id, invalid_tools_root_folder = self.build_invalid_tools_folder(
folder_id,
invalid_tool_configs,
changeset_revision,
repository=repository,
label="Invalid tools",
)
containers_dict["invalid_tools"] = invalid_tools_root_folder
# Readme files container.
if metadata:
if "readme_files" not in exclude and "readme_files" in metadata:
readme_files_dict = readme_util.build_readme_files_dict(
self.app, repository, changeset_revision, metadata
)
folder_id, readme_files_root_folder = self.build_readme_files_folder(
folder_id, readme_files_dict
)
containers_dict["readme_files"] = readme_files_root_folder
if "repository_dependencies" not in exclude:
# Repository dependencies container.
folder_id, repository_dependencies_root_folder = self.build_repository_dependencies_folder(
folder_id=folder_id,
repository_dependencies=repository_dependencies,
label="Repository dependencies",
installed=False,
)
if repository_dependencies_root_folder:
containers_dict["repository_dependencies"] = repository_dependencies_root_folder
# Tool dependencies container.
if metadata:
if "tool_dependencies" not in exclude and "tool_dependencies" in metadata:
tool_dependencies = metadata["tool_dependencies"]
if "orphan_tool_dependencies" in metadata:
# The use of the orphan_tool_dependencies category in metadata has been deprecated,
# but we still need to check in case the metadata is out of date.
orphan_tool_dependencies = metadata["orphan_tool_dependencies"]
tool_dependencies.update(orphan_tool_dependencies)
# Tool dependencies can be categorized as orphans only if the repository contains tools.
if "tools" not in exclude:
tools = metadata.get("tools", [])
tools.extend(metadata.get("invalid_tools", []))
folder_id, tool_dependencies_root_folder = self.build_tool_dependencies_folder(
folder_id, tool_dependencies, missing=False, new_install=False
)
containers_dict["tool_dependencies"] = tool_dependencies_root_folder
# Valid tools container.
if metadata:
if "tools" not in exclude and "tools" in metadata:
valid_tools = metadata["tools"]
folder_id, valid_tools_root_folder = self.build_tools_folder(
folder_id, valid_tools, repository, changeset_revision, label="Valid tools"
)
containers_dict["valid_tools"] = valid_tools_root_folder
# Workflows container.
if metadata:
if "workflows" not in exclude and "workflows" in metadata:
workflows = metadata["workflows"]
folder_id, workflows_root_folder = self.build_workflows_folder(
folder_id=folder_id,
workflows=workflows,
repository_metadata_id=repository_metadata.id,
repository_id=None,
label="Workflows",
)
containers_dict["workflows"] = workflows_root_folder
# Valid Data Managers container
if metadata:
if "data_manager" not in exclude and "data_manager" in metadata:
data_managers = metadata["data_manager"].get("data_managers", None)
folder_id, data_managers_root_folder = self.build_data_managers_folder(
folder_id, data_managers, label="Data Managers"
)
containers_dict["valid_data_managers"] = data_managers_root_folder
error_messages = metadata["data_manager"].get("error_messages", None)
data_managers = metadata["data_manager"].get("invalid_data_managers", None)
folder_id, data_managers_root_folder = self.build_invalid_data_managers_folder(
folder_id, data_managers, error_messages, label="Invalid Data Managers"
)
containers_dict["invalid_data_managers"] = data_managers_root_folder
except Exception:
log.exception("Exception in build_repository_containers")
finally:
lock.release()
return containers_dict
def generate_tool_dependencies_key(self, name, version, type):
return f"{str(name)}{container_util.STRSEP}{str(version)}{container_util.STRSEP}{str(type)}"
| 46.515544 | 125 | 0.596825 |
79482e80a92ef3d4b4eb8511e7801ab2ac27d2a8
| 47,503 |
py
|
Python
|
tests/test_repository_lib.py
|
Ajedi32/tuf
|
5733e6138a9fdb3e822ad90e920abd5520115a68
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
tests/test_repository_lib.py
|
Ajedi32/tuf
|
5733e6138a9fdb3e822ad90e920abd5520115a68
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
tests/test_repository_lib.py
|
Ajedi32/tuf
|
5733e6138a9fdb3e822ad90e920abd5520115a68
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
#!/usr/bin/env python
"""
<Program Name>
test_repository_lib.py
<Author>
Vladimir Diaz <vladimir.v.diaz@gmail.com>
<Started>
June 1, 2014.
<Copyright>
See LICENSE for licensing information.
<Purpose>
Unit test for 'repository_lib.py'.
"""
# Help with Python 3 compatibility, where the print statement is a function, an
# implicit relative import is invalid, and the '/' operator performs true
# division. Example: print 'hello world' raises a 'SyntaxError' exception.
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import os
import time
import datetime
import logging
import tempfile
import json
import shutil
import stat
import sys
# 'unittest2' required for testing under Python < 2.7.
if sys.version_info >= (2, 7):
import unittest
else:
import unittest2 as unittest
import tuf
import tuf.formats
import tuf.log
import tuf.formats
import tuf.roledb
import tuf.keydb
import tuf.settings
import tuf.repository_lib as repo_lib
import tuf.repository_tool as repo_tool
import securesystemslib
import six
logger = logging.getLogger('tuf.test_repository_lib')
repo_lib.disable_console_log_messages()
class TestRepositoryToolFunctions(unittest.TestCase):
@classmethod
def setUpClass(cls):
# setUpClass() is called before tests in an individual class are executed.
# Create a temporary directory to store the repository, metadata, and target
# files. 'temporary_directory' must be deleted in TearDownClass() so that
# temporary files are always removed, even when exceptions occur.
tuf.roledb.clear_roledb(clear_all=True)
tuf.keydb.clear_keydb(clear_all=True)
cls.temporary_directory = tempfile.mkdtemp(dir=os.getcwd())
@classmethod
def tearDownClass(cls):
# tearDownModule() is called after all the tests have run.
# http://docs.python.org/2/library/unittest.html#class-and-module-fixtures
# Remove the temporary repository directory, which should contain all the
# metadata, targets, and key files generated for the test cases.
tuf.roledb.clear_roledb(clear_all=True)
tuf.keydb.clear_keydb(clear_all=True)
shutil.rmtree(cls.temporary_directory)
def setUp(self):
tuf.roledb.create_roledb('test_repository')
tuf.keydb.create_keydb('test_repository')
def tearDown(self):
tuf.roledb.clear_roledb(clear_all=True)
tuf.keydb.clear_keydb(clear_all=True)
def test_generate_and_write_rsa_keypair(self):
# Test normal case.
temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory)
test_keypath = os.path.join(temporary_directory, 'rsa_key')
repo_lib.generate_and_write_rsa_keypair(test_keypath, password='pw')
self.assertTrue(os.path.exists(test_keypath))
self.assertTrue(os.path.exists(test_keypath + '.pub'))
# Ensure the generated key files are importable.
imported_pubkey = \
repo_lib.import_rsa_publickey_from_file(test_keypath + '.pub')
self.assertTrue(securesystemslib.formats.RSAKEY_SCHEMA.matches(imported_pubkey))
imported_privkey = \
repo_lib.import_rsa_privatekey_from_file(test_keypath, 'pw')
self.assertTrue(securesystemslib.formats.RSAKEY_SCHEMA.matches(imported_privkey))
# Custom 'bits' argument.
os.remove(test_keypath)
os.remove(test_keypath + '.pub')
repo_lib.generate_and_write_rsa_keypair(test_keypath, bits=2048,
password='pw')
self.assertTrue(os.path.exists(test_keypath))
self.assertTrue(os.path.exists(test_keypath + '.pub'))
# Test improperly formatted arguments.
self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.generate_and_write_rsa_keypair,
3, bits=2048, password='pw')
self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.generate_and_write_rsa_keypair,
test_keypath, bits='bad', password='pw')
self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.generate_and_write_rsa_keypair,
test_keypath, bits=2048, password=3)
# Test invalid 'bits' argument.
self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.generate_and_write_rsa_keypair,
test_keypath, bits=1024, password='pw')
def test_import_rsa_privatekey_from_file(self):
# Test normal case.
temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory)
# Load one of the pre-generated key files from 'tuf/tests/repository_data'.
# 'password' unlocks the pre-generated key files.
key_filepath = os.path.join('repository_data', 'keystore',
'root_key')
self.assertTrue(os.path.exists(key_filepath))
imported_rsa_key = repo_lib.import_rsa_privatekey_from_file(key_filepath,
'password')
self.assertTrue(securesystemslib.formats.RSAKEY_SCHEMA.matches(imported_rsa_key))
# Test improperly formatted argument.
self.assertRaises(securesystemslib.exceptions.FormatError,
repo_lib.import_rsa_privatekey_from_file, 3, 'pw')
# Test invalid argument.
# Non-existent key file.
nonexistent_keypath = os.path.join(temporary_directory,
'nonexistent_keypath')
self.assertRaises(IOError, repo_lib.import_rsa_privatekey_from_file,
nonexistent_keypath, 'pw')
# Invalid key file argument.
invalid_keyfile = os.path.join(temporary_directory, 'invalid_keyfile')
with open(invalid_keyfile, 'wb') as file_object:
file_object.write(b'bad keyfile')
self.assertRaises(securesystemslib.exceptions.CryptoError, repo_lib.import_rsa_privatekey_from_file,
invalid_keyfile, 'pw')
def test_import_rsa_publickey_from_file(self):
# Test normal case.
temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory)
# Load one of the pre-generated key files from 'tuf/tests/repository_data'.
key_filepath = os.path.join('repository_data', 'keystore',
'root_key.pub')
self.assertTrue(os.path.exists(key_filepath))
imported_rsa_key = repo_lib.import_rsa_publickey_from_file(key_filepath)
self.assertTrue(securesystemslib.formats.RSAKEY_SCHEMA.matches(imported_rsa_key))
# Test improperly formatted argument.
self.assertRaises(securesystemslib.exceptions.FormatError,
repo_lib.import_rsa_privatekey_from_file, 3)
# Test invalid argument.
# Non-existent key file.
nonexistent_keypath = os.path.join(temporary_directory,
'nonexistent_keypath')
self.assertRaises(IOError, repo_lib.import_rsa_publickey_from_file,
nonexistent_keypath)
# Invalid key file argument.
invalid_keyfile = os.path.join(temporary_directory, 'invalid_keyfile')
with open(invalid_keyfile, 'wb') as file_object:
file_object.write(b'bad keyfile')
self.assertRaises(securesystemslib.exceptions.Error, repo_lib.import_rsa_publickey_from_file,
invalid_keyfile)
def test_generate_and_write_ed25519_keypair(self):
# Test normal case.
temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory)
test_keypath = os.path.join(temporary_directory, 'ed25519_key')
repo_lib.generate_and_write_ed25519_keypair(test_keypath, password='pw')
self.assertTrue(os.path.exists(test_keypath))
self.assertTrue(os.path.exists(test_keypath + '.pub'))
# Ensure the generated key files are importable.
imported_pubkey = \
repo_lib.import_ed25519_publickey_from_file(test_keypath + '.pub')
self.assertTrue(securesystemslib.formats.ED25519KEY_SCHEMA.matches(imported_pubkey))
imported_privkey = \
repo_lib.import_ed25519_privatekey_from_file(test_keypath, 'pw')
self.assertTrue(securesystemslib.formats.ED25519KEY_SCHEMA.matches(imported_privkey))
# Test improperly formatted arguments.
self.assertRaises(securesystemslib.exceptions.FormatError,
repo_lib.generate_and_write_ed25519_keypair,
3, password='pw')
self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.generate_and_write_rsa_keypair,
test_keypath, password=3)
def test_import_ed25519_publickey_from_file(self):
# Test normal case.
# Generate ed25519 keys that can be imported.
temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory)
ed25519_keypath = os.path.join(temporary_directory, 'ed25519_key')
repo_lib.generate_and_write_ed25519_keypair(ed25519_keypath, password='pw')
imported_ed25519_key = \
repo_lib.import_ed25519_publickey_from_file(ed25519_keypath + '.pub')
self.assertTrue(securesystemslib.formats.ED25519KEY_SCHEMA.matches(imported_ed25519_key))
# Test improperly formatted argument.
self.assertRaises(securesystemslib.exceptions.FormatError,
repo_lib.import_ed25519_publickey_from_file, 3)
# Test invalid argument.
# Non-existent key file.
nonexistent_keypath = os.path.join(temporary_directory,
'nonexistent_keypath')
self.assertRaises(IOError, repo_lib.import_ed25519_publickey_from_file,
nonexistent_keypath)
# Invalid key file argument.
invalid_keyfile = os.path.join(temporary_directory, 'invalid_keyfile')
with open(invalid_keyfile, 'wb') as file_object:
file_object.write(b'bad keyfile')
self.assertRaises(securesystemslib.exceptions.Error, repo_lib.import_ed25519_publickey_from_file,
invalid_keyfile)
# Invalid public key imported (contains unexpected keytype.)
keytype = imported_ed25519_key['keytype']
keyval = imported_ed25519_key['keyval']
scheme = imported_ed25519_key['scheme']
ed25519key_metadata_format = \
securesystemslib.keys.format_keyval_to_metadata(keytype, scheme,
keyval, private=False)
ed25519key_metadata_format['keytype'] = 'invalid_keytype'
with open(ed25519_keypath + '.pub', 'wb') as file_object:
file_object.write(json.dumps(ed25519key_metadata_format).encode('utf-8'))
self.assertRaises(securesystemslib.exceptions.FormatError,
repo_lib.import_ed25519_publickey_from_file,
ed25519_keypath + '.pub')
def test_import_ed25519_privatekey_from_file(self):
# Test normal case.
# Generate ed25519 keys that can be imported.
temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory)
ed25519_keypath = os.path.join(temporary_directory, 'ed25519_key')
repo_lib.generate_and_write_ed25519_keypair(ed25519_keypath, password='pw')
imported_ed25519_key = \
repo_lib.import_ed25519_privatekey_from_file(ed25519_keypath, 'pw')
self.assertTrue(securesystemslib.formats.ED25519KEY_SCHEMA.matches(imported_ed25519_key))
# Test improperly formatted argument.
self.assertRaises(securesystemslib.exceptions.FormatError,
repo_lib.import_ed25519_privatekey_from_file, 3, 'pw')
# Test invalid argument.
# Non-existent key file.
nonexistent_keypath = os.path.join(temporary_directory,
'nonexistent_keypath')
self.assertRaises(IOError, repo_lib.import_ed25519_privatekey_from_file,
nonexistent_keypath, 'pw')
# Invalid key file argument.
invalid_keyfile = os.path.join(temporary_directory, 'invalid_keyfile')
with open(invalid_keyfile, 'wb') as file_object:
file_object.write(b'bad keyfile')
self.assertRaises(securesystemslib.exceptions.Error, repo_lib.import_ed25519_privatekey_from_file,
invalid_keyfile, 'pw')
# Invalid private key imported (contains unexpected keytype.)
imported_ed25519_key['keytype'] = 'invalid_keytype'
# Use 'pycrypto_keys.py' to bypass the key format validation performed by
# 'keys.py'.
salt, iterations, derived_key = \
securesystemslib.pycrypto_keys._generate_derived_key('pw')
# Store the derived key info in a dictionary, the object expected
# by the non-public _encrypt() routine.
derived_key_information = {'salt': salt, 'iterations': iterations,
'derived_key': derived_key}
# Convert the key object to json string format and encrypt it with the
# derived key.
encrypted_key = \
securesystemslib.pycrypto_keys._encrypt(json.dumps(imported_ed25519_key),
derived_key_information)
with open(ed25519_keypath, 'wb') as file_object:
file_object.write(encrypted_key.encode('utf-8'))
self.assertRaises(securesystemslib.exceptions.FormatError,
repo_lib.import_ed25519_privatekey_from_file,
ed25519_keypath, 'pw')
def test_get_metadata_filenames(self):
# Test normal case.
metadata_directory = os.path.join('metadata/')
filenames = {'root.json': metadata_directory + 'root.json',
'targets.json': metadata_directory + 'targets.json',
'snapshot.json': metadata_directory + 'snapshot.json',
'timestamp.json': metadata_directory + 'timestamp.json'}
self.assertEqual(filenames, repo_lib.get_metadata_filenames('metadata/'))
# If a directory argument is not specified, the current working directory
# is used.
metadata_directory = os.getcwd()
filenames = {'root.json': os.path.join(metadata_directory, 'root.json'),
'targets.json': os.path.join(metadata_directory, 'targets.json'),
'snapshot.json': os.path.join(metadata_directory, 'snapshot.json'),
'timestamp.json': os.path.join(metadata_directory, 'timestamp.json')}
self.assertEqual(filenames, repo_lib.get_metadata_filenames())
# Test improperly formatted argument.
self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.get_metadata_filenames, 3)
def test_get_metadata_fileinfo(self):
# Test normal case.
temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory)
test_filepath = os.path.join(temporary_directory, 'file.txt')
with open(test_filepath, 'wt') as file_object:
file_object.write('test file')
# Generate test fileinfo object. It is assumed SHA256 and SHA512 hashes
# are computed by get_metadata_fileinfo().
file_length = os.path.getsize(test_filepath)
sha256_digest_object = securesystemslib.hash.digest_filename(test_filepath)
sha512_digest_object = securesystemslib.hash.digest_filename(test_filepath, algorithm='sha512')
file_hashes = {'sha256': sha256_digest_object.hexdigest(),
'sha512': sha512_digest_object.hexdigest()}
fileinfo = {'length': file_length, 'hashes': file_hashes}
self.assertTrue(tuf.formats.FILEINFO_SCHEMA.matches(fileinfo))
self.assertEqual(fileinfo, repo_lib.get_metadata_fileinfo(test_filepath))
# Test improperly formatted argument.
self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.get_metadata_fileinfo, 3)
# Test non-existent file.
nonexistent_filepath = os.path.join(temporary_directory, 'oops.txt')
self.assertRaises(securesystemslib.exceptions.Error, repo_lib.get_metadata_fileinfo,
nonexistent_filepath)
def test_get_target_hash(self):
# Test normal case.
expected_target_hashes = {
'/file1.txt': 'e3a3d89eb3b70ce3fbce6017d7b8c12d4abd5635427a0e8a238f53157df85b3d',
'/README.txt': '8faee106f1bb69f34aaf1df1e3c2e87d763c4d878cb96b91db13495e32ceb0b0',
'/packages/file2.txt': 'c9c4a5cdd84858dd6a23d98d7e6e6b2aec45034946c16b2200bc317c75415e92'
}
for filepath, target_hash in six.iteritems(expected_target_hashes):
self.assertTrue(securesystemslib.formats.RELPATH_SCHEMA.matches(filepath))
self.assertTrue(securesystemslib.formats.HASH_SCHEMA.matches(target_hash))
self.assertEqual(repo_lib.get_target_hash(filepath), target_hash)
# Test for improperly formatted argument.
self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.get_target_hash, 8)
def test_generate_root_metadata(self):
# Test normal case.
# Load the root metadata provided in 'tuf/tests/repository_data/'.
root_filepath = os.path.join('repository_data', 'repository',
'metadata', 'root.json')
root_signable = securesystemslib.util.load_json_file(root_filepath)
# generate_root_metadata() expects the top-level roles and keys to be
# available in 'tuf.keydb' and 'tuf.roledb'.
tuf.roledb.create_roledb_from_root_metadata(root_signable['signed'])
tuf.keydb.create_keydb_from_root_metadata(root_signable['signed'])
expires = '1985-10-21T01:22:00Z'
root_metadata = repo_lib.generate_root_metadata(1, expires,
consistent_snapshot=False)
self.assertTrue(tuf.formats.ROOT_SCHEMA.matches(root_metadata))
root_keyids = tuf.roledb.get_role_keyids('root')
tuf.keydb._keydb_dict['default'][root_keyids[0]]['keytype'] = 'bad_keytype'
self.assertRaises(securesystemslib.exceptions.Error, repo_lib.generate_root_metadata, 1,
expires, consistent_snapshot=False)
# Reset the root key's keytype, so that we can next verify that a different
# securesystemslib.exceptions.Error exception is raised for duplicate keyids.
tuf.keydb._keydb_dict['default'][root_keyids[0]]['keytype'] = 'rsa'
# Add duplicate keyid to root's roleinfo.
tuf.roledb._roledb_dict['default']['root']['keyids'].append(root_keyids[0])
self.assertRaises(securesystemslib.exceptions.Error, repo_lib.generate_root_metadata, 1,
expires, consistent_snapshot=False)
# Test improperly formatted arguments.
self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.generate_root_metadata,
'3', expires, False)
self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.generate_root_metadata,
1, '3', False)
self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.generate_root_metadata,
1, expires, 3)
# Test for missing required roles and keys.
tuf.roledb.clear_roledb()
tuf.keydb.clear_keydb()
self.assertRaises(securesystemslib.exceptions.Error, repo_lib.generate_root_metadata,
1, expires, False)
def test_generate_targets_metadata(self):
# Test normal case.
temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory)
targets_directory = os.path.join(temporary_directory, 'targets')
file1_path = os.path.join(targets_directory, 'file.txt')
securesystemslib.util.ensure_parent_dir(file1_path)
with open(file1_path, 'wt') as file_object:
file_object.write('test file.')
# Set valid generate_targets_metadata() arguments. Add a custom field for
# the 'target_files' target set below.
version = 1
datetime_object = datetime.datetime(2030, 1, 1, 12, 0)
expiration_date = datetime_object.isoformat() + 'Z'
file_permissions = oct(os.stat(file1_path).st_mode)[4:]
target_files = {'file.txt': {'file_permission': file_permissions}}
delegations = {"keys": {
"a394c28384648328b16731f81440d72243c77bb44c07c040be99347f0df7d7bf": {
"keytype": "ed25519",
"keyval": {
"public": "3eb81026ded5af2c61fb3d4b272ac53cd1049a810ee88f4df1fc35cdaf918157"
}
}
},
"roles": [
{
"keyids": [
"a394c28384648328b16731f81440d72243c77bb44c07c040be99347f0df7d7bf"
],
"name": "targets/warehouse",
"paths": [
"/file1.txt", "/README.txt", '/warehouse/'
],
"threshold": 1
}
]
}
targets_metadata = \
repo_lib.generate_targets_metadata(targets_directory, target_files,
version, expiration_date, delegations,
False)
self.assertTrue(tuf.formats.TARGETS_SCHEMA.matches(targets_metadata))
# Valid arguments with 'delegations' set to None.
targets_metadata = \
repo_lib.generate_targets_metadata(targets_directory, target_files,
version, expiration_date, None,
False)
self.assertTrue(tuf.formats.TARGETS_SCHEMA.matches(targets_metadata))
# Verify that 'digest.filename' file is saved to 'targets_directory' if
# the 'write_consistent_targets' argument is True.
list_targets_directory = os.listdir(targets_directory)
targets_metadata = \
repo_lib.generate_targets_metadata(targets_directory, target_files,
version, expiration_date, delegations,
write_consistent_targets=True)
new_list_targets_directory = os.listdir(targets_directory)
# Verify that 'targets_directory' contains only one extra item.
self.assertTrue(len(list_targets_directory) + 1,
len(new_list_targets_directory))
# Verify that an exception is not raised if the target files already exist.
repo_lib.generate_targets_metadata(targets_directory, target_files,
version, expiration_date, delegations,
write_consistent_targets=True)
# Verify that 'targets_metadata' contains a 'custom' entry (optional)
# for 'file.txt'.
self.assertTrue('custom' in targets_metadata['targets']['file.txt'])
# Test improperly formatted arguments.
self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.generate_targets_metadata,
3, target_files, version, expiration_date)
self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.generate_targets_metadata,
targets_directory, 3, version, expiration_date)
self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.generate_targets_metadata,
targets_directory, target_files, '3', expiration_date)
self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.generate_targets_metadata,
targets_directory, target_files, version, '3')
# Improperly formatted 'delegations' and 'write_consistent_targets'
self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.generate_targets_metadata,
targets_directory, target_files, version, expiration_date,
3, False)
self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.generate_targets_metadata,
targets_directory, target_files, version, expiration_date,
delegations, 3)
# Test non-existent target file.
bad_target_file = \
{'non-existent.txt': {'file_permission': file_permissions}}
self.assertRaises(securesystemslib.exceptions.Error, repo_lib.generate_targets_metadata,
targets_directory, bad_target_file, version,
expiration_date)
def test_generate_snapshot_metadata(self):
# Test normal case.
temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory)
original_repository_path = os.path.join('repository_data',
'repository')
repository_directory = os.path.join(temporary_directory, 'repository')
shutil.copytree(original_repository_path, repository_directory)
metadata_directory = os.path.join(repository_directory,
repo_lib.METADATA_STAGED_DIRECTORY_NAME)
targets_directory = os.path.join(repository_directory, repo_lib.TARGETS_DIRECTORY_NAME)
root_filename = os.path.join(metadata_directory, repo_lib.ROOT_FILENAME)
targets_filename = os.path.join(metadata_directory,
repo_lib.TARGETS_FILENAME)
version = 1
expiration_date = '1985-10-21T13:20:00Z'
# Load a valid repository so that top-level roles exist in roledb and
# generate_snapshot_metadata() has roles to specify in snapshot metadata.
repository = repo_tool.Repository(repository_directory, metadata_directory,
targets_directory)
repository_junk = repo_tool.load_repository(repository_directory)
root_filename = 'root'
targets_filename = 'targets'
snapshot_metadata = \
repo_lib.generate_snapshot_metadata(metadata_directory, version,
expiration_date, root_filename,
targets_filename,
consistent_snapshot=False)
self.assertTrue(tuf.formats.SNAPSHOT_SCHEMA.matches(snapshot_metadata))
# Test improperly formatted arguments.
self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.generate_snapshot_metadata,
3, version, expiration_date,
root_filename, targets_filename, consistent_snapshot=False)
self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.generate_snapshot_metadata,
metadata_directory, '3', expiration_date,
root_filename, targets_filename, consistent_snapshot=False)
self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.generate_snapshot_metadata,
metadata_directory, version, '3',
root_filename, targets_filename, consistent_snapshot=False)
self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.generate_snapshot_metadata,
metadata_directory, version, expiration_date,
3, targets_filename, consistent_snapshot=False)
self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.generate_snapshot_metadata,
metadata_directory, version, expiration_date,
root_filename, 3, consistent_snapshot=False)
self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.generate_snapshot_metadata,
metadata_directory, version, expiration_date,
root_filename, targets_filename, 3)
def test_generate_timestamp_metadata(self):
# Test normal case.
repository_name = 'test_repository'
temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory)
original_repository_path = os.path.join('repository_data',
'repository')
repository_directory = os.path.join(temporary_directory, 'repository')
shutil.copytree(original_repository_path, repository_directory)
metadata_directory = os.path.join(repository_directory,
repo_lib.METADATA_STAGED_DIRECTORY_NAME)
targets_directory = os.path.join(repository_directory, repo_lib.TARGETS_DIRECTORY_NAME)
snapshot_filename = os.path.join(metadata_directory,
repo_lib.SNAPSHOT_FILENAME)
# Set valid generate_timestamp_metadata() arguments.
version = 1
expiration_date = '1985-10-21T13:20:00Z'
# Load a valid repository so that top-level roles exist in roledb and
# generate_snapshot_metadata() has roles to specify in snapshot metadata.
repository = repo_tool.Repository(repository_directory, metadata_directory,
targets_directory, repository_name)
repository_junk = repo_tool.load_repository(repository_directory,
repository_name)
timestamp_metadata = repo_lib.generate_timestamp_metadata(snapshot_filename,
version, expiration_date, repository_name)
self.assertTrue(tuf.formats.TIMESTAMP_SCHEMA.matches(timestamp_metadata))
# Test improperly formatted arguments.
self.assertRaises(securesystemslib.exceptions.FormatError,
repo_lib.generate_timestamp_metadata, 3, version, expiration_date,
repository_name)
self.assertRaises(securesystemslib.exceptions.FormatError,
repo_lib.generate_timestamp_metadata, snapshot_filename, '3',
expiration_date, repository_name)
self.assertRaises(securesystemslib.exceptions.FormatError,
repo_lib.generate_timestamp_metadata, snapshot_filename, version, '3',
repository_name)
def test_sign_metadata(self):
# Test normal case.
repository_name = 'test_repository'
temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory)
metadata_path = os.path.join('repository_data', 'repository', 'metadata')
keystore_path = os.path.join('repository_data', 'keystore')
root_filename = os.path.join(metadata_path, 'root.json')
root_metadata = securesystemslib.util.load_json_file(root_filename)['signed']
targets_filename = os.path.join(metadata_path, 'targets.json')
targets_metadata = securesystemslib.util.load_json_file(targets_filename)['signed']
tuf.keydb.create_keydb_from_root_metadata(root_metadata, repository_name)
tuf.roledb.create_roledb_from_root_metadata(root_metadata, repository_name)
root_keyids = tuf.roledb.get_role_keyids('root', repository_name)
targets_keyids = tuf.roledb.get_role_keyids('targets', repository_name)
root_private_keypath = os.path.join(keystore_path, 'root_key')
root_private_key = repo_lib.import_rsa_privatekey_from_file(root_private_keypath,
'password')
# Sign with a valid, but not a threshold, key.
targets_public_keypath = os.path.join(keystore_path, 'targets_key.pub')
targets_public_key = \
repo_lib.import_ed25519_publickey_from_file(targets_public_keypath)
# sign_metadata() expects the private key 'root_metadata' to be in
# 'tuf.keydb'. Remove any public keys that may be loaded before
# adding private key, otherwise a 'tuf.KeyAlreadyExists' exception is
# raised.
tuf.keydb.remove_key(root_private_key['keyid'],
repository_name=repository_name)
tuf.keydb.add_key(root_private_key, repository_name=repository_name)
tuf.keydb.remove_key(targets_public_key['keyid'], repository_name=repository_name)
tuf.keydb.add_key(targets_public_key, repository_name=repository_name)
# Verify that a valid root signable is generated.
root_signable = repo_lib.sign_metadata(root_metadata, root_keyids,
root_filename, repository_name)
self.assertTrue(tuf.formats.SIGNABLE_SCHEMA.matches(root_signable))
# Test for an unset private key (in this case, target's).
repo_lib.sign_metadata(targets_metadata, targets_keyids, targets_filename,
repository_name)
# Add an invalid keytype to one of the root keys.
root_keyid = root_keyids[0]
tuf.keydb._keydb_dict[repository_name][root_keyid]['keytype'] = 'bad_keytype'
self.assertRaises(securesystemslib.exceptions.Error, repo_lib.sign_metadata,
root_metadata, root_keyids, root_filename, repository_name)
# Test improperly formatted arguments.
self.assertRaises(securesystemslib.exceptions.FormatError,
repo_lib.sign_metadata, 3, root_keyids, 'root.json', repository_name)
self.assertRaises(securesystemslib.exceptions.FormatError,
repo_lib.sign_metadata, root_metadata, 3, 'root.json', repository_name)
self.assertRaises(securesystemslib.exceptions.FormatError,
repo_lib.sign_metadata, root_metadata, root_keyids, 3, repository_name)
def test_write_metadata_file(self):
# Test normal case.
temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory)
metadata_directory = os.path.join('repository_data', 'repository', 'metadata')
root_filename = os.path.join(metadata_directory, 'root.json')
root_signable = securesystemslib.util.load_json_file(root_filename)
output_filename = os.path.join(temporary_directory, 'root.json')
version_number = root_signable['signed']['version'] + 1
self.assertFalse(os.path.exists(output_filename))
repo_lib.write_metadata_file(root_signable, output_filename, version_number,
consistent_snapshot=False)
self.assertTrue(os.path.exists(output_filename))
# Attempt to over-write the previously written metadata file. An exception
# is not raised in this case, only a debug message is logged.
repo_lib.write_metadata_file(root_signable, output_filename, version_number,
consistent_snapshot=False)
# Try to write a consistent metadate file. An exception is not raised in
# this case. For testing purposes, root.json should be a hard link to the
# consistent metadata file. We should verify that root.json points to
# the latest consistent files.
tuf.settings.CONSISTENT_METHOD = 'hard_link'
repo_lib.write_metadata_file(root_signable, output_filename, version_number,
consistent_snapshot=True)
# Test if the consistent files are properly named
# Filename format of a consistent file: <version number>.rolename.json
version_and_filename = str(version_number) + '.' + 'root.json'
first_version_output_file = os.path.join(temporary_directory, version_and_filename)
self.assertTrue(os.path.exists(first_version_output_file))
# Try to add more consistent metadata files.
version_number += 1
repo_lib.write_metadata_file(root_signable, output_filename,
version_number, consistent_snapshot=True)
# Test if the the latest root.json points to the expected consistent file
# and consistent metadata do not all point to the same root.json
version_and_filename = str(version_number) + '.' + 'root.json'
second_version_output_file = os.path.join(temporary_directory, version_and_filename)
self.assertTrue(os.path.exists(second_version_output_file))
self.assertNotEqual(os.stat(output_filename).st_ino, os.stat(first_version_output_file).st_ino)
self.assertEqual(os.stat(output_filename).st_ino, os.stat(second_version_output_file).st_ino)
# Test for an improper settings.CONSISTENT_METHOD string value.
tuf.settings.CONSISTENT_METHOD = 'somebadidea'
self.assertRaises(securesystemslib.exceptions.InvalidConfigurationError,
repo_lib.write_metadata_file, root_signable, output_filename,
version_number, consistent_snapshot=True)
# Try to create a link to root.json when root.json doesn't exist locally.
# repository_lib should log a message if this is the case.
tuf.settings.CONSISTENT_METHOD = 'hard_link'
os.remove(output_filename)
repo_lib.write_metadata_file(root_signable, output_filename, version_number,
consistent_snapshot=True)
# Reset CONSISTENT_METHOD so that subsequent tests work as expected.
tuf.settings.CONSISTENT_METHOD = 'copy'
# Test improperly formatted arguments.
self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.write_metadata_file,
3, output_filename, version_number, False)
self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.write_metadata_file,
root_signable, 3, version_number, False)
self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.write_metadata_file,
root_signable, output_filename, '3', False)
self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.write_metadata_file,
root_signable, output_filename, version_number, 3)
def test_create_tuf_client_directory(self):
# Test normal case.
temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory)
repository_directory = os.path.join('repository_data', 'repository')
client_directory = os.path.join(temporary_directory, 'client')
repo_lib.create_tuf_client_directory(repository_directory, client_directory)
self.assertTrue(os.path.exists(client_directory))
metadata_directory = os.path.join(client_directory, 'metadata')
current_directory = os.path.join(metadata_directory, 'current')
previous_directory = os.path.join(metadata_directory, 'previous')
self.assertTrue(os.path.exists(client_directory))
self.assertTrue(os.path.exists(metadata_directory))
self.assertTrue(os.path.exists(current_directory))
self.assertTrue(os.path.exists(previous_directory))
# Test improperly formatted arguments.
self.assertRaises(securesystemslib.exceptions.FormatError,
repo_lib.create_tuf_client_directory, 3, client_directory)
self.assertRaises(securesystemslib.exceptions.FormatError,
repo_lib.create_tuf_client_directory, repository_directory, 3)
# Test invalid argument (i.e., client directory already exists.)
self.assertRaises(securesystemslib.exceptions.RepositoryError,
repo_lib.create_tuf_client_directory, repository_directory,
client_directory)
# Test invalid client metadata directory (i.e., non-errno.EEXIST exceptions
# should be re-raised.)
shutil.rmtree(metadata_directory)
current_client_directory_mode = os.stat(client_directory)[stat.ST_MODE]
# Remove write access for the client directory so that the 'metadata'
# directory cannot be created. create_tuf_client_directory() should
# re-raise the 'OSError' (i.e., errno.EACCES) exception and only handle
# errno.EEXIST.
os.chmod(client_directory, current_client_directory_mode & ~stat.S_IWUSR)
self.assertRaises(OSError, repo_lib.create_tuf_client_directory,
repository_directory, client_directory)
# Reset the client directory's mode.
os.chmod(client_directory, current_client_directory_mode)
def test__check_directory(self):
# Test for non-existent directory.
self.assertRaises(securesystemslib.exceptions.Error,
repo_lib._check_directory, 'non-existent')
def test__generate_and_write_metadata(self):
# Test for invalid, or unsupported, rolename.
# Load the root metadata provided in 'tuf/tests/repository_data/'.
repository_name = 'repository_name'
root_filepath = os.path.join('repository_data', 'repository',
'metadata', 'root.json')
root_signable = securesystemslib.util.load_json_file(root_filepath)
# _generate_and_write_metadata() expects the top-level roles
# (specifically 'snapshot') and keys to be available in 'tuf.roledb'.
tuf.roledb.create_roledb_from_root_metadata(root_signable['signed'],
repository_name)
temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory)
targets_directory = os.path.join(temporary_directory, 'targets')
os.mkdir(targets_directory)
repository_directory = os.path.join(temporary_directory, 'repository')
metadata_directory = os.path.join(repository_directory,
repo_lib.METADATA_STAGED_DIRECTORY_NAME)
targets_metadata = os.path.join('repository_data', 'repository', 'metadata',
'targets.json')
obsolete_metadata = os.path.join(metadata_directory, 'obsolete_role.json')
securesystemslib.util.ensure_parent_dir(obsolete_metadata)
shutil.copyfile(targets_metadata, obsolete_metadata)
# Verify that obsolete metadata (a metadata file exists on disk, but the
# role is unavailable in 'tuf.roledb'). First add the obsolete
# role to 'tuf.roledb' so that its metadata file can be written to disk.
targets_roleinfo = tuf.roledb.get_roleinfo('targets', repository_name)
targets_roleinfo['version'] = 1
expiration = \
tuf.formats.unix_timestamp_to_datetime(int(time.time() + 86400))
expiration = expiration.isoformat() + 'Z'
targets_roleinfo['expires'] = expiration
tuf.roledb.add_role('obsolete_role', targets_roleinfo,
repository_name=repository_name)
repo_lib._generate_and_write_metadata('obsolete_role', obsolete_metadata,
targets_directory, metadata_directory, consistent_snapshot=False,
filenames=None, repository_name=repository_name)
snapshot_filepath = os.path.join('repository_data', 'repository',
'metadata', 'snapshot.json')
snapshot_signable = securesystemslib.util.load_json_file(snapshot_filepath)
tuf.roledb.remove_role('obsolete_role', repository_name)
self.assertTrue(os.path.exists(os.path.join(metadata_directory,
'obsolete_role.json')))
tuf.repository_lib._delete_obsolete_metadata(metadata_directory,
snapshot_signable['signed'], False, repository_name)
self.assertFalse(os.path.exists(metadata_directory + 'obsolete_role.json'))
shutil.copyfile(targets_metadata, obsolete_metadata)
def test__delete_obsolete_metadata(self):
repository_name = 'test_repository'
temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory)
repository_directory = os.path.join(temporary_directory, 'repository')
metadata_directory = os.path.join(repository_directory,
repo_lib.METADATA_STAGED_DIRECTORY_NAME)
os.makedirs(metadata_directory)
snapshot_filepath = os.path.join('repository_data', 'repository',
'metadata', 'snapshot.json')
snapshot_signable = securesystemslib.util.load_json_file(snapshot_filepath)
# Create role metadata that should not exist in snapshot.json.
role1_filepath = os.path.join('repository_data', 'repository', 'metadata',
'role1.json')
shutil.copyfile(role1_filepath, os.path.join(metadata_directory, 'role2.json'))
repo_lib._delete_obsolete_metadata(metadata_directory,
snapshot_signable['signed'], True, repository_name)
# _delete_obsolete_metadata should never delete root.json.
root_filepath = os.path.join('repository_data', 'repository', 'metadata',
'root.json')
shutil.copyfile(root_filepath, os.path.join(metadata_directory, 'root.json'))
repo_lib._delete_obsolete_metadata(metadata_directory,
snapshot_signable['signed'], True, repository_name)
self.assertTrue(os.path.exists(os.path.join(metadata_directory, 'root.json')))
# Verify what happens for a non-existent metadata directory (a debug
# message is logged).
repo_lib._delete_obsolete_metadata('non-existent',
snapshot_signable['signed'], True, repository_name)
def test__load_top_level_metadata(self):
repository_name = 'test_repository'
temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory)
repository_directory = os.path.join(temporary_directory, 'repository')
metadata_directory = os.path.join(repository_directory,
repo_lib.METADATA_STAGED_DIRECTORY_NAME)
targets_directory = os.path.join(repository_directory,
repo_lib.TARGETS_DIRECTORY_NAME)
shutil.copytree(os.path.join('repository_data', 'repository', 'metadata'),
metadata_directory)
shutil.copytree(os.path.join('repository_data', 'repository', 'targets'),
targets_directory)
# Add a duplicate signature to the Root file for testing purposes).
root_file = os.path.join(metadata_directory, 'root.json')
signable = securesystemslib.util.load_json_file(os.path.join(metadata_directory, 'root.json'))
signable['signatures'].append(signable['signatures'][0])
repo_lib.write_metadata_file(signable, root_file, 8, False)
# Attempt to load a repository that contains a compressed Root file.
repository = repo_tool.create_new_repository(repository_directory, repository_name)
filenames = repo_lib.get_metadata_filenames(metadata_directory)
repo_lib._load_top_level_metadata(repository, filenames, repository_name)
filenames = repo_lib.get_metadata_filenames(metadata_directory)
repository = repo_tool.create_new_repository(repository_directory, repository_name)
repo_lib._load_top_level_metadata(repository, filenames, repository_name)
# Partially write all top-level roles (we increase the threshold of each
# top-level role so that they are flagged as partially written.
repository.root.threshold = repository.root.threshold + 1
repository.snapshot.threshold = repository.snapshot.threshold + 1
repository.targets.threshold = repository.targets.threshold + 1
repository.timestamp.threshold = repository.timestamp.threshold + 1
repository.write('root', )
repository.write('snapshot')
repository.write('targets')
repository.write('timestamp')
repo_lib._load_top_level_metadata(repository, filenames, repository_name)
# Attempt to load a repository with missing top-level metadata.
for role_file in os.listdir(metadata_directory):
if role_file.endswith('.json') and not role_file.startswith('root'):
role_filename = os.path.join(metadata_directory, role_file)
os.remove(role_filename)
repo_lib._load_top_level_metadata(repository, filenames, repository_name)
# Remove the required Root file and verify that an exception is raised.
os.remove(os.path.join(metadata_directory, 'root.json'))
self.assertRaises(securesystemslib.exceptions.RepositoryError,
repo_lib._load_top_level_metadata, repository, filenames,
repository_name)
def test__remove_invalid_and_duplicate_signatures(self):
# Remove duplicate PSS signatures (same key generates valid, but different
# signatures). First load a valid signable (in this case, the root role).
repository_name = 'test_repository'
root_filepath = os.path.join('repository_data', 'repository',
'metadata', 'root.json')
root_signable = securesystemslib.util.load_json_file(root_filepath)
key_filepath = os.path.join('repository_data', 'keystore', 'root_key')
root_rsa_key = repo_lib.import_rsa_privatekey_from_file(key_filepath,
'password')
# Add 'root_rsa_key' to tuf.keydb, since
# _remove_invalid_and_duplicate_signatures() checks for unknown keys in
# tuf.keydb.
tuf.keydb.add_key(root_rsa_key, repository_name=repository_name)
# Append the new valid, but duplicate PSS signature, and test that
# duplicates are removed. create_signature() generates a key for the
# key type of the first argument (i.e., root_rsa_key).
new_pss_signature = securesystemslib.keys.create_signature(root_rsa_key,
root_signable['signed'])
root_signable['signatures'].append(new_pss_signature)
expected_number_of_signatures = len(root_signable['signatures'])
tuf.repository_lib._remove_invalid_and_duplicate_signatures(root_signable,
repository_name)
self.assertEqual(len(root_signable), expected_number_of_signatures)
# Test for an invalid keyid.
root_signable['signatures'][0]['keyid'] = '404'
tuf.repository_lib._remove_invalid_and_duplicate_signatures(root_signable,
repository_name)
# Re-add a valid signature for the following test condition.
root_signable['signatures'].append(new_pss_signature)
# Test that an exception is not raised if an invalid sig is present,
# and that the duplicate key is removed 'root_signable'.
root_signable['signatures'][0]['sig'] = '4040'
invalid_keyid = root_signable['signatures'][0]['keyid']
tuf.repository_lib._remove_invalid_and_duplicate_signatures(root_signable,
repository_name)
for signature in root_signable['signatures']:
self.assertFalse(invalid_keyid == signature['keyid'])
# Run the test cases.
if __name__ == '__main__':
unittest.main()
| 44.395327 | 104 | 0.728607 |
79482eb2779663d4f1f0576e3b12ffe0204dcbe0
| 3,875 |
py
|
Python
|
yggdrasil/serialize/AsciiMapSerialize.py
|
cropsinsilico/yggdrasil
|
466a4f77605a6f461d57ef7b165a6db7eec4d1fd
|
[
"BSD-3-Clause"
] | 22 |
2019-02-05T15:20:07.000Z
|
2022-02-25T09:00:40.000Z
|
yggdrasil/serialize/AsciiMapSerialize.py
|
cropsinsilico/yggdrasil
|
466a4f77605a6f461d57ef7b165a6db7eec4d1fd
|
[
"BSD-3-Clause"
] | 48 |
2019-02-15T20:41:24.000Z
|
2022-03-16T20:52:02.000Z
|
yggdrasil/serialize/AsciiMapSerialize.py
|
cropsinsilico/yggdrasil
|
466a4f77605a6f461d57ef7b165a6db7eec4d1fd
|
[
"BSD-3-Clause"
] | 16 |
2019-04-27T03:36:40.000Z
|
2021-12-02T09:47:06.000Z
|
import json
from yggdrasil import tools, constants
from yggdrasil.serialize.SerializeBase import SerializeBase
from yggdrasil.metaschema.encoder import JSONReadableEncoder
class AsciiMapSerialize(SerializeBase):
r"""Class for serializing/deserializing name/value mapping.
Args:
delimiter (str, optional): Delimiter that should be used to
separate name/value pairs in the map. Defaults to \t.
"""
_seritype = 'map'
_schema_subtype_description = ('Serialzation of mapping between key/value '
'pairs with one pair per line and using a '
'character delimiter to separate keys and '
'values.')
_schema_properties = {
'delimiter': {'type': 'string',
'default': constants.DEFAULT_DELIMITER_STR}}
_attr_conv = SerializeBase._attr_conv # + ['delimiter']
default_datatype = {'type': 'object'}
concats_as_str = False
def func_serialize(self, args):
r"""Serialize a message.
Args:
args (dict): Python dictionary to be serialized.
Returns:
bytes, str: Serialized message.
"""
out = ''
order = sorted([k for k in args.keys()])
newline_str = tools.bytes2str(self.newline)
for k in order:
v = args[k]
if not isinstance(k, (str, bytes)):
raise ValueError("Serialization of non-string keys not supported.")
out += tools.bytes2str(k)
out += self.delimiter
out += json.dumps(v, cls=JSONReadableEncoder)
out += newline_str
return tools.str2bytes(out)
def func_deserialize(self, msg):
r"""Deserialize a message.
Args:
msg (bytes): Message to be deserialized.
Returns:
dict: Deserialized Python dictionary.
"""
out = dict()
lines = tools.bytes2str(msg.split(self.newline), recurse=True)
for line in lines:
kv = [x for x in line.split(self.delimiter) if x]
if len(kv) <= 1:
# TODO: Allow empty?
continue
elif len(kv) == 2:
if kv[1].startswith("'") and kv[1].endswith("'"):
out[kv[0]] = kv[1].strip("'")
else:
try:
out[kv[0]] = json.loads(kv[1])
except BaseException:
out[kv[0]] = kv[1]
else:
raise ValueError("Line has more than one delimiter: " + str(line))
return out
@classmethod
def concatenate(cls, objects, **kwargs):
r"""Concatenate objects to get object that would be recieved if
the concatenated serialization were deserialized.
Args:
objects (list): Objects to be concatenated.
**kwargs: Additional keyword arguments are ignored.
Returns:
list: Set of objects that results from concatenating those provided.
"""
total = dict()
for x in objects:
total.update(x)
return [total]
@classmethod
def get_testing_options(cls):
r"""Method to return a dictionary of testing options for this class.
Returns:
dict: Dictionary of variables to use for testing.
"""
out = super(AsciiMapSerialize, cls).get_testing_options()
out['objects'] = [{'args1': int(1), 'args2': 'this'},
{'args3': float(1), 'args4': [int(1), int(2)]}]
out['empty'] = dict()
out['contents'] = (b'args1\t1\n'
+ b'args2\t"this"\n'
+ b'args3\t1.0\n'
+ b'args4\t[1, 2]\n')
return out
| 33.695652 | 83 | 0.536 |
79482ebbdb91eaa0dfc76e032666b290623e173a
| 351 |
py
|
Python
|
elcheapoais_tui/screen_syslog.py
|
innovationgarage/ElCheapoAIS-TUI
|
c54990cfd2c1f7527e55371fcf166f708f21fc14
|
[
"MIT"
] | null | null | null |
elcheapoais_tui/screen_syslog.py
|
innovationgarage/ElCheapoAIS-TUI
|
c54990cfd2c1f7527e55371fcf166f708f21fc14
|
[
"MIT"
] | null | null | null |
elcheapoais_tui/screen_syslog.py
|
innovationgarage/ElCheapoAIS-TUI
|
c54990cfd2c1f7527e55371fcf166f708f21fc14
|
[
"MIT"
] | null | null | null |
from . import screen
class SyslogScreen(screen.TextScroll):
def __init__(self, tui):
self.tui = tui
with open("/var/log/syslog") as f:
content = f.read()
content = content.replace(": ", ":\n")
screen.TextScroll.__init__(self, content)
def action(self, value):
return self.tui.debug_screen
| 27 | 49 | 0.603989 |
79482ee1c81d6aea934ea410a4fe8da9c8dc0315
| 862 |
py
|
Python
|
adapters/uploader.py
|
atnartur/backuper
|
0d1831720896f82ad67234efcbd4e680382895bc
|
[
"MIT"
] | 2 |
2018-06-08T20:02:06.000Z
|
2018-06-08T20:02:07.000Z
|
adapters/uploader.py
|
atnartur/backuper
|
0d1831720896f82ad67234efcbd4e680382895bc
|
[
"MIT"
] | 1 |
2018-01-14T10:25:37.000Z
|
2018-01-23T14:58:20.000Z
|
adapters/uploader.py
|
atnartur/backuper
|
0d1831720896f82ad67234efcbd4e680382895bc
|
[
"MIT"
] | null | null | null |
import logging
import os
import subprocess
import datetime
from app import yadisk_dir, yadisk_token
def upload(local_path, project_name):
for file in os.listdir(local_path):
logging.info(u'Uploading ' + file)
src = os.path.join(local_path, file)
today = '{:%Y-%m-%d}'.format(datetime.date.today())
dest = os.path.join(yadisk_dir, project_name + '/', today, file).replace(os.path.sep, "/")
try:
my_env = os.environ.copy()
my_env["YDCMD_TOKEN"] = yadisk_token
proc = subprocess.Popen('ydcmd put %s %s --verbose' % (src, dest), shell=True, env=my_env)
outs, errs = proc.communicate()
if errs:
raise ConnectionError(errs)
except subprocess.CalledProcessError as e:
logging.error('ydcmd error %s %s' % (e.stdout, e.stderr))
| 35.916667 | 102 | 0.613689 |
79482f1d276702aaccedef2048e73a23eb6792d4
| 10,704 |
py
|
Python
|
src/pyexam/exam-assistant.py
|
monkeychen/python-tutorial
|
a24785da6b4d857200b819ad4d960885b1ef7a20
|
[
"Apache-2.0"
] | null | null | null |
src/pyexam/exam-assistant.py
|
monkeychen/python-tutorial
|
a24785da6b4d857200b819ad4d960885b1ef7a20
|
[
"Apache-2.0"
] | null | null | null |
src/pyexam/exam-assistant.py
|
monkeychen/python-tutorial
|
a24785da6b4d857200b819ad4d960885b1ef7a20
|
[
"Apache-2.0"
] | null | null | null |
import os
import argparse
import shutil
from datetime import datetime
from openpyxl import load_workbook
from unrar import rarfile
import tkinter as tk
FMT_DEFAULT = "%Y-%m-%d %H:%M:%S"
class Answer(object):
score_conf = [
{"title": "single_choice", "unit": 2},
{"title": "multi_choice", "unit": 3},
{"title": "judgment", "unit": 2},
{"title": "programme_fill", "unit": 5},
{"title": "complex", "unit": 10},
]
def __init__(self, single_choice=None, multi_choice=None, judgment=None):
self.single_choice = single_choice
self.multi_choice = multi_choice
self.judgment = judgment
self.programme_fill = []
self.complex = []
class Student(object):
def __init__(self, name, answer: Answer = None, std_answer: Answer = None, valid=True):
self.name = name
self.answer = answer
self.score = 0
self.std_answer = std_answer
self.valid = valid
self.answer_dir_path = None
def calc_score(self):
if self.answer is None:
self.valid = False
return
for conf in Answer.score_conf:
unit = conf.get("unit")
q_type = conf.get("title")
tmp_std_ans_values = getattr(self.std_answer, q_type)
tmp_ans_values = getattr(self.answer, q_type)
if len(tmp_ans_values) != len(tmp_std_ans_values):
self.valid = False
return
for i in range(0, len(tmp_std_ans_values)):
self.score = self.score + (unit if tmp_std_ans_values[i] == tmp_ans_values[i] else 0)
self.valid = self.score < 20
return self.score
class ExamAssistant(object):
root_dir = os.getcwd()
question_conf = [
{"begin": "程序填空题01", "end": "txt"},
{"begin": "程序填空题02", "end": "txt"},
{"begin": "程序填空题03", "end": "txt"},
{"begin": "综合题01", "end": "txt"},
{"begin": "综合题02", "end": "txt"}
]
def __init__(self, tk_inst: tk.Tk):
self.tk_inst = tk_inst
self.args = self.parse_cli_args()
self.root_dir = self.args.root_dir
self.answer_file_num = self.args.answer_file_num
self.answer_dir = f"{self.root_dir}/answer"
self.upload_dir = f"{self.root_dir}/upload"
self.temp_dir = f"{self.upload_dir}/temp"
self.action = self.args.action
self.std_answer = self.load_answer_file(f"{self.answer_dir}/答题卡.xlsx")
self.student_list = []
self.init_win_layout()
def init_win_layout(self):
self.tk_inst.title("压缩包检测工具V1.0")
self.tk_inst.geometry('900x600')
self.btn = tk.Button(self.tk_inst, text="压缩包内容检测", bg="lightblue", width=30, height=1, command=self.execute)
self.btn.pack(pady=15)
self.label3 = tk.Label(self.tk_inst, text="本产品只获取当前文件夹中rar压缩包的总数,\n以及检测rar中是否包含Acopr与prog、答题卡xlsx文件。\n如需更详细的检测,请进行人工判定")
self.label3.pack(pady=1)
self.text2 = tk.StringVar()
self.label2 = tk.Label(self.tk_inst, textvariable=self.text2, fg="red", width=90, relief="sunken", bg="white", font=16)
self.label2.pack(pady=1)
self.text1 = tk.StringVar()
self.label1 = tk.Label(self.tk_inst, textvariable=self.text1, fg="red", width=90, height=30, relief="sunken", bg="white", font=16)
self.label1.pack()
def load_answer_file(self, answer_file_path):
answer_card_wb = load_workbook(answer_file_path)
sheet = answer_card_wb["Sheet1"]
parse_conf = {
"single_choice": {'row': 3, 'max_col': 16},
"multi_choice": {'row': 6, 'max_col': 6},
"judgment": {'row': 9, 'max_col': 16}
}
answer = Answer([], [], [])
for k in parse_conf.keys():
conf = parse_conf.get(k)
if conf is None:
continue
tmp_arr = []
for col_idx in range(2, conf.get('max_col') + 1):
tmp_arr.append(sheet.cell(conf.get('row'), col_idx).value)
setattr(answer, k, tmp_arr)
return answer
def submit_check(self):
self._remove_dir_tree(dir_path=self.temp_dir, include_self=False)
dir_list = os.listdir(self.upload_dir)
check_report = []
student_list = []
for stu_main_dir_name in dir_list:
stu_main_dir_path = f"{self.upload_dir}/{stu_main_dir_name}"
if "temp" == stu_main_dir_name or "py-code" == stu_main_dir_name or os.path.isfile(stu_main_dir_path) or len(os.listdir(stu_main_dir_path)) == 0:
print(f"{stu_main_dir_path} is file or is an empty directory or a temp dir!")
continue
student = Student(name=stu_main_dir_name, std_answer=self.std_answer)
student_list.append(student)
rar_cnt = 0
rar_file_path = None
for stu_file_name in os.listdir(stu_main_dir_path):
if not stu_file_name.endswith("rar"):
continue
rar_cnt += 1
if rar_cnt > 1:
check_report.append(f"目录【{stu_main_dir_name}]下发现多份RAR文件,请人工确认是否异常!")
student.valid = False
break
rar_file_path = f"{stu_main_dir_path}/{stu_file_name}"
if student.valid:
unrar_tmp_dir_path = f"{self.temp_dir}/{stu_main_dir_name}"
try:
self._remove_dir_tree(dir_path=unrar_tmp_dir_path, include_self=False)
rar_file = rarfile.RarFile(rar_file_path)
matched_name_list = []
card_name = None
for name_in_rar in rar_file.namelist():
base_name = os.path.basename(name_in_rar)
if self._is_matched_file(base_name):
matched_name_list.append(name_in_rar)
elif base_name == "答题卡.xlsx":
card_name = name_in_rar
if len(matched_name_list) < self.answer_file_num or card_name is None:
check_report.append(f"压缩文件【{rar_file_path}]下的文件数量不满意要求,请人工确认!")
student.valid = False
continue
if student.valid:
rar_file.extractall(path=unrar_tmp_dir_path)
card_path = f"{unrar_tmp_dir_path}/{card_name}"
student.answer_dir_path = os.path.dirname(card_path)
if os.path.exists(card_path):
student.answer = self.load_answer_file(card_path)
student.calc_score()
else:
check_report.append(f"无法加载答题卡文件【{card_path}】,请人工确认!")
student.valid = False
except Exception as e:
print(f"Fail to parse student[{stu_main_dir_name}]'s answer rar file!")
invalid_student_list = [stu for stu in student_list if not stu.valid]
for stu in invalid_student_list:
if stu.score < 20:
check_report.append(f"学生【{stu.name}】的成绩为【{stu.score}】,低于20分,请人工确认答卷是否异常!")
return check_report
def evaluate(self):
pass
def _remove_dir_tree(self, dir_path, include_self=True):
if os.path.exists(dir_path):
shutil.rmtree(dir_path)
if not include_self:
os.makedirs(dir_path)
def _list_dir(self, dir_path, ext=None):
name_list = []
for dir_name in os.listdir(dir_path):
if ext is None:
name_list.append(dir_name)
else:
if dir_name.endswith(ext):
name_list.append(dir_name)
return name_list
def _is_matched_file(self, file_name: str):
matched = False
for conf in self.question_conf:
matched = file_name.startswith(conf.get("begin")) and file_name.endswith(conf.get("end"))
if matched:
break
return matched
def datetime_to_str(self, dt, fmt=FMT_DEFAULT):
return dt.strftime(fmt)
def parse_cli_args(self):
cli_parser = argparse.ArgumentParser(description="这是一个Python考试辅助工具。", add_help=False)
cli_parser.add_argument('-a', '--action', nargs='?', choices=['submit', 'evaluate'], default='submit', help='设置运行环境(submit:交卷检测,evaluate:评卷),默认为:交卷检测')
cli_parser.add_argument('-e', '--env_mode', nargs='?', choices=['dev', 'prod'], default='prod', help='设置运行环境(dev:开发环境,prod:生产环境),默认为:开发环境')
cli_parser.add_argument('-n', '--answer_file_num', nargs='?', type=int, default=5, help='学生提交的python文件数,默认为5。')
cli_parser.add_argument('-r', '--root_dir', nargs='?', default=f"{os.getcwd()}", help='程序工作根目录路径。')
cli_parser.add_argument('-x', '--ext_name', nargs='?', default="py", help='学生提交作业中程序文件扩展名,默认为:py')
cli_parser.add_argument('-h', '--help', action='help', help='显示本帮助信息并退出')
cli_parser.add_argument('-v', '--version', action='version', version='%(prog)s V0.0.1', help='显示当前版本信息并退出')
args = cli_parser.parse_args()
scheduled_time = self.datetime_to_str(datetime.now(), fmt=FMT_DEFAULT)
print(f"The programme[{cli_parser.prog}] was scheduled at {scheduled_time}, cwd = {os.getcwd()}, env_mode = {args.env_mode}, "
f"root_dir = {args.root_dir}, answer_file_num = {args.answer_file_num}")
return args
def execute(self):
if self.action == 'submit':
check_result = self.submit_check()
infos = "\n".join(check_result)
self.text1.set(infos)
for info in check_result:
print(info)
else:
self.evaluate()
class PythonQuestionEval(object):
question_conf = ExamAssistant.question_conf
def __init__(self, answer_dir_path=None):
if answer_dir_path is None:
answer_dir_path = f"{os.getcwd()}/upload/temp"
self.answer_dir_path = answer_dir_path
def eval_question_1(self, question_file_path):
pass
def eval_question_2(self, question_file_path):
pass
def eval_question_3(self, question_file_path):
pass
def eval_question_4(self, question_file_path):
pass
def eval_question_5(self, question_file_path):
pass
def evaluate(self):
for stu_main_dir_name in os.listdir(self.answer_dir_path):
stu_main_dir_path = f"{self.answer_dir_path}/{stu_main_dir_name}"
if __name__ == "__main__":
win = tk.Tk()
GUI = ExamAssistant(win)
win.mainloop()
| 39.940299 | 159 | 0.588378 |
7948311ff3a48d660faebd0cf932166d07f7a0a1
| 4,846 |
py
|
Python
|
setup.py
|
gcurtis79/OctoPrint-DiscordRemote
|
1af667648a5161633f5484f656783cd03858e798
|
[
"MIT"
] | null | null | null |
setup.py
|
gcurtis79/OctoPrint-DiscordRemote
|
1af667648a5161633f5484f656783cd03858e798
|
[
"MIT"
] | null | null | null |
setup.py
|
gcurtis79/OctoPrint-DiscordRemote
|
1af667648a5161633f5484f656783cd03858e798
|
[
"MIT"
] | null | null | null |
# coding=utf-8
import os
from distutils.cmd import Command
class CoverageCommand(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
os.system("python -m coverage run --source octoprint_discordremote/ setup.py test")
os.system("python -m coverage html")
########################################################################################################################
### Do not forget to adjust the following variables to your own plugin.
# The plugin's identifier, has to be unique
plugin_identifier = "discordremote"
# The plugin's python package, should be "octoprint_<plugin identifier>", has to be unique
plugin_package = "octoprint_discordremote"
# The plugin's human readable name. Can be overwritten within OctoPrint's internal data via __plugin_name__ in the
# plugin module
plugin_name = "OctoPrint-DiscordRemote"
# The plugin's version. Can be overwritten within OctoPrint's internal data via __plugin_version__ in the plugin module
plugin_version = "2.37.0"
# The plugin's description. Can be overwritten within OctoPrint's internal data via __plugin_description__ in the plugin
# module
plugin_description = """Discord plugin for OctoPrint"""
# The plugin's author. Can be overwritten within OctoPrint's internal data via __plugin_author__ in the plugin module
plugin_author = "Cameron Cross"
# The plugin's author's mail address.
plugin_author_email = "cameroncros@hotmail.com"
# The plugin's homepage URL. Can be overwritten within OctoPrint's internal data via __plugin_url__ in the plugin module
plugin_url = "https://github.com/cameroncros/Octoprint-DiscordRemote"
# The plugin's license. Can be overwritten within OctoPrint's internal data via __plugin_license__ in the plugin module
plugin_license = "MIT"
# Any additional requirements besides OctoPrint should be listed here
plugin_requires = ["Pillow==5.0.0", "websocket-client", "requests", "humanfriendly"]
### --------------------------------------------------------------------------------------------------------------------
### More advanced options that you usually shouldn't have to touch follow after this point
### --------------------------------------------------------------------------------------------------------------------
# Additional package data to install for this plugin. The subfolders "templates", "static" and "translations" will
# already be installed automatically if they exist. Note that if you add something here you'll also need to update
# MANIFEST.in to match to ensure that python setup.py sdist produces a source distribution that contains all your
# files. This is sadly due to how python's setup.py works, see also http://stackoverflow.com/a/14159430/2028598
plugin_additional_data = []
# Any additional python packages you need to install with your plugin that are not contained in <plugin_package>.*
plugin_additional_packages = []
# Any python packages within <plugin_package>.* you do NOT want to install with your plugin
plugin_ignored_packages = []
# Additional parameters for the call to setuptools.setup. If your plugin wants to register additional entry points,
# define dependency links or other things like that, this is the place to go. Will be merged recursively with the
# default setup parameters as provided by octoprint_setuptools.create_plugin_setup_parameters using
# octoprint.util.dict_merge.
#
# Example:
# plugin_requires = ["someDependency==dev"]
# additional_setup_parameters = {"dependency_links": ["https://github.com/someUser/someRepo/archive/master.zip#egg=someDependency-dev"]}
additional_setup_parameters = {
'cmdclass': {
'coverage': CoverageCommand
}
}
########################################################################################################################
from setuptools import setup
try:
import octoprint_setuptools
except:
print("Could not import OctoPrint's setuptools, are you sure you are running that under "
"the same python installation that OctoPrint is installed under?")
import sys
sys.exit(-1)
setup_parameters = octoprint_setuptools.create_plugin_setup_parameters(
identifier=plugin_identifier,
package=plugin_package,
name=plugin_name,
version=plugin_version,
description=plugin_description,
author=plugin_author,
mail=plugin_author_email,
url=plugin_url,
license=plugin_license,
requires=plugin_requires,
additional_packages=plugin_additional_packages,
ignored_packages=plugin_ignored_packages,
additional_data=plugin_additional_data
)
if len(additional_setup_parameters):
from octoprint.util import dict_merge
setup_parameters = dict_merge(setup_parameters, additional_setup_parameters)
setup(**setup_parameters)
| 40.722689 | 140 | 0.698308 |
7948331fbb0765637ab6421132612652e4ed63b1
| 20,709 |
py
|
Python
|
setup.py
|
sallysyw/vision
|
bf073e785528970e6a1605e411e4fc382d686dc7
|
[
"BSD-3-Clause"
] | null | null | null |
setup.py
|
sallysyw/vision
|
bf073e785528970e6a1605e411e4fc382d686dc7
|
[
"BSD-3-Clause"
] | null | null | null |
setup.py
|
sallysyw/vision
|
bf073e785528970e6a1605e411e4fc382d686dc7
|
[
"BSD-3-Clause"
] | null | null | null |
import distutils.command.clean
import distutils.spawn
import glob
import os
import shutil
import subprocess
import sys
import torch
from pkg_resources import parse_version, get_distribution, DistributionNotFound
from setuptools import setup, find_packages
from torch.utils.cpp_extension import BuildExtension, CppExtension, CUDAExtension, CUDA_HOME
def read(*names, **kwargs):
with open(os.path.join(os.path.dirname(__file__), *names), encoding=kwargs.get("encoding", "utf8")) as fp:
return fp.read()
def get_dist(pkgname):
try:
return get_distribution(pkgname)
except DistributionNotFound:
return None
cwd = os.path.dirname(os.path.abspath(__file__))
version_txt = os.path.join(cwd, "version.txt")
with open(version_txt) as f:
version = f.readline().strip()
sha = "Unknown"
package_name = "torchvision"
try:
sha = subprocess.check_output(["git", "rev-parse", "HEAD"], cwd=cwd).decode("ascii").strip()
except Exception:
pass
if os.getenv("BUILD_VERSION"):
version = os.getenv("BUILD_VERSION")
elif sha != "Unknown":
version += "+" + sha[:7]
def write_version_file():
version_path = os.path.join(cwd, "torchvision", "version.py")
with open(version_path, "w") as f:
f.write(f"__version__ = '{version}'\n")
f.write(f"git_version = {repr(sha)}\n")
f.write("from torchvision.extension import _check_cuda_version\n")
f.write("if _check_cuda_version() > 0:\n")
f.write(" cuda = _check_cuda_version()\n")
pytorch_dep = "torch"
if os.getenv("PYTORCH_VERSION"):
pytorch_dep += "==" + os.getenv("PYTORCH_VERSION")
requirements = [
"numpy",
"requests",
pytorch_dep,
]
# Excluding 8.3.* because of https://github.com/pytorch/vision/issues/4934
pillow_ver = " >= 5.3.0, !=8.3.*"
pillow_req = "pillow-simd" if get_dist("pillow-simd") is not None else "pillow"
requirements.append(pillow_req + pillow_ver)
def find_library(name, vision_include):
this_dir = os.path.dirname(os.path.abspath(__file__))
build_prefix = os.environ.get("BUILD_PREFIX", None)
is_conda_build = build_prefix is not None
library_found = False
conda_installed = False
lib_folder = None
include_folder = None
library_header = f"{name}.h"
# Lookup in TORCHVISION_INCLUDE or in the package file
package_path = [os.path.join(this_dir, "torchvision")]
for folder in vision_include + package_path:
candidate_path = os.path.join(folder, library_header)
library_found = os.path.exists(candidate_path)
if library_found:
break
if not library_found:
print(f"Running build on conda-build: {is_conda_build}")
if is_conda_build:
# Add conda headers/libraries
if os.name == "nt":
build_prefix = os.path.join(build_prefix, "Library")
include_folder = os.path.join(build_prefix, "include")
lib_folder = os.path.join(build_prefix, "lib")
library_header_path = os.path.join(include_folder, library_header)
library_found = os.path.isfile(library_header_path)
conda_installed = library_found
else:
# Check if using Anaconda to produce wheels
conda = distutils.spawn.find_executable("conda")
is_conda = conda is not None
print(f"Running build on conda: {is_conda}")
if is_conda:
python_executable = sys.executable
py_folder = os.path.dirname(python_executable)
if os.name == "nt":
env_path = os.path.join(py_folder, "Library")
else:
env_path = os.path.dirname(py_folder)
lib_folder = os.path.join(env_path, "lib")
include_folder = os.path.join(env_path, "include")
library_header_path = os.path.join(include_folder, library_header)
library_found = os.path.isfile(library_header_path)
conda_installed = library_found
if not library_found:
if sys.platform == "linux":
library_found = os.path.exists(f"/usr/include/{library_header}")
library_found = library_found or os.path.exists(f"/usr/local/include/{library_header}")
return library_found, conda_installed, include_folder, lib_folder
def get_extensions():
this_dir = os.path.dirname(os.path.abspath(__file__))
extensions_dir = os.path.join(this_dir, "torchvision", "csrc")
main_file = glob.glob(os.path.join(extensions_dir, "*.cpp")) + glob.glob(
os.path.join(extensions_dir, "ops", "*.cpp")
)
source_cpu = (
glob.glob(os.path.join(extensions_dir, "ops", "autograd", "*.cpp"))
+ glob.glob(os.path.join(extensions_dir, "ops", "cpu", "*.cpp"))
+ glob.glob(os.path.join(extensions_dir, "ops", "quantized", "cpu", "*.cpp"))
)
is_rocm_pytorch = False
if torch.__version__ >= "1.5":
from torch.utils.cpp_extension import ROCM_HOME
is_rocm_pytorch = (torch.version.hip is not None) and (ROCM_HOME is not None)
if is_rocm_pytorch:
from torch.utils.hipify import hipify_python
hipify_python.hipify(
project_directory=this_dir,
output_directory=this_dir,
includes="torchvision/csrc/ops/cuda/*",
show_detailed=True,
is_pytorch_extension=True,
)
source_cuda = glob.glob(os.path.join(extensions_dir, "ops", "hip", "*.hip"))
# Copy over additional files
for file in glob.glob(r"torchvision/csrc/ops/cuda/*.h"):
shutil.copy(file, "torchvision/csrc/ops/hip")
else:
source_cuda = glob.glob(os.path.join(extensions_dir, "ops", "cuda", "*.cu"))
source_cuda += glob.glob(os.path.join(extensions_dir, "ops", "autocast", "*.cpp"))
sources = main_file + source_cpu
extension = CppExtension
compile_cpp_tests = os.getenv("WITH_CPP_MODELS_TEST", "0") == "1"
if compile_cpp_tests:
test_dir = os.path.join(this_dir, "test")
models_dir = os.path.join(this_dir, "torchvision", "csrc", "models")
test_file = glob.glob(os.path.join(test_dir, "*.cpp"))
source_models = glob.glob(os.path.join(models_dir, "*.cpp"))
test_file = [os.path.join(test_dir, s) for s in test_file]
source_models = [os.path.join(models_dir, s) for s in source_models]
tests = test_file + source_models
tests_include_dirs = [test_dir, models_dir]
define_macros = []
extra_compile_args = {"cxx": []}
if (torch.cuda.is_available() and ((CUDA_HOME is not None) or is_rocm_pytorch)) or os.getenv(
"FORCE_CUDA", "0"
) == "1":
extension = CUDAExtension
sources += source_cuda
if not is_rocm_pytorch:
define_macros += [("WITH_CUDA", None)]
nvcc_flags = os.getenv("NVCC_FLAGS", "")
if nvcc_flags == "":
nvcc_flags = []
else:
nvcc_flags = nvcc_flags.split(" ")
else:
define_macros += [("WITH_HIP", None)]
nvcc_flags = []
extra_compile_args["nvcc"] = nvcc_flags
if sys.platform == "win32":
define_macros += [("torchvision_EXPORTS", None)]
define_macros += [("USE_PYTHON", None)]
extra_compile_args["cxx"].append("/MP")
debug_mode = os.getenv("DEBUG", "0") == "1"
if debug_mode:
print("Compile in debug mode")
extra_compile_args["cxx"].append("-g")
extra_compile_args["cxx"].append("-O0")
if "nvcc" in extra_compile_args:
# we have to remove "-OX" and "-g" flag if exists and append
nvcc_flags = extra_compile_args["nvcc"]
extra_compile_args["nvcc"] = [f for f in nvcc_flags if not ("-O" in f or "-g" in f)]
extra_compile_args["nvcc"].append("-O0")
extra_compile_args["nvcc"].append("-g")
sources = [os.path.join(extensions_dir, s) for s in sources]
include_dirs = [extensions_dir]
ext_modules = [
extension(
"torchvision._C",
sorted(sources),
include_dirs=include_dirs,
define_macros=define_macros,
extra_compile_args=extra_compile_args,
)
]
if compile_cpp_tests:
ext_modules.append(
extension(
"torchvision._C_tests",
tests,
include_dirs=tests_include_dirs,
define_macros=define_macros,
extra_compile_args=extra_compile_args,
)
)
# ------------------- Torchvision extra extensions ------------------------
vision_include = os.environ.get("TORCHVISION_INCLUDE", None)
vision_library = os.environ.get("TORCHVISION_LIBRARY", None)
vision_include = vision_include.split(os.pathsep) if vision_include is not None else []
vision_library = vision_library.split(os.pathsep) if vision_library is not None else []
include_dirs += vision_include
library_dirs = vision_library
# Image reading extension
image_macros = []
image_include = [extensions_dir]
image_library = []
image_link_flags = []
if sys.platform == "win32":
image_macros += [("USE_PYTHON", None)]
# Locating libPNG
libpng = distutils.spawn.find_executable("libpng-config")
pngfix = distutils.spawn.find_executable("pngfix")
png_found = libpng is not None or pngfix is not None
print(f"PNG found: {png_found}")
if png_found:
if libpng is not None:
# Linux / Mac
png_version = subprocess.run([libpng, "--version"], stdout=subprocess.PIPE)
png_version = png_version.stdout.strip().decode("utf-8")
print(f"libpng version: {png_version}")
png_version = parse_version(png_version)
if png_version >= parse_version("1.6.0"):
print("Building torchvision with PNG image support")
png_lib = subprocess.run([libpng, "--libdir"], stdout=subprocess.PIPE)
png_lib = png_lib.stdout.strip().decode("utf-8")
if "disabled" not in png_lib:
image_library += [png_lib]
png_include = subprocess.run([libpng, "--I_opts"], stdout=subprocess.PIPE)
png_include = png_include.stdout.strip().decode("utf-8")
_, png_include = png_include.split("-I")
print(f"libpng include path: {png_include}")
image_include += [png_include]
image_link_flags.append("png")
else:
print("libpng installed version is less than 1.6.0, disabling PNG support")
png_found = False
else:
# Windows
png_lib = os.path.join(os.path.dirname(os.path.dirname(pngfix)), "lib")
png_include = os.path.join(os.path.dirname(os.path.dirname(pngfix)), "include", "libpng16")
image_library += [png_lib]
image_include += [png_include]
image_link_flags.append("libpng")
# Locating libjpeg
(jpeg_found, jpeg_conda, jpeg_include, jpeg_lib) = find_library("jpeglib", vision_include)
print(f"JPEG found: {jpeg_found}")
image_macros += [("PNG_FOUND", str(int(png_found)))]
image_macros += [("JPEG_FOUND", str(int(jpeg_found)))]
if jpeg_found:
print("Building torchvision with JPEG image support")
image_link_flags.append("jpeg")
if jpeg_conda:
image_library += [jpeg_lib]
image_include += [jpeg_include]
# Locating nvjpeg
# Should be included in CUDA_HOME for CUDA >= 10.1, which is the minimum version we have in the CI
nvjpeg_found = (
extension is CUDAExtension
and CUDA_HOME is not None
and os.path.exists(os.path.join(CUDA_HOME, "include", "nvjpeg.h"))
)
print(f"NVJPEG found: {nvjpeg_found}")
image_macros += [("NVJPEG_FOUND", str(int(nvjpeg_found)))]
if nvjpeg_found:
print("Building torchvision with NVJPEG image support")
image_link_flags.append("nvjpeg")
image_path = os.path.join(extensions_dir, "io", "image")
image_src = (
glob.glob(os.path.join(image_path, "*.cpp"))
+ glob.glob(os.path.join(image_path, "cpu", "*.cpp"))
+ glob.glob(os.path.join(image_path, "cuda", "*.cpp"))
)
if png_found or jpeg_found:
ext_modules.append(
extension(
"torchvision.image",
image_src,
include_dirs=image_include + include_dirs + [image_path],
library_dirs=image_library + library_dirs,
define_macros=image_macros,
libraries=image_link_flags,
extra_compile_args=extra_compile_args,
)
)
ffmpeg_exe = distutils.spawn.find_executable("ffmpeg")
has_ffmpeg = ffmpeg_exe is not None
# FIXME: Building torchvision with ffmpeg on MacOS or with Python 3.9
# FIXME: causes crash. See the following GitHub issues for more details.
# FIXME: https://github.com/pytorch/pytorch/issues/65000
# FIXME: https://github.com/pytorch/vision/issues/3367
if sys.platform != "linux" or (sys.version_info.major == 3 and sys.version_info.minor == 9):
has_ffmpeg = False
if has_ffmpeg:
try:
# This is to check if ffmpeg is installed properly.
subprocess.check_output(["ffmpeg", "-version"])
except subprocess.CalledProcessError:
print("Error fetching ffmpeg version, ignoring ffmpeg.")
has_ffmpeg = False
print(f"FFmpeg found: {has_ffmpeg}")
if has_ffmpeg:
ffmpeg_libraries = {"libavcodec", "libavformat", "libavutil", "libswresample", "libswscale"}
ffmpeg_bin = os.path.dirname(ffmpeg_exe)
ffmpeg_root = os.path.dirname(ffmpeg_bin)
ffmpeg_include_dir = os.path.join(ffmpeg_root, "include")
ffmpeg_library_dir = os.path.join(ffmpeg_root, "lib")
gcc = distutils.spawn.find_executable("gcc")
platform_tag = subprocess.run([gcc, "-print-multiarch"], stdout=subprocess.PIPE)
platform_tag = platform_tag.stdout.strip().decode("utf-8")
if platform_tag:
# Most probably a Debian-based distribution
ffmpeg_include_dir = [ffmpeg_include_dir, os.path.join(ffmpeg_include_dir, platform_tag)]
ffmpeg_library_dir = [ffmpeg_library_dir, os.path.join(ffmpeg_library_dir, platform_tag)]
else:
ffmpeg_include_dir = [ffmpeg_include_dir]
ffmpeg_library_dir = [ffmpeg_library_dir]
has_ffmpeg = True
for library in ffmpeg_libraries:
library_found = False
for search_path in ffmpeg_include_dir + include_dirs:
full_path = os.path.join(search_path, library, "*.h")
library_found |= len(glob.glob(full_path)) > 0
if not library_found:
print(f"{library} header files were not found, disabling ffmpeg support")
has_ffmpeg = False
if has_ffmpeg:
print(f"ffmpeg include path: {ffmpeg_include_dir}")
print(f"ffmpeg library_dir: {ffmpeg_library_dir}")
# TorchVision base decoder + video reader
video_reader_src_dir = os.path.join(this_dir, "torchvision", "csrc", "io", "video_reader")
video_reader_src = glob.glob(os.path.join(video_reader_src_dir, "*.cpp"))
base_decoder_src_dir = os.path.join(this_dir, "torchvision", "csrc", "io", "decoder")
base_decoder_src = glob.glob(os.path.join(base_decoder_src_dir, "*.cpp"))
# Torchvision video API
videoapi_src_dir = os.path.join(this_dir, "torchvision", "csrc", "io", "video")
videoapi_src = glob.glob(os.path.join(videoapi_src_dir, "*.cpp"))
# exclude tests
base_decoder_src = [x for x in base_decoder_src if "_test.cpp" not in x]
combined_src = video_reader_src + base_decoder_src + videoapi_src
ext_modules.append(
CppExtension(
"torchvision.video_reader",
combined_src,
include_dirs=[
base_decoder_src_dir,
video_reader_src_dir,
videoapi_src_dir,
extensions_dir,
*ffmpeg_include_dir,
*include_dirs,
],
library_dirs=ffmpeg_library_dir + library_dirs,
libraries=[
"avcodec",
"avformat",
"avutil",
"swresample",
"swscale",
],
extra_compile_args=["-std=c++14"] if os.name != "nt" else ["/std:c++14", "/MP"],
extra_link_args=["-std=c++14" if os.name != "nt" else "/std:c++14"],
)
)
# Locating video codec
# CUDA_HOME should be set to the cuda root directory.
# TORCHVISION_INCLUDE and TORCHVISION_LIBRARY should include the location to
# video codec header files and libraries respectively.
video_codec_found = (
extension is CUDAExtension
and CUDA_HOME is not None
and any([os.path.exists(os.path.join(folder, "cuviddec.h")) for folder in vision_include])
and any([os.path.exists(os.path.join(folder, "nvcuvid.h")) for folder in vision_include])
and any([os.path.exists(os.path.join(folder, "libnvcuvid.so")) for folder in library_dirs])
)
print(f"video codec found: {video_codec_found}")
if (
video_codec_found
and has_ffmpeg
and any([os.path.exists(os.path.join(folder, "libavcodec", "bsf.h")) for folder in ffmpeg_include_dir])
):
gpu_decoder_path = os.path.join(extensions_dir, "io", "decoder", "gpu")
gpu_decoder_src = glob.glob(os.path.join(gpu_decoder_path, "*.cpp"))
cuda_libs = os.path.join(CUDA_HOME, "lib64")
cuda_inc = os.path.join(CUDA_HOME, "include")
ext_modules.append(
extension(
"torchvision.Decoder",
gpu_decoder_src,
include_dirs=include_dirs + [gpu_decoder_path] + [cuda_inc] + ffmpeg_include_dir,
library_dirs=ffmpeg_library_dir + library_dirs + [cuda_libs],
libraries=[
"avcodec",
"avformat",
"avutil",
"swresample",
"swscale",
"nvcuvid",
"cuda",
"cudart",
"z",
"pthread",
"dl",
],
extra_compile_args=extra_compile_args,
)
)
else:
print(
"The installed version of ffmpeg is missing the header file 'bsf.h' which is "
"required for GPU video decoding. Please install the latest ffmpeg from conda-forge channel:"
" `conda install -c conda-forge ffmpeg`."
)
return ext_modules
class clean(distutils.command.clean.clean):
def run(self):
with open(".gitignore") as f:
ignores = f.read()
for wildcard in filter(None, ignores.split("\n")):
for filename in glob.glob(wildcard):
try:
os.remove(filename)
except OSError:
shutil.rmtree(filename, ignore_errors=True)
# It's an old-style class in Python 2.7...
distutils.command.clean.clean.run(self)
if __name__ == "__main__":
print(f"Building wheel {package_name}-{version}")
write_version_file()
with open("README.rst") as f:
readme = f.read()
setup(
# Metadata
name=package_name,
version=version,
author="PyTorch Core Team",
author_email="soumith@pytorch.org",
url="https://github.com/pytorch/vision",
description="image and video datasets and models for torch deep learning",
long_description=readme,
license="BSD",
# Package info
packages=find_packages(exclude=("test",)),
package_data={package_name: ["*.dll", "*.dylib", "*.so", "prototype/datasets/_builtin/*.categories"]},
zip_safe=False,
install_requires=requirements,
extras_require={
"scipy": ["scipy"],
},
ext_modules=get_extensions(),
python_requires=">=3.7",
cmdclass={
"build_ext": BuildExtension.with_options(no_python_abi_suffix=True),
"clean": clean,
},
)
| 38.564246 | 111 | 0.606403 |
794834b3ba1d5bda491c78920ee09c0b2c2c6101
| 2,155 |
py
|
Python
|
tests/features/cache/test_file_cache.py
|
cercos/masonite
|
f7f220efa7fae833683e9f07ce13c3795a87d3b8
|
[
"MIT"
] | 1,816 |
2018-02-14T01:59:51.000Z
|
2022-03-31T17:09:20.000Z
|
tests/features/cache/test_file_cache.py
|
cercos/masonite
|
f7f220efa7fae833683e9f07ce13c3795a87d3b8
|
[
"MIT"
] | 340 |
2018-02-11T00:27:26.000Z
|
2022-03-21T12:00:24.000Z
|
tests/features/cache/test_file_cache.py
|
cercos/masonite
|
f7f220efa7fae833683e9f07ce13c3795a87d3b8
|
[
"MIT"
] | 144 |
2018-03-18T00:08:16.000Z
|
2022-02-26T01:51:58.000Z
|
from tests import TestCase
import os
import time
class TestFileCache(TestCase):
def setUp(self):
super().setUp()
self.application.make("cache")
self.driver = self.application.make("cache").store()
def test_can_get_file_driver(self):
self.driver.put("key", "value")
self.assertEqual(self.driver.get("key"), "value")
self.assertTrue(self.driver.has("key"), "value")
def test_can_add_file_driver(self):
self.assertEqual(self.driver.add("add_key", "value"), "value")
def test_can_increment(self):
self.driver.put("count", "1")
self.assertEqual(self.driver.get("count"), "1")
self.driver.increment("count")
self.assertEqual(self.driver.get("count"), "2")
self.driver.decrement("count")
self.assertEqual(self.driver.get("count"), "1")
def test_will_not_get_expired(self):
self.driver.put("expire", "1", 1)
time.sleep(2)
self.assertEqual(self.driver.get("expire"), None)
def test_will_get_not_expired(self):
self.driver.put("expire", "1", 20)
self.assertEqual(self.driver.get("expire"), "1")
def test_forget(self):
self.driver.put("forget", "1")
self.assertEqual(self.driver.get("forget"), "1")
self.driver.forget("forget")
self.assertEqual(self.driver.get("forget"), None)
def test_remember(self):
self.driver.remember("remember", lambda cache: (cache.put("remember", "1", 10)))
self.assertEqual(self.driver.get("remember"), "1")
def test_remember_datatypes(self):
self.driver.remember(
"dic", lambda cache: (cache.put("dic", {"id": 1, "name": "Joe"}, 10))
)
self.assertIsInstance(self.driver.get("dic"), dict)
self.driver.remember("list", lambda cache: (cache.put("list", [1, 2, 3], 10)))
self.assertIsInstance(self.driver.get("list"), list)
def test_flush(self):
self.driver.remember(
"dic", lambda cache: (cache.put("dic", {"id": 1, "name": "Joe"}, 10))
)
self.driver.flush()
self.assertIsNone(self.driver.get("dic"))
| 34.758065 | 88 | 0.610673 |
79483625adcecea4c164599a0991c727dbdc532f
| 21,289 |
py
|
Python
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2017_06_01/aio/operations/_virtual_network_peerings_operations.py
|
vbarbaresi/azure-sdk-for-python
|
397ba46c51d001ff89c66b170f5576cf8f49c05f
|
[
"MIT"
] | 8 |
2021-01-13T23:44:08.000Z
|
2021-03-17T10:13:36.000Z
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2017_06_01/aio/operations/_virtual_network_peerings_operations.py
|
vbarbaresi/azure-sdk-for-python
|
397ba46c51d001ff89c66b170f5576cf8f49c05f
|
[
"MIT"
] | null | null | null |
sdk/network/azure-mgmt-network/azure/mgmt/network/v2017_06_01/aio/operations/_virtual_network_peerings_operations.py
|
vbarbaresi/azure-sdk-for-python
|
397ba46c51d001ff89c66b170f5576cf8f49c05f
|
[
"MIT"
] | null | null | null |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class VirtualNetworkPeeringsOperations:
"""VirtualNetworkPeeringsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2017_06_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial(
self,
resource_group_name: str,
virtual_network_name: str,
virtual_network_peering_name: str,
**kwargs
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-06-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'virtualNetworkPeeringName': self._serialize.url("virtual_network_peering_name", virtual_network_peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings/{virtualNetworkPeeringName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
virtual_network_name: str,
virtual_network_peering_name: str,
**kwargs
) -> AsyncLROPoller[None]:
"""Deletes the specified virtual network peering.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param virtual_network_peering_name: The name of the virtual network peering.
:type virtual_network_peering_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
virtual_network_name=virtual_network_name,
virtual_network_peering_name=virtual_network_peering_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings/{virtualNetworkPeeringName}'} # type: ignore
async def get(
self,
resource_group_name: str,
virtual_network_name: str,
virtual_network_peering_name: str,
**kwargs
) -> "models.VirtualNetworkPeering":
"""Gets the specified virtual network peering.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param virtual_network_peering_name: The name of the virtual network peering.
:type virtual_network_peering_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VirtualNetworkPeering, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2017_06_01.models.VirtualNetworkPeering
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.VirtualNetworkPeering"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-06-01"
accept = "application/json, text/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'virtualNetworkPeeringName': self._serialize.url("virtual_network_peering_name", virtual_network_peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualNetworkPeering', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings/{virtualNetworkPeeringName}'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
virtual_network_name: str,
virtual_network_peering_name: str,
virtual_network_peering_parameters: "models.VirtualNetworkPeering",
**kwargs
) -> "models.VirtualNetworkPeering":
cls = kwargs.pop('cls', None) # type: ClsType["models.VirtualNetworkPeering"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json, text/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'virtualNetworkPeeringName': self._serialize.url("virtual_network_peering_name", virtual_network_peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(virtual_network_peering_parameters, 'VirtualNetworkPeering')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('VirtualNetworkPeering', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('VirtualNetworkPeering', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings/{virtualNetworkPeeringName}'} # type: ignore
async def begin_create_or_update(
self,
resource_group_name: str,
virtual_network_name: str,
virtual_network_peering_name: str,
virtual_network_peering_parameters: "models.VirtualNetworkPeering",
**kwargs
) -> AsyncLROPoller["models.VirtualNetworkPeering"]:
"""Creates or updates a peering in the specified virtual network.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param virtual_network_peering_name: The name of the peering.
:type virtual_network_peering_name: str
:param virtual_network_peering_parameters: Parameters supplied to the create or update virtual
network peering operation.
:type virtual_network_peering_parameters: ~azure.mgmt.network.v2017_06_01.models.VirtualNetworkPeering
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either VirtualNetworkPeering or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2017_06_01.models.VirtualNetworkPeering]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.VirtualNetworkPeering"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
virtual_network_name=virtual_network_name,
virtual_network_peering_name=virtual_network_peering_name,
virtual_network_peering_parameters=virtual_network_peering_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VirtualNetworkPeering', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings/{virtualNetworkPeeringName}'} # type: ignore
def list(
self,
resource_group_name: str,
virtual_network_name: str,
**kwargs
) -> AsyncIterable["models.VirtualNetworkPeeringListResult"]:
"""Gets all virtual network peerings in a virtual network.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either VirtualNetworkPeeringListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2017_06_01.models.VirtualNetworkPeeringListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.VirtualNetworkPeeringListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-06-01"
accept = "application/json, text/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('VirtualNetworkPeeringListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings'} # type: ignore
| 51.175481 | 250 | 0.684344 |
7948362f72676268b4dd45826089cf36b18b3b67
| 8,020 |
py
|
Python
|
src/pdftableextract/extracttab.py
|
ahlusar1989/pdf-table-extractor_saran_version1
|
521e39eda459c0321a4f05a5c44e60dcbd865ec2
|
[
"MIT"
] | null | null | null |
src/pdftableextract/extracttab.py
|
ahlusar1989/pdf-table-extractor_saran_version1
|
521e39eda459c0321a4f05a5c44e60dcbd865ec2
|
[
"MIT"
] | null | null | null |
src/pdftableextract/extracttab.py
|
ahlusar1989/pdf-table-extractor_saran_version1
|
521e39eda459c0321a4f05a5c44e60dcbd865ec2
|
[
"MIT"
] | null | null | null |
# Description : PDF Table Extraction Utility
# Original Author : Ian McEwan, Ashima Research.
# Maintainer : ijm
# Lastmod : 20130402 (ijm)
# License : Copyright (C) 2011 Ashima Research. All rights reserved.
# Distributed under the MIT Expat License. See LICENSE file.
# https://github.com/ashima/pdf-table-extract
import sys, argparse, subprocess, re, csv, json
from numpy import *
from pipes import quote
from xml.dom.minidom import getDOMImplementation
# Proccessing function.
def process_page(pgs) :
(pg,frow,lrow) = (map(int,(pgs.split(":")))+[None,None])[0:3]
p = subprocess.Popen( ("pdftoppm -gray -r %d -f %d -l %d %s " %
(args.r,pg,pg,quote(args.infile))),
stdin=subprocess.PIPE, stdout=subprocess.PIPE, shell=True )
#-----------------------------------------------------------------------
# image load secion.
(maxval, width, height, data) = readPNM(p.stdout)
pad = int(args.pad)
height+=pad*2
width+=pad*2
# reimbed image with a white padd.
bmp = ones( (height,width) , dtype=bool )
bmp[pad:height-pad,pad:width-pad] = ( data[:,:] > int(255.0*args.g/100.0) )
# Set up Debuging image.
img = zeros( (height,width,3) , dtype=uint8 )
img[:,:,0] = bmp*255
img[:,:,1] = bmp*255
img[:,:,2] = bmp*255
#-----------------------------------------------------------------------
# Find bounding box.
t=0
while t < height and sum(bmp[t,:]==0) == 0 :
t=t+1
if t > 0 :
t=t-1
b=height-1
while b > t and sum(bmp[b,:]==0) == 0 :
b=b-1
if b < height-1:
b = b+1
l=0
while l < width and sum(bmp[:,l]==0) == 0 :
l=l+1
if l > 0 :
l=l-1
r=width-1
while r > l and sum(bmp[:,r]==0) == 0 :
r=r-1
if r < width-1 :
r=r+1
# Mark bounding box.
bmp[t,:] = 0
bmp[b,:] = 0
bmp[:,l] = 0
bmp[:,r] = 0
def boxOfString(x,p) :
s = x.split(":")
if len(s) < 4 :
raise Exception("boxes have format left:top:right:bottom[:page]")
return ([args.r * float(x) + args.pad for x in s[0:4] ]
+ [ p if len(s)<5 else int(s[4]) ] )
# translate crop to paint white.
whites = []
if args.crop :
(l,t,r,b,p) = boxOfString(args.crop,pg)
whites.extend( [ (0,0,l,height,p), (0,0,width,t,p),
(r,0,width,height,p), (0,b,width,height,p) ] )
# paint white ...
if args.white :
whites.extend( [ boxOfString(b, pg) for b in args.white ] )
for (l,t,r,b,p) in whites :
if p == pg :
bmp[ t:b+1,l:r+1 ] = 1
img[ t:b+1,l:r+1 ] = [255,255,255]
# paint black ...
if args.black :
for b in args.black :
(l,t,r,b) = [args.r * float(x) + args.pad for x in b.split(":") ]
bmp[ t:b+1,l:r+1 ] = 0
img[ t:b+1,l:r+1 ] = [0,0,0]
if args.checkcrop :
dumpImage(args,bmp,img)
sys.exit(0)
#-----------------------------------------------------------------------
# Line finding section.
#
# Find all verticle or horizontal lines that are more than rlthresh
# long, these are considered lines on the table grid.
lthresh = int(args.l * args.r)
vs = zeros(width, dtype=int)
for i in range(width) :
dd = diff( where(bmp[:,i])[0] )
if len(dd)>0:
v = max ( dd )
if v > lthresh :
vs[i] = 1
else:
# it was a solid black line.
if bmp[0,i] == 0 :
vs[i] = 1
vd= ( where(diff(vs[:]))[0] +1 )
hs = zeros(height, dtype=int)
for j in range(height) :
dd = diff( where(bmp[j,:]==1)[0] )
if len(dd) > 0 :
h = max ( dd )
if h > lthresh :
hs[j] = 1
else:
# it was a solid black line.
if bmp[j,0] == 0 :
hs[j] = 1
hd=( where(diff(hs[:]==1))[0] +1 )
#-----------------------------------------------------------------------
# Look for dividors that are too large.
maxdiv=10
i=0
while i < len(vd) :
if vd[i+1]-vd[i] > maxdiv :
vd = delete(vd,i)
vd = delete(vd,i)
else:
i=i+2
j = 0
while j < len(hd):
if hd[j+1]-hd[j] > maxdiv :
hd = delete(hd,j)
hd = delete(hd,j)
else:
j=j+2
if args.checklines :
for i in vd :
img[:,i] = [255,0,0] # red
for j in hd :
img[j,:] = [0,0,255] # blue
dumpImage(args,bmp,img)
sys.exit(0)
#-----------------------------------------------------------------------
# divider checking.
#
# at this point vd holds the x coordinate of vertical and
# hd holds the y coordinate of horizontal divider tansitions for each
# vertical and horizontal lines in the table grid.
def isDiv(a, l,r,t,b) :
# if any col or row (in axis) is all zeros ...
return sum( sum(bmp[t:b, l:r], axis=a)==0 ) >0
if args.checkdivs :
img = img / 2
for j in range(0,len(hd),2):
for i in range(0,len(vd),2):
if i>0 :
(l,r,t,b) = (vd[i-1], vd[i], hd[j], hd[j+1])
img[ t:b, l:r, 1 ] = 192
if isDiv(1, l,r,t,b) :
img[ t:b, l:r, 0 ] = 0
img[ t:b, l:r, 2 ] = 255
if j>0 :
(l,r,t,b) = (vd[i], vd[i+1], hd[j-1], hd[j] )
img[ t:b, l:r, 1 ] = 128
if isDiv(0, l,r,t,b) :
img[ t:b, l:r, 0 ] = 255
img[ t:b, l:r, 2 ] = 0
dumpImage(args,bmp,img)
sys.exit(0)
#-----------------------------------------------------------------------
# Cell finding section.
# This algorithum is width hungry, and always generates rectangular
# boxes.
cells =[]
touched = zeros( (len(hd), len(vd)),dtype=bool )
j = 0
while j*2+2 < len (hd) :
i = 0
while i*2+2 < len(vd) :
u = 1
v = 1
if not touched[j,i] :
while 2+(i+u)*2 < len(vd) and \
not isDiv( 0, vd[ 2*(i+u) ], vd[ 2*(i+u)+1],
hd[ 2*(j+v)-1 ], hd[ 2*(j+v) ] ):
u=u+1
bot = False
while 2+(j+v)*2 < len(hd) and not bot :
bot = False
for k in range(1,u+1) :
bot |= isDiv( 1, vd[ 2*(i+k)-1 ], vd[ 2*(i+k)],
hd[ 2*(j+v) ], hd[ 2*(j+v)+1 ] )
if not bot :
v=v+1
cells.append( (i,j,u,v) )
touched[ j:j+v, i:i+u] = True
i = i+1
j=j+1
if args.checkcells :
nc = len(cells)+0.
img = img / 2
for k in range(len(cells)):
(i,j,u,v) = cells[k]
(l,r,t,b) = ( vd[2*i+1] , vd[ 2*(i+u) ], hd[2*j+1], hd[2*(j+v)] )
img[ t:b, l:r ] += col( k/nc )
dumpImage(args,bmp,img)
sys.exit(0)
#-----------------------------------------------------------------------
# fork out to extract text for each cell.
whitespace = re.compile( r'\s+')
def getCell( (i,j,u,v) ):
(l,r,t,b) = ( vd[2*i+1] , vd[ 2*(i+u) ], hd[2*j+1], hd[2*(j+v)] )
p = subprocess.Popen(
("pdftotext -r %d -x %d -y %d -W %d -H %d -layout -nopgbrk -f %d -l %d %s -"
% (args.r, l-pad, t-pad, r-l, b-t, pg, pg, quote(args.infile) ) ),
stdout=subprocess.PIPE, shell=True )
ret = p.communicate()[0]
if args.w != 'raw' :
ret = whitespace.sub( "" if args.w == "none" else " ", ret )
if len(ret) > 0 :
ret = ret[ (1 if ret[0]==' ' else 0) :
len(ret) - (1 if ret[-1]==' ' else 0) ]
return (i,j,u,v,pg,ret)
#if args.boxes :
# cells = [ x + (pg,"",) for x in cells ]
#else :
# cells = map(getCell, cells)
if args.boxes :
cells = [ x + (pg,"",) for x in cells if
( frow == None or (x[1] >= frow and x[1] <= lrow)) ]
else :
cells = [ getCell(x) for x in cells if
( frow == None or (x[1] >= frow and x[1] <= lrow)) ]
return cells
#-----------------------------------------------------------------------
# main
def main_script():
args = procargs()
cells = []
for pgs in args.page :
cells.extend(process_page(pgs))
{ "cells_csv" : o_cells_csv, "cells_json" : o_cells_json,
"cells_xml" : o_cells_xml, "table_csv" : o_table_csv,
"table_html": o_table_html, "table_chtml": o_table_html,
} [ args.t ](cells,args.page)
| 26.912752 | 80 | 0.470574 |
79483647122868ef53c432320aa50f4b82954a18
| 1,419 |
py
|
Python
|
validator.py
|
Zafiyetsiz/tc-kimlilk-no-dogrulama
|
4cfb2ba3656660439240cbb776dca33be37dde44
|
[
"MIT"
] | 1 |
2022-02-01T19:50:00.000Z
|
2022-02-01T19:50:00.000Z
|
validator.py
|
Zafiyetsiz/tc-kimlilk-no-dogrulama
|
4cfb2ba3656660439240cbb776dca33be37dde44
|
[
"MIT"
] | null | null | null |
validator.py
|
Zafiyetsiz/tc-kimlilk-no-dogrulama
|
4cfb2ba3656660439240cbb776dca33be37dde44
|
[
"MIT"
] | null | null | null |
#################################################
#PROJECT 2 : VALIDATOR
#Language :Turkish
#Turkey ID(identification) number validator
#The simplest and the easiest way for validator
#Contact me on ;
#Telegram : Zafiyetsiz0
#Instagram : Zafiyetsiz
#################################################
print("tc nin 1. rakamını yazın")
a= input(" 1.rakam: ")
a= int(a)
print("tc nin 2. rakamını yazın")
b = input("2.rakam: ")
b= int(b)
print("tc nin 3. rakamını yazın")
c = input("3.rakam: ")
c= int(c)
print("tc nin 4. rakamını yazın")
d = input("4.rakam: ")
d= int(d)
print("tc nin 5. rakamını yazın")
e = input("5.rakam: ")
e= int(e)
print("tc nin 6. rakamını yazın")
f = input("6.rakam: ")
f= int(f)
print("tc nin 7. rakamını yazın")
g = input("7.rakam: ")
g= int(g)
print("tc nin 8. rakamını yazın")
h = input("8.rakam: ")
h= int(h)
print("tc nin 9. rakamını yazın")
i = input("9.rakam: ")
i= int(i)
print("tc nin 10. rakamını yazın")
j = input("10.rakam: ")
j = int(j)
print("tc nin 11. rakamını yazın")
k = input("11.rakam: ")
k= int(k)
tekler=(a + c + e + g + i)
tekler = int(tekler)
Çiftler=(b + d + f + h)
Çiftler = int(Çiftler)
x =(tekler * 7)
x = int(x)
y =(Çiftler * 9)
y = int(y)
w=(a + b +c + d + e + f + g + h + i + j)
o=(w % 10) #11.rakam
if o == k:
print("bu tc kimkil no DOĞRU.")
else:
print("bu tc kimlik no YANLIŞ")
| 15.766667 | 49 | 0.541226 |
794836c7e8b2eecd099044b360b17efea81f0123
| 2,367 |
py
|
Python
|
ae/modeling_stack.py
|
snnall/temp
|
7742d2efc6bd3a23377c4b865e968019891fd94a
|
[
"MIT"
] | 1 |
2021-08-11T17:32:54.000Z
|
2021-08-11T17:32:54.000Z
|
ae/modeling_stack.py
|
snnall/temp
|
7742d2efc6bd3a23377c4b865e968019891fd94a
|
[
"MIT"
] | null | null | null |
ae/modeling_stack.py
|
snnall/temp
|
7742d2efc6bd3a23377c4b865e968019891fd94a
|
[
"MIT"
] | 1 |
2021-12-08T23:20:32.000Z
|
2021-12-08T23:20:32.000Z
|
from aws_cdk import (
aws_sagemaker as sagemaker,
aws_iam as iam,
core
)
class AeModelStack(core.Construct):
def __init__(self, scope: core.Construct, construct_id: str, bucket_name: str, **kwargs) -> None:
super().__init__(scope, construct_id, **kwargs)
self._PREFIX = construct_id
# Create Role for the SageMaker Backend
self._service_role = iam.Role(
self, f'{self._PREFIX}-ServiceRole',
role_name=f'{self._PREFIX}-ServiceRole',
assumed_by=iam.CompositePrincipal(
iam.ServicePrincipal("sagemaker.amazonaws.com"),
iam.ServicePrincipal("lambda.amazonaws.com"),
iam.ServicePrincipal("s3.amazonaws.com")
)
)
self._service_role.add_managed_policy(iam.ManagedPolicy.from_aws_managed_policy_name('AWSCodeCommitPowerUser'))
self._service_role.add_managed_policy(iam.ManagedPolicy.from_aws_managed_policy_name('AmazonSageMakerFullAccess'))
self._service_role.add_managed_policy(iam.ManagedPolicy.from_aws_managed_policy_name('AmazonS3FullAccess'))
# Create SageMaker instance
self._notebook_lifecycle = sagemaker.CfnNotebookInstanceLifecycleConfig(
self, f'{self._PREFIX}-LifeCycleConfig',
notebook_instance_lifecycle_config_name='ae-config',
on_create=[sagemaker.CfnNotebookInstanceLifecycleConfig.NotebookInstanceLifecycleHookProperty(
content=core.Fn.base64(f"""
#!/bin/bash
set -ex
BUCKET="{bucket_name}"
PREFIX="sagemaker"
DIRECTORY="ae_detection"
cd home/ec2-user/SageMaker && mkdir -p $DIRECTORY
aws s3 cp s3://$BUCKET/$PREFIX /home/ec2-user/SageMaker/$DIRECTORY --recursive
sudo chown "ec2-user":"ec2-user" /home/ec2-user/SageMaker/$DIRECTORY --recursive
""")
)]
)
self.notebook = sagemaker.CfnNotebookInstance(
self, f'{self._PREFIX}-notebook',
role_arn=self._service_role.role_arn,
instance_type='ml.t2.medium',
root_access="Enabled",
notebook_instance_name='AdverseEventDetectionModeling',
lifecycle_config_name ='ae-config'
)
| 43.036364 | 122 | 0.630334 |
794836daf7220e39e24f7f05864a9d7b63985306
| 740 |
py
|
Python
|
HW1/linperceptron.py
|
miguel-mzbi/MachineLearning-DataMining
|
d589e89c85ccc7cba129c9a489c49f61a4298c5d
|
[
"MIT"
] | null | null | null |
HW1/linperceptron.py
|
miguel-mzbi/MachineLearning-DataMining
|
d589e89c85ccc7cba129c9a489c49f61a4298c5d
|
[
"MIT"
] | null | null | null |
HW1/linperceptron.py
|
miguel-mzbi/MachineLearning-DataMining
|
d589e89c85ccc7cba129c9a489c49f61a4298c5d
|
[
"MIT"
] | null | null | null |
# Input: number of iterations L
# numpy matrix X of features, with n rows (samples), d columns (features)
# X[i,j] is the j-th feature of the i-th sample
# numpy vector y of labels, with n rows (samples), 1 column
# y[i] is the label (+1 or -1) of the i-th sample
# Output: numpy vector theta of d rows, 1 column
import numpy as np
def run(L,X,y):
n, d = X.shape
theta = np.zeros((d,1))
for it in range(1, L +1):
for t in range(n):
if (y[t]*(np.dot(X[t], theta))) <= 0:
temp = y[t]*X[t]
temp = np.reshape(temp, (d, 1))
#theta = np.transpose(np.transpose(theta) + temp)
theta = theta + temp
return theta
| 37 | 80 | 0.536486 |
79483708853e90ac2b92fb95e35dee01fa88aa88
| 2,830 |
py
|
Python
|
jobs/transforms/topic_validation_test.py
|
WebFlakyTest/oppia
|
520e35490eae8171beb035fbafc2948983abec75
|
[
"Apache-2.0"
] | 1 |
2021-08-17T20:33:12.000Z
|
2021-08-17T20:33:12.000Z
|
jobs/transforms/topic_validation_test.py
|
WebFlakyTest/oppia
|
520e35490eae8171beb035fbafc2948983abec75
|
[
"Apache-2.0"
] | null | null | null |
jobs/transforms/topic_validation_test.py
|
WebFlakyTest/oppia
|
520e35490eae8171beb035fbafc2948983abec75
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
#
# Copyright 2021 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests for jobs.transforms.topic_validation."""
from __future__ import absolute_import # pylint: disable=import-only-modules
from __future__ import unicode_literals # pylint: disable=import-only-modules
import datetime
from core.platform import models
from jobs import job_test_utils
from jobs.transforms import topic_validation
from jobs.types import topic_validation_errors
import apache_beam as beam
(topic_models,) = models.Registry.import_models([models.NAMES.topic])
class ValidateCanonicalNameMatchesNameInLowercaseTests(
job_test_utils.PipelinedTestBase):
NOW = datetime.datetime.utcnow()
def test_process_for_not_matching_canonical_name(self):
model_with_different_name = topic_models.TopicModel(
id='123',
name='name',
created_on=self.NOW,
last_updated=self.NOW,
url_fragment='name-two',
canonical_name='canonical_name',
next_subtopic_id=1,
language_code='en',
subtopic_schema_version=0,
story_reference_schema_version=0
)
output = (
self.pipeline
| beam.Create([model_with_different_name])
| beam.ParDo(
topic_validation.ValidateCanonicalNameMatchesNameInLowercase())
)
self.assert_pcoll_equal(output, [
topic_validation_errors.ModelCanonicalNameMismatchError(
model_with_different_name)
])
def test_process_for_matching_canonical_name(self):
model_with_same_name = topic_models.TopicModel(
id='123',
name='SOMEthing',
created_on=self.NOW,
last_updated=self.NOW,
url_fragment='name-two',
canonical_name='something',
next_subtopic_id=1,
language_code='en',
subtopic_schema_version=0,
story_reference_schema_version=0
)
output = (
self.pipeline
| beam.Create([model_with_same_name])
| beam.ParDo(
topic_validation.ValidateCanonicalNameMatchesNameInLowercase())
)
self.assert_pcoll_equal(output, [])
| 34.096386 | 79 | 0.674205 |
7948376a29750c05e1d2620cd396330a877447cd
| 15,891 |
py
|
Python
|
tensorflow/sample_rnn.py
|
reachlin/machinelearning
|
eb8ba02aa0da86ccf9991fa609afa84d8c180a21
|
[
"MIT"
] | 11 |
2017-12-05T17:37:18.000Z
|
2020-07-01T21:47:31.000Z
|
tensorflow/sample_rnn.py
|
reachlin/machinelearning
|
eb8ba02aa0da86ccf9991fa609afa84d8c180a21
|
[
"MIT"
] | null | null | null |
tensorflow/sample_rnn.py
|
reachlin/machinelearning
|
eb8ba02aa0da86ccf9991fa609afa84d8c180a21
|
[
"MIT"
] | 6 |
2017-09-11T12:31:19.000Z
|
2020-12-13T16:28:48.000Z
|
"""
Modified from: https://github.com/tensorflow/models/blob/master/tutorials/rnn/ptb/ptb_word_lm.py
RNN with LSTM cells
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import time
import numpy as np
import tensorflow as tf
import reader
import util
from tensorflow.python.client import device_lib
flags = tf.flags
logging = tf.logging
flags.DEFINE_string(
"model", "small",
"A type of model. Possible options are: small, medium, large.")
flags.DEFINE_string("data_path", None,
"Where the training/test data is stored.")
flags.DEFINE_string("save_path", None,
"Model output directory.")
flags.DEFINE_bool("use_fp16", False,
"Train using 16-bit floats instead of 32bit floats")
flags.DEFINE_integer("num_gpus", 1,
"If larger than 1, Grappler AutoParallel optimizer "
"will create multiple training replicas with each GPU "
"running one replica.")
flags.DEFINE_string("rnn_mode", None,
"The low level implementation of lstm cell: one of CUDNN, "
"BASIC, and BLOCK, representing cudnn_lstm, basic_lstm, "
"and lstm_block_cell classes.")
FLAGS = flags.FLAGS
BASIC = "basic"
CUDNN = "cudnn"
BLOCK = "block"
def data_type():
return tf.float16 if FLAGS.use_fp16 else tf.float32
class PTBInput(object):
"""The input data."""
def __init__(self, config, data, name=None):
self.batch_size = batch_size = config.batch_size
self.num_steps = num_steps = config.num_steps
self.epoch_size = ((len(data) // batch_size) - 1) // num_steps
self.input_data, self.targets = reader.ptb_producer(
data, batch_size, num_steps, name=name)
class PTBModel(object):
"""The PTB model."""
def __init__(self, is_training, config, input_):
self._is_training = is_training
self._input = input_
self._rnn_params = None
self._cell = None
self.batch_size = input_.batch_size
self.num_steps = input_.num_steps
size = config.hidden_size
vocab_size = config.vocab_size
with tf.device("/cpu:0"):
embedding = tf.get_variable(
"embedding", [vocab_size, size], dtype=data_type())
inputs = tf.nn.embedding_lookup(embedding, input_.input_data)
if is_training and config.keep_prob < 1:
inputs = tf.nn.dropout(inputs, config.keep_prob)
output, state = self._build_rnn_graph(inputs, config, is_training)
softmax_w = tf.get_variable(
"softmax_w", [size, vocab_size], dtype=data_type())
softmax_b = tf.get_variable("softmax_b", [vocab_size], dtype=data_type())
logits = tf.nn.xw_plus_b(output, softmax_w, softmax_b)
# Reshape logits to be a 3-D tensor for sequence loss
logits = tf.reshape(logits, [self.batch_size, self.num_steps, vocab_size])
# Use the contrib sequence loss and average over the batches
loss = tf.contrib.seq2seq.sequence_loss(
logits,
input_.targets,
tf.ones([self.batch_size, self.num_steps], dtype=data_type()),
average_across_timesteps=False,
average_across_batch=True)
# Update the cost
self._cost = tf.reduce_sum(loss)
self._final_state = state
if not is_training:
return
self._lr = tf.Variable(0.0, trainable=False)
tvars = tf.trainable_variables()
grads, _ = tf.clip_by_global_norm(tf.gradients(self._cost, tvars),
config.max_grad_norm)
optimizer = tf.train.GradientDescentOptimizer(self._lr)
self._train_op = optimizer.apply_gradients(
zip(grads, tvars),
global_step=tf.contrib.framework.get_or_create_global_step())
self._new_lr = tf.placeholder(
tf.float32, shape=[], name="new_learning_rate")
self._lr_update = tf.assign(self._lr, self._new_lr)
def _build_rnn_graph(self, inputs, config, is_training):
if config.rnn_mode == CUDNN:
return self._build_rnn_graph_cudnn(inputs, config, is_training)
else:
return self._build_rnn_graph_lstm(inputs, config, is_training)
def _build_rnn_graph_cudnn(self, inputs, config, is_training):
"""Build the inference graph using CUDNN cell."""
inputs = tf.transpose(inputs, [1, 0, 2])
self._cell = tf.contrib.cudnn_rnn.CudnnLSTM(
num_layers=config.num_layers,
num_units=config.hidden_size,
input_size=config.hidden_size,
dropout=1 - config.keep_prob if is_training else 0)
params_size_t = self._cell.params_size()
self._rnn_params = tf.get_variable(
"lstm_params",
initializer=tf.random_uniform(
[params_size_t], -config.init_scale, config.init_scale),
validate_shape=False)
c = tf.zeros([config.num_layers, self.batch_size, config.hidden_size],
tf.float32)
h = tf.zeros([config.num_layers, self.batch_size, config.hidden_size],
tf.float32)
self._initial_state = (tf.contrib.rnn.LSTMStateTuple(h=h, c=c),)
outputs, h, c = self._cell(inputs, h, c, self._rnn_params, is_training)
outputs = tf.transpose(outputs, [1, 0, 2])
outputs = tf.reshape(outputs, [-1, config.hidden_size])
return outputs, (tf.contrib.rnn.LSTMStateTuple(h=h, c=c),)
def _get_lstm_cell(self, config, is_training):
if config.rnn_mode == BASIC:
return tf.contrib.rnn.BasicLSTMCell(
config.hidden_size, forget_bias=0.0, state_is_tuple=True,
reuse=not is_training)
if config.rnn_mode == BLOCK:
return tf.contrib.rnn.LSTMBlockCell(
config.hidden_size, forget_bias=0.0)
raise ValueError("rnn_mode %s not supported" % config.rnn_mode)
def _build_rnn_graph_lstm(self, inputs, config, is_training):
"""Build the inference graph using canonical LSTM cells."""
# Slightly better results can be obtained with forget gate biases
# initialized to 1 but the hyperparameters of the model would need to be
# different than reported in the paper.
cell = self._get_lstm_cell(config, is_training)
if is_training and config.keep_prob < 1:
cell = tf.contrib.rnn.DropoutWrapper(
cell, output_keep_prob=config.keep_prob)
cell = tf.contrib.rnn.MultiRNNCell(
[cell for _ in range(config.num_layers)], state_is_tuple=True)
self._initial_state = cell.zero_state(config.batch_size, data_type())
state = self._initial_state
# Simplified version of tensorflow_models/tutorials/rnn/rnn.py's rnn().
# This builds an unrolled LSTM for tutorial purposes only.
# In general, use the rnn() or state_saving_rnn() from rnn.py.
#
# The alternative version of the code below is:
#
# inputs = tf.unstack(inputs, num=num_steps, axis=1)
# outputs, state = tf.contrib.rnn.static_rnn(cell, inputs,
# initial_state=self._initial_state)
outputs = []
with tf.variable_scope("RNN"):
for time_step in range(self.num_steps):
if time_step > 0: tf.get_variable_scope().reuse_variables()
(cell_output, state) = cell(inputs[:, time_step, :], state)
outputs.append(cell_output)
output = tf.reshape(tf.concat(outputs, 1), [-1, config.hidden_size])
return output, state
def assign_lr(self, session, lr_value):
session.run(self._lr_update, feed_dict={self._new_lr: lr_value})
def export_ops(self, name):
"""Exports ops to collections."""
self._name = name
ops = {util.with_prefix(self._name, "cost"): self._cost}
if self._is_training:
ops.update(lr=self._lr, new_lr=self._new_lr, lr_update=self._lr_update)
if self._rnn_params:
ops.update(rnn_params=self._rnn_params)
for name, op in ops.iteritems():
tf.add_to_collection(name, op)
self._initial_state_name = util.with_prefix(self._name, "initial")
self._final_state_name = util.with_prefix(self._name, "final")
util.export_state_tuples(self._initial_state, self._initial_state_name)
util.export_state_tuples(self._final_state, self._final_state_name)
def import_ops(self):
"""Imports ops from collections."""
if self._is_training:
self._train_op = tf.get_collection_ref("train_op")[0]
self._lr = tf.get_collection_ref("lr")[0]
self._new_lr = tf.get_collection_ref("new_lr")[0]
self._lr_update = tf.get_collection_ref("lr_update")[0]
rnn_params = tf.get_collection_ref("rnn_params")
if self._cell and rnn_params:
params_saveable = tf.contrib.cudnn_rnn.RNNParamsSaveable(
self._cell,
self._cell.params_to_canonical,
self._cell.canonical_to_params,
rnn_params,
base_variable_scope="Model/RNN")
tf.add_to_collection(tf.GraphKeys.SAVEABLE_OBJECTS, params_saveable)
self._cost = tf.get_collection_ref(util.with_prefix(self._name, "cost"))[0]
num_replicas = FLAGS.num_gpus if self._name == "Train" else 1
self._initial_state = util.import_state_tuples(
self._initial_state, self._initial_state_name, num_replicas)
self._final_state = util.import_state_tuples(
self._final_state, self._final_state_name, num_replicas)
@property
def input(self):
return self._input
@property
def initial_state(self):
return self._initial_state
@property
def cost(self):
return self._cost
@property
def final_state(self):
return self._final_state
@property
def lr(self):
return self._lr
@property
def train_op(self):
return self._train_op
@property
def initial_state_name(self):
return self._initial_state_name
@property
def final_state_name(self):
return self._final_state_name
class SmallConfig(object):
"""Small config."""
init_scale = 0.1
learning_rate = 1.0
max_grad_norm = 5
num_layers = 2
num_steps = 20
hidden_size = 200
max_epoch = 4
max_max_epoch = 13
keep_prob = 1.0
lr_decay = 0.5
batch_size = 20
vocab_size = 10000
rnn_mode = CUDNN
class MediumConfig(object):
"""Medium config."""
init_scale = 0.05
learning_rate = 1.0
max_grad_norm = 5
num_layers = 2
num_steps = 35
hidden_size = 650
max_epoch = 6
max_max_epoch = 39
keep_prob = 0.5
lr_decay = 0.8
batch_size = 20
vocab_size = 10000
rnn_mode = BLOCK
class LargeConfig(object):
"""Large config."""
init_scale = 0.04
learning_rate = 1.0
max_grad_norm = 10
num_layers = 2
num_steps = 35
hidden_size = 1500
max_epoch = 14
max_max_epoch = 55
keep_prob = 0.35
lr_decay = 1 / 1.15
batch_size = 20
vocab_size = 10000
rnn_mode = BLOCK
class TestConfig(object):
"""Tiny config, for testing."""
init_scale = 0.1
learning_rate = 1.0
max_grad_norm = 1
num_layers = 1
num_steps = 2
hidden_size = 2
max_epoch = 1
max_max_epoch = 1
keep_prob = 1.0
lr_decay = 0.5
batch_size = 20
vocab_size = 10000
rnn_mode = BLOCK
def run_epoch(session, model, eval_op=None, verbose=False):
"""Runs the model on the given data."""
start_time = time.time()
costs = 0.0
iters = 0
state = session.run(model.initial_state)
fetches = {
"cost": model.cost,
"final_state": model.final_state,
}
if eval_op is not None:
fetches["eval_op"] = eval_op
for step in range(model.input.epoch_size):
feed_dict = {}
for i, (c, h) in enumerate(model.initial_state):
feed_dict[c] = state[i].c
feed_dict[h] = state[i].h
vals = session.run(fetches, feed_dict)
cost = vals["cost"]
state = vals["final_state"]
costs += cost
iters += model.input.num_steps
if verbose and step % (model.input.epoch_size // 10) == 10:
print("%.3f perplexity: %.3f speed: %.0f wps" %
(step * 1.0 / model.input.epoch_size, np.exp(costs / iters),
iters * model.input.batch_size * max(1, FLAGS.num_gpus) /
(time.time() - start_time)))
return np.exp(costs / iters)
def get_config():
"""Get model config."""
config = None
if FLAGS.model == "small":
config = SmallConfig()
elif FLAGS.model == "medium":
config = MediumConfig()
elif FLAGS.model == "large":
config = LargeConfig()
elif FLAGS.model == "test":
config = TestConfig()
else:
raise ValueError("Invalid model: %s", FLAGS.model)
if FLAGS.rnn_mode:
config.rnn_mode = FLAGS.rnn_mode
if FLAGS.num_gpus != 1 or tf.__version__ < "1.3.0" :
config.rnn_mode = BASIC
return config
def main(_):
if not FLAGS.data_path:
raise ValueError("Must set --data_path to PTB data directory")
gpus = [
x.name for x in device_lib.list_local_devices() if x.device_type == "GPU"
]
if FLAGS.num_gpus > len(gpus):
raise ValueError(
"Your machine has only %d gpus "
"which is less than the requested --num_gpus=%d."
% (len(gpus), FLAGS.num_gpus))
raw_data = reader.ptb_raw_data(FLAGS.data_path)
train_data, valid_data, test_data, _ = raw_data
config = get_config()
eval_config = get_config()
eval_config.batch_size = 1
eval_config.num_steps = 1
with tf.Graph().as_default():
initializer = tf.random_uniform_initializer(-config.init_scale,
config.init_scale)
with tf.name_scope("Train"):
train_input = PTBInput(config=config, data=train_data, name="TrainInput")
with tf.variable_scope("Model", reuse=None, initializer=initializer):
m = PTBModel(is_training=True, config=config, input_=train_input)
tf.summary.scalar("Training Loss", m.cost)
tf.summary.scalar("Learning Rate", m.lr)
with tf.name_scope("Valid"):
valid_input = PTBInput(config=config, data=valid_data, name="ValidInput")
with tf.variable_scope("Model", reuse=True, initializer=initializer):
mvalid = PTBModel(is_training=False, config=config, input_=valid_input)
tf.summary.scalar("Validation Loss", mvalid.cost)
with tf.name_scope("Test"):
test_input = PTBInput(
config=eval_config, data=test_data, name="TestInput")
with tf.variable_scope("Model", reuse=True, initializer=initializer):
mtest = PTBModel(is_training=False, config=eval_config,
input_=test_input)
models = {"Train": m, "Valid": mvalid, "Test": mtest}
for name, model in models.iteritems():
model.export_ops(name)
metagraph = tf.train.export_meta_graph()
if tf.__version__ < "1.1.0" and FLAGS.num_gpus > 1:
raise ValueError("num_gpus > 1 is not supported for TensorFlow versions "
"below 1.1.0")
soft_placement = False
if FLAGS.num_gpus > 1:
soft_placement = True
util.auto_parallel(metagraph, m)
with tf.Graph().as_default():
tf.train.import_meta_graph(metagraph)
for model in models.values():
model.import_ops()
sv = tf.train.Supervisor(logdir=FLAGS.save_path)
config_proto = tf.ConfigProto(allow_soft_placement=soft_placement)
with sv.managed_session(config=config_proto) as session:
for i in range(config.max_max_epoch):
lr_decay = config.lr_decay ** max(i + 1 - config.max_epoch, 0.0)
m.assign_lr(session, config.learning_rate * lr_decay)
print("Epoch: %d Learning rate: %.3f" % (i + 1, session.run(m.lr)))
train_perplexity = run_epoch(session, m, eval_op=m.train_op,
verbose=True)
print("Epoch: %d Train Perplexity: %.3f" % (i + 1, train_perplexity))
valid_perplexity = run_epoch(session, mvalid)
print("Epoch: %d Valid Perplexity: %.3f" % (i + 1, valid_perplexity))
test_perplexity = run_epoch(session, mtest)
print("Test Perplexity: %.3f" % test_perplexity)
if FLAGS.save_path:
print("Saving model to %s." % FLAGS.save_path)
sv.saver.save(session, FLAGS.save_path, global_step=sv.global_step)
if __name__ == "__main__":
tf.app.run()
| 33.667373 | 96 | 0.672771 |
7948383eecc62f71e8f02cfc433f9d7bb88e7d71
| 26,950 |
py
|
Python
|
test/functional/test_framework/test_framework.py
|
adgeese/gbcr
|
51145945d78c588f3ec3dc8595e14b1faea1a4e7
|
[
"MIT"
] | null | null | null |
test/functional/test_framework/test_framework.py
|
adgeese/gbcr
|
51145945d78c588f3ec3dc8595e14b1faea1a4e7
|
[
"MIT"
] | null | null | null |
test/functional/test_framework/test_framework.py
|
adgeese/gbcr
|
51145945d78c588f3ec3dc8595e14b1faea1a4e7
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) 2014-2019 The Gold BCR Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Base class for RPC testing."""
import configparser
from enum import Enum
import logging
import argparse
import os
import pdb
import random
import shutil
import subprocess
import sys
import tempfile
import time
from .authproxy import JSONRPCException
from . import coverage
from .test_node import TestNode
from .mininode import NetworkThread
from .util import (
MAX_NODES,
PortSeed,
assert_equal,
check_json_precision,
connect_nodes,
disconnect_nodes,
get_datadir_path,
initialize_datadir,
sync_blocks,
sync_mempools,
)
class TestStatus(Enum):
PASSED = 1
FAILED = 2
SKIPPED = 3
TEST_EXIT_PASSED = 0
TEST_EXIT_FAILED = 1
TEST_EXIT_SKIPPED = 77
TMPDIR_PREFIX = "goldbcr_func_test_"
class SkipTest(Exception):
"""This exception is raised to skip a test"""
def __init__(self, message):
self.message = message
class Gold BCRTestMetaClass(type):
"""Metaclass for Gold BCRTestFramework.
Ensures that any attempt to register a subclass of `Gold BCRTestFramework`
adheres to a standard whereby the subclass overrides `set_test_params` and
`run_test` but DOES NOT override either `__init__` or `main`. If any of
those standards are violated, a ``TypeError`` is raised."""
def __new__(cls, clsname, bases, dct):
if not clsname == 'Gold BCRTestFramework':
if not ('run_test' in dct and 'set_test_params' in dct):
raise TypeError("Gold BCRTestFramework subclasses must override "
"'run_test' and 'set_test_params'")
if '__init__' in dct or 'main' in dct:
raise TypeError("Gold BCRTestFramework subclasses may not override "
"'__init__' or 'main'")
return super().__new__(cls, clsname, bases, dct)
class Gold BCRTestFramework(metaclass=Gold BCRTestMetaClass):
"""Base class for a goldbcr test script.
Individual goldbcr test scripts should subclass this class and override the set_test_params() and run_test() methods.
Individual tests can also override the following methods to customize the test setup:
- add_options()
- setup_chain()
- setup_network()
- setup_nodes()
The __init__() and main() methods should not be overridden.
This class also contains various public and private helper methods."""
def __init__(self):
"""Sets test framework defaults. Do not override this method. Instead, override the set_test_params() method"""
self.chain = 'regtest'
self.setup_clean_chain = False
self.nodes = []
self.network_thread = None
self.rpc_timeout = 60 # Wait for up to 60 seconds for the RPC server to respond
self.supports_cli = True
self.bind_to_localhost_only = True
self.set_test_params()
self.parse_args()
def main(self):
"""Main function. This should not be overridden by the subclass test scripts."""
assert hasattr(self, "num_nodes"), "Test must set self.num_nodes in set_test_params()"
try:
self.setup()
self.run_test()
except JSONRPCException:
self.log.exception("JSONRPC error")
self.success = TestStatus.FAILED
except SkipTest as e:
self.log.warning("Test Skipped: %s" % e.message)
self.success = TestStatus.SKIPPED
except AssertionError:
self.log.exception("Assertion failed")
self.success = TestStatus.FAILED
except KeyError:
self.log.exception("Key error")
self.success = TestStatus.FAILED
except subprocess.CalledProcessError as e:
self.log.exception("Called Process failed with '{}'".format(e.output))
self.success = TestStatus.FAILED
except Exception:
self.log.exception("Unexpected exception caught during testing")
self.success = TestStatus.FAILED
except KeyboardInterrupt:
self.log.warning("Exiting after keyboard interrupt")
self.success = TestStatus.FAILED
finally:
exit_code = self.shutdown()
sys.exit(exit_code)
def parse_args(self):
parser = argparse.ArgumentParser(usage="%(prog)s [options]")
parser.add_argument("--nocleanup", dest="nocleanup", default=False, action="store_true",
help="Leave goldbcrds and test.* datadir on exit or error")
parser.add_argument("--noshutdown", dest="noshutdown", default=False, action="store_true",
help="Don't stop goldbcrds after the test execution")
parser.add_argument("--cachedir", dest="cachedir", default=os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + "/../../cache"),
help="Directory for caching pregenerated datadirs (default: %(default)s)")
parser.add_argument("--tmpdir", dest="tmpdir", help="Root directory for datadirs")
parser.add_argument("-l", "--loglevel", dest="loglevel", default="INFO",
help="log events at this level and higher to the console. Can be set to DEBUG, INFO, WARNING, ERROR or CRITICAL. Passing --loglevel DEBUG will output all logs to console. Note that logs at all levels are always written to the test_framework.log file in the temporary test directory.")
parser.add_argument("--tracerpc", dest="trace_rpc", default=False, action="store_true",
help="Print out all RPC calls as they are made")
parser.add_argument("--portseed", dest="port_seed", default=os.getpid(), type=int,
help="The seed to use for assigning port numbers (default: current process id)")
parser.add_argument("--coveragedir", dest="coveragedir",
help="Write tested RPC commands into this directory")
parser.add_argument("--configfile", dest="configfile",
default=os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + "/../../config.ini"),
help="Location of the test framework config file (default: %(default)s)")
parser.add_argument("--pdbonfailure", dest="pdbonfailure", default=False, action="store_true",
help="Attach a python debugger if test fails")
parser.add_argument("--usecli", dest="usecli", default=False, action="store_true",
help="use goldbcr-cli instead of RPC for all commands")
parser.add_argument("--perf", dest="perf", default=False, action="store_true",
help="profile running nodes with perf for the duration of the test")
parser.add_argument("--valgrind", dest="valgrind", default=False, action="store_true",
help="run nodes under the valgrind memory error detector: expect at least a ~10x slowdown, valgrind 3.14 or later required")
parser.add_argument("--randomseed", type=int,
help="set a random seed for deterministically reproducing a previous test run")
self.add_options(parser)
self.options = parser.parse_args()
def setup(self):
"""Call this method to start up the test framework object with options set."""
PortSeed.n = self.options.port_seed
check_json_precision()
self.options.cachedir = os.path.abspath(self.options.cachedir)
config = configparser.ConfigParser()
config.read_file(open(self.options.configfile))
self.config = config
self.options.goldbcrd = os.getenv("BITCOIND", default=config["environment"]["BUILDDIR"] + '/src/goldbcrd' + config["environment"]["EXEEXT"])
self.options.goldbcrcli = os.getenv("BITCOINCLI", default=config["environment"]["BUILDDIR"] + '/src/goldbcr-cli' + config["environment"]["EXEEXT"])
os.environ['PATH'] = os.pathsep.join([
os.path.join(config['environment']['BUILDDIR'], 'src'),
os.path.join(config['environment']['BUILDDIR'], 'src', 'qt'),
os.environ['PATH']
])
# Set up temp directory and start logging
if self.options.tmpdir:
self.options.tmpdir = os.path.abspath(self.options.tmpdir)
os.makedirs(self.options.tmpdir, exist_ok=False)
else:
self.options.tmpdir = tempfile.mkdtemp(prefix=TMPDIR_PREFIX)
self._start_logging()
# Seed the PRNG. Note that test runs are reproducible if and only if
# a single thread accesses the PRNG. For more information, see
# https://docs.python.org/3/library/random.html#notes-on-reproducibility.
# The network thread shouldn't access random. If we need to change the
# network thread to access randomness, it should instantiate its own
# random.Random object.
seed = self.options.randomseed
if seed is None:
seed = random.randrange(sys.maxsize)
else:
self.log.debug("User supplied random seed {}".format(seed))
random.seed(seed)
self.log.debug("PRNG seed is: {}".format(seed))
self.log.debug('Setting up network thread')
self.network_thread = NetworkThread()
self.network_thread.start()
if self.options.usecli:
if not self.supports_cli:
raise SkipTest("--usecli specified but test does not support using CLI")
self.skip_if_no_cli()
self.skip_test_if_missing_module()
self.setup_chain()
self.setup_network()
self.success = TestStatus.PASSED
def shutdown(self):
"""Call this method to shut down the test framework object."""
if self.success == TestStatus.FAILED and self.options.pdbonfailure:
print("Testcase failed. Attaching python debugger. Enter ? for help")
pdb.set_trace()
self.log.debug('Closing down network thread')
self.network_thread.close()
if not self.options.noshutdown:
self.log.info("Stopping nodes")
if self.nodes:
self.stop_nodes()
else:
for node in self.nodes:
node.cleanup_on_exit = False
self.log.info("Note: goldbcrds were not stopped and may still be running")
should_clean_up = (
not self.options.nocleanup and
not self.options.noshutdown and
self.success != TestStatus.FAILED and
not self.options.perf
)
if should_clean_up:
self.log.info("Cleaning up {} on exit".format(self.options.tmpdir))
cleanup_tree_on_exit = True
elif self.options.perf:
self.log.warning("Not cleaning up dir {} due to perf data".format(self.options.tmpdir))
cleanup_tree_on_exit = False
else:
self.log.warning("Not cleaning up dir {}".format(self.options.tmpdir))
cleanup_tree_on_exit = False
if self.success == TestStatus.PASSED:
self.log.info("Tests successful")
exit_code = TEST_EXIT_PASSED
elif self.success == TestStatus.SKIPPED:
self.log.info("Test skipped")
exit_code = TEST_EXIT_SKIPPED
else:
self.log.error("Test failed. Test logging available at %s/test_framework.log", self.options.tmpdir)
self.log.error("Hint: Call {} '{}' to consolidate all logs".format(os.path.normpath(os.path.dirname(os.path.realpath(__file__)) + "/../combine_logs.py"), self.options.tmpdir))
exit_code = TEST_EXIT_FAILED
# Logging.shutdown will not remove stream- and filehandlers, so we must
# do it explicitly. Handlers are removed so the next test run can apply
# different log handler settings.
# See: https://docs.python.org/3/library/logging.html#logging.shutdown
for h in list(self.log.handlers):
h.flush()
h.close()
self.log.removeHandler(h)
rpc_logger = logging.getLogger("Gold BCRRPC")
for h in list(rpc_logger.handlers):
h.flush()
rpc_logger.removeHandler(h)
if cleanup_tree_on_exit:
shutil.rmtree(self.options.tmpdir)
self.nodes.clear()
return exit_code
# Methods to override in subclass test scripts.
def set_test_params(self):
"""Tests must this method to change default values for number of nodes, topology, etc"""
raise NotImplementedError
def add_options(self, parser):
"""Override this method to add command-line options to the test"""
pass
def skip_test_if_missing_module(self):
"""Override this method to skip a test if a module is not compiled"""
pass
def setup_chain(self):
"""Override this method to customize blockchain setup"""
self.log.info("Initializing test directory " + self.options.tmpdir)
if self.setup_clean_chain:
self._initialize_chain_clean()
else:
self._initialize_chain()
def setup_network(self):
"""Override this method to customize test network topology"""
self.setup_nodes()
# Connect the nodes as a "chain". This allows us
# to split the network between nodes 1 and 2 to get
# two halves that can work on competing chains.
#
# Topology looks like this:
# node0 <-- node1 <-- node2 <-- node3
#
# If all nodes are in IBD (clean chain from genesis), node0 is assumed to be the source of blocks (miner). To
# ensure block propagation, all nodes will establish outgoing connections toward node0.
# See fPreferredDownload in net_processing.
#
# If further outbound connections are needed, they can be added at the beginning of the test with e.g.
# connect_nodes(self.nodes[1], 2)
for i in range(self.num_nodes - 1):
connect_nodes(self.nodes[i + 1], i)
self.sync_all()
def setup_nodes(self):
"""Override this method to customize test node setup"""
extra_args = None
if hasattr(self, "extra_args"):
extra_args = self.extra_args
self.add_nodes(self.num_nodes, extra_args)
self.start_nodes()
self.import_deterministic_coinbase_privkeys()
if not self.setup_clean_chain:
for n in self.nodes:
assert_equal(n.getblockchaininfo()["blocks"], 199)
# To ensure that all nodes are out of IBD, the most recent block
# must have a timestamp not too old (see IsInitialBlockDownload()).
self.log.debug('Generate a block with current time')
block_hash = self.nodes[0].generate(1)[0]
block = self.nodes[0].getblock(blockhash=block_hash, verbosity=0)
for n in self.nodes:
n.submitblock(block)
chain_info = n.getblockchaininfo()
assert_equal(chain_info["blocks"], 200)
assert_equal(chain_info["initialblockdownload"], False)
def import_deterministic_coinbase_privkeys(self):
for n in self.nodes:
try:
n.getwalletinfo()
except JSONRPCException as e:
assert str(e).startswith('Method not found')
continue
n.importprivkey(privkey=n.get_deterministic_priv_key().key, label='coinbase')
def run_test(self):
"""Tests must override this method to define test logic"""
raise NotImplementedError
# Public helper methods. These can be accessed by the subclass test scripts.
def add_nodes(self, num_nodes, extra_args=None, *, rpchost=None, binary=None, binary_cli=None, versions=None):
"""Instantiate TestNode objects.
Should only be called once after the nodes have been specified in
set_test_params()."""
if self.bind_to_localhost_only:
extra_confs = [["bind=127.0.0.1"]] * num_nodes
else:
extra_confs = [[]] * num_nodes
if extra_args is None:
extra_args = [[]] * num_nodes
if versions is None:
versions = [None] * num_nodes
if binary is None:
binary = [self.options.goldbcrd] * num_nodes
if binary_cli is None:
binary_cli = [self.options.goldbcrcli] * num_nodes
assert_equal(len(extra_confs), num_nodes)
assert_equal(len(extra_args), num_nodes)
assert_equal(len(versions), num_nodes)
assert_equal(len(binary), num_nodes)
assert_equal(len(binary_cli), num_nodes)
for i in range(num_nodes):
self.nodes.append(TestNode(
i,
get_datadir_path(self.options.tmpdir, i),
chain=self.chain,
rpchost=rpchost,
timewait=self.rpc_timeout,
goldbcrd=binary[i],
goldbcr_cli=binary_cli[i],
version=versions[i],
coverage_dir=self.options.coveragedir,
cwd=self.options.tmpdir,
extra_conf=extra_confs[i],
extra_args=extra_args[i],
use_cli=self.options.usecli,
start_perf=self.options.perf,
use_valgrind=self.options.valgrind,
))
def start_node(self, i, *args, **kwargs):
"""Start a goldbcrd"""
node = self.nodes[i]
node.start(*args, **kwargs)
node.wait_for_rpc_connection()
if self.options.coveragedir is not None:
coverage.write_all_rpc_commands(self.options.coveragedir, node.rpc)
def start_nodes(self, extra_args=None, *args, **kwargs):
"""Start multiple goldbcrds"""
if extra_args is None:
extra_args = [None] * self.num_nodes
assert_equal(len(extra_args), self.num_nodes)
try:
for i, node in enumerate(self.nodes):
node.start(extra_args[i], *args, **kwargs)
for node in self.nodes:
node.wait_for_rpc_connection()
except:
# If one node failed to start, stop the others
self.stop_nodes()
raise
if self.options.coveragedir is not None:
for node in self.nodes:
coverage.write_all_rpc_commands(self.options.coveragedir, node.rpc)
def stop_node(self, i, expected_stderr='', wait=0):
"""Stop a goldbcrd test node"""
self.nodes[i].stop_node(expected_stderr, wait=wait)
self.nodes[i].wait_until_stopped()
def stop_nodes(self, wait=0):
"""Stop multiple goldbcrd test nodes"""
for node in self.nodes:
# Issue RPC to stop nodes
node.stop_node(wait=wait)
for node in self.nodes:
# Wait for nodes to stop
node.wait_until_stopped()
def restart_node(self, i, extra_args=None):
"""Stop and start a test node"""
self.stop_node(i)
self.start_node(i, extra_args)
def wait_for_node_exit(self, i, timeout):
self.nodes[i].process.wait(timeout)
def split_network(self):
"""
Split the network of four nodes into nodes 0/1 and 2/3.
"""
disconnect_nodes(self.nodes[1], 2)
disconnect_nodes(self.nodes[2], 1)
self.sync_all(self.nodes[:2])
self.sync_all(self.nodes[2:])
def join_network(self):
"""
Join the (previously split) network halves together.
"""
connect_nodes(self.nodes[1], 2)
self.sync_all()
def sync_blocks(self, nodes=None, **kwargs):
sync_blocks(nodes or self.nodes, **kwargs)
def sync_mempools(self, nodes=None, **kwargs):
sync_mempools(nodes or self.nodes, **kwargs)
def sync_all(self, nodes=None, **kwargs):
self.sync_blocks(nodes, **kwargs)
self.sync_mempools(nodes, **kwargs)
# Private helper methods. These should not be accessed by the subclass test scripts.
def _start_logging(self):
# Add logger and logging handlers
self.log = logging.getLogger('TestFramework')
self.log.setLevel(logging.DEBUG)
# Create file handler to log all messages
fh = logging.FileHandler(self.options.tmpdir + '/test_framework.log', encoding='utf-8')
fh.setLevel(logging.DEBUG)
# Create console handler to log messages to stderr. By default this logs only error messages, but can be configured with --loglevel.
ch = logging.StreamHandler(sys.stdout)
# User can provide log level as a number or string (eg DEBUG). loglevel was caught as a string, so try to convert it to an int
ll = int(self.options.loglevel) if self.options.loglevel.isdigit() else self.options.loglevel.upper()
ch.setLevel(ll)
# Format logs the same as goldbcrd's debug.log with microprecision (so log files can be concatenated and sorted)
formatter = logging.Formatter(fmt='%(asctime)s.%(msecs)03d000Z %(name)s (%(levelname)s): %(message)s', datefmt='%Y-%m-%dT%H:%M:%S')
formatter.converter = time.gmtime
fh.setFormatter(formatter)
ch.setFormatter(formatter)
# add the handlers to the logger
self.log.addHandler(fh)
self.log.addHandler(ch)
if self.options.trace_rpc:
rpc_logger = logging.getLogger("Gold BCRRPC")
rpc_logger.setLevel(logging.DEBUG)
rpc_handler = logging.StreamHandler(sys.stdout)
rpc_handler.setLevel(logging.DEBUG)
rpc_logger.addHandler(rpc_handler)
def _initialize_chain(self):
"""Initialize a pre-mined blockchain for use by the test.
Create a cache of a 199-block-long chain
Afterward, create num_nodes copies from the cache."""
CACHE_NODE_ID = 0 # Use node 0 to create the cache for all other nodes
cache_node_dir = get_datadir_path(self.options.cachedir, CACHE_NODE_ID)
assert self.num_nodes <= MAX_NODES
if not os.path.isdir(cache_node_dir):
self.log.debug("Creating cache directory {}".format(cache_node_dir))
initialize_datadir(self.options.cachedir, CACHE_NODE_ID, self.chain)
self.nodes.append(
TestNode(
CACHE_NODE_ID,
cache_node_dir,
chain=self.chain,
extra_conf=["bind=127.0.0.1"],
extra_args=['-disablewallet'],
rpchost=None,
timewait=self.rpc_timeout,
goldbcrd=self.options.goldbcrd,
goldbcr_cli=self.options.goldbcrcli,
coverage_dir=None,
cwd=self.options.tmpdir,
))
self.start_node(CACHE_NODE_ID)
# Wait for RPC connections to be ready
self.nodes[CACHE_NODE_ID].wait_for_rpc_connection()
# Create a 199-block-long chain; each of the 4 first nodes
# gets 25 mature blocks and 25 immature.
# The 4th node gets only 24 immature blocks so that the very last
# block in the cache does not age too much (have an old tip age).
# This is needed so that we are out of IBD when the test starts,
# see the tip age check in IsInitialBlockDownload().
for i in range(8):
self.nodes[CACHE_NODE_ID].generatetoaddress(
nblocks=25 if i != 7 else 24,
address=TestNode.PRIV_KEYS[i % 4].address,
)
assert_equal(self.nodes[CACHE_NODE_ID].getblockchaininfo()["blocks"], 199)
# Shut it down, and clean up cache directories:
self.stop_nodes()
self.nodes = []
def cache_path(*paths):
return os.path.join(cache_node_dir, self.chain, *paths)
os.rmdir(cache_path('wallets')) # Remove empty wallets dir
for entry in os.listdir(cache_path()):
if entry not in ['chainstate', 'blocks']: # Only keep chainstate and blocks folder
os.remove(cache_path(entry))
for i in range(self.num_nodes):
self.log.debug("Copy cache directory {} to node {}".format(cache_node_dir, i))
to_dir = get_datadir_path(self.options.tmpdir, i)
shutil.copytree(cache_node_dir, to_dir)
initialize_datadir(self.options.tmpdir, i, self.chain) # Overwrite port/rpcport in goldbcr.conf
def _initialize_chain_clean(self):
"""Initialize empty blockchain for use by the test.
Create an empty blockchain and num_nodes wallets.
Useful if a test case wants complete control over initialization."""
for i in range(self.num_nodes):
initialize_datadir(self.options.tmpdir, i, self.chain)
def skip_if_no_py3_zmq(self):
"""Attempt to import the zmq package and skip the test if the import fails."""
try:
import zmq # noqa
except ImportError:
raise SkipTest("python3-zmq module not available.")
def skip_if_no_goldbcrd_zmq(self):
"""Skip the running test if goldbcrd has not been compiled with zmq support."""
if not self.is_zmq_compiled():
raise SkipTest("goldbcrd has not been built with zmq enabled.")
def skip_if_no_wallet(self):
"""Skip the running test if wallet has not been compiled."""
if not self.is_wallet_compiled():
raise SkipTest("wallet has not been compiled.")
def skip_if_no_wallet_tool(self):
"""Skip the running test if goldbcr-wallet has not been compiled."""
if not self.is_wallet_tool_compiled():
raise SkipTest("goldbcr-wallet has not been compiled")
def skip_if_no_cli(self):
"""Skip the running test if goldbcr-cli has not been compiled."""
if not self.is_cli_compiled():
raise SkipTest("goldbcr-cli has not been compiled.")
def is_cli_compiled(self):
"""Checks whether goldbcr-cli was compiled."""
return self.config["components"].getboolean("ENABLE_CLI")
def is_wallet_compiled(self):
"""Checks whether the wallet module was compiled."""
return self.config["components"].getboolean("ENABLE_WALLET")
def is_wallet_tool_compiled(self):
"""Checks whether goldbcr-wallet was compiled."""
return self.config["components"].getboolean("ENABLE_WALLET_TOOL")
def is_zmq_compiled(self):
"""Checks whether the zmq module was compiled."""
return self.config["components"].getboolean("ENABLE_ZMQ")
| 42.241379 | 312 | 0.626716 |
794838e32a0b850ea0c89cbf88d989889fc0932a
| 18,344 |
py
|
Python
|
macOS/bee1.app/Contents/Resources/python/lib/python3.6/test/test_sysconfig.py
|
leewonmoh/macrepo1
|
d8f15145fac127c1c7294f01ad39187f76b76ac2
|
[
"BSD-3-Clause"
] | null | null | null |
macOS/bee1.app/Contents/Resources/python/lib/python3.6/test/test_sysconfig.py
|
leewonmoh/macrepo1
|
d8f15145fac127c1c7294f01ad39187f76b76ac2
|
[
"BSD-3-Clause"
] | null | null | null |
macOS/bee1.app/Contents/Resources/python/lib/python3.6/test/test_sysconfig.py
|
leewonmoh/macrepo1
|
d8f15145fac127c1c7294f01ad39187f76b76ac2
|
[
"BSD-3-Clause"
] | null | null | null |
import unittest
import sys
import os
import subprocess
import shutil
from copy import copy
from test.support import (run_unittest,
import_module, TESTFN, unlink, check_warnings,
captured_stdout, skip_unless_symlink, change_cwd)
import sysconfig
from sysconfig import (get_paths, get_platform, get_config_vars,
get_path, get_path_names, _INSTALL_SCHEMES,
_get_default_scheme, _expand_vars,
get_scheme_names, get_config_var, _main)
import _osx_support
class TestSysConfig(unittest.TestCase):
def setUp(self):
super(TestSysConfig, self).setUp()
self.sys_path = sys.path[:]
# patching os.uname
if hasattr(os, 'uname'):
self.uname = os.uname
self._uname = os.uname()
else:
self.uname = None
self._set_uname(('',)*5)
os.uname = self._get_uname
# saving the environment
self.name = os.name
self.platform = sys.platform
self.version = sys.version
self.sep = os.sep
self.join = os.path.join
self.isabs = os.path.isabs
self.splitdrive = os.path.splitdrive
self._config_vars = sysconfig._CONFIG_VARS, copy(sysconfig._CONFIG_VARS)
self._added_envvars = []
self._changed_envvars = []
for var in ('MACOSX_DEPLOYMENT_TARGET', 'PATH'):
if var in os.environ:
self._changed_envvars.append((var, os.environ[var]))
else:
self._added_envvars.append(var)
def tearDown(self):
sys.path[:] = self.sys_path
self._cleanup_testfn()
if self.uname is not None:
os.uname = self.uname
else:
del os.uname
os.name = self.name
sys.platform = self.platform
sys.version = self.version
os.sep = self.sep
os.path.join = self.join
os.path.isabs = self.isabs
os.path.splitdrive = self.splitdrive
sysconfig._CONFIG_VARS = self._config_vars[0]
sysconfig._CONFIG_VARS.clear()
sysconfig._CONFIG_VARS.update(self._config_vars[1])
for var, value in self._changed_envvars:
os.environ[var] = value
for var in self._added_envvars:
os.environ.pop(var, None)
super(TestSysConfig, self).tearDown()
def _set_uname(self, uname):
self._uname = os.uname_result(uname)
def _get_uname(self):
return self._uname
def _cleanup_testfn(self):
path = TESTFN
if os.path.isfile(path):
os.remove(path)
elif os.path.isdir(path):
shutil.rmtree(path)
def test_get_path_names(self):
self.assertEqual(get_path_names(), sysconfig._SCHEME_KEYS)
def test_get_paths(self):
scheme = get_paths()
default_scheme = _get_default_scheme()
wanted = _expand_vars(default_scheme, None)
wanted = sorted(wanted.items())
scheme = sorted(scheme.items())
self.assertEqual(scheme, wanted)
def test_get_path(self):
# XXX make real tests here
for scheme in _INSTALL_SCHEMES:
for name in _INSTALL_SCHEMES[scheme]:
res = get_path(name, scheme)
def test_get_config_vars(self):
cvars = get_config_vars()
self.assertIsInstance(cvars, dict)
self.assertTrue(cvars)
def test_get_platform(self):
# windows XP, 32bits
os.name = 'nt'
sys.version = ('2.4.4 (#71, Oct 18 2006, 08:34:43) '
'[MSC v.1310 32 bit (Intel)]')
sys.platform = 'win32'
self.assertEqual(get_platform(), 'win32')
# windows XP, amd64
os.name = 'nt'
sys.version = ('2.4.4 (#71, Oct 18 2006, 08:34:43) '
'[MSC v.1310 32 bit (Amd64)]')
sys.platform = 'win32'
self.assertEqual(get_platform(), 'win-amd64')
# windows XP, itanium
os.name = 'nt'
sys.version = ('2.4.4 (#71, Oct 18 2006, 08:34:43) '
'[MSC v.1310 32 bit (Itanium)]')
sys.platform = 'win32'
self.assertEqual(get_platform(), 'win-ia64')
# macbook
os.name = 'posix'
sys.version = ('2.5 (r25:51918, Sep 19 2006, 08:49:13) '
'\n[GCC 4.0.1 (Apple Computer, Inc. build 5341)]')
sys.platform = 'darwin'
self._set_uname(('Darwin', 'macziade', '8.11.1',
('Darwin Kernel Version 8.11.1: '
'Wed Oct 10 18:23:28 PDT 2007; '
'root:xnu-792.25.20~1/RELEASE_I386'), 'PowerPC'))
_osx_support._remove_original_values(get_config_vars())
get_config_vars()['MACOSX_DEPLOYMENT_TARGET'] = '10.3'
get_config_vars()['CFLAGS'] = ('-fno-strict-aliasing -DNDEBUG -g '
'-fwrapv -O3 -Wall -Wstrict-prototypes')
maxint = sys.maxsize
try:
sys.maxsize = 2147483647
self.assertEqual(get_platform(), 'macosx-10.3-ppc')
sys.maxsize = 9223372036854775807
self.assertEqual(get_platform(), 'macosx-10.3-ppc64')
finally:
sys.maxsize = maxint
self._set_uname(('Darwin', 'macziade', '8.11.1',
('Darwin Kernel Version 8.11.1: '
'Wed Oct 10 18:23:28 PDT 2007; '
'root:xnu-792.25.20~1/RELEASE_I386'), 'i386'))
_osx_support._remove_original_values(get_config_vars())
get_config_vars()['MACOSX_DEPLOYMENT_TARGET'] = '10.3'
get_config_vars()['CFLAGS'] = ('-fno-strict-aliasing -DNDEBUG -g '
'-fwrapv -O3 -Wall -Wstrict-prototypes')
maxint = sys.maxsize
try:
sys.maxsize = 2147483647
self.assertEqual(get_platform(), 'macosx-10.3-i386')
sys.maxsize = 9223372036854775807
self.assertEqual(get_platform(), 'macosx-10.3-x86_64')
finally:
sys.maxsize = maxint
# macbook with fat binaries (fat, universal or fat64)
_osx_support._remove_original_values(get_config_vars())
get_config_vars()['MACOSX_DEPLOYMENT_TARGET'] = '10.4'
get_config_vars()['CFLAGS'] = ('-arch ppc -arch i386 -isysroot '
'/Developer/SDKs/MacOSX10.4u.sdk '
'-fno-strict-aliasing -fno-common '
'-dynamic -DNDEBUG -g -O3')
self.assertEqual(get_platform(), 'macosx-10.4-fat')
_osx_support._remove_original_values(get_config_vars())
get_config_vars()['CFLAGS'] = ('-arch x86_64 -arch i386 -isysroot '
'/Developer/SDKs/MacOSX10.4u.sdk '
'-fno-strict-aliasing -fno-common '
'-dynamic -DNDEBUG -g -O3')
self.assertEqual(get_platform(), 'macosx-10.4-intel')
_osx_support._remove_original_values(get_config_vars())
get_config_vars()['CFLAGS'] = ('-arch x86_64 -arch ppc -arch i386 -isysroot '
'/Developer/SDKs/MacOSX10.4u.sdk '
'-fno-strict-aliasing -fno-common '
'-dynamic -DNDEBUG -g -O3')
self.assertEqual(get_platform(), 'macosx-10.4-fat3')
_osx_support._remove_original_values(get_config_vars())
get_config_vars()['CFLAGS'] = ('-arch ppc64 -arch x86_64 -arch ppc -arch i386 -isysroot '
'/Developer/SDKs/MacOSX10.4u.sdk '
'-fno-strict-aliasing -fno-common '
'-dynamic -DNDEBUG -g -O3')
self.assertEqual(get_platform(), 'macosx-10.4-universal')
_osx_support._remove_original_values(get_config_vars())
get_config_vars()['CFLAGS'] = ('-arch x86_64 -arch ppc64 -isysroot '
'/Developer/SDKs/MacOSX10.4u.sdk '
'-fno-strict-aliasing -fno-common '
'-dynamic -DNDEBUG -g -O3')
self.assertEqual(get_platform(), 'macosx-10.4-fat64')
for arch in ('ppc', 'i386', 'x86_64', 'ppc64'):
_osx_support._remove_original_values(get_config_vars())
get_config_vars()['CFLAGS'] = ('-arch %s -isysroot '
'/Developer/SDKs/MacOSX10.4u.sdk '
'-fno-strict-aliasing -fno-common '
'-dynamic -DNDEBUG -g -O3' % arch)
self.assertEqual(get_platform(), 'macosx-10.4-%s' % arch)
# linux debian sarge
os.name = 'posix'
sys.version = ('2.3.5 (#1, Jul 4 2007, 17:28:59) '
'\n[GCC 4.1.2 20061115 (prerelease) (Debian 4.1.1-21)]')
sys.platform = 'linux2'
self._set_uname(('Linux', 'aglae', '2.6.21.1dedibox-r7',
'#1 Mon Apr 30 17:25:38 CEST 2007', 'i686'))
self.assertEqual(get_platform(), 'linux-i686')
# XXX more platforms to tests here
def test_get_config_h_filename(self):
config_h = sysconfig.get_config_h_filename()
self.assertTrue(os.path.isfile(config_h), config_h)
def test_get_scheme_names(self):
wanted = ('ios', 'nt', 'nt_user', 'osx_framework_user',
'posix_home', 'posix_prefix', 'posix_user',
'tvos', 'watchos')
self.assertEqual(get_scheme_names(), wanted)
@skip_unless_symlink
@unittest.skipUnless(os.allows_subprocesses, 'Test requires support for subprocesses.')
def test_symlink(self):
# On Windows, the EXE needs to know where pythonXY.dll is at so we have
# to add the directory to the path.
if sys.platform == "win32":
os.environ["PATH"] = "{};{}".format(
os.path.dirname(sys.executable), os.environ["PATH"])
# Issue 7880
def get(python):
cmd = [python, '-c',
'import sysconfig; print(sysconfig.get_platform())']
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, env=os.environ)
return p.communicate()
real = os.path.realpath(sys.executable)
link = os.path.abspath(TESTFN)
os.symlink(real, link)
try:
self.assertEqual(get(real), get(link))
finally:
unlink(link)
def test_user_similar(self):
# Issue #8759: make sure the posix scheme for the users
# is similar to the global posix_prefix one
base = get_config_var('base')
user = get_config_var('userbase')
# the global scheme mirrors the distinction between prefix and
# exec-prefix but not the user scheme, so we have to adapt the paths
# before comparing (issue #9100)
adapt = sys.base_prefix != sys.base_exec_prefix
for name in ('stdlib', 'platstdlib', 'purelib', 'platlib'):
global_path = get_path(name, 'posix_prefix')
if adapt:
global_path = global_path.replace(sys.exec_prefix, sys.base_prefix)
base = base.replace(sys.exec_prefix, sys.base_prefix)
elif sys.base_prefix != sys.prefix:
# virtual environment? Likewise, we have to adapt the paths
# before comparing
global_path = global_path.replace(sys.base_prefix, sys.prefix)
base = base.replace(sys.base_prefix, sys.prefix)
user_path = get_path(name, 'posix_user')
self.assertEqual(user_path, global_path.replace(base, user, 1))
def test_main(self):
# just making sure _main() runs and returns things in the stdout
with captured_stdout() as output:
_main()
self.assertTrue(len(output.getvalue().split('\n')) > 0)
@unittest.skipIf(sys.platform == "win32", "Does not apply to Windows")
def test_ldshared_value(self):
ldflags = sysconfig.get_config_var('LDFLAGS')
ldshared = sysconfig.get_config_var('LDSHARED')
self.assertIn(ldflags, ldshared)
@unittest.skipUnless(sys.platform == "darwin", "test only relevant on MacOSX")
def test_platform_in_subprocess(self):
my_platform = sysconfig.get_platform()
# Test without MACOSX_DEPLOYMENT_TARGET in the environment
env = os.environ.copy()
if 'MACOSX_DEPLOYMENT_TARGET' in env:
del env['MACOSX_DEPLOYMENT_TARGET']
p = subprocess.Popen([
sys.executable, '-c',
'import sysconfig; print(sysconfig.get_platform())',
],
stdout=subprocess.PIPE,
stderr=subprocess.DEVNULL,
env=env)
test_platform = p.communicate()[0].strip()
test_platform = test_platform.decode('utf-8')
status = p.wait()
self.assertEqual(status, 0)
self.assertEqual(my_platform, test_platform)
# Test with MACOSX_DEPLOYMENT_TARGET in the environment, and
# using a value that is unlikely to be the default one.
env = os.environ.copy()
env['MACOSX_DEPLOYMENT_TARGET'] = '10.1'
p = subprocess.Popen([
sys.executable, '-c',
'import sysconfig; print(sysconfig.get_platform())',
],
stdout=subprocess.PIPE,
stderr=subprocess.DEVNULL,
env=env)
test_platform = p.communicate()[0].strip()
test_platform = test_platform.decode('utf-8')
status = p.wait()
self.assertEqual(status, 0)
self.assertEqual(my_platform, test_platform)
def test_srcdir(self):
# See Issues #15322, #15364.
srcdir = sysconfig.get_config_var('srcdir')
self.assertTrue(os.path.isabs(srcdir), srcdir)
self.assertTrue(os.path.isdir(srcdir), srcdir)
if sysconfig._PYTHON_BUILD:
# The python executable has not been installed so srcdir
# should be a full source checkout.
Python_h = os.path.join(srcdir, 'Include', 'Python.h')
self.assertTrue(os.path.exists(Python_h), Python_h)
self.assertTrue(sysconfig._is_python_source_dir(srcdir))
elif os.name == 'posix':
makefile_dir = os.path.dirname(sysconfig.get_makefile_filename())
# Issue #19340: srcdir has been realpath'ed already
makefile_dir = os.path.realpath(makefile_dir)
self.assertEqual(makefile_dir, srcdir)
def test_srcdir_independent_of_cwd(self):
# srcdir should be independent of the current working directory
# See Issues #15322, #15364.
srcdir = sysconfig.get_config_var('srcdir')
with change_cwd(os.pardir):
srcdir2 = sysconfig.get_config_var('srcdir')
self.assertEqual(srcdir, srcdir2)
@unittest.skipIf(sysconfig.get_config_var('EXT_SUFFIX') is None,
'EXT_SUFFIX required for this test')
def test_SO_deprecation(self):
self.assertWarns(DeprecationWarning,
sysconfig.get_config_var, 'SO')
@unittest.skipIf(sysconfig.get_config_var('EXT_SUFFIX') is None,
'EXT_SUFFIX required for this test')
def test_SO_value(self):
with check_warnings(('', DeprecationWarning)):
self.assertEqual(sysconfig.get_config_var('SO'),
sysconfig.get_config_var('EXT_SUFFIX'))
@unittest.skipIf(sysconfig.get_config_var('EXT_SUFFIX') is None,
'EXT_SUFFIX required for this test')
def test_SO_in_vars(self):
vars = sysconfig.get_config_vars()
self.assertIsNotNone(vars['SO'])
self.assertEqual(vars['SO'], vars['EXT_SUFFIX'])
@unittest.skipUnless(sys.platform == 'linux' and
hasattr(sys.implementation, '_multiarch'),
'multiarch-specific test')
def test_triplet_in_ext_suffix(self):
ctypes = import_module('ctypes')
import platform, re
machine = platform.machine()
suffix = sysconfig.get_config_var('EXT_SUFFIX')
if re.match('(aarch64|arm|mips|ppc|powerpc|s390|sparc)', machine):
self.assertTrue('linux' in suffix, suffix)
if re.match('(i[3-6]86|x86_64)$', machine):
if ctypes.sizeof(ctypes.c_char_p()) == 4:
self.assertTrue(suffix.endswith('i386-linux-gnu.so') or
suffix.endswith('x86_64-linux-gnux32.so'),
suffix)
else: # 8 byte pointer size
self.assertTrue(suffix.endswith('x86_64-linux-gnu.so'), suffix)
@unittest.skipUnless(sys.platform == 'darwin', 'OS X-specific test')
def test_osx_ext_suffix(self):
suffix = sysconfig.get_config_var('EXT_SUFFIX')
self.assertTrue(suffix.endswith('-darwin.so'), suffix)
class MakefileTests(unittest.TestCase):
@unittest.skipIf(sys.platform.startswith('win'),
'Test is not Windows compatible')
def test_get_makefile_filename(self):
makefile = sysconfig.get_makefile_filename()
self.assertTrue(os.path.isfile(makefile), makefile)
def test_parse_makefile(self):
self.addCleanup(unlink, TESTFN)
with open(TESTFN, "w") as makefile:
print("var1=a$(VAR2)", file=makefile)
print("VAR2=b$(var3)", file=makefile)
print("var3=42", file=makefile)
print("var4=$/invalid", file=makefile)
print("var5=dollar$$5", file=makefile)
print("var6=${var3}/lib/python3.5/config-$(VAR2)$(var5)"
"-x86_64-linux-gnu", file=makefile)
vars = sysconfig._parse_makefile(TESTFN)
self.assertEqual(vars, {
'var1': 'ab42',
'VAR2': 'b42',
'var3': 42,
'var4': '$/invalid',
'var5': 'dollar$5',
'var6': '42/lib/python3.5/config-b42dollar$5-x86_64-linux-gnu',
})
def test_main():
run_unittest(TestSysConfig, MakefileTests)
if __name__ == "__main__":
test_main()
| 40.855234 | 97 | 0.576973 |
7948391e65caca57a0a7cbd6aad771064574cfd2
| 1,259 |
py
|
Python
|
2018/12-35C3/rev-boxofblink/extract_change.py
|
wani-hackase/wani-writeup
|
dd4ad0607d2f2193ad94c1ce65359294aa591681
|
[
"MIT"
] | 25 |
2019-03-06T11:55:56.000Z
|
2021-05-21T22:07:14.000Z
|
2018/12-35C3/rev-boxofblink/extract_change.py
|
wani-hackase/wani-writeup
|
dd4ad0607d2f2193ad94c1ce65359294aa591681
|
[
"MIT"
] | 1 |
2020-06-25T07:27:15.000Z
|
2020-06-25T07:27:15.000Z
|
2018/12-35C3/rev-boxofblink/extract_change.py
|
wani-hackase/wani-writeup
|
dd4ad0607d2f2193ad94c1ce65359294aa591681
|
[
"MIT"
] | 1 |
2019-02-14T00:42:28.000Z
|
2019-02-14T00:42:28.000Z
|
import sys
ignores = [0]
if len(sys.argv) != 2:
print("usage: %s [filename]" % (sys.argv[0]))
sys.exit(0)
fp = open(sys.argv[1])
line = fp.readline()
if line == "":
sys.exit()
items = line.split(',')
pre_values = []
for i in range(len(items)):
if i == 0:
pre_values.append(float(items[i]))
else:
pre_values.append(int(items[i]))
index = 0
while line:
items = line.split(',')
current_values = []
for i in range(len(items)):
if i == 0:
current_values.append(float(items[i]))
else:
current_values.append(int(items[i]))
if current_values[1:-1] != pre_values[1:-1]:
for i in range(len(pre_values)):
if i == 0:
print("%.10f" % (pre_values[i]), end="")
else:
print(",%d" % (pre_values[i]), end="")
print("")
for i in range(len(current_values)):
if i == 0:
print("%.10f" % (current_values[i]), end="")
else:
print(",%d" % (current_values[i]), end="")
print("")
pre_values = current_values
line = fp.readline()
index = index + 1
# if index == 100000:
# break
| 19.984127 | 60 | 0.481334 |
79483a18ade49b9e3a6aee2e2162b3123a115e42
| 536 |
py
|
Python
|
python/en/_numpy/1.Quickstart_tutorial-1.The_Basics-1.An_eample.py
|
aimldl/coding
|
70ddbfaa454ab92fd072ee8dc614ecc330b34a70
|
[
"MIT"
] | null | null | null |
python/en/_numpy/1.Quickstart_tutorial-1.The_Basics-1.An_eample.py
|
aimldl/coding
|
70ddbfaa454ab92fd072ee8dc614ecc330b34a70
|
[
"MIT"
] | null | null | null |
python/en/_numpy/1.Quickstart_tutorial-1.The_Basics-1.An_eample.py
|
aimldl/coding
|
70ddbfaa454ab92fd072ee8dc614ecc330b34a70
|
[
"MIT"
] | null | null | null |
# 1.Quickstart_tutorial-1.The_Basics-1.An_eample.py
#
# https://docs.scipy.org/doc/numpy/user/quickstart.html
# The Basics - An example
import numpy as np
a = np.arange(15).reshape(3,5)
#>>> a
#array([[ 0, 1, 2, 3, 4],
# [ 5, 6, 7, 8, 9],
# [10, 11, 12, 13, 14]])
print( a.shape )
#(3, 5)
print( a.ndim )
#2
print( a.dtype.name )
#int64
print( a.itemsize )
#8
print( a.size )
#15
print( type(a) )
#<class 'numpy.ndarray'>
b = np.array( [6,7,8] )
#array([6, 7, 8])
print( type(b) )
#<class 'numpy.ndarray'>
| 14.486486 | 55 | 0.576493 |
79483a35bdc84873fcf5a12328b21093cd96f7de
| 2,818 |
py
|
Python
|
modules/dense_correspondence_manipulation/scripts/render_depth_images.py
|
masato-ka/pytorch-dense-correspondence
|
89a5f87fd773b210e93ebcfeb945c95e7417d0e9
|
[
"BSD-3-Clause"
] | null | null | null |
modules/dense_correspondence_manipulation/scripts/render_depth_images.py
|
masato-ka/pytorch-dense-correspondence
|
89a5f87fd773b210e93ebcfeb945c95e7417d0e9
|
[
"BSD-3-Clause"
] | null | null | null |
modules/dense_correspondence_manipulation/scripts/render_depth_images.py
|
masato-ka/pytorch-dense-correspondence
|
89a5f87fd773b210e93ebcfeb945c95e7417d0e9
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env directorPython
import os
import argparse
import dense_correspondence_manipulation.change_detection.change_detection as change_detection
import dense_correspondence_manipulation.utils.utils as utils
from dense_correspondence_manipulation.utils.constants import *
from director.timercallback import TimerCallback
"""
Renders depth images against the entire scene
"""
CONFIG_FILE = CHANGE_DETECTION_CONFIG_FILE
def run(data_folder, config_file=CONFIG_FILE, debug=False, globalsDict=None):
"""
Runs the change detection pipeline
:param data_dir:
:param config_file:
:return:
"""
if globalsDict is None:
globalsDict = globals()
config_file = CONFIG_FILE
config = utils.getDictFromYamlFilename(config_file)
# make dimensions large so no cropping
for key in config['crop_box']['dimensions']:
config['crop_box']['dimensions'][key] = 10.0# set it to 10 meteres
changeDetection, obj_dict = change_detection.ChangeDetection.from_data_folder(data_folder, config=config, globalsDict=globalsDict,
background_data_folder=data_folder)
# set foreground mesh to actually be background mesh
changeDetection.foreground_reconstruction = changeDetection.background_reconstruction
app = obj_dict['app']
globalsDict['cd'] = changeDetection
view = obj_dict['view']
# if debug:
# changeDetection.background_reconstruction.visualize_reconstruction(view, name='background')
def single_shot_function():
changeDetection.render_depth_images()
app.app.quit()
if not debug:
TimerCallback(callback=single_shot_function).singleShot(0)
app.app.start(restoreWindow=True)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--data_dir", type=str, default='/home/manuelli/code/data_volume/sandbox/drill_scenes/04_drill_long_downsampled')
default_config_file = os.path.join(utils.getDenseCorrespondenceSourceDir(), 'config', 'stations', 'RLG_iiwa_1', 'change_detection.yaml')
parser.add_argument("--config_file", type=str, default=default_config_file)
parser.add_argument('--current_dir', action='store_true', default=False, help="run the script with --data_dir set to the current directory")
parser.add_argument('--debug', action='store_true', default=False,
help="launch the app in debug mode")
globalsDict = globals()
args = parser.parse_args()
data_folder = args.data_dir
if args.current_dir:
print("running with data_dir set to current working directory . . . ")
data_folder = os.getcwd()
run(data_folder, config_file=args.config_file, debug=args.debug, globalsDict=globalsDict)
| 32.767442 | 144 | 0.71895 |
79483a4f66726810a694c4724567c8f5afb59f81
| 801 |
py
|
Python
|
setup.py
|
cosmodesi/cosmoprimo
|
a0d466418f6f5af80d446627109ce11bfb46f8b5
|
[
"BSD-3-Clause"
] | 2 |
2022-01-07T12:20:01.000Z
|
2022-01-22T20:34:03.000Z
|
setup.py
|
cosmodesi/cosmoprimo
|
a0d466418f6f5af80d446627109ce11bfb46f8b5
|
[
"BSD-3-Clause"
] | null | null | null |
setup.py
|
cosmodesi/cosmoprimo
|
a0d466418f6f5af80d446627109ce11bfb46f8b5
|
[
"BSD-3-Clause"
] | null | null | null |
import os
import sys
from setuptools import setup
package_basename = 'cosmoprimo'
sys.path.insert(0, os.path.join(os.path.dirname(__file__), package_basename))
import _version
version = _version.__version__
setup(name=package_basename,
version=version,
author='cosmodesi',
author_email='',
description='Lightweight primordial cosmology package, including wrappers to CLASS, CAMB, Eisenstein and Hu...',
license='BSD3',
url='http://github.com/cosmodesi/cosmoprimo',
install_requires=['numpy', 'scipy'],
extras_require={'class': ['cython', 'pyclass @ git+https://github.com/adematti/pyclass@1.0.0'], 'camb': ['camb'], 'astropy': ['astropy'], 'extras': ['pyfftw']},
package_data={'cosmoprimo': ['data/*.dat']},
packages=[package_basename])
| 34.826087 | 166 | 0.689139 |
79483b8d34fe4614b51e1546b30fce56c91fb036
| 469 |
py
|
Python
|
newsletters/migrations/0007_addemail_email.py
|
Juanvulcano/gci15_email
|
85f7133d3b539f14a56fedf7f2fc1987844d60e5
|
[
"MIT"
] | null | null | null |
newsletters/migrations/0007_addemail_email.py
|
Juanvulcano/gci15_email
|
85f7133d3b539f14a56fedf7f2fc1987844d60e5
|
[
"MIT"
] | null | null | null |
newsletters/migrations/0007_addemail_email.py
|
Juanvulcano/gci15_email
|
85f7133d3b539f14a56fedf7f2fc1987844d60e5
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('newsletters', '0006_auto_20151218_2026'),
]
operations = [
migrations.AddField(
model_name='addemail',
name='email',
field=models.EmailField(default=' ', unique=True, max_length=254),
preserve_default=False,
),
]
| 22.333333 | 78 | 0.609808 |
79483bd4abdf264f0227f9a8098f52cb79513113
| 862 |
py
|
Python
|
find-peak-element/Solution.17532630.py
|
rahul-ramadas/leetcode
|
6c84c2333a613729361c5cdb63dc3fc80203b340
|
[
"MIT"
] | null | null | null |
find-peak-element/Solution.17532630.py
|
rahul-ramadas/leetcode
|
6c84c2333a613729361c5cdb63dc3fc80203b340
|
[
"MIT"
] | 1 |
2016-09-11T22:26:17.000Z
|
2016-09-13T01:49:48.000Z
|
find-peak-element/Solution.17532630.py
|
rahul-ramadas/leetcode
|
6c84c2333a613729361c5cdb63dc3fc80203b340
|
[
"MIT"
] | null | null | null |
class Solution:
def getElement(self, num, index):
if index < 0 or index >= len(num):
return float('-inf')
return num[index]
def findPeakElement(self, num):
lower_bound = 0
upper_bound = len(num)
while True:
middle_index = (upper_bound + lower_bound) // 2
middle = self.getElement(num, middle_index)
left = self.getElement(num, middle_index - 1)
right = self.getElement(num, middle_index + 1)
if left < middle > right:
return middle_index
elif left < middle < right:
lower_bound = middle_index + 1
elif left > middle > right:
upper_bound = middle_index
elif left > middle < right:
upper_bound = middle_index
| 29.724138 | 60 | 0.523202 |
79483cc458b21dc3623f764d8f423f9991e16bc0
| 10,789 |
py
|
Python
|
projects/faster_rcnn_swin_transformer/train_net_swint_full_reso.py
|
cenchaojun/detectron2
|
03ca41a6873bb641764c4762d40d355f215e7ad9
|
[
"Apache-2.0"
] | null | null | null |
projects/faster_rcnn_swin_transformer/train_net_swint_full_reso.py
|
cenchaojun/detectron2
|
03ca41a6873bb641764c4762d40d355f215e7ad9
|
[
"Apache-2.0"
] | null | null | null |
projects/faster_rcnn_swin_transformer/train_net_swint_full_reso.py
|
cenchaojun/detectron2
|
03ca41a6873bb641764c4762d40d355f215e7ad9
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# Copyright (c) Facebook, Inc. and its affiliates.
"""
Detection Training Script.
This scripts reads a given config file and runs the training or evaluation.
It is an entry point that is made to train standard models in detectron2.
In order to let one script support training of many models,
this script contains logic that are specific to these built-in models and therefore
may not be suitable for your own project.
For example, your research project perhaps only needs a single "evaluator".
Therefore, we recommend you to use detectron2 as an library and take
this file as an example of how to use the library.
You may want to write your own script with your datasets and other customizations.
"""
import itertools
import logging
import os
from collections import OrderedDict
import torch
import detectron2.utils.comm as comm
from detectron2.checkpoint import DetectionCheckpointer
from detectron2.config import get_cfg
from detectron2.data import MetadataCatalog
from detectron2.engine import DefaultTrainer, default_argument_parser, default_setup, hooks, launch
from detectron2.evaluation import (
CityscapesInstanceEvaluator,
CityscapesSemSegEvaluator,
COCOEvaluator,
COCOPanopticEvaluator,
DatasetEvaluators,
LVISEvaluator,
PascalVOCDetectionEvaluator,
SemSegEvaluator,
verify_results,
)
from detectron2.modeling import GeneralizedRCNNWithTTA
from detectron2.solver.build import maybe_add_gradient_clipping, get_default_optimizer_params
from swint import add_swint_config
os.environ['CUDA_VISIBLE_DEVICES'] = '2,3'
from detectron2.modeling import GeneralizedRCNNWithTTA
##===============注册自定义数据集================##
from detectron2.data.datasets import register_coco_instances
register_coco_instances("SSLAD-2D_train", {}, json_file=r"/data/cenzhaojun/dataset/SODA10M/SSLAD-2D/labeled/annotations/instance_train.json",
image_root = r"/data/cenzhaojun/dataset/SODA10M/SSLAD-2D/labeled/train")
register_coco_instances("SSLAD-2D_test", {}, r"/data/cenzhaojun/dataset/SODA10M/SSLAD-2D/labeled/annotations/instance_val.json",
r"/data/cenzhaojun/dataset/SODA10M/SSLAD-2D/labeled/val")
# 设置类别
from detectron2.data import MetadataCatalog
MetadataCatalog.get("SSLAD-2D_train").thing_classes = ['Pedestrian','Cyclist','Car','Truck','Tram','Tricycle']
MetadataCatalog.get("SSLAD-2D_test").thing_classes = ['Pedestrian','Cyclist','Car','Truck','Tram','Tricycle']
# python tools/train_augmentationv2.py --config-file configs/Misc/cascade_rcnn_R_50_FPN_1x.yaml --num-gpus 2 OUTPUT_DIR training_dir/cascade_rcnn_R_50_FPN_1x_augmentation
class Trainer(DefaultTrainer):
"""
We use the "DefaultTrainer" which contains pre-defined default logic for
standard training workflow. They may not work for you, especially if you
are working on a new research project. In that case you can write your
own training loop. You can use "tools/plain_train_net.py" as an example.
"""
# def build_train_loader(cls, cfg):
# return build_detection_train_loader(cfg, mapper=DatasetMapper(cfg, is_train=True, augmentations=[
# T.RandomBrightness(0.3, 2.0),
# T.RandomContrast(0.3, 2.5),
# # T.ColorTransform
# # RandomGaussianNoise(),
# # RandomPepperNoise(),
# # T.RandomRotation([-90,90]),
# # RandomResize(0.5,1.5),
# # T.RandomCrop('relative_range',(0.3,0.3)),
# # T.RandomExtent(scale_range=(0.3, 1), shift_range=(1, 1))
# ]))
@classmethod
def build_evaluator(cls, cfg, dataset_name, output_folder=None):
"""
Create evaluator(s) for a given dataset.
This uses the special metadata "evaluator_type" associated with each builtin dataset.
For your own dataset, you can simply create an evaluator manually in your
script and do not have to worry about the hacky if-else logic here.
"""
if output_folder is None:
output_folder = os.path.join(cfg.OUTPUT_DIR, "inference")
evaluator_list = []
evaluator_type = MetadataCatalog.get(dataset_name).evaluator_type
if evaluator_type in ["sem_seg", "coco_panoptic_seg"]:
evaluator_list.append(
SemSegEvaluator(
dataset_name,
distributed=True,
num_classes=cfg.MODEL.SEM_SEG_HEAD.NUM_CLASSES,
ignore_label=cfg.MODEL.SEM_SEG_HEAD.IGNORE_VALUE,
output_dir=output_folder,
)
)
if evaluator_type in ["coco", "coco_panoptic_seg"]:
evaluator_list.append(COCOEvaluator(dataset_name, cfg, True, output_folder))
if evaluator_type == "coco_panoptic_seg":
evaluator_list.append(COCOPanopticEvaluator(dataset_name, output_folder))
if evaluator_type == "cityscapes_instance":
assert (
torch.cuda.device_count() >= comm.get_rank()
), "CityscapesEvaluator currently do not work with multiple machines."
return CityscapesInstanceEvaluator(dataset_name)
if evaluator_type == "cityscapes_sem_seg":
assert (
torch.cuda.device_count() >= comm.get_rank()
), "CityscapesEvaluator currently do not work with multiple machines."
return CityscapesSemSegEvaluator(dataset_name)
elif evaluator_type == "pascal_voc":
return PascalVOCDetectionEvaluator(dataset_name)
elif evaluator_type == "lvis":
return LVISEvaluator(dataset_name, cfg, True, output_folder)
if len(evaluator_list) == 0:
raise NotImplementedError(
"no Evaluator for the dataset {} with the type {}".format(
dataset_name, evaluator_type
)
)
elif len(evaluator_list) == 1:
return evaluator_list[0]
return DatasetEvaluators(evaluator_list)
@classmethod
def test_with_TTA(cls, cfg, model):
logger = logging.getLogger("detectron2.trainer")
# In the end of training, run an evaluation with TTA
# Only support some R-CNN models.
logger.info("Running inference with test-time augmentation ...")
model = GeneralizedRCNNWithTTA(cfg, model)
evaluators = [
cls.build_evaluator(
cfg, name, output_folder=os.path.join(cfg.OUTPUT_DIR, "inference_TTA")
)
for name in cfg.DATASETS.TEST
]
res = cls.test(cfg, model, evaluators)
res = OrderedDict({k + "_TTA": v for k, v in res.items()})
return res
@classmethod
def build_optimizer(cls, cfg, model):
params = get_default_optimizer_params(
model,
base_lr=cfg.SOLVER.BASE_LR,
weight_decay=cfg.SOLVER.WEIGHT_DECAY,
weight_decay_norm=cfg.SOLVER.WEIGHT_DECAY_NORM,
bias_lr_factor=cfg.SOLVER.BIAS_LR_FACTOR,
weight_decay_bias=cfg.SOLVER.WEIGHT_DECAY_BIAS,
)
def maybe_add_full_model_gradient_clipping(optim): # optim: the optimizer class
# detectron2 doesn't have full model gradient clipping now
clip_norm_val = cfg.SOLVER.CLIP_GRADIENTS.CLIP_VALUE
enable = (
cfg.SOLVER.CLIP_GRADIENTS.ENABLED
and cfg.SOLVER.CLIP_GRADIENTS.CLIP_TYPE == "full_model"
and clip_norm_val > 0.0
)
class FullModelGradientClippingOptimizer(optim):
def step(self, closure=None):
all_params = itertools.chain(*[x["params"] for x in self.param_groups])
torch.nn.utils.clip_grad_norm_(all_params, clip_norm_val)
super().step(closure=closure)
return FullModelGradientClippingOptimizer if enable else optim
optimizer_type = cfg.SOLVER.OPTIMIZER
if optimizer_type == "SGD":
optimizer = maybe_add_gradient_clipping(torch.optim.SGD)(
params, cfg.SOLVER.BASE_LR, momentum=cfg.SOLVER.MOMENTUM,
nesterov=cfg.SOLVER.NESTEROV,
weight_decay=cfg.SOLVER.WEIGHT_DECAY,
)
elif optimizer_type == "AdamW":
optimizer = maybe_add_full_model_gradient_clipping(torch.optim.AdamW)(
params, cfg.SOLVER.BASE_LR, betas=(0.9, 0.999),
weight_decay=cfg.SOLVER.WEIGHT_DECAY,
)
else:
raise NotImplementedError(f"no optimizer type {optimizer_type}")
return optimizer
def setup(args):
"""
Create configs and perform basic setups.
"""
cfg = get_cfg()
args.config_file = '../../configs/SwinT/faster_rcnn_swint_T_FPN_3x.yaml'
add_swint_config(cfg)
cfg.merge_from_file(args.config_file)
cfg.MODEL.WEIGHTS = "/data/cenzhaojun/detectron2/weights/faster_rcnn_swint_T.pth"
cfg.DATASETS.TRAIN = ("SSLAD-2D_train",) # 训练数据集名称
cfg.DATASETS.TEST = ("SSLAD-2D_test",)
cfg.OUTPUT_DIR = '/data/cenzhaojun/detectron2/training_dir/faster_rcnn_swint_T_FPN_3x_full_reso'
ITERS_IN_ONE_EPOCH = int(cfg.SOLVER.MAX_ITER / cfg.SOLVER.IMS_PER_BATCH)
cfg.TEST.EVAL_PERIOD = ITERS_IN_ONE_EPOCH
cfg.INPUT.MAX_SIZE_TRAIN = 1920
cfg.INPUT.MAX_SIZE_TEST = 1920
cfg.MODEL.ROI_HEADS.NUM_CLASSES = 6
cfg.SOLVER.IMS_PER_BATCH = 22
cfg.merge_from_list(args.opts)
cfg.freeze()
default_setup(cfg, args)
return cfg
def main(args):
cfg = setup(args)
if args.eval_only:
model = Trainer.build_model(cfg)
DetectionCheckpointer(model, save_dir=cfg.OUTPUT_DIR).resume_or_load(
cfg.MODEL.WEIGHTS, resume=args.resume
)
res = Trainer.test(cfg, model)
if cfg.TEST.AUG.ENABLED:
res.update(Trainer.test_with_TTA(cfg, model))
if comm.is_main_process():
verify_results(cfg, res)
return res
"""
If you'd like to do anything fancier than the standard training logic,
consider writing your own training loop (see plain_train_net.py) or
subclassing the trainer.
"""
trainer = Trainer(cfg)
trainer.resume_or_load(resume=args.resume)
if cfg.TEST.AUG.ENABLED:
trainer.register_hooks(
[hooks.EvalHook(0, lambda: trainer.test_with_TTA(cfg, trainer.model))]
)
return trainer.train()
if __name__ == "__main__":
args = default_argument_parser().parse_args()
args.num_gpus = 2
args.resume = True
print("Command Line Args:", args)
launch(
main,
args.num_gpus,
num_machines=args.num_machines,
machine_rank=args.machine_rank,
dist_url=args.dist_url,
args=(args,),
)
| 41.817829 | 171 | 0.668644 |
79483d18d26a548c0be9d4b3b9470efd1cad068a
| 28,287 |
py
|
Python
|
pysnmp/DELL-NETWORKING-OPENFLOW-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 11 |
2021-02-02T16:27:16.000Z
|
2021-08-31T06:22:49.000Z
|
pysnmp/DELL-NETWORKING-OPENFLOW-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 75 |
2021-02-24T17:30:31.000Z
|
2021-12-08T00:01:18.000Z
|
pysnmp/DELL-NETWORKING-OPENFLOW-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 10 |
2019-04-30T05:51:36.000Z
|
2022-02-16T03:33:41.000Z
|
#
# PySNMP MIB module DELL-NETWORKING-OPENFLOW-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/DELL-NETWORKING-OPENFLOW-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 18:22:42 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, Integer, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "OctetString", "Integer", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsUnion, SingleValueConstraint, ValueRangeConstraint, ConstraintsIntersection, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsUnion", "SingleValueConstraint", "ValueRangeConstraint", "ConstraintsIntersection", "ValueSizeConstraint")
dellNetMgmt, = mibBuilder.importSymbols("DELL-NETWORKING-SMI", "dellNetMgmt")
InterfaceIndex, = mibBuilder.importSymbols("IF-MIB", "InterfaceIndex")
InetPortNumber, InetAddressType, InetAddress = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetPortNumber", "InetAddressType", "InetAddress")
VlanId, = mibBuilder.importSymbols("Q-BRIDGE-MIB", "VlanId")
ModuleCompliance, ObjectGroup, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "ObjectGroup", "NotificationGroup")
TimeTicks, Integer32, Counter32, MibIdentifier, MibScalar, MibTable, MibTableRow, MibTableColumn, ObjectIdentity, ModuleIdentity, Unsigned32, Bits, Gauge32, iso, NotificationType, Counter64, IpAddress = mibBuilder.importSymbols("SNMPv2-SMI", "TimeTicks", "Integer32", "Counter32", "MibIdentifier", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "ObjectIdentity", "ModuleIdentity", "Unsigned32", "Bits", "Gauge32", "iso", "NotificationType", "Counter64", "IpAddress")
TextualConvention, TruthValue, DisplayString, MacAddress, TimeStamp = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "TruthValue", "DisplayString", "MacAddress", "TimeStamp")
dellNetOpenFlow = ModuleIdentity((1, 3, 6, 1, 4, 1, 6027, 3, 20))
if mibBuilder.loadTexts: dellNetOpenFlow.setLastUpdated('201203271200Z')
if mibBuilder.loadTexts: dellNetOpenFlow.setOrganization('Dell Inc')
ofSwitchObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1))
ofSwitchNotification = MibIdentifier((1, 3, 6, 1, 4, 1, 6027, 3, 20, 2))
class DataPathIdentifier(TextualConvention, OctetString):
status = 'current'
displayHint = '1x:'
subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(8, 8)
fixedLength = 8
ofSwitchId = MibScalar((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ofSwitchId.setStatus('current')
ofManufacturerDesc = MibScalar((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ofManufacturerDesc.setStatus('current')
ofHardwareDesc = MibScalar((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ofHardwareDesc.setStatus('current')
ofSoftwareDesc = MibScalar((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ofSoftwareDesc.setStatus('current')
ofSwitchSerialNo = MibScalar((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 5), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 8))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ofSwitchSerialNo.setStatus('current')
ofSwitchVersion = MibScalar((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 6), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 8))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ofSwitchVersion.setStatus('current')
ofInstTable = MibTable((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 7), )
if mibBuilder.loadTexts: ofInstTable.setStatus('current')
ofInstEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 7, 1), ).setIndexNames((0, "DELL-NETWORKING-OPENFLOW-MIB", "ofInstId"))
if mibBuilder.loadTexts: ofInstEntry.setStatus('current')
ofInstId = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 7, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 255)))
if mibBuilder.loadTexts: ofInstId.setStatus('current')
ofInstAdminState = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 7, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("up", 1), ("down", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ofInstAdminState.setStatus('current')
ofInstIntfType = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 7, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("port", 1), ("vlan", 2), ("any", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ofInstIntfType.setStatus('current')
ofInstDataPathId = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 7, 1, 4), DataPathIdentifier()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ofInstDataPathId.setStatus('current')
ofInstConnectTimeout = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 7, 1, 5), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setUnits('seconds').setMaxAccess("readonly")
if mibBuilder.loadTexts: ofInstConnectTimeout.setStatus('current')
ofInstEchoReplyTimeout = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 7, 1, 6), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setUnits('seconds').setMaxAccess("readonly")
if mibBuilder.loadTexts: ofInstEchoReplyTimeout.setStatus('current')
ofInstEchoReqInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 7, 1, 7), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ofInstEchoReqInterval.setStatus('current')
ofInstNumFlows = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 7, 1, 8), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ofInstNumFlows.setStatus('current')
ofInstSuppCapabilities = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 7, 1, 9), Bits().clone(namedValues=NamedValues(("port", 0), ("table", 1), ("flow", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ofInstSuppCapabilities.setStatus('current')
ofInstSuppActions = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 7, 1, 10), Bits().clone(namedValues=NamedValues(("output", 0), ("set-vlan", 1), ("set-pcp", 2), ("set-smac", 3), ("set-dmac", 4), ("set-tos", 5)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ofInstSuppActions.setStatus('current')
ofCntlrTable = MibTable((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 8), )
if mibBuilder.loadTexts: ofCntlrTable.setStatus('current')
ofCntlrEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 8, 1), ).setIndexNames((0, "DELL-NETWORKING-OPENFLOW-MIB", "ofInstId"), (0, "DELL-NETWORKING-OPENFLOW-MIB", "ofCntlrId"))
if mibBuilder.loadTexts: ofCntlrEntry.setStatus('current')
ofCntlrId = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 8, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 255)))
if mibBuilder.loadTexts: ofCntlrId.setStatus('current')
ofCntlrAddrType = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 8, 1, 2), InetAddressType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ofCntlrAddrType.setStatus('current')
ofCntlrAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 8, 1, 3), InetAddress().subtype(subtypeSpec=ConstraintsUnion(ValueSizeConstraint(0, 0), ValueSizeConstraint(4, 4), ValueSizeConstraint(8, 8), ValueSizeConstraint(16, 16), ValueSizeConstraint(20, 20), ))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ofCntlrAddr.setStatus('current')
ofCntlrPortNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 8, 1, 4), InetPortNumber()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ofCntlrPortNumber.setStatus('current')
ofCntlrProtocol = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 8, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("tls", 1), ("tcp", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ofCntlrProtocol.setStatus('current')
ofCntlrConState = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 8, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("down", 1), ("up", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ofCntlrConState.setStatus('current')
ofPortTable = MibTable((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 9), )
if mibBuilder.loadTexts: ofPortTable.setStatus('current')
ofPortEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 9, 1), ).setIndexNames((0, "DELL-NETWORKING-OPENFLOW-MIB", "ofInstId"), (0, "DELL-NETWORKING-OPENFLOW-MIB", "ofPortIfIndex"))
if mibBuilder.loadTexts: ofPortEntry.setStatus('current')
ofPortIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 9, 1, 1), InterfaceIndex())
if mibBuilder.loadTexts: ofPortIfIndex.setStatus('current')
ofPortAssociationType = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 9, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("instancePort", 1), ("instVlanPort", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ofPortAssociationType.setStatus('current')
ofVlanTable = MibTable((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 10), )
if mibBuilder.loadTexts: ofVlanTable.setStatus('current')
ofVlanEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 10, 1), ).setIndexNames((0, "DELL-NETWORKING-OPENFLOW-MIB", "ofInstId"), (0, "DELL-NETWORKING-OPENFLOW-MIB", "ofVlanIfIndex"))
if mibBuilder.loadTexts: ofVlanEntry.setStatus('current')
ofVlanIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 10, 1, 1), InterfaceIndex())
if mibBuilder.loadTexts: ofVlanIfIndex.setStatus('current')
ofVlanId = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 10, 1, 2), VlanId()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ofVlanId.setStatus('current')
ofFlowTable = MibTable((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 11), )
if mibBuilder.loadTexts: ofFlowTable.setStatus('current')
ofFlowEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 11, 1), ).setIndexNames((0, "DELL-NETWORKING-OPENFLOW-MIB", "ofInstId"), (0, "DELL-NETWORKING-OPENFLOW-MIB", "ofFlowId"), (0, "DELL-NETWORKING-OPENFLOW-MIB", "ofFlowTblId"))
if mibBuilder.loadTexts: ofFlowEntry.setStatus('current')
ofFlowId = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 11, 1, 1), Unsigned32())
if mibBuilder.loadTexts: ofFlowId.setStatus('current')
ofFlowTblId = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 11, 1, 2), Unsigned32())
if mibBuilder.loadTexts: ofFlowTblId.setStatus('current')
ofFlowPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 11, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ofFlowPriority.setStatus('current')
ofFlowIdleTime = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 11, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setUnits('seconds').setMaxAccess("readonly")
if mibBuilder.loadTexts: ofFlowIdleTime.setStatus('current')
ofFlowHardTime = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 11, 1, 5), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setUnits('seconds').setMaxAccess("readonly")
if mibBuilder.loadTexts: ofFlowHardTime.setStatus('current')
ofFlowUpTime = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 11, 1, 6), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ofFlowUpTime.setStatus('current')
ofFlowCookie = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 11, 1, 7), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ofFlowCookie.setStatus('current')
ofFlowPacketCount = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 11, 1, 8), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ofFlowPacketCount.setStatus('current')
ofFlowByteCount = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 11, 1, 9), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ofFlowByteCount.setStatus('current')
ofFlowMatchParamsTable = MibTable((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 12), )
if mibBuilder.loadTexts: ofFlowMatchParamsTable.setStatus('current')
ofFlowMatchParamsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 12, 1), )
ofFlowEntry.registerAugmentions(("DELL-NETWORKING-OPENFLOW-MIB", "ofFlowMatchParamsEntry"))
ofFlowMatchParamsEntry.setIndexNames(*ofFlowEntry.getIndexNames())
if mibBuilder.loadTexts: ofFlowMatchParamsEntry.setStatus('current')
ofFlowMatchInPort = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 12, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 2))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ofFlowMatchInPort.setStatus('current')
ofFlowMatchEtherSrcAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 12, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 6))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ofFlowMatchEtherSrcAddr.setStatus('current')
ofFlowMatchEtherDstAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 12, 1, 3), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 6))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ofFlowMatchEtherDstAddr.setStatus('current')
ofFlowMatchVlanId = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 12, 1, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 2))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ofFlowMatchVlanId.setStatus('current')
ofFlowMatchEthType = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 12, 1, 5), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 2))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ofFlowMatchEthType.setStatus('current')
ofFlowMatchVlanPri = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 12, 1, 6), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 1))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ofFlowMatchVlanPri.setStatus('current')
ofFlowMatchIpTos = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 12, 1, 7), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 1))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ofFlowMatchIpTos.setStatus('current')
ofFlowMatchIpProto = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 12, 1, 8), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 1))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ofFlowMatchIpProto.setStatus('current')
ofFlowMatchIpSrcAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 12, 1, 9), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 4))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ofFlowMatchIpSrcAddr.setStatus('current')
ofFlowMatchIpDestAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 12, 1, 10), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 4))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ofFlowMatchIpDestAddr.setStatus('current')
ofFlowMatchTpSrcPort = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 12, 1, 11), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 2))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ofFlowMatchTpSrcPort.setStatus('current')
ofFlowMatchTpDstPort = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 12, 1, 12), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 2))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ofFlowMatchTpDstPort.setStatus('current')
ofFlowActionTable = MibTable((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 13), )
if mibBuilder.loadTexts: ofFlowActionTable.setStatus('current')
ofFlowActionEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 13, 1), ).setIndexNames((0, "DELL-NETWORKING-OPENFLOW-MIB", "ofInstId"), (0, "DELL-NETWORKING-OPENFLOW-MIB", "ofFlowId"), (0, "DELL-NETWORKING-OPENFLOW-MIB", "ofFlowTblId"), (0, "DELL-NETWORKING-OPENFLOW-MIB", "ofFlowActionId"))
if mibBuilder.loadTexts: ofFlowActionEntry.setStatus('current')
ofFlowActionId = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 13, 1, 1), Unsigned32())
if mibBuilder.loadTexts: ofFlowActionId.setStatus('current')
ofFlowActionType = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 13, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 65535))).clone(namedValues=NamedValues(("outToSwitchPort", 1), ("setVlanVid", 2), ("setVlanPcp", 3), ("stripVlan", 4), ("setDlSrc", 5), ("setDlDst", 6), ("setNetworkSrc", 7), ("setNetworkDst", 8), ("setNetworkTos", 9), ("setTpSrc", 10), ("setTpDest", 11), ("outToQueue", 12), ("vendor", 65535)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ofFlowActionType.setStatus('current')
ofFlowActionSrcMac = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 13, 1, 3), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ofFlowActionSrcMac.setStatus('current')
ofFlowActionDstMac = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 13, 1, 4), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ofFlowActionDstMac.setStatus('current')
ofFlowActionPortIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 13, 1, 5), InterfaceIndex()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ofFlowActionPortIndex.setStatus('current')
ofFlowActionVlanId = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 13, 1, 6), VlanId()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ofFlowActionVlanId.setStatus('current')
ofFlowActionMaxLen = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 13, 1, 7), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ofFlowActionMaxLen.setStatus('current')
ofFlowActionVlanPcp = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 13, 1, 8), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ofFlowActionVlanPcp.setStatus('current')
ofFlowActionNWTos = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 13, 1, 9), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ofFlowActionNWTos.setStatus('current')
ofSwitchNotifications = MibIdentifier((1, 3, 6, 1, 4, 1, 6027, 3, 20, 2, 0))
ofSwitchNotifyVariable = MibIdentifier((1, 3, 6, 1, 4, 1, 6027, 3, 20, 2, 1))
ofSwitchFlowTableSrc = MibScalar((1, 3, 6, 1, 4, 1, 6027, 3, 20, 2, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("ifp", 1), ("vlan", 2), ("dmac", 3), ("route", 4), ("lb", 5)))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: ofSwitchFlowTableSrc.setStatus('current')
ofSwitchCntlrSessionStatusChanged = NotificationType((1, 3, 6, 1, 4, 1, 6027, 3, 20, 2, 0, 1)).setObjects(("DELL-NETWORKING-OPENFLOW-MIB", "ofCntlrConState"))
if mibBuilder.loadTexts: ofSwitchCntlrSessionStatusChanged.setStatus('current')
ofSwitchFlowTableFull = NotificationType((1, 3, 6, 1, 4, 1, 6027, 3, 20, 2, 0, 2)).setObjects(("DELL-NETWORKING-OPENFLOW-MIB", "ofSwitchFlowTableSrc"))
if mibBuilder.loadTexts: ofSwitchFlowTableFull.setStatus('current')
ofSwitchMibConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 14))
ofSwitchMibCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 14, 1))
ofSwitchMibGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 14, 2))
ofSwitchMibCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 14, 1, 1)).setObjects(("DELL-NETWORKING-OPENFLOW-MIB", "ofSwitchScalarGroup"), ("DELL-NETWORKING-OPENFLOW-MIB", "ofInstanceGroup"), ("DELL-NETWORKING-OPENFLOW-MIB", "ofControllerGroup"), ("DELL-NETWORKING-OPENFLOW-MIB", "ofPortGroup"), ("DELL-NETWORKING-OPENFLOW-MIB", "ofVlanGroup"), ("DELL-NETWORKING-OPENFLOW-MIB", "ofFlowGroup"), ("DELL-NETWORKING-OPENFLOW-MIB", "ofFlowMatchParamsGroup"), ("DELL-NETWORKING-OPENFLOW-MIB", "ofFlowActionGroup"), ("DELL-NETWORKING-OPENFLOW-MIB", "ofSwitchMibNotificationsGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ofSwitchMibCompliance = ofSwitchMibCompliance.setStatus('current')
ofSwitchScalarGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 14, 2, 1)).setObjects(("DELL-NETWORKING-OPENFLOW-MIB", "ofSwitchId"), ("DELL-NETWORKING-OPENFLOW-MIB", "ofManufacturerDesc"), ("DELL-NETWORKING-OPENFLOW-MIB", "ofHardwareDesc"), ("DELL-NETWORKING-OPENFLOW-MIB", "ofSoftwareDesc"), ("DELL-NETWORKING-OPENFLOW-MIB", "ofSwitchSerialNo"), ("DELL-NETWORKING-OPENFLOW-MIB", "ofSwitchVersion"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ofSwitchScalarGroup = ofSwitchScalarGroup.setStatus('current')
ofInstanceGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 14, 2, 2)).setObjects(("DELL-NETWORKING-OPENFLOW-MIB", "ofInstAdminState"), ("DELL-NETWORKING-OPENFLOW-MIB", "ofInstIntfType"), ("DELL-NETWORKING-OPENFLOW-MIB", "ofInstDataPathId"), ("DELL-NETWORKING-OPENFLOW-MIB", "ofInstConnectTimeout"), ("DELL-NETWORKING-OPENFLOW-MIB", "ofInstEchoReplyTimeout"), ("DELL-NETWORKING-OPENFLOW-MIB", "ofInstEchoReqInterval"), ("DELL-NETWORKING-OPENFLOW-MIB", "ofInstNumFlows"), ("DELL-NETWORKING-OPENFLOW-MIB", "ofInstSuppCapabilities"), ("DELL-NETWORKING-OPENFLOW-MIB", "ofInstSuppActions"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ofInstanceGroup = ofInstanceGroup.setStatus('current')
ofControllerGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 14, 2, 3)).setObjects(("DELL-NETWORKING-OPENFLOW-MIB", "ofCntlrAddrType"), ("DELL-NETWORKING-OPENFLOW-MIB", "ofCntlrAddr"), ("DELL-NETWORKING-OPENFLOW-MIB", "ofCntlrPortNumber"), ("DELL-NETWORKING-OPENFLOW-MIB", "ofCntlrProtocol"), ("DELL-NETWORKING-OPENFLOW-MIB", "ofCntlrConState"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ofControllerGroup = ofControllerGroup.setStatus('current')
ofPortGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 14, 2, 4)).setObjects(("DELL-NETWORKING-OPENFLOW-MIB", "ofPortAssociationType"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ofPortGroup = ofPortGroup.setStatus('current')
ofVlanGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 14, 2, 5)).setObjects(("DELL-NETWORKING-OPENFLOW-MIB", "ofVlanId"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ofVlanGroup = ofVlanGroup.setStatus('current')
ofFlowGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 14, 2, 6)).setObjects(("DELL-NETWORKING-OPENFLOW-MIB", "ofFlowPriority"), ("DELL-NETWORKING-OPENFLOW-MIB", "ofFlowIdleTime"), ("DELL-NETWORKING-OPENFLOW-MIB", "ofFlowHardTime"), ("DELL-NETWORKING-OPENFLOW-MIB", "ofFlowUpTime"), ("DELL-NETWORKING-OPENFLOW-MIB", "ofFlowCookie"), ("DELL-NETWORKING-OPENFLOW-MIB", "ofFlowPacketCount"), ("DELL-NETWORKING-OPENFLOW-MIB", "ofFlowByteCount"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ofFlowGroup = ofFlowGroup.setStatus('current')
ofFlowMatchParamsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 14, 2, 7)).setObjects(("DELL-NETWORKING-OPENFLOW-MIB", "ofFlowMatchInPort"), ("DELL-NETWORKING-OPENFLOW-MIB", "ofFlowMatchEtherSrcAddr"), ("DELL-NETWORKING-OPENFLOW-MIB", "ofFlowMatchEtherDstAddr"), ("DELL-NETWORKING-OPENFLOW-MIB", "ofFlowMatchVlanId"), ("DELL-NETWORKING-OPENFLOW-MIB", "ofFlowMatchEthType"), ("DELL-NETWORKING-OPENFLOW-MIB", "ofFlowMatchVlanPri"), ("DELL-NETWORKING-OPENFLOW-MIB", "ofFlowMatchIpTos"), ("DELL-NETWORKING-OPENFLOW-MIB", "ofFlowMatchIpProto"), ("DELL-NETWORKING-OPENFLOW-MIB", "ofFlowMatchIpSrcAddr"), ("DELL-NETWORKING-OPENFLOW-MIB", "ofFlowMatchIpDestAddr"), ("DELL-NETWORKING-OPENFLOW-MIB", "ofFlowMatchTpSrcPort"), ("DELL-NETWORKING-OPENFLOW-MIB", "ofFlowMatchTpDstPort"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ofFlowMatchParamsGroup = ofFlowMatchParamsGroup.setStatus('current')
ofFlowActionGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 14, 2, 8)).setObjects(("DELL-NETWORKING-OPENFLOW-MIB", "ofFlowActionType"), ("DELL-NETWORKING-OPENFLOW-MIB", "ofFlowActionSrcMac"), ("DELL-NETWORKING-OPENFLOW-MIB", "ofFlowActionDstMac"), ("DELL-NETWORKING-OPENFLOW-MIB", "ofFlowActionPortIndex"), ("DELL-NETWORKING-OPENFLOW-MIB", "ofFlowActionVlanId"), ("DELL-NETWORKING-OPENFLOW-MIB", "ofFlowActionMaxLen"), ("DELL-NETWORKING-OPENFLOW-MIB", "ofFlowActionVlanPcp"), ("DELL-NETWORKING-OPENFLOW-MIB", "ofFlowActionNWTos"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ofFlowActionGroup = ofFlowActionGroup.setStatus('current')
ofSwitchMibNotificationsGroup = NotificationGroup((1, 3, 6, 1, 4, 1, 6027, 3, 20, 1, 14, 2, 9)).setObjects(("DELL-NETWORKING-OPENFLOW-MIB", "ofSwitchCntlrSessionStatusChanged"), ("DELL-NETWORKING-OPENFLOW-MIB", "ofSwitchFlowTableFull"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ofSwitchMibNotificationsGroup = ofSwitchMibNotificationsGroup.setStatus('current')
mibBuilder.exportSymbols("DELL-NETWORKING-OPENFLOW-MIB", ofVlanId=ofVlanId, ofSwitchVersion=ofSwitchVersion, ofFlowGroup=ofFlowGroup, ofInstEntry=ofInstEntry, ofInstConnectTimeout=ofInstConnectTimeout, ofVlanEntry=ofVlanEntry, ofSwitchObjects=ofSwitchObjects, ofSwitchFlowTableSrc=ofSwitchFlowTableSrc, ofFlowPacketCount=ofFlowPacketCount, ofSwitchMibNotificationsGroup=ofSwitchMibNotificationsGroup, ofInstIntfType=ofInstIntfType, ofFlowMatchIpTos=ofFlowMatchIpTos, ofSwitchNotifyVariable=ofSwitchNotifyVariable, ofFlowMatchParamsEntry=ofFlowMatchParamsEntry, ofCntlrId=ofCntlrId, ofFlowUpTime=ofFlowUpTime, ofSwitchMibConformance=ofSwitchMibConformance, ofInstAdminState=ofInstAdminState, ofSwitchFlowTableFull=ofSwitchFlowTableFull, ofCntlrEntry=ofCntlrEntry, dellNetOpenFlow=dellNetOpenFlow, ofPortIfIndex=ofPortIfIndex, ofSwitchMibCompliances=ofSwitchMibCompliances, ofFlowId=ofFlowId, ofInstSuppCapabilities=ofInstSuppCapabilities, ofFlowMatchParamsTable=ofFlowMatchParamsTable, ofSwitchNotification=ofSwitchNotification, ofFlowActionSrcMac=ofFlowActionSrcMac, ofFlowMatchVlanId=ofFlowMatchVlanId, ofHardwareDesc=ofHardwareDesc, ofFlowActionGroup=ofFlowActionGroup, ofInstDataPathId=ofInstDataPathId, ofSwitchMibCompliance=ofSwitchMibCompliance, ofInstEchoReqInterval=ofInstEchoReqInterval, ofFlowMatchIpSrcAddr=ofFlowMatchIpSrcAddr, ofCntlrAddrType=ofCntlrAddrType, ofSwitchSerialNo=ofSwitchSerialNo, ofFlowMatchIpDestAddr=ofFlowMatchIpDestAddr, ofFlowTblId=ofFlowTblId, ofControllerGroup=ofControllerGroup, ofSwitchScalarGroup=ofSwitchScalarGroup, ofFlowActionId=ofFlowActionId, ofVlanIfIndex=ofVlanIfIndex, ofCntlrPortNumber=ofCntlrPortNumber, ofFlowMatchParamsGroup=ofFlowMatchParamsGroup, ofFlowMatchEtherDstAddr=ofFlowMatchEtherDstAddr, ofFlowMatchTpSrcPort=ofFlowMatchTpSrcPort, ofFlowCookie=ofFlowCookie, ofFlowActionPortIndex=ofFlowActionPortIndex, ofFlowEntry=ofFlowEntry, ofFlowByteCount=ofFlowByteCount, ofSwitchCntlrSessionStatusChanged=ofSwitchCntlrSessionStatusChanged, ofPortAssociationType=ofPortAssociationType, ofInstanceGroup=ofInstanceGroup, ofCntlrConState=ofCntlrConState, ofFlowActionTable=ofFlowActionTable, ofVlanGroup=ofVlanGroup, ofFlowMatchEtherSrcAddr=ofFlowMatchEtherSrcAddr, ofFlowMatchInPort=ofFlowMatchInPort, ofFlowActionType=ofFlowActionType, ofInstEchoReplyTimeout=ofInstEchoReplyTimeout, ofCntlrProtocol=ofCntlrProtocol, ofFlowActionVlanId=ofFlowActionVlanId, ofManufacturerDesc=ofManufacturerDesc, ofPortTable=ofPortTable, ofFlowMatchEthType=ofFlowMatchEthType, ofInstId=ofInstId, ofCntlrAddr=ofCntlrAddr, ofFlowIdleTime=ofFlowIdleTime, ofCntlrTable=ofCntlrTable, ofPortEntry=ofPortEntry, ofSoftwareDesc=ofSoftwareDesc, ofFlowPriority=ofFlowPriority, PYSNMP_MODULE_ID=dellNetOpenFlow, ofFlowTable=ofFlowTable, ofFlowMatchVlanPri=ofFlowMatchVlanPri, ofFlowMatchIpProto=ofFlowMatchIpProto, ofFlowMatchTpDstPort=ofFlowMatchTpDstPort, ofFlowHardTime=ofFlowHardTime, ofSwitchId=ofSwitchId, ofFlowActionNWTos=ofFlowActionNWTos, ofInstSuppActions=ofInstSuppActions, ofFlowActionVlanPcp=ofFlowActionVlanPcp, ofSwitchMibGroups=ofSwitchMibGroups, ofInstTable=ofInstTable, ofInstNumFlows=ofInstNumFlows, ofSwitchNotifications=ofSwitchNotifications, ofFlowActionMaxLen=ofFlowActionMaxLen, ofVlanTable=ofVlanTable, ofFlowActionEntry=ofFlowActionEntry, ofPortGroup=ofPortGroup, ofFlowActionDstMac=ofFlowActionDstMac, DataPathIdentifier=DataPathIdentifier)
| 132.182243 | 3,386 | 0.752183 |
79483da989ae5d26cd8be4940125ef867182c156
| 831 |
py
|
Python
|
rpcservices.py
|
The-Yak-Collective/iamz1
|
52d8397abcf109cda1bb87955e1179bc6db60e56
|
[
"MIT"
] | null | null | null |
rpcservices.py
|
The-Yak-Collective/iamz1
|
52d8397abcf109cda1bb87955e1179bc6db60e56
|
[
"MIT"
] | 8 |
2021-02-16T19:25:19.000Z
|
2021-05-07T10:59:59.000Z
|
rpcservices.py
|
The-Yak-Collective/iamz1
|
52d8397abcf109cda1bb87955e1179bc6db60e56
|
[
"MIT"
] | 1 |
2021-12-06T14:22:35.000Z
|
2021-12-06T14:22:35.000Z
|
import servo_util
import xmlrpc.client
import json
PORTFORLOGGING=9501 # later we will make this an env variable. for now, use 9500-9550
PORTFORLEGS=9502 #position of each servo
PORTFORRAG=9503 #send rag commands here
logging = xmlrpc.client.ServerProxy('http://localhost:'+str(PORTFORLOGGING))
def log_start(command):
logging.logstart(command)
def log_stop():
logging.logstop()
def log_get():
g=logging.logget()
#print(g)
return json.loads(g)
legs = xmlrpc.client.ServerProxy('http://localhost:'+str(PORTFORLEGS))
def leg_pos():
return legs.legpos()
ragserver = xmlrpc.client.ServerProxy('http://localhost:'+str(PORTFORRAG))
def rag(name, reps=1,speedratio=1.0,modu=False,toldtowait=True):#dropping "savedata", as we have logger now
return ragserver.rag(name,reps,speedratio,modu,toldtowait)
| 29.678571 | 107 | 0.747292 |
79483ebc1b92e758fe1437f325b3ab927cdcd0c2
| 6,267 |
py
|
Python
|
netket/hilbert/homogeneous.py
|
pesvut/netket
|
7f19574ddc567748344bb75a4ddd507578d94b0d
|
[
"Apache-2.0"
] | 352 |
2018-04-24T16:45:10.000Z
|
2022-03-31T01:15:34.000Z
|
netket/hilbert/homogeneous.py
|
pesvut/netket
|
7f19574ddc567748344bb75a4ddd507578d94b0d
|
[
"Apache-2.0"
] | 947 |
2018-04-24T20:16:17.000Z
|
2022-03-31T17:33:52.000Z
|
netket/hilbert/homogeneous.py
|
pesvut/netket
|
7f19574ddc567748344bb75a4ddd507578d94b0d
|
[
"Apache-2.0"
] | 148 |
2018-04-25T02:44:20.000Z
|
2022-03-11T11:42:34.000Z
|
# Copyright 2021 The NetKet Authors - All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Optional, List, Callable
from numbers import Real
import numpy as np
from numba import jit
from .discrete_hilbert import DiscreteHilbert
from .hilbert_index import HilbertIndex
@jit(nopython=True)
def _gen_to_bare_numbers(conditions):
return np.nonzero(conditions)[0]
@jit(nopython=True)
def _to_constrained_numbers_kernel(bare_numbers, numbers):
found = np.searchsorted(bare_numbers, numbers)
if np.max(found) >= bare_numbers.shape[0]:
raise RuntimeError("The required state does not satisfy the given constraints.")
return found
class HomogeneousHilbert(DiscreteHilbert):
r"""The Abstract base class for homogeneous hilbert spaces.
This class should only be subclassed and should not be instantiated directly.
"""
def __init__(
self,
local_states: Optional[List[Real]],
N: int = 1,
constraint_fn: Optional[Callable] = None,
):
r"""
Constructs a new ``HomogeneousHilbert`` given a list of eigenvalues of the
states and a number of sites, or modes, within this hilbert space.
This method should only be called from the subclasses `__init__` method.
Args:
local_states (list or None): Eigenvalues of the states. If the allowed
states are an infinite number, None should be passed as an argument.
N: Number of modes in this hilbert space (default 1).
constraint_fn: A function specifying constraints on the quantum numbers.
Given a batch of quantum numbers it should return a vector of bools
specifying whether those states are valid or not.
"""
assert isinstance(N, int)
self._size = N
self._is_finite = local_states is not None
if self._is_finite:
self._local_states = np.asarray(local_states)
assert self._local_states.ndim == 1
self._local_size = self._local_states.shape[0]
self._local_states = self._local_states.tolist()
self._local_states_frozen = frozenset(self._local_states)
else:
self._local_states = None
self._local_states_frozen = None
self._local_size = np.iinfo(np.intp).max
self._has_constraint = constraint_fn is not None
self._constraint_fn = constraint_fn
self._hilbert_index = None
shape = tuple(self._local_size for _ in range(self.size))
super().__init__(shape=shape)
@property
def size(self) -> int:
r"""The total number number of degrees of freedom."""
return self._size
@property
def local_size(self) -> int:
r"""Size of the local degrees of freedom that make the total hilbert space."""
return self._local_size
def size_at_index(self, i: int) -> int:
return self.local_size
@property
def local_states(self) -> Optional[List[float]]:
r"""A list of discreet local quantum numbers.
If the local states are infinitely many, None is returned."""
return self._local_states
def states_at_index(self, i: int):
return self.local_states
@property
def n_states(self) -> int:
r"""The total dimension of the many-body Hilbert space.
Throws an exception iff the space is not indexable."""
hind = self._get_hilbert_index()
if not self._has_constraint:
return hind.n_states
else:
return self._bare_numbers.shape[0]
@property
def is_finite(self) -> bool:
r"""Whether the local hilbert space is finite."""
return self._is_finite
@property
def constrained(self) -> bool:
r"""Returns True if the hilbert space is constrained."""
return self._has_constraint
def _numbers_to_states(self, numbers: np.ndarray, out: np.ndarray) -> np.ndarray:
hind = self._get_hilbert_index()
return hind.numbers_to_states(self._to_bare_numbers(numbers), out)
def _states_to_numbers(self, states, out):
hind = self._get_hilbert_index()
hind.states_to_numbers(states, out)
if self._has_constraint:
out[:] = _to_constrained_numbers_kernel(
self._bare_numbers,
out,
)
return out
def _get_hilbert_index(self):
if self._hilbert_index is None:
if not self.is_indexable:
raise RuntimeError("The hilbert space is too large to be indexed.")
self._hilbert_index = HilbertIndex(
np.asarray(self.local_states, dtype=np.float64), self.size
)
if self._has_constraint:
self._bare_numbers = _gen_to_bare_numbers(
self._constraint_fn(self._hilbert_index.all_states())
)
else:
self._bare_numbers = np.empty(0, dtype=np.intp)
return self._hilbert_index
def _to_bare_numbers(self, numbers):
if self._constraint_fn is None:
return numbers
else:
return self._bare_numbers[numbers]
def __repr__(self):
constr = (
", has_constraint={}".format(self._has_constraint)
if self._has_constraint
else ""
)
clsname = type(self).__name__
return f"{clsname}(local_size={self._local_size}, N={self.size}{constr})"
@property
def _attrs(self):
return (
self.size,
self.local_size,
self._local_states_frozen,
self._has_constraint,
self._constraint_fn,
)
| 32.304124 | 88 | 0.643849 |
79483efb5e8bdc54697b393be04055b339e883ec
| 2,500 |
py
|
Python
|
machine_learning/logistic_regression.py
|
devanshpratapsingh/Python
|
5327f54512a35fa8c55a83499d56875abbde633c
|
[
"MIT"
] | null | null | null |
machine_learning/logistic_regression.py
|
devanshpratapsingh/Python
|
5327f54512a35fa8c55a83499d56875abbde633c
|
[
"MIT"
] | null | null | null |
machine_learning/logistic_regression.py
|
devanshpratapsingh/Python
|
5327f54512a35fa8c55a83499d56875abbde633c
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
## Logistic Regression from scratch
# In[62]:
# In[63]:
# importing all the required libraries
""" Implementing logistic regression for classification problem
Helpful resources :
1.Coursera ML course
2.https://medium.com/@martinpella/logistic-regression-from-scratch-in-python-124c5636b8ac"""
import numpy as np
import matplotlib.pyplot as plt
# get_ipython().run_line_magic('matplotlib', 'inline')
from sklearn import datasets
# In[67]:
# sigmoid function or logistic function is used as a hypothesis function in classification problems
def sigmoid_function(z):
return 1 / (1 + np.exp(-z))
def cost_function(h, y):
return (-y * np.log(h) - (1 - y) * np.log(1 - h)).mean()
def log_likelihood(X, Y, weights):
scores = np.dot(X, weights)
return np.sum(Y * scores - np.log(1 + np.exp(scores)))
# here alpha is the learning rate, X is the feature matrix,y is the target matrix
def logistic_reg(alpha, X, y, max_iterations=70000):
theta = np.zeros(X.shape[1])
for iterations in range(max_iterations):
z = np.dot(X, theta)
h = sigmoid_function(z)
gradient = np.dot(X.T, h - y) / y.size
theta = theta - alpha * gradient # updating the weights
z = np.dot(X, theta)
h = sigmoid_function(z)
J = cost_function(h, y)
if iterations % 100 == 0:
print(f"loss: {J} \t") # printing the loss after every 100 iterations
return theta
# In[68]:
if __name__ == "__main__":
iris = datasets.load_iris()
X = iris.data[:, :2]
y = (iris.target != 0) * 1
alpha = 0.1
theta = logistic_reg(alpha, X, y, max_iterations=70000)
print("theta: ", theta) # printing the theta i.e our weights vector
def predict_prob(X):
return sigmoid_function(
np.dot(X, theta)
) # predicting the value of probability from the logistic regression algorithm
plt.figure(figsize=(10, 6))
plt.scatter(X[y == 0][:, 0], X[y == 0][:, 1], color="b", label="0")
plt.scatter(X[y == 1][:, 0], X[y == 1][:, 1], color="r", label="1")
(x1_min, x1_max) = (X[:, 0].min(), X[:, 0].max())
(x2_min, x2_max) = (X[:, 1].min(), X[:, 1].max())
(xx1, xx2) = np.meshgrid(np.linspace(x1_min, x1_max), np.linspace(x2_min, x2_max))
grid = np.c_[xx1.ravel(), xx2.ravel()]
probs = predict_prob(grid).reshape(xx1.shape)
plt.contour(xx1, xx2, probs, [0.5], linewidths=1, colors="black")
plt.legend()
plt.show()
| 28.735632 | 99 | 0.6212 |
79483ff64bb972c4ad5eec4c110a901d23a421b6
| 1,338 |
py
|
Python
|
graph4nlp/pytorch/modules/config/graph_embedding/__init__.py
|
stjordanis/graph4nlp
|
c6ebde32bc77d3a7b78f86a93f19b1c057963ffa
|
[
"Apache-2.0"
] | 1 |
2021-06-06T15:23:11.000Z
|
2021-06-06T15:23:11.000Z
|
graph4nlp/pytorch/modules/config/graph_embedding/__init__.py
|
stjordanis/graph4nlp
|
c6ebde32bc77d3a7b78f86a93f19b1c057963ffa
|
[
"Apache-2.0"
] | null | null | null |
graph4nlp/pytorch/modules/config/graph_embedding/__init__.py
|
stjordanis/graph4nlp
|
c6ebde32bc77d3a7b78f86a93f19b1c057963ffa
|
[
"Apache-2.0"
] | 1 |
2021-11-01T08:41:26.000Z
|
2021-11-01T08:41:26.000Z
|
from ....modules.utils.config_utils import get_yaml_config
import os
str2yaml = {"gat": "gat.yaml", "gcn": "gcn.yaml", "ggnn": "ggnn.yaml", "graphsage": "graphsage.yaml"}
dir_path = os.path.dirname(os.path.realpath(__file__))
def get_graph_embedding_args(graph_embedding_name):
"""
It will build the template for ``GNNBase`` model.
Parameters
----------
graph_embedding_name: str
The graph embedding name. Expected in ["gcn", "gat", "graphsage", "ggnn"].
If it can't find the ``graph_embedding_name``, it will return ``{}``.
Returns
-------
template_dict: dict
The template dict.
The structure is shown as follows:
{
graph_embedding_share: {num_layers: 1, input_size: 300, ...},
graph_embedding_private: {heads: [1], attn_drop: 0.0}
}
The ``graph_embedding_share`` contains the parameters shared by all ``GNNBase`` models.
The ``graph_embedding_private`` contains the parameters specifically in each graph_embedding methods.
"""
if graph_embedding_name in str2yaml.keys():
yaml_name = str2yaml[graph_embedding_name]
path = os.path.join(dir_path, yaml_name)
config = get_yaml_config(path)
return config
else:
return {}
__all__ = ["get_graph_embedding_args"]
| 35.210526 | 109 | 0.647235 |
7948411da0222f4375779780f0f63f498d8c6e8e
| 32,242 |
py
|
Python
|
739. Daily Temperatures.py
|
Nriver/leetcode
|
e0b30fae8a31513172fcbd13d67eded8922ba785
|
[
"MIT"
] | null | null | null |
739. Daily Temperatures.py
|
Nriver/leetcode
|
e0b30fae8a31513172fcbd13d67eded8922ba785
|
[
"MIT"
] | null | null | null |
739. Daily Temperatures.py
|
Nriver/leetcode
|
e0b30fae8a31513172fcbd13d67eded8922ba785
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# @Author: zengjq
# @Date: 2020-03-04 14:04:58
# @Last Modified by: zengjq
# @Last Modified time: 2020-03-04 15:04:19
class Solution:
# 超时
# 应该是那个pop操作的问题
# 该怎么优化?
# def dailyTemperatures(self, T: List[int]) -> List[int]:
def dailyTemperatures1(self, T):
stack_p = []
# modified做标记是否修改了
modified = [0] * len(T)
for i in range(len(T)):
pop_list = []
for index, x in enumerate(stack_p):
if T[x] < T[i]:
T[x] = i - x
pop_list.append(index)
modified[x] = 1
# print(pop_list)
for index in pop_list[::-1]:
stack_p.pop(index)
stack_p.append(i)
for x in range(len(T)):
if modified[x] == 0:
T[x] = 0
return T
# 超时
# 一点小优化 把标记的modified列表直接当作结果列表
def dailyTemperatures2(self, T):
stack_p = []
res = [0] * len(T)
for i in range(len(T)):
pop_list = []
for index, x in enumerate(stack_p):
if T[x] < T[i]:
pop_list.append(index)
res[x] = i - x
# print(pop_list)
for index in pop_list[::-1]:
stack_p.pop(index)
stack_p.append(i)
return res
# 优化stack_p.pop(index)操作
# stack_p中只保留比当前指针所指的位置大的数据
# 遍历stack_p从尾巴开始遍历, 让pop操作每次都是pop最后一个元素
# Runtime: 476 ms, faster than 94.64% of Python3 online submissions for Daily Temperatures.
# Memory Usage: 16.9 MB, less than 34.21% of Python3 online submissions for Daily Temperatures.
def dailyTemperatures(self, T):
stack_p = []
res = [0] * len(T)
for i in range(len(T)):
while stack_p and T[stack_p[-1]] < T[i]:
res[stack_p[-1]] = i - stack_p[-1]
stack_p.pop()
stack_p.append(i)
return res
if __name__ == '__main__':
s = Solution()
print(s.dailyTemperatures([73, 74, 75, 71, 69, 72, 76, 73]))
# [1, 1, 4, 2, 1, 1, 0, 0]
# print(s.dailyTemperatures([55,38,53,81,61,93,97,32,43,78]))
T = [47,34,47,34,47,47,34,34,47,47,34,47,47,47,47,34,47,34,34,47,34,34,34,47,34,47,34,47,34,47,34,34,34,34,34,47,34,34,47,47,47,47,34,34,47,47,34,47,47,34,47,47,47,34,47,34,34,34,34,47,47,34,34,34,47,47,34,34,34,47,34,34,34,34,47,34,34,34,34,47,47,47,34,47,34,34,47,47,34,47,34,34,47,34,34,34,47,34,34,47,34,47,34,47,47,47,34,47,34,47,47,34,47,47,47,47,47,47,47,34,47,47,47,47,34,34,34,34,47,34,34,34,34,34,47,34,47,47,47,47,34,34,47,47,34,47,34,47,47,47,34,47,34,34,34,47,47,47,34,34,34,34,34,47,47,47,47,34,47,47,34,47,34,34,47,34,47,34,34,34,47,34,34,34,47,47,47,47,47,47,47,47,34,47,47,47,47,34,34,34,47,34,34,34,34,47,47,47,34,34,34,34,47,34,47,34,34,47,47,47,34,34,47,34,47,34,34,47,47,34,34,47,34,47,47,47,47,34,47,34,47,34,47,34,34,34,47,47,47,34,34,47,34,47,34,47,47,34,34,47,34,34,47,34,34,47,47,34,34,47,47,47,34,34,47,47,47,34,47,47,47,47,47,34,34,34,34,34,34,34,47,47,34,47,34,47,47,47,47,47,47,47,47,47,47,34,47,47,47,34,47,34,34,34,47,47,47,47,34,47,34,47,34,47,47,34,34,34,47,34,34,47,34,47,34,47,34,34,34,34,47,47,34,47,47,47,34,47,34,34,47,34,47,34,47,34,47,34,47,34,47,47,34,34,47,34,47,47,47,47,47,34,34,34,47,34,47,34,34,47,47,47,34,47,47,47,47,34,34,47,47,34,34,34,47,47,47,34,47,47,47,34,47,34,34,47,47,34,47,34,34,47,34,34,34,34,47,47,47,34,47,47,34,47,47,34,34,34,34,47,34,34,47,34,47,47,47,34,47,34,34,47,34,47,34,34,34,34,34,34,47,47,47,47,47,47,47,34,34,34,47,34,34,47,47,47,34,47,34,47,34,47,34,47,47,47,34,34,34,34,34,34,34,34,47,47,47,34,47,34,34,34,47,34,47,47,34,47,47,47,34,34,34,34,34,47,34,47,34,34,47,34,47,47,34,34,34,34,47,47,34,34,47,47,34,34,34,34,34,34,34,47,34,34,47,34,34,47,34,34,47,34,34,34,47,47,34,34,47,47,47,34,34,47,47,47,34,47,47,47,47,47,47,47,34,47,47,47,34,47,47,34,47,34,34,47,34,34,34,34,34,34,47,47,47,34,47,47,34,47,34,34,47,34,34,34,47,47,34,47,47,34,34,34,47,47,47,47,34,47,34,34,47,47,47,47,47,34,47,34,34,34,47,47,47,34,34,47,47,34,34,34,34,34,47,34,47,34,47,34,47,47,47,47,47,34,47,34,34,34,34,34,47,34,47,47,34,34,47,34,47,34,47,34,47,34,47,34,47,47,34,47,47,47,47,34,34,47,34,34,34,34,47,34,34,47,34,34,34,34,34,47,34,47,34,47,34,47,47,47,34,47,34,34,47,47,34,34,34,34,34,34,47,47,47,47,47,47,34,34,34,34,34,47,47,34,47,47,34,47,34,34,47,34,34,47,47,47,34,47,34,47,47,47,47,34,47,47,34,34,34,34,34,34,47,47,47,47,47,47,47,47,47,34,34,34,47,34,34,47,34,47,34,47,34,47,34,47,47,47,34,34,34,34,47,34,47,47,47,34,34,34,47,47,34,34,47,47,47,34,47,34,47,47,47,47,34,34,34,34,47,34,47,47,47,34,47,47,34,47,34,47,47,47,34,47,34,47,34,47,47,47,47,47,34,34,34,47,47,47,34,47,34,34,34,34,34,34,34,47,47,47,47,47,47,34,47,47,47,34,34,34,47,47,47,47,47,34,34,47,47,34,34,34,34,47,34,34,34,47,34,47,34,47,47,47,47,47,47,34,47,34,47,34,47,34,34,34,34,34,34,47,47,47,34,34,34,34,34,47,34,34,34,47,47,47,34,47,34,47,34,34,47,47,34,34,47,34,47,47,34,47,34,47,34,47,47,34,34,47,47,34,34,34,47,34,34,47,47,34,47,47,34,34,34,47,34,34,34,47,34,34,34,47,47,34,47,34,47,34,47,34,34,34,47,34,34,47,34,47,34,47,47,47,47,47,47,47,34,47,34,34,47,47,34,34,47,34,34,47,47,47,34,47,34,34,47,47,34,34,34,34,34,47,47,34,47,47,34,47,34,34,34,47,34,34,34,47,47,34,34,34,34,34,47,47,47,34,47,47,47,47,34,47,47,47,47,47,34,34,47,47,47,47,34,34,34,47,34,47,47,34,47,47,34,34,47,47,47,47,47,47,34,47,47,34,34,34,47,47,47,47,34,47,47,47,34,47,47,34,34,34,47,34,34,34,47,34,47,34,47,34,47,47,47,47,34,47,34,47,34,47,34,34,47,34,34,47,34,47,47,47,47,34,47,34,47,34,34,34,34,47,34,34,34,47,34,47,47,34,47,34,47,34,47,47,47,47,47,47,34,34,47,47,34,34,47,47,47,47,47,34,47,34,34,47,47,34,34,47,47,47,34,47,34,47,34,47,47,34,47,34,34,47,34,34,47,47,47,34,34,47,47,34,47,47,47,34,47,34,34,47,34,34,47,47,34,34,47,47,34,34,47,47,34,47,34,34,47,34,34,47,47,47,47,47,34,47,47,34,34,34,34,34,47,47,34,47,47,34,47,47,34,47,47,47,47,34,34,34,34,34,34,34,47,34,47,47,34,34,34,47,34,34,47,34,47,47,34,34,47,47,34,47,47,47,47,47,47,47,47,34,34,47,47,47,34,34,34,47,47,47,47,47,47,34,47,47,47,34,34,34,34,34,34,47,47,34,47,34,47,47,47,34,34,47,34,34,34,34,47,34,34,47,34,47,47,34,34,47,34,34,34,47,34,47,47,34,47,47,34,47,47,34,34,47,47,34,34,34,47,47,47,47,47,34,34,34,34,34,34,47,47,34,34,34,34,34,47,34,47,34,47,47,34,47,47,47,47,34,47,34,47,47,34,34,47,47,34,34,47,47,47,47,34,47,47,47,34,47,47,34,34,47,47,34,34,34,47,47,34,34,47,34,34,34,47,47,34,34,34,47,47,34,47,47,47,47,47,47,34,47,47,47,47,34,34,47,47,47,34,34,47,47,47,34,34,47,47,47,47,34,34,47,47,34,47,47,34,47,47,47,47,47,47,47,47,47,47,47,47,34,34,47,34,47,47,34,34,47,34,34,34,34,34,47,34,47,47,47,34,47,47,34,34,34,47,47,47,47,47,34,47,47,47,34,47,34,34,34,47,34,34,34,47,47,47,47,34,34,34,34,34,47,47,34,47,47,34,47,47,47,34,34,34,34,47,34,47,47,47,47,47,47,47,34,34,47,47,47,34,34,34,34,34,34,34,47,34,47,34,34,47,34,34,34,47,34,34,47,47,34,47,34,47,47,34,34,34,34,47,47,34,34,34,47,47,34,47,47,34,34,47,34,47,34,34,34,47,34,34,34,34,47,47,34,47,47,34,34,47,34,34,34,47,34,34,34,34,34,47,34,47,34,34,34,34,34,34,47,34,34,47,34,47,34,34,34,47,47,47,34,34,47,47,47,34,47,34,47,34,34,34,34,34,34,47,34,34,34,47,34,34,47,34,47,34,47,47,47,34,34,47,34,47,47,47,47,34,47,47,34,34,47,34,34,34,34,34,34,34,47,34,47,47,47,34,34,47,34,34,34,34,34,34,34,34,47,34,47,34,34,47,34,47,47,34,34,47,47,47,47,34,34,34,34,34,47,34,47,34,47,34,47,47,34,34,47,47,47,34,47,34,34,47,34,47,47,47,34,47,34,34,34,34,47,34,34,34,34,47,34,34,47,47,47,34,34,47,47,47,47,47,34,34,34,47,47,47,34,47,47,34,47,34,34,34,34,47,47,47,47,34,47,47,34,34,47,34,47,47,34,47,47,34,34,47,47,47,47,34,47,47,34,34,34,34,47,34,47,47,34,34,47,47,34,47,34,34,47,34,47,34,47,47,47,34,47,34,47,47,47,34,34,47,34,47,34,34,34,47,47,34,47,47,34,34,47,47,47,47,47,34,47,47,47,47,47,47,47,34,47,34,34,34,34,34,34,47,47,34,47,34,47,47,47,34,47,47,34,47,47,47,47,47,34,47,47,47,34,34,34,34,47,47,34,47,47,34,47,34,34,47,34,47,47,34,47,34,34,34,47,47,47,47,34,34,34,34,34,34,34,34,34,34,47,34,47,47,47,34,34,47,47,47,47,47,47,47,34,47,34,47,34,34,34,34,34,47,47,47,47,47,47,34,47,47,34,47,47,34,34,34,47,47,34,34,47,34,34,34,47,47,34,34,47,47,34,34,34,47,34,47,34,47,34,34,47,47,47,47,47,47,34,47,47,47,47,47,47,34,47,34,34,47,34,34,34,34,47,47,47,34,47,34,34,47,47,47,47,34,47,34,34,47,47,34,47,47,34,47,34,47,34,47,47,34,34,34,34,34,47,47,34,47,34,47,34,34,47,34,34,47,34,34,47,47,47,47,34,47,34,47,47,47,34,34,47,34,47,34,34,34,47,47,34,34,34,34,34,34,47,47,47,47,47,47,47,34,47,47,47,34,34,47,34,34,47,47,34,47,47,34,47,47,34,34,34,47,34,47,34,34,47,47,34,47,47,47,34,34,47,34,34,34,47,47,34,34,34,34,34,47,34,47,47,34,34,47,34,47,47,34,47,47,34,47,47,34,47,47,47,34,34,47,47,34,34,34,34,47,47,47,34,34,47,47,34,34,47,47,34,47,34,47,34,34,34,34,34,34,34,34,34,47,47,47,47,34,34,34,47,47,47,47,47,34,47,47,34,47,34,47,47,34,47,34,34,47,34,34,47,47,47,47,47,34,34,34,47,34,47,47,47,47,47,47,34,34,47,34,47,47,34,47,47,34,34,34,47,47,47,34,47,47,47,47,47,34,47,34,47,34,47,34,34,34,47,47,47,34,47,47,47,34,47,47,47,47,47,47,34,47,47,47,47,34,47,34,34,47,47,34,47,34,47,47,34,47,34,34,34,47,47,34,47,34,47,34,47,34,34,47,34,47,47,47,34,47,34,47,47,34,34,47,34,47,34,34,34,34,34,34,47,34,34,34,34,34,34,34,47,47,47,47,47,47,34,47,47,47,34,47,34,47,47,34,34,34,34,34,47,47,47,47,47,47,34,47,47,47,47,34,47,47,47,47,34,34,47,47,47,47,34,47,34,47,47,47,34,47,34,47,34,47,34,47,47,34,47,47,34,34,47,47,34,47,47,47,47,47,34,47,47,47,34,47,47,47,47,47,47,47,47,47,34,34,47,34,34,34,47,47,34,47,47,47,34,34,47,34,47,47,34,34,34,47,47,34,47,47,47,47,47,34,34,34,34,47,47,34,47,34,34,34,34,34,47,47,34,47,47,34,47,34,34,47,34,34,34,47,47,34,34,34,47,34,34,34,47,34,47,34,34,47,47,34,34,34,47,34,34,47,34,47,34,34,47,47,47,47,34,47,34,34,47,34,34,34,47,47,34,47,34,47,47,34,47,47,47,47,47,34,34,47,34,34,47,47,47,47,47,47,47,47,34,47,34,47,47,34,34,34,34,34,47,47,47,47,34,47,34,47,47,34,47,34,47,34,47,34,47,34,34,47,34,47,47,34,47,34,47,47,34,34,47,34,34,47,34,47,47,47,47,34,34,47,34,34,47,34,47,47,47,47,34,47,47,34,34,47,47,47,47,34,47,34,47,34,47,47,34,47,34,47,34,47,47,34,34,47,47,47,47,34,34,34,47,47,47,47,34,34,47,34,47,47,47,34,47,47,34,47,34,47,47,34,34,34,47,34,47,47,34,47,47,34,47,34,34,34,47,47,34,34,47,47,47,34,47,47,34,47,47,47,34,34,47,34,47,47,47,47,34,47,34,34,34,47,47,47,47,34,47,47,47,34,34,47,34,47,34,47,34,47,47,34,34,34,34,34,34,47,47,34,34,34,47,34,47,34,34,34,47,34,47,34,34,34,47,34,34,47,47,47,34,34,34,34,34,34,34,47,34,47,34,34,47,34,47,47,34,47,47,34,47,47,34,47,34,34,34,47,47,34,47,34,47,34,47,47,34,34,47,47,34,34,47,47,34,34,34,47,34,47,47,34,47,34,34,34,47,47,47,34,34,34,34,34,34,47,34,47,34,34,34,47,47,47,34,34,47,34,34,47,47,47,47,34,34,47,47,47,47,47,34,34,34,34,47,34,34,47,47,47,34,47,34,34,34,34,47,34,34,34,47,47,34,34,47,47,47,34,34,47,34,34,47,47,47,47,47,34,34,34,47,47,47,34,47,34,34,34,47,34,34,34,34,47,47,47,34,34,47,47,47,47,34,47,34,47,34,34,34,47,47,47,47,47,47,34,34,47,47,34,34,34,47,34,47,34,34,34,34,34,47,34,47,47,34,47,47,34,34,34,47,34,47,47,47,47,47,34,47,47,34,34,47,34,47,47,34,47,34,34,47,34,47,34,47,34,34,47,47,47,47,47,47,34,34,34,34,47,34,34,47,34,34,47,34,47,47,34,34,47,47,47,34,47,34,34,34,47,47,34,34,47,47,34,47,47,34,34,34,34,34,34,34,47,34,34,47,47,47,34,34,47,47,47,34,34,47,34,34,34,34,34,34,34,34,47,34,34,34,47,47,34,47,34,34,47,34,34,34,47,34,34,47,34,34,34,47,34,34,47,34,47,34,47,34,34,47,34,47,47,34,47,34,34,47,34,47,47,47,34,34,47,47,47,47,34,34,34,34,34,47,47,47,34,47,34,34,34,47,47,34,34,47,34,47,47,47,47,47,47,47,34,34,34,47,34,47,34,47,34,34,47,47,34,47,47,47,47,47,47,47,47,34,34,34,34,47,47,47,47,47,34,34,47,34,34,34,47,34,47,47,34,34,34,34,34,34,34,34,34,47,34,34,47,47,47,34,34,34,47,47,47,47,47,47,47,47,47,47,34,47,47,47,47,34,47,34,34,34,47,47,34,47,47,47,47,34,47,34,34,34,47,47,47,47,34,47,34,47,34,34,47,47,34,34,34,34,47,47,47,47,47,34,47,34,34,34,34,47,47,34,47,34,34,34,47,47,34,34,47,47,47,47,47,34,47,47,47,34,47,47,47,47,47,34,34,47,34,34,34,34,47,47,47,34,47,47,34,47,47,47,34,47,47,47,47,47,34,47,47,47,47,34,47,47,47,47,47,34,47,34,47,47,34,47,34,34,47,47,34,47,47,47,47,47,34,34,47,34,47,47,34,34,47,34,47,47,47,34,34,34,34,34,34,47,34,34,47,34,47,47,47,47,34,34,34,34,47,34,47,34,47,34,34,34,34,34,47,34,34,34,47,47,34,34,47,34,34,47,34,34,47,47,47,34,47,47,47,34,47,34,34,47,47,47,34,47,34,47,34,34,47,47,34,47,34,47,47,34,34,34,47,34,47,47,47,34,47,34,34,34,47,34,47,47,34,47,47,47,34,47,34,47,47,34,34,47,34,34,34,34,47,47,34,34,34,34,34,34,47,34,34,47,34,47,47,47,47,34,34,47,34,47,34,34,34,34,34,47,34,47,47,34,47,47,34,47,34,47,34,34,47,34,47,47,34,34,34,34,34,34,34,47,47,47,34,34,34,34,34,47,34,34,47,47,34,34,34,47,47,34,34,47,47,34,34,47,47,34,47,47,47,47,47,47,47,34,47,34,34,47,34,34,34,34,47,34,47,34,47,34,34,34,34,47,34,47,47,34,34,47,47,34,34,34,34,47,47,47,47,47,47,34,34,47,34,47,47,34,47,34,34,34,47,47,47,47,47,34,34,34,47,47,47,47,34,34,34,34,47,47,47,34,34,34,34,34,47,34,34,47,47,47,47,34,47,34,47,47,47,34,34,47,34,47,47,34,47,47,34,34,47,47,47,47,47,34,34,34,34,47,34,47,47,47,47,47,34,34,47,47,34,47,47,34,34,34,34,47,47,34,47,34,47,34,47,34,34,47,34,47,34,47,47,47,47,47,34,34,34,47,47,34,34,34,47,34,47,34,34,34,34,34,34,34,34,47,34,34,34,47,34,47,47,34,47,34,47,34,47,47,47,34,34,47,34,47,47,34,47,47,34,47,34,47,34,34,47,34,47,34,47,47,47,34,34,47,47,47,47,47,47,47,34,34,34,47,47,34,47,34,47,34,34,34,47,34,47,34,47,34,47,47,47,34,47,47,47,34,47,47,47,47,34,47,47,34,34,47,47,47,34,47,47,47,34,47,47,34,34,47,47,47,47,34,47,34,47,34,47,34,34,34,47,47,34,47,34,34,34,34,34,47,34,34,34,47,47,47,47,47,34,47,34,47,34,47,34,47,34,47,34,47,34,47,34,47,34,47,34,34,34,34,47,34,34,47,47,47,47,47,47,34,34,47,47,34,34,34,34,34,34,47,47,47,47,34,47,34,47,47,47,34,34,47,47,34,34,34,34,34,34,47,34,34,47,34,47,34,47,34,34,47,34,34,34,47,34,47,34,34,47,47,34,47,47,47,34,47,34,34,34,47,34,34,34,34,47,47,34,34,34,34,34,47,34,47,34,47,34,47,34,47,34,34,47,34,47,47,34,47,47,47,34,47,34,47,47,47,34,47,47,47,47,34,47,47,47,47,34,47,47,47,34,47,47,47,34,47,34,34,47,47,47,47,47,34,34,34,47,34,34,47,47,34,34,34,47,47,47,34,34,34,34,34,47,34,47,34,34,47,34,47,47,47,34,34,47,34,34,34,34,34,34,47,47,47,34,34,47,34,47,47,47,47,34,47,34,34,34,47,34,47,34,34,47,47,47,47,47,47,47,34,34,47,47,47,47,47,47,47,34,34,34,47,47,47,47,47,34,34,34,34,34,34,47,34,34,47,34,34,47,47,34,47,34,47,34,47,47,47,34,34,34,34,34,34,34,47,34,47,34,47,47,34,34,34,34,47,34,47,47,34,34,47,34,47,34,34,47,47,47,34,47,47,47,34,34,47,34,47,34,47,34,34,47,34,47,47,47,47,47,34,34,47,47,47,47,47,47,34,34,47,47,34,34,34,34,47,47,47,34,34,34,47,34,34,47,47,47,47,47,34,47,34,47,47,34,47,47,34,47,47,34,34,47,47,34,47,34,47,47,34,34,34,47,34,47,34,34,47,47,34,34,34,34,34,34,34,47,34,47,47,47,34,34,47,34,47,47,47,34,34,47,34,47,47,34,34,34,34,47,47,47,34,34,47,34,34,47,47,47,47,47,47,34,47,34,47,47,34,47,34,34,34,34,47,34,47,34,34,34,47,34,47,47,47,47,34,34,47,34,47,34,47,47,47,34,34,34,47,47,34,34,34,34,47,34,47,47,47,34,47,34,34,47,34,47,47,47,47,47,47,47,47,47,34,47,47,34,47,34,47,47,47,47,47,47,47,34,34,47,47,34,34,34,34,34,47,34,34,34,47,47,47,34,34,47,34,47,47,34,34,47,34,34,47,47,47,47,47,34,34,47,47,34,47,34,47,34,34,47,47,34,34,47,47,34,34,47,47,34,47,47,47,47,34,34,34,34,34,34,34,34,47,47,34,34,47,47,47,47,47,34,47,47,34,47,34,47,34,34,34,47,34,34,47,34,34,34,34,34,47,47,47,34,47,34,47,34,34,34,34,47,47,34,47,34,34,47,34,34,34,34,34,34,34,47,47,34,34,47,34,47,34,47,47,34,47,34,47,47,47,34,47,34,47,34,34,47,47,47,47,47,34,34,34,34,47,34,34,47,34,47,34,34,34,34,34,47,47,34,47,47,47,47,47,34,34,34,47,47,34,47,47,47,34,47,47,34,47,34,47,34,34,34,34,34,47,47,34,34,47,34,47,34,34,34,34,34,47,47,47,47,47,47,47,34,47,47,47,47,34,34,34,47,47,34,34,34,47,47,47,34,34,34,47,47,34,34,47,34,34,47,34,47,34,34,34,34,34,34,47,34,47,34,34,34,34,47,47,34,47,34,47,34,47,34,47,47,34,47,47,47,47,34,47,34,34,34,47,47,47,47,47,34,47,34,34,34,34,34,47,34,34,34,47,34,47,47,34,47,34,47,47,47,47,34,34,47,34,34,34,47,34,34,34,34,34,34,47,34,47,47,47,47,47,47,34,47,47,34,34,47,47,34,47,47,47,34,34,34,34,34,47,47,34,47,34,34,34,47,34,34,34,34,47,47,34,47,47,47,34,47,47,47,47,34,34,34,47,34,47,47,47,34,47,47,34,47,34,47,34,34,47,47,47,34,34,47,47,47,34,47,47,34,34,47,47,47,34,34,34,34,47,47,47,34,47,47,34,34,47,47,47,47,34,34,34,47,47,34,34,34,47,47,34,47,34,34,47,47,47,34,34,34,34,34,34,34,34,34,34,34,34,34,34,47,34,47,34,47,34,47,34,47,47,47,34,47,34,47,34,47,34,34,34,47,47,34,47,34,34,47,47,47,34,47,34,47,47,47,34,34,47,34,34,34,34,47,47,47,47,34,47,34,47,47,47,47,34,34,47,47,34,47,34,47,47,34,47,34,34,34,47,34,34,47,47,47,47,47,47,34,34,34,34,47,34,47,47,47,47,47,34,34,34,47,34,47,34,47,47,34,34,47,47,34,47,34,47,34,34,34,34,47,34,34,34,47,34,34,34,47,34,34,34,47,47,47,47,47,34,34,34,34,47,34,34,47,47,47,34,34,47,34,34,47,47,34,34,47,47,47,47,47,34,47,47,34,47,34,47,34,47,47,47,34,34,34,34,47,34,34,47,34,47,47,47,47,34,47,34,47,34,34,34,47,47,34,34,47,47,47,34,34,47,34,47,34,47,34,47,34,47,47,47,34,34,47,47,47,47,34,47,47,47,47,34,47,47,47,34,34,34,47,47,34,47,47,34,34,47,34,34,47,47,47,47,47,47,47,47,47,34,34,34,47,47,34,34,47,47,47,47,47,47,47,47,47,34,34,34,47,34,34,34,47,34,47,47,47,47,47,34,34,34,34,34,34,34,34,47,47,47,47,34,47,34,47,47,47,34,47,34,34,34,47,47,47,47,34,47,47,34,47,47,34,34,34,47,34,34,34,47,34,34,34,34,47,47,47,47,34,34,47,47,47,47,34,47,47,34,34,34,47,47,34,34,34,47,47,47,34,47,47,34,47,34,47,47,47,34,34,47,34,34,34,34,47,47,47,47,34,34,47,34,47,47,47,47,34,47,34,34,47,47,47,34,47,34,34,47,34,34,47,34,34,47,34,47,34,47,34,34,47,47,47,34,47,47,34,47,34,47,34,47,34,34,34,47,47,47,47,47,47,47,34,47,34,34,34,47,34,47,47,34,47,34,47,34,34,47,34,47,34,47,34,47,34,47,47,34,34,34,47,47,34,47,47,47,47,34,34,47,47,34,34,34,34,34,47,34,47,34,34,47,47,34,47,47,34,34,47,34,34,34,34,47,47,34,34,47,34,47,34,47,34,34,34,34,34,47,34,47,47,47,47,47,47,47,34,47,34,47,47,47,34,34,34,34,47,34,47,47,34,47,34,34,34,47,34,34,47,34,47,47,47,47,34,34,34,47,34,34,34,47,47,34,47,34,34,47,34,47,34,34,47,34,34,34,47,34,47,34,47,34,47,47,34,47,47,34,47,47,34,47,34,47,34,34,34,34,47,47,47,34,34,34,47,47,47,47,34,34,34,47,47,34,34,34,34,34,34,34,34,47,34,47,47,47,47,47,34,34,34,34,47,47,47,34,47,47,47,47,47,47,47,34,34,47,34,47,34,47,47,34,47,34,47,47,47,47,34,34,34,34,47,34,34,34,34,34,47,47,34,47,34,47,34,34,34,47,34,34,47,47,34,47,47,34,47,47,34,34,34,47,34,34,47,47,47,34,34,34,47,34,34,47,47,47,34,34,34,34,47,34,47,34,47,34,47,47,34,47,34,34,34,34,47,47,47,47,47,34,47,47,47,47,34,34,34,47,47,47,34,47,47,34,47,47,34,47,47,47,34,34,47,34,47,34,47,34,47,47,34,47,34,47,47,34,34,34,34,34,47,47,47,47,47,34,47,47,34,34,47,47,34,34,34,47,47,47,34,47,34,34,47,34,47,47,47,47,47,34,47,34,47,34,47,34,47,34,34,34,47,34,47,34,47,34,34,47,47,47,34,34,34,47,34,34,34,47,47,47,47,34,47,47,34,34,47,34,34,34,47,34,34,47,34,34,47,47,47,47,34,34,34,34,47,47,34,47,34,47,34,34,34,47,47,34,34,47,34,47,47,34,34,47,34,34,34,34,34,47,34,47,47,47,34,47,47,47,34,47,47,47,47,34,47,34,47,47,47,34,34,34,47,47,34,34,34,47,47,34,47,34,34,34,47,34,34,47,34,47,47,47,47,34,47,34,34,47,34,34,47,47,34,47,34,34,47,47,47,34,34,47,47,34,47,34,34,34,34,34,34,47,47,47,47,34,34,47,47,47,47,34,34,34,34,34,47,47,47,47,47,34,47,34,47,47,34,34,47,34,34,47,47,47,34,47,34,47,34,34,47,47,34,47,34,47,47,47,47,47,47,34,34,47,47,47,34,47,34,34,34,34,47,47,34,34,47,34,34,34,34,47,34,34,47,34,47,34,34,47,34,34,47,47,34,34,47,47,47,47,47,34,34,34,34,47,34,47,34,34,47,34,34,34,34,47,47,47,47,47,47,34,34,34,47,34,34,47,34,47,47,34,47,34,47,47,47,47,34,47,34,47,47,34,47,34,34,47,34,34,34,47,34,47,34,47,47,34,34,47,34,47,47,47,47,34,34,34,47,47,34,47,34,34,47,47,47,47,34,34,47,34,34,34,47,47,47,34,47,47,47,34,34,34,34,34,34,47,47,47,34,47,47,47,34,34,34,47,34,34,34,47,47,34,34,47,47,47,47,47,47,47,34,34,47,34,34,47,34,34,47,47,34,47,47,47,47,34,34,47,34,47,47,34,34,47,34,47,34,34,34,34,47,34,47,34,47,47,47,34,47,34,34,34,47,47,34,47,47,47,34,34,47,47,34,47,34,47,47,34,47,34,47,34,34,34,47,47,34,47,47,34,34,47,34,47,47,47,47,34,34,47,47,34,47,47,34,47,34,34,34,47,47,34,47,34,34,34,47,47,34,34,34,34,34,34,34,34,47,47,47,34,34,34,34,47,47,47,34,47,47,34,34,47,34,34,34,47,34,47,34,47,47,47,34,34,47,47,47,34,47,47,47,34,34,47,34,47,47,47,47,34,34,34,47,34,34,47,47,34,34,47,34,47,34,34,34,34,47,47,34,34,34,34,34,34,47,34,47,47,47,34,47,34,47,47,34,34,34,34,47,34,34,47,47,34,34,47,34,47,34,47,47,34,47,47,47,34,47,34,47,34,47,34,34,47,47,34,34,34,34,47,34,47,47,34,47,34,47,34,34,34,47,47,47,34,47,34,34,47,47,34,47,47,47,47,47,47,47,47,34,47,34,47,34,34,34,34,47,47,34,47,47,47,47,47,47,47,47,34,34,34,34,34,47,47,34,47,47,47,47,34,34,47,47,34,34,34,34,47,34,34,34,47,47,34,34,34,47,34,34,47,47,34,34,47,34,47,47,47,47,47,34,34,47,47,47,47,47,47,34,47,47,47,34,47,47,34,34,47,47,34,34,34,34,47,47,34,47,47,34,34,34,47,47,47,34,47,47,34,34,47,34,47,47,34,34,47,47,47,47,47,47,47,34,34,34,34,34,34,34,34,47,34,47,47,34,47,34,47,47,47,34,34,34,47,34,47,34,34,47,34,47,34,47,34,47,34,34,47,47,34,47,47,47,34,47,47,47,47,47,47,34,47,34,34,47,34,34,34,34,47,47,34,47,34,34,34,34,47,34,34,47,34,34,34,47,34,47,47,34,34,34,47,47,47,34,34,34,34,34,47,47,34,47,47,34,47,47,34,34,34,34,34,34,34,34,47,34,47,47,47,34,34,34,47,47,47,34,47,47,47,47,47,47,47,34,34,47,47,34,34,34,34,34,34,47,47,34,34,34,47,34,47,34,34,34,47,47,47,47,47,47,34,34,47,34,47,34,47,34,34,47,34,47,34,34,47,47,47,47,34,34,47,34,47,47,47,47,34,34,47,34,34,34,34,47,47,34,34,47,47,47,47,34,47,34,47,47,34,34,34,34,47,34,34,34,47,34,34,47,47,34,34,47,34,34,34,47,47,34,47,34,47,47,34,47,34,34,34,34,47,47,47,34,47,47,34,47,34,34,34,47,34,34,34,34,34,47,47,47,47,47,34,47,34,47,47,47,47,34,34,34,47,47,34,34,34,47,47,47,34,34,34,47,34,34,47,47,47,47,34,34,47,47,34,47,47,47,47,34,47,34,34,34,47,47,47,34,47,47,47,47,34,47,47,47,34,34,34,47,34,34,34,47,34,34,47,34,47,47,34,47,47,34,34,34,47,34,47,47,34,34,34,47,34,47,47,34,34,47,47,34,47,34,47,47,47,47,47,34,47,34,34,47,47,34,34,47,47,47,47,34,47,34,34,34,47,34,47,34,47,34,47,47,47,34,34,34,47,47,34,47,34,34,47,47,47,47,34,47,47,47,34,47,47,47,47,47,34,34,47,34,34,47,34,47,34,34,47,47,34,34,47,47,34,34,34,47,47,47,34,34,34,34,34,47,34,47,34,47,47,47,34,47,47,47,34,47,34,34,47,34,34,47,34,47,47,34,47,47,47,47,34,34,47,34,34,47,34,34,34,34,47,47,47,34,47,34,47,47,47,34,34,34,47,34,47,47,47,47,47,47,47,47,34,47,47,47,34,34,47,47,47,34,34,47,34,34,47,47,34,47,34,34,47,47,47,34,47,47,47,47,34,34,47,34,34,34,34,47,34,47,34,47,34,34,34,34,47,34,34,47,34,34,34,47,34,47,47,34,47,34,34,34,34,47,47,47,47,47,47,47,47,47,47,34,34,47,47,47,34,47,47,34,34,34,47,34,34,47,34,34,34,34,34,47,47,47,34,47,47,34,34,34,34,34,47,34,47,47,34,34,34,47,34,34,47,47,47,47,34,34,34,34,47,47,47,34,47,34,34,34,47,47,34,34,47,34,47,47,34,47,47,34,34,34,34,47,34,34,34,34,47,47,34,34,34,34,34,34,34,34,34,34,34,34,34,47,34,47,34,34,34,47,47,47,34,47,34,47,47,47,47,34,34,47,47,47,34,47,34,34,34,47,34,47,47,47,34,47,47,34,47,47,34,47,34,34,47,34,34,47,47,47,34,34,47,34,47,34,47,47,47,34,47,47,34,47,47,34,47,47,47,34,34,47,47,34,34,47,47,34,47,34,47,34,34,34,34,47,34,47,47,47,47,47,34,47,47,34,34,47,34,34,47,47,47,47,47,47,34,47,34,34,47,34,34,34,47,47,34,47,47,47,47,47,34,47,34,47,47,34,34,34,47,34,34,47,34,34,47,34,34,34,47,34,34,34,34,47,34,47,34,47,47,47,47,47,34,34,34,47,47,34,34,34,47,34,34,34,47,34,34,34,34,34,47,34,34,34,47,47,47,34,47,34,47,34,34,47,47,34,34,34,47,34,47,47,47,47,47,34,47,47,34,47,47,34,47,47,47,34,47,47,47,47,47,34,47,34,34,34,47,34,34,47,34,47,34,47,34,47,47,34,47,47,47,34,34,34,34,34,34,34,47,34,34,34,34,34,47,47,47,34,34,47,34,34,47,34,34,47,47,47,34,34,47,47,47,47,34,47,47,47,34,34,47,47,34,34,47,47,34,34,47,47,47,47,47,34,34,47,47,34,47,47,47,34,34,34,47,34,47,34,47,34,34,34,47,34,47,47,47,34,34,34,47,34,47,47,34,34,34,47,34,34,47,47,34,47,47,47,47,34,47,47,34,34,34,47,34,47,34,47,47,47,34,34,47,34,47,47,47,47,34,47,47,47,47,34,34,34,34,47,47,47,47,34,47,34,47,34,34,34,47,47,34,47,34,34,34,34,34,34,34,47,47,47,47,47,34,34,34,47,34,34,34,34,34,34,34,34,34,47,34,34,34,47,34,34,47,47,34,34,47,34,47,34,47,47,47,34,47,47,47,34,47,34,47,47,47,34,34,34,34,47,34,34,34,34,34,34,47,47,34,34,47,34,47,47,34,34,34,47,47,47,47,34,34,47,47,34,34,34,34,34,34,34,34,47,34,47,47,34,47,34,47,47,47,47,34,47,47,47,34,47,34,34,47,47,34,34,47,47,34,34,34,34,47,47,34,47,34,47,47,47,34,34,34,47,47,34,34,34,47,47,47,47,47,47,47,47,34,47,34,47,47,47,34,34,47,34,34,47,34,34,34,47,47,47,34,34,34,47,34,47,34,47,47,34,47,47,47,34,34,34,47,34,34,47,34,47,34,34,34,34,34,34,47,47,34,34,34,34,34,34,47,47,47,47,34,47,47,34,47,47,47,47,34,34,34,34,34,34,47,34,34,47,47,34,47,47,34,47,47,47,34,34,34,34,47,47,34,34,47,47,47,34,34,47,47,34,34,47,47,34,47,34,47,47,47,47,34,47,47,34,47,34,34,47,47,47,47,47,34,34,47,47,47,34,34,47,34,47,47,47,34,34,47,47,47,34,47,47,47,34,34,47,47,47,47,47,34,47,47,34,47,34,34,34,47,34,34,47,34,47,47,47,34,34,47,47,47,34,47,34,47,34,47,47,47,34,47,34,47,47,34,47,47,34,47,47,47,34,47,34,47,47,47,47,47,47,47,34,47,47,47,47,47,34,34,34,47,34,47,34,47,34,34,34,47,47,34,47,47,47,47,34,34,34,34,34,34,47,34,34,34,47,47,34,34,34,34,47,47,47,47,34,34,47,47,34,47,47,34,34,47,47,34,47,34,47,47,47,47,47,34,34,34,34,34,47,47,34,34,34,34,47,34,47,34,34,47,47,47,34,34,34,34,47,47,34,47,34,34,34,34,34,47,47,34,47,34,47,47,47,34,47,34,47,47,34,47,47,34,34,47,47,34,34,47,34,34,34,34,34,34,47,47,34,34,34,34,34,47,47,34,47,47,47,47,47,34,34,34,47,47,34,47,34,47,47,34,34,47,34,34,34,47,34,47,34,47,34,34,47,34,47,47,34,47,34,47,34,34,34,47,34,47,47,34,47,34,34,34,34,34,47,34,34,47,34,34,34,47,47,34,47,34,34,47,47,47,47,47,47,34,34,47,47,47,34,47,47,47,34,47,47,47,34,34,47,34,47,34,47,34,47,47,34,34,47,47,47,47,47,34,47,34,47,34,47,47,47,47,47,34,47,47,47,34,47,47,47,34,34,47,34,34,47,47,34,34,47,34,34,34,34,34,34,34,34,47,47,47,47,47,34,34,47,47,34,34,47,34,47,34,47,34,34,34,34,34,34,34,47,47,47,34,47,34,47,34,34,34,34,47,47,34,34,34,47,34,47,34,34,47,34,34,47,47,34,34,47,34,34,34,34,34,34,47,47,47,34,47,47,34,47,47,34,47,47,47,47,34,47,47,34,34,47,34,34,34,47,47,34,34,34,34,34,34,47,34,47,47,47,34,47,47,34,47,47,47,47,34,47,47,34,47,47,47,47,34,47,34,34,34,34,47,47,34,47,34,47,34,34,47,47,47,34,47,47,34,34,47,47,34,34,47,47,34,47,47,47,34,34,34,34,34,47,34,47,47,34,47,34,47,47,34,34,47,34,47,34,47,47,47,34,34,47,47,47,47,34,47,47,47,34,34,47,34,34,34,34,47,34,34,34,47,34,34,47,34,34,34,47,47,34,34,34,34,34,34,34,34,34,34,47,47,47,34,47,34,47,47,34,47,34,47,34,34,47,47,34,47,34,47,34,34,47,34,47,34,34,34,34,34,34,47,47,47,47,47,47,47,34,47,34,34,34,47,34,34,34,34,34,47,34,47,34,34,47,34,34,47,47,47,34,34,47,34,47,34,34,34,34,34,47,47,34,47,47,34,34,34,47,47,47,47,47,34,34,34,34,47,47,47,34,47,34,47,34,34,34,34,34,34,47,47,47,34,34,34,34,47,47,47,34,47,34,34,34,47,47,47,47,34,34,34,34,47,34,47,47,34,47,34,47,34,47,34,34,47,47,34,47,34,47,47,47,47,34,34,34,47,34,47,34,47,34,47,47,47,34,47,47,34,47,47,34,34,47,34,47,34,47,47,47,34,47,47,47,47,34,47,34,34,47,47,34,47,34,47,47,47,47,34,34,47,34,34,47,47,47,34,34,47,34,34,34,34,34,34,47,47,34,34,47,47,47,47,47,34,34,34,34,34,34,47,34,47,47,47,47,47,34,34,34,34,34,34,34,34,47,34,34,34,47,47,34,34,47,34,34,34,47,34,47,47,34,34,47,34,34,47,34,47,47,47,34,47,47,47,47,34,34,34,47,34,47,34,47,34,47,34,34,34,47,34,34,47,34,47,47,34,34,47,47,47,34,47,47,34,34,47,34,34,47,34,47,47,34,34,47,34,47,34,47,34,34,34,34,47,34,34,34,34,34,34,34,47,34,34,47,47,47,47,47,47,34,47,47,34,47,47,47,47,34,34,47,47,47,34,34,47,34,47,47,34,34,34,47,47,34,47,34,47,34,34,34,34,34,34,34,47,47,47,47,34,47,47,34,34,47,47,47,34,34,47,34,47,47,47,47,34,47,34,34,34,47,47,34,34,34,47,34,34,34,34,47,47,34,34,47,47,47,34,47,34,34,34,47,34,34,34,34,34,34,34,34,34,34,34,47,47,47,47,34,47,47,34,47,34,47,47,34,47,34,47,34,47,34,34,34,34,34,34,34,47,34,34,34,47,34,34,34,47,47,47,47,34,34,34,34,47,34,34,47,34,34,47,34,34,47,47,47,34,34,34,47,47,34,34,34,47,34,34,47,34,34,47,34,47,47,47,34,34,34,47,34,47,34,47,47,47,47,34,47,34,34,34,47,47,34,47,34,47,34,34,47,47,47,47,34,47,34,47,34,47,47,34,47,34,47,34,47,47,47,47,34,47,47,47,34,47,34,34,47,34,47,47,47,47,34,34,34,47,34,34,47,47,47,47,34,47,34,47,34,34,47,34,34,47,34,34,47,34,34,47,47,47,47,47,47,34,47,47,47,47,47,47,34,34,34,34,47,34,34,34,34,34,34,34,34,47,34,34,47,34,34,47,47,47,34,47,34,47,47,47,34,34,34,34,47,47,47,47,34,47,47,47,47,34,47,47,34,47,34,34,47,47,47,34,47,34,47,47,47,34,47,47,34,34,34,34,47,47,47,34,34,47,47,47,34,47,34,47,47,47,47,47,34,34,47,34,34,34,34,47,34,34,34,34,34,47,47,47,34,34,34,34,47,47,34,34,47,47,47,34,34,34,47,47,47,34,47,47,34,34,34,47,47,47,47,47,47,34,34,47,47,47,47,34,47,34,34,34,47,47,47,47,34,34,34,34,47,34,47,47,47,47,34,47,47,34,34,47,34,34,47,47,34,34,47,34,47,34,34,34,34,34,34,47,47,47,47,47,34,47,47,34,34,34,34,34,47,47,34,47,47,34,47,47,34,47,34,47,47,47,47,47,47,34,47,34,34,34,34,47,47,34,47,34,34,47,34,34,47,47,47,34,34,34,47,34,34,47,47,34,34,34,47,34,34,34,34,34,34,34,34,34,47,47,34,47,34,34,34,34,34,34,47,47,47,47,47,47,47,34,34,34,47,47,34,34,47,34,47,34,34,47,47,47,47,47,34,47,34,34,34,34,34,34,47,47,47,47,47,34,47,34,34,34,47,34,47,34,47,34,34,47,47,34,34,47,34,47,34,34,47,34,47,47,47,34,34,47,47,34,34,34,34,34,34,47,34,34,47,47,47,47,34,34,34,47,34,34,47,34,34,34,47,34,34,34,47,34,34,34,47,34,47,47,34,47,34,47,47,47,34,47,47,34,47,47,34,34,47,47,34,47,34,47,34,34,34,34,47,47,34,34,34,47,34,34,47,47,34,34,34,34,34,47,47,47,34,47,47,34,34,47,34,34,47,34,34,34,34,34,47,47,34,34,47,34,34,47,34,34,34,47,34,34,47,34,34,47,34,47,47,34,47,34,47,34,47,47,34,34,47,47,47,47,47,34,47,47,34,47,34,47,47,34,34,34,34,47,47,34,47,47,34,47,34,47,47,47,47,47,34,34,34,34,34,47,34,47,34,34,47,34,34,34,47,47,47,47,34,47,47,34,47,47,47,34,47,47,47,34,34,47,34,47,34,34,34,34,34,47,34,47,47,47,34,47,47,47,34,47,47,34,47,47,47,47,34,34,34,47,47,47,34,47,47,34,47,34,34,34,47,47,47,34,34,34,47,47,34,34,34,47,34,34,34,47,47,34,47,34,34,47,47,34,47,47,47,34,47,34,34,47,34,34,34,47,34,34,34,34,34,34,47,34,34,34,47,47,47,34,47,47,34,47,47,34,47,34,47,47,47,34,34,34,34,47,34,34,47,47,34,34,47,47,47,47,47,34,47,34,47,47,34,34,47,34,47,47,34,34,34,47,34,47,34,34,47,34,47,47,34,34,34,47,47,47,47,34,47,47,47,34,34,47,34,34,47,47,47,47,47,34,34,34,34,34,47,34,47,34,34,34,34,34,47,34,47,34,47,47,47,34,47,47,34,34,47,34,47,34,34,34,34,34,47,34,47,47,47,34,47,34,34,34,34,34,34,47,47,34,34,34,47,47,34,34,34,47,47,47,47,34,34,34,47,47,34,47,34,34,47,47,34,47,34,47,34,47,47,47,34,47,47,47,47,34,47,47,47,34,47,47,34,34,47,34,34,47,47,47,47,47,47,47,34,34,34,47,34,47,34,47,47,47,47,34,34,47,34,34,47,47,47,34,34,47,47,47,47,47,34,47,47,47,47,47,47,34,34,47,34,34,34,47,47,47,34,34,47,34,47,47,47,47,34,47,34,47,34,34,34,47,47,47,47,34,34,34,34,47,47,47,47,47,47,47,34,34,47,34,34,34,47,47,34,47,47,47,34,47,34,34,47,47,34,47,34,34,34,47,47,47,47,34,34,34,47,47,47,47,34,34,47,34,47,47,34,34,47,47,34,34,34,34,47,34,34,34,34,47,47,34,34,47,47,47,34,34,47,47,34,34,47,47,34,34,34,47,34,34,47,47,47,47,34,47,34,34,34,34,47,34,47,34,34,47,47,47,34,47,47,34,47,34,47,34,47,47,47,34,47,47,47,47,34,34,34,34,34,34,47,34,47,47,47,47,47,34,47,47,47,34,47,34,47,47,47,34,47,34,47,47,34,47,47,34,34,47,34,47,47,34,34,34,47,47,34,34,34,34,47,34,47,47,47,34,47,34,34,34,34,47,47,34,47,47,47,34,34,47,47,47,34,47,34,47,34,47,34,34,34,47,34,47,34,34,47,34,34,34,47,34,34,47,34,47,47,34,47,34,34,47,34,34,47,47,34,47,34,47,47,47,47,47,47,34,47,34,47,34,47,34,34,34,34,47,34,34,47,34,34,47,34,34,47,47,34,47,34,34,47,47,34,34,34,34,47,47,47,47,34,47,34,34,47,34,34,47,47,34,47,34,34,47,34,47,47,34,47,47,34,47,47,34,34,47,47,34,34,47,47,34,47,34,47,34,34,34,34,47,47,47,34,47,47,34,34,47,34,47,47,47,47,34,47,34,34,34,47,47,47,34,47,34,34,47,34,47,34,34,47,34,47,47,34,47,34,34,47,34,34,34,47,47,34,34,47,47,34,34,47,34,47,34,34,47,34,34,47,47,47,34,34,34,34,34,47,34,34,34,34,47,47,34,34,34,34,34,47,47,47,34,47,34,47,34,47,34,34,47,34,47,47,34,34,47,34,34,34,34,34,34,34,34,34,34,47,47,34,47,34,34,34,34,34,47,34,34,47,34,34,47,34,34,47,47,34,47,47,34,34,34,34,34,34,34,34,34,34,34,34,47,34,47,47,47,34,47,34,34,47,34,34,34,34,34,34,47,47,47,34,47,47,34,34,34,47,34,34,34,47,47,47,34,47,47,34,34,34,47,34,34,34,34,47,34,47,47,34,47,34,34,47,47,47,47,47,34,34,34,34,34,47,47,47,34,47,34,34,47,34,47,47,34,34,34,47,47,34,47,47,34,47,47,47,47,47,47,47,34,34,47,34,34,34,47,47,34,47,47]
print(s.dailyTemperatures(T))
| 418.727273 | 30,009 | 0.655449 |
79484234e7a1e3ea0121b4917ce180993792d5e6
| 844 |
py
|
Python
|
codewars/7 kyu/disemvowel-trolls.py
|
sirken/coding-practice
|
9c5e23b2c24f525a89a5e1d15ce3aec3ad1a01ab
|
[
"MIT"
] | null | null | null |
codewars/7 kyu/disemvowel-trolls.py
|
sirken/coding-practice
|
9c5e23b2c24f525a89a5e1d15ce3aec3ad1a01ab
|
[
"MIT"
] | null | null | null |
codewars/7 kyu/disemvowel-trolls.py
|
sirken/coding-practice
|
9c5e23b2c24f525a89a5e1d15ce3aec3ad1a01ab
|
[
"MIT"
] | null | null | null |
from Test import Test, Test as test
'''
Trolls are attacking your comment section!
A common way to deal with this situation is to remove all of the vowels from the trolls' comments, neutralizing the threat.
Your task is to write a function that takes a string and return a new string with all vowels removed.
For example, the string "This website is for losers LOL!" would become "Ths wbst s fr lsrs LL!".
Note: for this kata y isn't considered a vowel.
'''
def disemvowel(string):
return ''.join(x for x in string if x.lower() not in ('a','e','i','o','u'))
def disemvowel(string):
return ''.join(x for x in string if x.lower() not in ('aeiou'))
def disemvowel(string):
return string.translate(string.maketrans('','','aeiouAEIOU'))
test.assert_equals(disemvowel("This website is for losers LOL!"), "Ths wbst s fr lsrs LL!")
| 35.166667 | 123 | 0.71564 |
794842602178d2661146025f3f1ec6da1f1c1dba
| 12,738 |
py
|
Python
|
neurokit2/bio/bio_analyze.py
|
BelleJohn/neuropsychology-NeuroKit
|
d01111b9b82364d28da01c002e6cbfc45d9493d9
|
[
"MIT"
] | 1 |
2022-03-05T06:15:02.000Z
|
2022-03-05T06:15:02.000Z
|
neurokit2/bio/bio_analyze.py
|
BelleJohn/neuropsychology-NeuroKit
|
d01111b9b82364d28da01c002e6cbfc45d9493d9
|
[
"MIT"
] | null | null | null |
neurokit2/bio/bio_analyze.py
|
BelleJohn/neuropsychology-NeuroKit
|
d01111b9b82364d28da01c002e6cbfc45d9493d9
|
[
"MIT"
] | 2 |
2021-12-25T15:39:49.000Z
|
2021-12-25T15:44:16.000Z
|
# -*- coding: utf-8 -*-
import numpy as np
import pandas as pd
from ..ecg import ecg_analyze
from ..eda import eda_analyze
from ..emg import emg_analyze
from ..eog import eog_analyze
from ..hrv import hrv_rsa
from ..ppg import ppg_analyze
from ..rsp import rsp_analyze
def bio_analyze(data, sampling_rate=1000, method="auto", window_lengths='constant', subepoch_rate=[None, None]):
"""Automated analysis of bio signals.
Wrapper for other bio analyze functions of
electrocardiography signals (ECG), respiration signals (RSP), electrodermal activity (EDA),
electromyography signals (EMG) and electrooculography signals (EOG).
Parameters
----------
data : DataFrame
The DataFrame containing all the processed signals, typically
produced by `bio_process()`, `ecg_process()`, `rsp_process()`,
`eda_process()`, `emg_process()` or `eog_process()`.
sampling_rate : int
The sampling frequency of the signals (in Hz, i.e., samples/second).
Defaults to 1000.
method : str
Can be one of 'event-related' for event-related analysis on epochs,
or 'interval-related' for analysis on longer periods of data. Defaults
to 'auto' where the right method will be chosen based on the
mean duration of the data ('event-related' for duration under 10s).
window_lengths : dict
Defaults to 'constant'. Add a dictionary of epoch start and end times for different
types of signals e.g., window_lengths = {'ECG': [0.5, 1.5], 'EDA': [0.5, 3.5]}
subepoch_rate : list, dict
For event-related analysis,, a smaller "sub-epoch" within the epoch of an event can be specified.
The ECG and RSP rate-related features of this "sub-epoch" (e.g., ECG_Rate, ECG_Rate_Max),
relative to the baseline (where applicable), will be computed, e.g., subepoch_rate = [1, 3]
or subepoch_rate = {'ECG_Rate' = [1, 2], 'RSP_Rate' = [1.5, None]} if different sub-epoch length
for different signal is desired. Defaults to [None, None]. The first value of the list specifies
the start of the sub-epoch and the second specifies the end of the sub-epoch (in seconds),
Returns
----------
DataFrame
DataFrame of the analyzed bio features. See docstrings of `ecg_analyze()`,
`rsp_analyze()`, `eda_analyze()`, `emg_analyze()` and `eog_analyze()` for more details.
Also returns Respiratory Sinus Arrhythmia features produced by
`hrv_rsa()` if interval-related analysis is carried out.
See Also
----------
ecg_analyze, rsp_analyze, eda_analyze, emg_analyze, eog_analyze
Examples
----------
>>> import neurokit2 as nk
>>>
>>> # Example 1: Event-related analysis
>>> # Download data
>>> data = nk.data("bio_eventrelated_100hz")
>>>
>>> # Process the data
>>> df, info = nk.bio_process(ecg=data["ECG"], rsp=data["RSP"], eda=data["EDA"],
... keep=data["Photosensor"], sampling_rate=100)
>>>
>>> # Build epochs
>>> events = nk.events_find(data["Photosensor"], threshold_keep='below',
... event_conditions=["Negative", "Neutral",
... "Neutral", "Negative"])
>>> epochs = nk.epochs_create(df, events, sampling_rate=100, epochs_start=-0.1,
... epochs_end=1.9)
>>>
>>> # Analyze
>>> nk.bio_analyze(epochs, sampling_rate=100) #doctest: +ELLIPSIS
Label Condition Event_Onset ... RSA_Gates
1 1 Negative ... ... ...
2 2 Neutral ... ... ...
3 3 Neutral ... ... ...
4 4 Negative ... ... ...
...
>>>
>>> # Example 2: Interval-related analysis
>>> # Download data
>>> data = nk.data("bio_resting_5min_100hz")
>>>
>>> # Process the data
>>> df, info = nk.bio_process(ecg=data["ECG"], rsp=data["RSP"], ppg=data["PPG"], sampling_rate=100)
>>>
>>> # Analyze
>>> nk.bio_analyze(df, sampling_rate=100) #doctest: +ELLIPSIS
ECG_Rate_Mean HRV_MeanNN ... RSA_Gates_Mean_log RSA_Gates_SD
0 ... ... ... ... ...
[1 rows x 184 columns]
"""
features = pd.DataFrame()
method = method.lower()
# Sanitize input
if isinstance(data, pd.DataFrame):
ecg_cols = [col for col in data.columns if "ECG" in col]
rsp_cols = [col for col in data.columns if "RSP" in col]
eda_cols = [col for col in data.columns if "EDA" in col]
emg_cols = [col for col in data.columns if "EMG" in col]
ppg_cols = [col for col in data.columns if "PPG" in col]
eog_cols = [col for col in data.columns if "EOG" in col]
ecg_rate_col = [col for col in data.columns if "ECG_Rate" in col]
rsp_phase_col = [col for col in data.columns if "RSP_Phase" in col]
elif isinstance(data, dict):
for i in data:
ecg_cols = [col for col in data[i].columns if "ECG" in col]
rsp_cols = [col for col in data[i].columns if "RSP" in col]
eda_cols = [col for col in data[i].columns if "EDA" in col]
emg_cols = [col for col in data[i].columns if "EMG" in col]
ppg_cols = [col for col in data[i].columns if "PPG" in col]
eog_cols = [col for col in data[i].columns if "EOG" in col]
ecg_rate_col = [col for col in data[i].columns if "ECG_Rate" in col]
rsp_phase_col = [col for col in data[i].columns if "RSP_Phase" in col]
else:
raise ValueError(
"NeuroKit error: bio_analyze(): Wrong input, please make sure you enter a DataFrame or a dictionary. "
)
# ECG
if len(ecg_cols) != 0:
ecg_data = data.copy()
if window_lengths != 'constant':
if 'ECG' in window_lengths.keys(): # only for epochs
ecg_data = _bio_analyze_slicewindow(ecg_data, window_lengths, signal='ECG')
ecg_analyzed = ecg_analyze(ecg_data, sampling_rate=sampling_rate, method=method, subepoch_rate=subepoch_rate)
features = pd.concat([features, ecg_analyzed], axis=1, sort=False)
# RSP
if len(rsp_cols) != 0:
rsp_data = data.copy()
if window_lengths != 'constant':
if 'RSP' in window_lengths.keys(): # only for epochs
rsp_data = _bio_analyze_slicewindow(rsp_data, window_lengths, signal='RSP')
rsp_analyzed = rsp_analyze(rsp_data, sampling_rate=sampling_rate, method=method, subepoch_rate=subepoch_rate)
features = pd.concat([features, rsp_analyzed], axis=1, sort=False)
# EDA
if len(eda_cols) != 0:
eda_data = data.copy()
if window_lengths != 'constant':
if 'EDA' in window_lengths.keys(): # only for epochs
eda_data = _bio_analyze_slicewindow(eda_data, window_lengths, signal='EDA')
eda_analyzed = eda_analyze(eda_data, sampling_rate=sampling_rate, method=method)
features = pd.concat([features, eda_analyzed], axis=1, sort=False)
# EMG
if len(emg_cols) != 0:
emg_data = data.copy()
if window_lengths != 'constant':
if 'EMG' in window_lengths.keys(): # only for epochs
emg_data = _bio_analyze_slicewindow(emg_data, window_lengths, signal='EMG')
emg_analyzed = emg_analyze(emg_data, sampling_rate=sampling_rate, method=method)
features = pd.concat([features, emg_analyzed], axis=1, sort=False)
# EMG
if len(ppg_cols) != 0:
ppg_data = data.copy()
if window_lengths != 'constant':
if 'PPG' in window_lengths.keys(): # only for epochs
ppg_data = _bio_analyze_slicewindow(ppg_data, window_lengths, signal='PPG')
ppg_analyzed = ppg_analyze(ppg_data, sampling_rate=sampling_rate, method=method)
features = pd.concat([features, ppg_analyzed], axis=1, sort=False)
# EOG
if len(eog_cols) != 0:
eog_data = data.copy()
if window_lengths != 'constant':
if 'EOG' in window_lengths.keys(): # only for epochs
eog_data = _bio_analyze_slicewindow(eog_data, window_lengths, signal='EOG')
eog_analyzed = eog_analyze(eog_data, sampling_rate=sampling_rate, method=method)
features = pd.concat([features, eog_analyzed], axis=1, sort=False)
# RSA
if len(ecg_rate_col + rsp_phase_col) >= 3:
# Event-related
if method in ["event-related", "event", "epoch"]:
rsa = _bio_analyze_rsa_event(data)
# Interval-related
elif method in ["interval-related", "interval", "resting-state"]:
rsa = _bio_analyze_rsa_interval(data, sampling_rate=sampling_rate)
# Auto
else:
duration = _bio_analyze_findduration(data, sampling_rate=sampling_rate)
if duration >= 10:
rsa = _bio_analyze_rsa_interval(data, sampling_rate=sampling_rate)
else:
rsa = _bio_analyze_rsa_event(data)
features = pd.concat([features, rsa], axis=1, sort=False)
# Remove duplicate columns of Label and Condition
if "Label" in features.columns.values:
features = features.loc[:, ~features.columns.duplicated()]
return features
# =============================================================================
# Internals
# =============================================================================
def _bio_analyze_slicewindow(data, window_lengths, signal='ECG'):
if signal in window_lengths.keys():
start = window_lengths[signal][0]
end = window_lengths[signal][1]
epochs = {}
for i, label in enumerate(data):
# Slice window
epoch = data[label].loc[(data[label].index > start) & (data[label].index < end)]
epochs[label] = epoch
epochs
return epochs
def _bio_analyze_findduration(data, sampling_rate=1000):
# If DataFrame
if isinstance(data, pd.DataFrame):
if "Label" in data.columns:
labels = data["Label"].unique()
durations = [len(data[data["Label"] == label]) / sampling_rate for label in labels]
else:
durations = [len(data) / sampling_rate]
# If dictionary
if isinstance(data, dict):
durations = [len(data[i]) / sampling_rate for i in data]
return np.nanmean(durations)
def _bio_analyze_rsa_interval(data, sampling_rate=1000):
# RSA features for interval-related analysis
if isinstance(data, pd.DataFrame):
rsa = hrv_rsa(data, sampling_rate=sampling_rate, continuous=False)
rsa = pd.DataFrame.from_dict(rsa, orient="index").T
elif isinstance(data, dict):
for index in data:
rsa[index] = {} # Initialize empty container
data[index] = data[index].set_index("Index").drop(["Label"], axis=1)
rsa[index] = hrv_rsa(data[index], sampling_rate=sampling_rate, continuous=False)
rsa = pd.DataFrame.from_dict(rsa, orient="index")
return rsa
def _bio_analyze_rsa_event(data, rsa={}):
# RSA features for event-related analysis
if isinstance(data, dict):
for i in data:
rsa[i] = {}
rsa[i] = _bio_analyze_rsa_epoch(data[i], rsa[i])
rsa = pd.DataFrame.from_dict(rsa, orient="index")
elif isinstance(data, pd.DataFrame):
# Convert back to dict
for label, df in data.groupby('Label'):
rsa[label] = {}
epoch = df.set_index('Time')
rsa[label] = _bio_analyze_rsa_epoch(epoch, rsa[label])
rsa = pd.DataFrame.from_dict(rsa, orient="index")
# Fix index sorting to combine later with features dataframe
rsa.index = rsa.index.astype(int)
rsa = rsa.sort_index().rename_axis(None)
rsa.index = rsa.index.astype(str)
return rsa
def _bio_analyze_rsa_epoch(epoch, output={}):
# RSA features for event-related analysis: epoching
# To remove baseline
if np.min(epoch.index.values) <= 0:
baseline = epoch["RSA_P2T"][epoch.index <= 0].values
signal = epoch["RSA_P2T"][epoch.index > 0].values
output["RSA_P2T"] = np.mean(signal) - np.mean(baseline)
baseline = epoch["RSA_Gates"][epoch.index <= 0].values
signal = epoch["RSA_Gates"][epoch.index > 0].values
output["RSA_Gates"] = np.nanmean(signal) - np.nanmean(baseline)
else:
signal = epoch["RSA_P2T"].values
output["RSA_P2T"] = np.mean(signal)
signal = epoch["RSA_Gates"].values
output["RSA_Gates"] = np.nanmean(signal)
return output
| 40.310127 | 117 | 0.608808 |
794843c51557f5c5d149976c1af4105afdb96f53
| 1,682 |
py
|
Python
|
typings/bpy/ops/brush.py
|
Argmaster/PyR3
|
6786bcb6a101fe4bd4cc50fe43767b8178504b15
|
[
"MIT"
] | 2 |
2021-12-12T18:51:52.000Z
|
2022-02-23T09:49:16.000Z
|
typings/bpy/ops/brush.py
|
Argmaster/PyR3
|
6786bcb6a101fe4bd4cc50fe43767b8178504b15
|
[
"MIT"
] | 2 |
2021-11-08T12:09:02.000Z
|
2021-12-12T23:01:12.000Z
|
typings/bpy/ops/brush.py
|
Argmaster/PyR3
|
6786bcb6a101fe4bd4cc50fe43767b8178504b15
|
[
"MIT"
] | null | null | null |
import sys
import typing
def add():
''' Add brush by mode type
'''
pass
def add_gpencil():
''' Add brush for Grease Pencil
'''
pass
def curve_preset(shape: typing.Union[str, int] = 'SMOOTH'):
''' Set brush shape
:param shape: Mode
:type shape: typing.Union[str, int]
'''
pass
def reset():
''' Return brush to defaults based on current tool
'''
pass
def scale_size(scalar: float = 1.0):
''' Change brush size by a scalar
:param scalar: Scalar, Factor to scale brush size by
:type scalar: float
'''
pass
def stencil_control(mode: typing.Union[str, int] = 'TRANSLATION',
texmode: typing.Union[str, int] = 'PRIMARY'):
''' Control the stencil brush
:param mode: Tool
:type mode: typing.Union[str, int]
:param texmode: Tool
:type texmode: typing.Union[str, int]
'''
pass
def stencil_fit_image_aspect(use_repeat: bool = True,
use_scale: bool = True,
mask: bool = False):
''' When using an image texture, adjust the stencil size to fit the image aspect ratio
:param use_repeat: Use Repeat, Use repeat mapping values
:type use_repeat: bool
:param use_scale: Use Scale, Use texture scale values
:type use_scale: bool
:param mask: Modify Mask Stencil, Modify either the primary or mask stencil
:type mask: bool
'''
pass
def stencil_reset_transform(mask: bool = False):
''' Reset the stencil transformation to the default
:param mask: Modify Mask Stencil, Modify either the primary or mask stencil
:type mask: bool
'''
pass
| 19.55814 | 90 | 0.618312 |
79484555fd9df5b1c0a7cbcd38f1c440d8ee1daa
| 9,997 |
py
|
Python
|
src/index.py
|
Aquila-Network/AquilaX-CE
|
9aa6b076e83892ececaa23970609ae7045aa6699
|
[
"MIT"
] | null | null | null |
src/index.py
|
Aquila-Network/AquilaX-CE
|
9aa6b076e83892ececaa23970609ae7045aa6699
|
[
"MIT"
] | 1 |
2021-05-13T09:28:09.000Z
|
2021-05-13T09:28:09.000Z
|
src/index.py
|
Aquila-Network/AquilaX-CE
|
9aa6b076e83892ececaa23970609ae7045aa6699
|
[
"MIT"
] | null | null | null |
import logging
from flask import Flask, request
from flask_cors import CORS
from functools import wraps
import html_cleanup as chtml
from services import logging as slog
slogging_session = slog.create_session()
from aquilapy import Wallet, DB, Hub
app = Flask(__name__, instance_relative_config=True)
# Create a wallet instance from private key
wallet = Wallet("/ossl/private_unencrypted.pem")
# Connect to Aquila DB instance
db = DB("http://aquiladb", "5001", wallet)
# Connect to Aquila Hub instance
hub = Hub("http://aquilahub", "5002", wallet)
# default database name
default_database_name = None
def create_database (user_id):
# Schema definition to be used
schema_def = {
"description": "AquilaX-CE default user index",
"unique": user_id,
"encoder": "strn:msmarco-distilbert-base-tas-b",
"codelen": 768,
"metadata": {
"url": "string",
"text": "string"
}
}
# Craete a database with the schema definition provided
db_name = db.create_database(schema_def)
# Craete a database with the schema definition provided
db_name_ = hub.create_database(schema_def)
return db_name, True
# Compress data
def compress_strings (db_name, strings_in):
return hub.compress_documents(db_name, strings_in)
# Insert docs
def index_website (db_name, paragraphs, title, url):
# add title as well to the index
if title != "":
paragraphs.append(title)
compressed = compress_strings(db_name, paragraphs)
docs = []
for idx_, para in enumerate(paragraphs):
v = compressed[idx_]
docs.append({
"metadata": {
"url": url,
"text": para
},
"code": v
})
try:
dids = db.insert_documents(db_name, docs)
return True
except Exception as e:
logging.debug(e)
return False
# Search docs
def search_docs(db_name, query):
compressed = compress_strings(db_name, [query])
docs, dists = db.search_k_documents(db_name, compressed, 100)
index = {}
score = {}
for idx_, doc in enumerate(docs[0]):
metadata = doc["metadata"]
# -------------------------- exponential dampening ------------------------------
if index.get(metadata["url"]):
pass
else:
index[metadata["url"]] = 1
score[metadata["url"]] = dists[0][idx_]
results_d = {}
for key in index:
results_d[key] = score[key]
results_d = {k: v for k, v in sorted(results_d.items(), key=lambda item: item[1], reverse=True)}
return results_d
# Add authentication
def authenticate ():
def decorator (f):
@wraps(f)
def wrapper (*args, **kwargs):
# skip
return f(*args, **kwargs)
return wrapper
return decorator
def extract_request_params (request):
if not request.is_json:
logging.error("Cannot parse request parameters")
# request is invalid
return {}
# Extract JSON data
data_ = request.get_json()
return data_
@app.route("/", methods=['GET'])
def info ():
"""
Check server status
"""
# Build response
return {
"success": True,
"message": "Aquila X is running healthy"
}, 200
@app.route("/create", methods=['POST'])
@authenticate()
def create_db ():
"""
Create a database on demand given a random unique seed
"""
# get parameters
user_id = None
if extract_request_params(request).get("seed"):
user_id = extract_request_params(request)["seed"]
if not user_id:
# Build error response
return {
"success": False,
"message": "Invalid parameters"
}, 400
db_name, status = create_database(user_id)
# Build response
if status:
return {
"success": True,
"databaseName": db_name
}, 200
else:
return {
"success": False,
"message": "Invalid schema definition"
}, 400
@app.route("/index", methods=['POST'])
@authenticate()
def index_page ():
"""
Index html page
"""
# get parameters
html_data = None
url = None
db_name = default_database_name
if extract_request_params(request).get("database"):
db_name = extract_request_params(request)["database"]
if extract_request_params(request).get("html") and extract_request_params(request).get("url"):
html_data = extract_request_params(request)["html"]
url = extract_request_params(request)["url"]
if not html_data or not url or not db_name:
# Build error response
return {
"success": False,
"message": "Invalid parameters"
}, 400
# cleanup html
chtml_data = chtml.process_html(html_data, url)
thtml_data = chtml.trim_content(chtml_data["data"]["content"])["result"]
# index html
status = index_website(db_name, thtml_data, chtml_data["data"]["title"], url)
# Build response
if status:
# logging
if slogging_session != None:
# index activity logging
slog.put_log_index(slogging_session, db_name, url, html_data, 0)
# metadata logging
slog.put_url_summary(slogging_session, db_name, url, chtml_data["data"]["title"], chtml_data["data"]["author"], chtml_data["data"]["lead_image_url"], chtml_data["data"]["next_page_url"], "...".join(thtml_data))
return {
"success": True,
"databaseName": db_name
}, 200
else:
return {
"success": False,
"message": "Invalid schema definition"
}, 400
@app.route("/search", methods=['POST'])
def search ():
"""
Search database for matches
"""
# get parameters
query = None
db_name = default_database_name
if extract_request_params(request).get("database"):
db_name = extract_request_params(request)["database"]
if extract_request_params(request).get("query"):
query = extract_request_params(request)["query"]
if not query or not db_name:
# Build error response
return {
"success": False,
"message": "Invalid parameters"
}, 400
urls = search_docs(db_name, query)
# logging
if slogging_session != None:
if len(urls) > 0:
slog.put_log_search(slogging_session, db_name, query, list(urls.keys())[0])
else:
slog.put_log_search(slogging_session, db_name, query, "")
# Build response
return {
"success": True,
"result": urls
}, 200
@app.route("/correct", methods=['POST'])
def correct ():
"""
Correct matches
"""
# get parameters
query = None
db_name = default_database_name
url = None
if extract_request_params(request).get("database"):
db_name = extract_request_params(request)["database"]
if extract_request_params(request).get("query") and extract_request_params(request).get("url"):
query = extract_request_params(request)["query"]
url = extract_request_params(request)["url"]
if not query and not db_name and not url:
# Build error response
return {
"success": False,
"message": "Invalid parameters"
}, 400
# logging
if slogging_session != None:
slog.put_log_correct(slogging_session, db_name, query, url)
# index correction
status = index_website(db_name, [], query, url)
# Build response
return {
"success": True
}, 200
@app.route("/list", methods=['POST'])
def listall ():
"""
List indexed urls
"""
# get parameters
page = None
db_name = default_database_name
limit = None
if extract_request_params(request).get("database"):
db_name = extract_request_params(request)["database"]
if extract_request_params(request).get("page") and extract_request_params(request).get("limit"):
page = extract_request_params(request)["page"]
limit = extract_request_params(request)["limit"]
if not page and not db_name and not limit:
# Build error response
return {
"success": False,
"message": "Invalid parameters"
}, 400
# get links
if slogging_session != None:
links = slog.get_all_url(slogging_session, db_name, page, limit)
# Build response
return {
"success": True,
"result": {
"links": links
}
}, 200
@app.route("/urlsummary", methods=['POST'])
def summary ():
"""
URL summary
"""
# get parameters
urls = None
db_name = default_database_name
if extract_request_params(request).get("database"):
db_name = extract_request_params(request)["database"]
if extract_request_params(request).get("urls"):
urls = extract_request_params(request)["urls"]
if not urls:
# Build error response
return {
"success": False,
"message": "Invalid parameters"
}, 400
summary_r = slog.get_url_summary(slogging_session, db_name, urls)
# Build response
return {
"success": True,
"result": {
"summary": summary_r
}
}, 200
# Server starter
def flaskserver ():
"""
start server
"""
app.run(host='0.0.0.0', port=5003, debug=False)
# Enable CORS
CORS(app)
if __name__ == "__main__":
# create default database
db_name, status = create_database("default")
if status:
default_database_name = db_name
logging.debug("Default DB name: " + default_database_name)
# start server
flaskserver()
| 26.101828 | 222 | 0.591778 |
7948467f6893a503dcefd354cc6ca4e0f9e7dbef
| 1,475 |
py
|
Python
|
python/example_code/cloudwatch/delete_alarms.py
|
gabehollombe-aws/aws-doc-sdk-examples
|
dfc0e06ebe1762ab127f3ef5f425507644c6a99c
|
[
"Apache-2.0"
] | 12 |
2020-07-28T01:20:15.000Z
|
2021-12-10T10:52:49.000Z
|
python/example_code/cloudwatch/delete_alarms.py
|
gabehollombe-aws/aws-doc-sdk-examples
|
dfc0e06ebe1762ab127f3ef5f425507644c6a99c
|
[
"Apache-2.0"
] | 5 |
2021-12-10T01:52:47.000Z
|
2022-01-04T16:47:45.000Z
|
python/example_code/cloudwatch/delete_alarms.py
|
gabehollombe-aws/aws-doc-sdk-examples
|
dfc0e06ebe1762ab127f3ef5f425507644c6a99c
|
[
"Apache-2.0"
] | 5 |
2020-08-29T14:01:38.000Z
|
2021-11-18T07:11:49.000Z
|
# Copyright 2010-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
# snippet-start:[cloudwatch.python.delete_alarms.complete]
import boto3
# Create CloudWatch client
cloudwatch = boto3.client('cloudwatch')
# Delete alarm
cloudwatch.delete_alarms(
AlarmNames=['Web_Server_CPU_Utilization'],
)
# snippet-end:[cloudwatch.python.delete_alarms.complete]
# snippet-comment:[These are tags for the AWS doc team's sample catalog. Do not remove.]
# snippet-sourcedescription:[delete_alarm.py demonstrates how to delete an array of Amazon CloudWatch alarms given the alarm names.]
# snippet-keyword:[Python]
# snippet-sourcesyntax:[python]
# snippet-sourcesyntax:[python]
# snippet-keyword:[AWS SDK for Python (Boto3)]
# snippet-keyword:[Code Sample]
# snippet-keyword:[Amazon Cloudwatch]
# snippet-service:[cloudwatch]
# snippet-sourcetype:[full-example]
# snippet-sourcedate:[2018-12-26]
# snippet-sourceauthor:[jschwarzwalder (AWS)]
| 35.97561 | 133 | 0.750508 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.