id
int64 0
843k
| repository_name
stringlengths 7
55
| file_path
stringlengths 9
332
| class_name
stringlengths 3
290
| human_written_code
stringlengths 12
4.36M
| class_skeleton
stringlengths 19
2.2M
| total_program_units
int64 1
9.57k
| total_doc_str
int64 0
4.2k
| AvgCountLine
float64 0
7.89k
| AvgCountLineBlank
float64 0
300
| AvgCountLineCode
float64 0
7.89k
| AvgCountLineComment
float64 0
7.89k
| AvgCyclomatic
float64 0
130
| CommentToCodeRatio
float64 0
176
| CountClassBase
float64 0
48
| CountClassCoupled
float64 0
589
| CountClassCoupledModified
float64 0
581
| CountClassDerived
float64 0
5.37k
| CountDeclInstanceMethod
float64 0
4.2k
| CountDeclInstanceVariable
float64 0
299
| CountDeclMethod
float64 0
4.2k
| CountDeclMethodAll
float64 0
4.2k
| CountLine
float64 1
115k
| CountLineBlank
float64 0
9.01k
| CountLineCode
float64 0
94.4k
| CountLineCodeDecl
float64 0
46.1k
| CountLineCodeExe
float64 0
91.3k
| CountLineComment
float64 0
27k
| CountStmt
float64 1
93.2k
| CountStmtDecl
float64 0
46.1k
| CountStmtExe
float64 0
90.2k
| MaxCyclomatic
float64 0
759
| MaxInheritanceTree
float64 0
16
| MaxNesting
float64 0
34
| SumCyclomatic
float64 0
6k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
146,448 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/util.py
|
lisien.util.FinalRule
|
class FinalRule:
"""A singleton sentinel for the rule iterator"""
__slots__ = []
def __hash__(self):
# completely random integer
return 6448962173793096248
|
class FinalRule:
'''A singleton sentinel for the rule iterator'''
def __hash__(self):
pass
| 2 | 1 | 3 | 0 | 2 | 1 | 1 | 0.5 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 8 | 2 | 4 | 3 | 2 | 2 | 4 | 3 | 2 | 1 | 0 | 0 | 1 |
146,449 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/util.py
|
lisien.util.SignalDict
|
class SignalDict(Signal, dict):
def __setitem__(self, __key, __value):
super().__setitem__(__key, __value)
self.send(self, key=__key, value=__value)
def __delitem__(self, __key):
super().__delitem__(__key)
self.send(self, key=__key, value=None)
|
class SignalDict(Signal, dict):
def __setitem__(self, __key, __value):
pass
def __delitem__(self, __key):
pass
| 3 | 0 | 3 | 0 | 3 | 0 | 1 | 0 | 2 | 1 | 0 | 0 | 2 | 0 | 2 | 29 | 8 | 1 | 7 | 3 | 4 | 0 | 7 | 3 | 4 | 1 | 2 | 0 | 2 |
146,450 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/util.py
|
lisien.util.SpecialMappingDescriptor
|
class SpecialMappingDescriptor:
def __init__(self, mapclsname):
self.mapps = {}
self.mapclsname = mapclsname
def __get__(self, instance, owner):
attname = "_" + self.mapclsname
if not hasattr(instance, attname):
mappcls = getattr(instance, self.mapclsname)
setattr(instance, attname, mappcls(instance))
return getattr(instance, attname)
def __set__(self, instance, value):
attname = "_" + self.mapclsname
if not hasattr(instance, attname):
mappcls = getattr(instance, self.mapclsname)
setattr(instance, attname, mappcls(instance))
it = getattr(instance, attname)
it.clear()
it.update(value)
|
class SpecialMappingDescriptor:
def __init__(self, mapclsname):
pass
def __get__(self, instance, owner):
pass
def __set__(self, instance, value):
pass
| 4 | 0 | 6 | 0 | 6 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 3 | 2 | 3 | 3 | 20 | 2 | 18 | 11 | 14 | 0 | 18 | 11 | 14 | 2 | 0 | 1 | 5 |
146,451 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/allegedb/tests/test_all.py
|
lisien.allegedb.tests.test_all.DictStorageTest
|
class DictStorageTest(AllegedTest):
"""Make sure the dict wrapper works"""
def runTest(self):
for i, graphmaker in enumerate(self.graphmakers):
self.engine.turn = i
g = graphmaker("testgraph")
g.add_node(0)
g.add_node(1)
g.add_edge(0, 1)
n = g.node[0]
e = g.edge[0][1]
for entity in g.graph, n, e:
entity[0] = {
"spam": "eggs",
"ham": {"baked beans": "delicious"},
"qux": ["quux", "quuux"],
"clothes": {"hats", "shirts", "pants"},
"dicts": {"foo": {"bar": "bas"}, "qux": {"quux": "quuux"}},
}
self.engine.turn = i + 1
for entity in g.graph, n, e:
self.assertEqual(entity[0]["spam"], "eggs")
entity[0]["spam"] = "ham"
self.assertEqual(entity[0]["spam"], "ham")
self.assertEqual(
entity[0]["ham"], {"baked beans": "delicious"}
)
entity[0]["ham"]["baked beans"] = "disgusting"
self.assertEqual(
entity[0]["ham"], {"baked beans": "disgusting"}
)
self.assertEqual(entity[0]["qux"], ["quux", "quuux"])
entity[0]["qux"] = ["quuux", "quux"]
self.assertEqual(entity[0]["qux"], ["quuux", "quux"])
self.assertEqual(
entity[0]["clothes"], {"hats", "shirts", "pants"}
)
entity[0]["clothes"].remove("hats")
self.assertEqual(entity[0]["clothes"], {"shirts", "pants"})
self.assertEqual(
entity[0]["dicts"],
{"foo": {"bar": "bas"}, "qux": {"quux": "quuux"}},
)
del entity[0]["dicts"]["foo"]
entity[0]["dicts"]["qux"]["foo"] = {"bar": "bas"}
self.assertEqual(
entity[0]["dicts"],
{"qux": {"foo": {"bar": "bas"}, "quux": "quuux"}},
)
self.engine.turn = i
for entity in g.graph, n, e:
self.assertEqual(entity[0]["spam"], "eggs")
self.assertEqual(
entity[0]["ham"], {"baked beans": "delicious"}
)
self.assertEqual(entity[0]["qux"], ["quux", "quuux"])
self.assertEqual(
entity[0]["clothes"], {"hats", "shirts", "pants"}
)
self.assertEqual(
entity[0]["dicts"],
{"foo": {"bar": "bas"}, "qux": {"quux": "quuux"}},
)
|
class DictStorageTest(AllegedTest):
'''Make sure the dict wrapper works'''
def runTest(self):
pass
| 2 | 1 | 61 | 0 | 61 | 0 | 5 | 0.02 | 1 | 1 | 0 | 2 | 1 | 0 | 1 | 74 | 64 | 1 | 62 | 7 | 60 | 1 | 37 | 7 | 35 | 5 | 3 | 2 | 5 |
146,452 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/allegedb/tests/test_all.py
|
lisien.allegedb.tests.test_all.AllegedTest
|
class AllegedTest(unittest.TestCase):
def setUp(self):
self.engine = ORM("sqlite:///:memory:")
self.graphmakers = (self.engine.new_digraph,)
|
class AllegedTest(unittest.TestCase):
def setUp(self):
pass
| 2 | 0 | 3 | 0 | 3 | 0 | 1 | 0 | 1 | 1 | 1 | 7 | 1 | 2 | 1 | 73 | 4 | 0 | 4 | 4 | 2 | 0 | 4 | 4 | 2 | 1 | 2 | 0 | 1 |
146,453 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/allegedb/tests/test_all.py
|
lisien.allegedb.tests.test_all.AbstractGraphTest
|
class AbstractGraphTest:
def test_graph_objects_create_delete(self):
for graphmaker in self.graphmakers:
self.engine.time = graphmaker.__name__, 0
g = graphmaker(graphmaker.__name__)
self.assertFalse(self.engine._node_exists(graphmaker.__name__, 0))
g.add_node(0)
self.assertTrue(self.engine._node_exists(graphmaker.__name__, 0))
self.assertIn(0, g)
g.add_node(1)
self.assertIn(1, g)
g.add_edge(0, 1)
self.assertIn(1, g.adj[0])
self.assertIn(1, list(g.adj[0]))
g.add_edge(2, 3)
self.assertIn(2, g.node)
self.assertIn(3, g.node)
self.assertIn(2, g.adj)
self.assertIn(3, g.adj[2])
self.assertIn(3, list(g.adj[2]))
if hasattr(g, "pred_cls"):
self.assertIn(2, g.pred[3])
g.add_edge(2, 4)
self.assertIn(2, g.pred[4])
self.assertIn(2, list(g.pred[4]))
self.assertIn(4, g.adj[2])
self.assertIn(4, list(g.adj[2]))
del g.pred[4]
self.assertNotIn(2, g.pred[4])
self.assertNotIn(2, list(g.pred[4]))
self.assertNotIn(4, g.adj[2])
self.assertNotIn(4, list(g.adj[2]))
self.assertIn(4, g.node)
self.assertNotIn(1, g.adj)
else:
self.assertIn(0, g.adj[1])
self.assertIn(0, list(g.adj[1]))
self.engine.turn = 1
self.assertIn(0, g)
self.assertIn(1, g)
self.engine.branch = graphmaker.__name__ + "_no_edge"
self.assertIn(3, g.node)
self.assertIn(0, g)
self.assertTrue(self.engine._node_exists(graphmaker.__name__, 0))
self.assertIn(1, g)
self.assertIn(1, g.adj[0])
self.assertIn(1, list(g.adj[0]))
if hasattr(g, "pred_cls"):
self.assertNotIn(1, g.adj)
self.assertNotIn(1, list(g.adj))
else:
self.assertIn(0, g.adj[1])
self.assertIn(0, list(g.adj[1]))
g.remove_edge(0, 1)
self.assertIn(0, g)
self.assertIn(1, g)
self.assertNotIn(1, g.adj)
self.assertNotIn(1, list(g.adj))
self.assertNotIn(1, g.adj[0])
self.assertNotIn(1, list(g.adj[0]))
self.engine.branch = graphmaker.__name__ + "_triangle"
self.assertIn(3, g.node)
self.assertIn(2, g)
g.add_edge(0, 1)
self.assertIn(1, g.adj[0])
self.assertIn(1, list(g.adj[0]))
if g.is_directed():
g.add_edge(1, 0)
self.assertIn(0, g.adj[1])
self.assertIn(0, list(g.adj[1]))
g.add_edge(1, 2)
g.add_edge(2, 1)
g.add_edge(2, 0)
g.add_edge(0, 2)
self.assertIn(2, g.adj[0])
self.assertIn(2, list(g.adj[0]))
self.engine.branch = graphmaker.__name__ + "_square"
self.assertTrue(self.engine._node_exists(graphmaker.__name__, 0))
self.assertIn(3, g.node)
self.assertIn(2, list(g.adj[0]))
self.engine.turn = 2
self.assertIn(2, g)
self.assertIn(2, list(g.node.keys()))
self.assertIn(2, g.adj[0])
self.assertIn(2, list(g.adj[0]))
self.assertTrue(self.engine._node_exists(graphmaker.__name__, 0))
g.remove_edge(2, 0)
self.assertNotIn(0, g.adj[2])
self.assertNotIn(0, list(g.adj[2]))
self.assertIn(0, g.node)
self.assertTrue(self.engine._node_exists(graphmaker.__name__, 0))
self.assertNotIn(3, g.adj)
g.add_edge(3, 0)
self.assertIn(3, g.adj)
self.assertEqual(self.engine.turn, 2)
self.assertIn(0, g.adj[3])
self.assertIn(0, list(g.adj[3]))
self.assertIn(0, g.node)
self.assertTrue(self.engine._node_exists(graphmaker.__name__, 0))
if g.is_directed():
self.assertIn(2, g.pred[3])
self.assertIn(3, g.pred[0])
self.engine.branch = graphmaker.__name__ + "_de_edge"
self.assertIn(3, g.node)
self.assertIn(0, g.node)
self.assertTrue(self.engine._node_exists(graphmaker.__name__, 0))
g.remove_node(3)
self.assertNotIn(3, g.node)
self.assertNotIn(3, g.adj)
self.assertNotIn(3, g.adj[2])
if g.is_directed():
self.assertNotIn(3, g.pred)
self.assertNotIn(3, g.pred[0])
self.engine.branch = graphmaker.__name__ + "_square"
self.assertEqual(self.engine.turn, 2)
self.assertNotIn(0, g.adj[2])
self.assertNotIn(0, list(g.adj[2]))
self.assertIn(0, g.adj[3])
self.assertIn(0, list(g.adj[3]))
self.assertIn(3, g.node)
self.engine.branch = graphmaker.__name__ + "_nothing"
self.assertNotIn(0, g.adj[2])
self.assertNotIn(0, list(g.adj[2]))
self.assertIn(0, g.adj[3])
self.assertIn(0, list(g.adj[3]))
self.assertIn(3, g.node)
g.remove_nodes_from((0, 1, 2, 3))
for n in (0, 1, 2, 3):
self.assertNotIn(n, g.node)
self.assertNotIn(n, g.adj)
self.engine.time = "trunk", 0
|
class AbstractGraphTest:
def test_graph_objects_create_delete(self):
pass
| 2 | 0 | 130 | 0 | 130 | 0 | 8 | 0 | 0 | 1 | 0 | 2 | 1 | 0 | 1 | 1 | 131 | 0 | 131 | 5 | 129 | 0 | 129 | 5 | 127 | 8 | 0 | 2 | 8 |
146,454 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/xcollections.py
|
lisien.xcollections.LanguageDescriptor
|
class LanguageDescriptor(AbstractLanguageDescriptor):
def _get_language(self, inst):
return inst._language
def _set_language(self, inst, val):
inst._load_language(val)
inst.query.global_set("language", val)
|
class LanguageDescriptor(AbstractLanguageDescriptor):
def _get_language(self, inst):
pass
def _set_language(self, inst, val):
pass
| 3 | 0 | 3 | 0 | 3 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 2 | 0 | 2 | 5 | 7 | 1 | 6 | 3 | 3 | 0 | 6 | 3 | 3 | 1 | 2 | 0 | 2 |
146,455 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/allegedb/tests/test_all.py
|
lisien.allegedb.tests.test_all.AbstractBranchLineageTest
|
class AbstractBranchLineageTest(AbstractGraphTest):
# TODO: an analogue of this test for when you're looking up keyframes
# in parent branches
def runTest(self):
"""Create some branches of history and check that allegedb remembers where
each came from and what happened in each.
"""
for graphmaker in self.graphmakers:
gmn = graphmaker.__name__
self.assertTrue(self.engine.is_ancestor_of("trunk", gmn))
self.assertTrue(self.engine.is_ancestor_of(gmn, gmn + "_no_edge"))
self.assertTrue(self.engine.is_ancestor_of(gmn, gmn + "_triangle"))
self.assertTrue(self.engine.is_ancestor_of(gmn, gmn + "_nothing"))
self.assertTrue(
self.engine.is_ancestor_of(gmn + "_no_edge", gmn + "_triangle")
)
self.assertTrue(
self.engine.is_ancestor_of(gmn + "_square", gmn + "_nothing")
)
self.assertFalse(
self.engine.is_ancestor_of(gmn + "_nothing", "trunk")
)
self.assertFalse(
self.engine.is_ancestor_of(gmn + "_triangle", gmn + "_no_edge")
)
g = self.engine.graph[gmn]
self.engine.branch = gmn
self.assertIn(0, g.node)
self.assertIn(1, g.node)
self.assertIn(0, g.edge)
self.assertIn(1, g.edge[0])
self.engine.turn = 0
def badjump():
self.engine.branch = gmn + "_no_edge"
self.assertRaises(ValueError, badjump)
self.engine.turn = 2
self.engine.branch = gmn + "_no_edge"
self.assertIn(0, g)
self.assertIn(0, list(g.node.keys()))
self.assertNotIn(1, g.edge[0])
if g.is_multigraph():
self.assertRaises(KeyError, lambda: g.edge[0][1][0])
else:
self.assertRaises(KeyError, lambda: g.edge[0][1])
self.engine.branch = gmn + "_triangle"
self.assertIn(2, g.node)
for orig in (0, 1, 2):
for dest in (0, 1, 2):
if orig == dest:
continue
self.assertIn(orig, g.edge)
self.assertIn(dest, g.edge[orig])
self.engine.branch = gmn + "_square"
self.assertNotIn(0, g.edge[2])
if g.is_multigraph():
self.assertRaises(KeyError, lambda: g.edge[2][0][0])
else:
self.assertRaises(KeyError, lambda: g.edge[2][0])
self.engine.turn = 2
self.assertIn(3, g.node)
self.assertIn(1, g.edge[0])
self.assertIn(2, g.edge[1])
self.assertIn(3, g.edge[2])
self.assertIn(0, g.edge[3])
self.engine.branch = gmn + "_nothing"
for node in (0, 1, 2):
self.assertNotIn(node, g.node)
self.assertNotIn(node, g.edge)
self.engine.branch = gmn
self.engine.turn = 0
self.assertIn(0, g.node)
self.assertIn(1, g.node)
self.assertIn(0, g.edge)
self.assertIn(1, g.edge[0])
|
class AbstractBranchLineageTest(AbstractGraphTest):
def runTest(self):
'''Create some branches of history and check that allegedb remembers where
each came from and what happened in each.
'''
pass
def badjump():
pass
| 3 | 1 | 38 | 2 | 35 | 2 | 5 | 0.07 | 1 | 3 | 0 | 2 | 1 | 0 | 1 | 2 | 77 | 3 | 69 | 9 | 66 | 5 | 59 | 9 | 56 | 8 | 1 | 4 | 9 |
146,456 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/allegedb/query.py
|
lisien.allegedb.query.GlobalKeyValueStore
|
class GlobalKeyValueStore(MutableMapping):
"""A dict-like object that keeps its contents in a table.
Mostly this is for holding the current branch and revision.
"""
def __init__(self, qe):
self.qe = qe
self._cache = dict(qe.global_items())
def __iter__(self):
yield from self._cache
def __len__(self):
return len(self._cache)
def __getitem__(self, k):
ret = self._cache[k]
if isinstance(ret, dict):
return DictWrapper(
lambda: self._cache[k],
lambda v: self.__setitem__(k, v),
self,
k,
)
elif isinstance(ret, list):
return ListWrapper(
lambda: self._cache[k],
lambda v: self.__setitem__(k, v),
self,
k,
)
elif isinstance(ret, set):
return SetWrapper(
lambda: self._cache[k],
lambda v: self.__setitem__(k, v),
self,
k,
)
return ret
def __setitem__(self, k, v):
if hasattr(v, "unwrap"):
v = v.unwrap()
self.qe.global_set(k, v)
self._cache[k] = v
def __delitem__(self, k):
del self._cache[k]
self.qe.global_del(k)
|
class GlobalKeyValueStore(MutableMapping):
'''A dict-like object that keeps its contents in a table.
Mostly this is for holding the current branch and revision.
'''
def __init__(self, qe):
pass
def __iter__(self):
pass
def __len__(self):
pass
def __getitem__(self, k):
pass
def __setitem__(self, k, v):
pass
def __delitem__(self, k):
pass
| 7 | 1 | 7 | 0 | 7 | 0 | 2 | 0.07 | 1 | 6 | 3 | 0 | 6 | 2 | 6 | 47 | 51 | 8 | 40 | 10 | 33 | 3 | 23 | 10 | 16 | 4 | 7 | 1 | 10 |
146,457 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/allegedb/query.py
|
lisien.allegedb.query.ConnectionHolder
|
class ConnectionHolder:
strings: dict
def __init__(
self, dbstring, connect_args, inq, outq, fn, tables, gather=None
):
self.lock = Lock()
self.existence_lock = Lock()
self.existence_lock.acquire()
self._dbstring = dbstring
self._connect_args = connect_args
self._fn = fn
self.inq = inq
self.outq = outq
self.tables = tables
if gather is not None:
self.gather = gather
def commit(self):
self.transaction.commit()
self.transaction = self.connection.begin()
def init_table(self, tbl):
return self.call_one("create_{}".format(tbl))
def run(self):
dbstring = self._dbstring
connect_args = self._connect_args
if hasattr(self, "gather"):
gather_sql = self.gather
else:
from .alchemy import gather_sql
if isinstance(dbstring, Engine):
self.engine = dbstring
else:
self.engine = create_engine(
dbstring, connect_args=connect_args, poolclass=NullPool
)
self.meta = MetaData()
self.sql = gather_sql(self.meta)
self.connection = self.engine.connect()
self.transaction = self.connection.begin()
while True:
inst = self.inq.get()
if inst == "shutdown":
self.transaction.close()
self.connection.close()
self.engine.dispose()
self.existence_lock.release()
return
if inst == "commit":
self.commit()
continue
if inst == "initdb":
self.outq.put(self.initdb())
continue
if isinstance(inst, Select):
res = self.connection.execute(inst).fetchall()
self.outq.put(res)
continue
silent = False
if inst[0] == "silent":
inst = inst[1:]
silent = True
if inst[0] == "echo":
self.outq.put(inst[1])
elif inst[0] == "one":
try:
res = self.call_one(inst[1], *inst[2], **inst[3])
if not silent:
if hasattr(res, "returns_rows"):
if res.returns_rows:
o = list(res)
self.outq.put(o)
else:
self.outq.put(None)
else:
o = list(res)
self.outq.put(o)
except Exception as ex:
if not silent:
self.outq.put(ex)
elif inst[0] != "many":
raise ValueError(f"Invalid instruction: {inst[0]}")
else:
try:
res = self.call_many(inst[1], inst[2])
if not silent:
if hasattr(res, "returns_rows"):
if res.returns_rows:
self.outq.put(list(res))
else:
self.outq.put(None)
else:
rez = list(res.fetchall())
self.outq.put(rez or None)
except Exception as ex:
if not silent:
self.outq.put(ex)
def call_one(self, k, *largs, **kwargs):
statement = self.sql[k].compile(dialect=self.engine.dialect)
if hasattr(statement, "positiontup"):
kwargs.update(dict(zip(statement.positiontup, largs)))
return self.connection.execute(statement, kwargs)
elif largs:
raise TypeError("{} is a DDL query, I think".format(k))
return self.connection.execute(self.sql[k], kwargs)
def call_many(self, k, largs):
statement = self.sql[k].compile(dialect=self.engine.dialect)
return self.connection.execute(
statement,
[dict(zip(statement.positiontup, larg)) for larg in largs],
)
def initdb(self):
"""Create tables and indices as needed."""
for table in (
"branches",
"turns",
"graphs",
"graph_val",
"nodes",
"node_val",
"edges",
"edge_val",
"plans",
"plan_ticks",
"keyframes",
"keyframes_graphs",
"global",
):
try:
ret = self.init_table(table)
except OperationalError:
pass
except Exception as ex:
return ex
self.commit()
|
class ConnectionHolder:
def __init__(
self, dbstring, connect_args, inq, outq, fn, tables, gather=None
):
pass
def commit(self):
pass
def init_table(self, tbl):
pass
def run(self):
pass
def call_one(self, k, *largs, **kwargs):
pass
def call_many(self, k, largs):
pass
def initdb(self):
'''Create tables and indices as needed.'''
pass
| 8 | 1 | 19 | 0 | 19 | 0 | 5 | 0.01 | 0 | 6 | 0 | 2 | 7 | 14 | 7 | 7 | 140 | 7 | 132 | 39 | 121 | 1 | 101 | 35 | 92 | 22 | 0 | 6 | 34 |
146,458 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/allegedb/graph.py
|
lisien.allegedb.graph.Node
|
class Node(AbstractEntityMapping):
"""Mapping for node attributes"""
__slots__ = (
"graph",
"node",
"db",
"__weakref__",
"_iter_stuff",
"_cache_contains_stuff",
"_len_stuff",
"_get_cache_stuff",
"_set_db_stuff",
"_set_cache_stuff",
)
def _validate_node_type(self):
return True
def __init__(self, graph, node):
"""Store name and graph"""
super().__init__()
self.graph = graph
self.node = node
self.db = db = graph.db
node_val_cache = db._node_val_cache
graphn = graph.name
btt = db._btt
self._iter_stuff = (node_val_cache.iter_entity_keys, graphn, node, btt)
self._cache_contains_stuff = (
node_val_cache.contains_key,
graphn,
node,
)
self._len_stuff = (node_val_cache.count_entity_keys, graphn, node, btt)
self._get_cache_stuff = (node_val_cache.retrieve, graphn, node)
self._set_db_stuff = (db.query.node_val_set, graphn, node)
self._set_cache_stuff = (db._node_val_cache.store, graphn, node)
def __repr__(self):
return "{}(graph={}, node={})".format(
self.__class__.__name__, self.graph, self.node
)
def __str__(self):
return "{}(graph={}, node={}, data={})".format(
self.__class__.__name__, self.graph, self.node, repr(dict(self))
)
def __iter__(self):
iter_entity_keys, graphn, node, btt = self._iter_stuff
return iter_entity_keys(graphn, node, *btt())
def _cache_contains(self, key, branch, turn, tick):
contains_key, graphn, node = self._cache_contains_stuff
return contains_key(graphn, node, key, branch, turn, tick)
def __len__(self):
count_entity_keys, graphn, node, btt = self._len_stuff
return count_entity_keys(graphn, node, *btt())
def _get_cache(self, key, branch, turn, tick):
retrieve, graphn, node = self._get_cache_stuff
return retrieve(graphn, node, key, branch, turn, tick)
def _set_db(self, key, branch, turn, tick, value):
node_val_set, graphn, node = self._set_db_stuff
node_val_set(graphn, node, key, branch, turn, tick, value)
def _set_cache(self, key, branch, turn, tick, value):
store, graphn, node = self._set_cache_stuff
store(graphn, node, key, branch, turn, tick, value)
def __eq__(self, other):
if not isinstance(other, type(self)):
return False
return dict(self) == dict(other)
|
class Node(AbstractEntityMapping):
'''Mapping for node attributes'''
def _validate_node_type(self):
pass
def __init__(self, graph, node):
'''Store name and graph'''
pass
def __repr__(self):
pass
def __str__(self):
pass
def __iter__(self):
pass
def _cache_contains(self, key, branch, turn, tick):
pass
def __len__(self):
pass
def _get_cache(self, key, branch, turn, tick):
pass
def _set_db(self, key, branch, turn, tick, value):
pass
def _set_cache(self, key, branch, turn, tick, value):
pass
def __eq__(self, other):
pass
| 12 | 2 | 5 | 0 | 5 | 0 | 1 | 0.03 | 1 | 2 | 0 | 1 | 11 | 9 | 11 | 67 | 77 | 12 | 63 | 31 | 51 | 2 | 44 | 31 | 32 | 2 | 10 | 1 | 12 |
146,459 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/allegedb/graph.py
|
lisien.allegedb.graph.GraphsMapping
|
class GraphsMapping(MutableMapping):
def __init__(self, orm):
self.orm = orm
def __iter__(self):
"""Iterate over every character name."""
for name in self.orm._graph_objs:
if name in self:
yield name
def __contains__(self, name):
"""Has this character been created?"""
try:
return (
self.orm._graph_cache.retrieve(name, *self.orm._btt())
!= "Deleted"
)
except KeyError:
return False
def __len__(self):
"""How many characters have been created?"""
n = 0
for name in self.orm._graph_objs:
if name in self:
n += 1
return n
def __getitem__(self, item):
if not self.orm._has_graph(item):
raise KeyError(f"No such graph: {item}", item)
return self.orm._graph_objs[item]
def __setitem__(self, key, value):
if isinstance(value, networkx.MultiDiGraph):
self.orm.new_multidigraph(key, data=value)
elif isinstance(value, networkx.DiGraph):
self.orm.new_digraph(key, data=value)
elif isinstance(value, networkx.MultiGraph):
self.orm.new_multigraph(key, data=value)
else:
self.orm.new_graph(key, data=value)
def __delitem__(self, key):
if key not in self:
raise KeyError("No such graph")
self.orm.query.del_graph(key, *self.orm._btt())
if key in self.orm._graph_objs:
del self.orm._graph_objs[key]
|
class GraphsMapping(MutableMapping):
def __init__(self, orm):
pass
def __iter__(self):
'''Iterate over every character name.'''
pass
def __contains__(self, name):
'''Has this character been created?'''
pass
def __len__(self):
'''How many characters have been created?'''
pass
def __getitem__(self, item):
pass
def __setitem__(self, key, value):
pass
def __delitem__(self, key):
pass
| 8 | 3 | 6 | 0 | 6 | 0 | 3 | 0.07 | 1 | 1 | 0 | 1 | 7 | 1 | 7 | 48 | 49 | 6 | 40 | 12 | 32 | 3 | 34 | 12 | 26 | 4 | 7 | 2 | 18 |
146,460 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/allegedb/graph.py
|
lisien.allegedb.graph.GraphSuccessorsMapping.Successors
|
class Successors(AbstractSuccessors):
__slots__ = ("graph", "container", "orig", "_cache")
def _order_nodes(self, dest):
if dest < self.orig:
return (dest, self.orig)
else:
return (self.orig, dest)
|
class Successors(AbstractSuccessors):
def _order_nodes(self, dest):
pass
| 2 | 0 | 5 | 0 | 5 | 0 | 2 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 59 | 8 | 1 | 7 | 3 | 5 | 0 | 6 | 3 | 4 | 2 | 11 | 1 | 2 |
146,461 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/allegedb/graph.py
|
lisien.allegedb.graph.GraphSuccessorsMapping
|
class GraphSuccessorsMapping(GraphEdgeMapping):
"""Mapping for Successors (itself a MutableMapping)"""
__slots__ = ("graph",)
class Successors(AbstractSuccessors):
__slots__ = ("graph", "container", "orig", "_cache")
def _order_nodes(self, dest):
if dest < self.orig:
return (dest, self.orig)
else:
return (self.orig, dest)
def __getitem__(self, orig):
if orig not in self._cache:
self._cache[orig] = self.Successors(self, orig)
return self._cache[orig]
def __setitem__(self, key, val):
"""Wipe out any edges presently emanating from orig and replace them
with those described by val
"""
if key in self:
sucs = self[key]
sucs.clear()
else:
sucs = self._cache[key] = self.Successors(self, key)
if val:
sucs.update(val)
def __delitem__(self, key):
"""Wipe out edges emanating from orig"""
self[key].clear()
del self._cache[key]
def __iter__(self):
for node in self.graph.node:
if node in self:
yield node
def __len__(self):
n = 0
for node in self.graph.node:
if node in self:
n += 1
return n
def __contains__(self, key):
return (
self.db._edges_cache.count_successors(
self.graph.name, key, *self.db._btt()
)
> 0
)
def __repr__(self):
cls = self.__class__
return "<{}.{} object containing {}>".format(
cls.__module__,
cls.__name__,
{
k: {k2: dict(v2) for (k2, v2) in v.items()}
for (k, v) in self.items()
},
)
|
class GraphSuccessorsMapping(GraphEdgeMapping):
'''Mapping for Successors (itself a MutableMapping)'''
class Successors(AbstractSuccessors):
def _order_nodes(self, dest):
pass
def __getitem__(self, orig):
pass
def __setitem__(self, key, val):
'''Wipe out any edges presently emanating from orig and replace them
with those described by val
'''
pass
def __delitem__(self, key):
'''Wipe out edges emanating from orig'''
pass
def __iter__(self):
pass
def __len__(self):
pass
def __contains__(self, key):
pass
def __repr__(self):
pass
| 10 | 3 | 7 | 0 | 6 | 1 | 2 | 0.1 | 1 | 2 | 1 | 1 | 7 | 0 | 7 | 54 | 67 | 11 | 51 | 17 | 41 | 5 | 37 | 17 | 27 | 3 | 10 | 2 | 16 |
146,462 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/allegedb/graph.py
|
lisien.allegedb.graph.GraphNodeMapping
|
class GraphNodeMapping(AllegedMapping):
"""Mapping for nodes in a graph"""
__slots__ = ("graph",)
db = getatt("graph.db")
"""Alias to ``self.graph.db``"""
def __init__(self, graph):
super().__init__()
self.graph = graph
def __iter__(self):
"""Iterate over the names of the nodes"""
return self.db._nodes_cache.iter_entities(
self.graph.name, *self.db._btt()
)
def __eq__(self, other):
from collections.abc import Mapping
if not isinstance(other, Mapping):
return NotImplemented
if self.keys() != other.keys():
return False
for k in self.keys():
me = self[k]
you = other[k]
if hasattr(me, "unwrap") and not hasattr(me, "no_unwrap"):
me = me.unwrap()
if hasattr(you, "unwrap") and not hasattr(you, "no_unwrap"):
you = you.unwrap()
if me != you:
return False
else:
return True
def __contains__(self, node):
"""Return whether the node exists presently"""
return self.db._nodes_cache.contains_entity(
self.graph.name, node, *self.db._btt()
)
def __len__(self):
"""How many nodes exist right now?"""
return self.db._nodes_cache.count_entities(
self.graph.name, *self.db._btt()
)
def __getitem__(self, node):
"""If the node exists at present, return it, else throw KeyError"""
if node not in self:
raise KeyError
return self.db._get_node(self.graph, node)
def __setitem__(self, node, dikt):
"""Only accept dict-like values for assignment. These are taken to be
dicts of node attributes, and so, a new GraphNodeMapping.Node
is made with them, perhaps clearing out the one already there.
"""
created = False
db = self.db
graph = self.graph
gname = graph.name
if not db._node_exists(gname, node):
created = True
db._exist_node(gname, node, True)
n = db._get_node(graph, node)
n.clear()
n.update(dikt)
def __delitem__(self, node):
"""Indicate that the given node no longer exists"""
if node not in self:
raise KeyError("No such node")
for succ in self.graph.adj[node]:
del self.graph.adj[node][succ]
for pred in self.graph.pred[node]:
del self.graph.pred[node][pred]
branch, turn, tick = self.db._nbtt()
self.db.query.exist_node(
self.graph.name, node, branch, turn, tick, False
)
self.db._nodes_cache.store(
self.graph.name, node, branch, turn, tick, False
)
key = (self.graph.name, node)
if node in self.db._node_objs:
del self.db._node_objs[key]
def update(self, m, /, **kwargs):
for node, value in chain(m.items(), kwargs.items()):
if value is None:
del self[node]
elif node not in self:
self[node] = value
else:
self[node].update(value)
|
class GraphNodeMapping(AllegedMapping):
'''Mapping for nodes in a graph'''
def __init__(self, graph):
pass
def __iter__(self):
'''Iterate over the names of the nodes'''
pass
def __eq__(self, other):
pass
def __contains__(self, node):
'''Return whether the node exists presently'''
pass
def __len__(self):
'''How many nodes exist right now?'''
pass
def __getitem__(self, node):
'''If the node exists at present, return it, else throw KeyError'''
pass
def __setitem__(self, node, dikt):
'''Only accept dict-like values for assignment. These are taken to be
dicts of node attributes, and so, a new GraphNodeMapping.Node
is made with them, perhaps clearing out the one already there.
'''
pass
def __delitem__(self, node):
'''Indicate that the given node no longer exists'''
pass
def update(self, m, /, **kwargs):
pass
| 10 | 7 | 9 | 0 | 8 | 1 | 3 | 0.15 | 1 | 3 | 0 | 1 | 9 | 1 | 9 | 53 | 99 | 13 | 75 | 27 | 64 | 11 | 63 | 27 | 52 | 7 | 9 | 2 | 24 |
146,463 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/allegedb/graph.py
|
lisien.allegedb.graph.GraphMapping
|
class GraphMapping(AbstractEntityMapping):
"""Mapping for graph attributes"""
__slots__ = (
"graph",
"db",
"_iter_stuff",
"_cache_contains_stuff",
"_len_stuff",
"_get_stuff",
"_set_db_stuff",
"_set_cache_stuff",
"_del_db_stuff",
"_get_cache_stuff",
)
def __init__(self, graph):
super().__init__()
self.graph = graph
self.db = db = graph.db
btt = db._btt
graph_val_cache = db._graph_val_cache
graphn = graph.name
self._iter_stuff = (graph_val_cache.iter_entity_keys, graphn, btt)
self._cache_contains_stuff = (graph_val_cache.contains_key, graphn)
self._len_stuff = (graph_val_cache.count_entities, graphn, btt)
self._get_stuff = (self._get_cache, btt)
graph_val_set = db.query.graph_val_set
self._set_db_stuff = (graph_val_set, graphn)
self._set_cache_stuff = (graph_val_cache.store, graphn)
self._del_db_stuff = (graph_val_set, graphn)
self._get_cache_stuff = (graph_val_cache.retrieve, graphn)
def __iter__(self):
iter_entity_keys, graphn, btt = self._iter_stuff
yield "name"
yield from iter_entity_keys(graphn, *btt())
def _cache_contains(self, key, branch, turn, tick):
contains_key, graphn = self._cache_contains_stuff
return contains_key(graphn, key, branch, turn, tick)
def __len__(self):
count_entities, graphn, btt = self._len_stuff
return 1 + count_entities(graphn, *btt())
def __getitem__(self, item):
if item == "name":
return self.graph.name
return super().__getitem__(item)
def __setitem__(self, key, value):
if key == "name":
raise KeyError("name cannot be changed after creation")
super().__setitem__(key, value)
def _get_cache(self, key, branch, turn, tick):
retrieve, graphn = self._get_cache_stuff
return retrieve(graphn, key, branch, turn, tick)
def _get(self, key):
get_cache, btt = self._get_stuff
return get_cache(key, *btt())
def _set_db(self, key, branch, turn, tick, value):
graph_val_set, graphn = self._set_db_stuff
graph_val_set(graphn, key, branch, turn, tick, value)
def _set_cache(self, key, branch, turn, tick, value):
store, graphn = self._set_cache_stuff
store(graphn, key, branch, turn, tick, value)
def _del_db(self, key, branch, turn, tick):
graph_val_set, graphn = self._del_db_stuff
graph_val_set(graphn, key, branch, turn, tick, None)
def clear(self):
keys = set(self.keys())
keys.remove("name")
for k in keys:
del self[k]
def unwrap(self):
return {
k: v.unwrap()
if hasattr(v, "unwrap") and not hasattr(v, "no_unwrap")
else v
for (k, v) in self.items()
}
def __eq__(self, other):
if hasattr(other, "unwrap"):
other = other.unwrap()
other = other.copy()
me = self.unwrap().copy()
if "name" not in other:
del me["name"]
return me == other
|
class GraphMapping(AbstractEntityMapping):
'''Mapping for graph attributes'''
def __init__(self, graph):
pass
def __iter__(self):
pass
def _cache_contains(self, key, branch, turn, tick):
pass
def __len__(self):
pass
def __getitem__(self, item):
pass
def __setitem__(self, key, value):
pass
def _get_cache(self, key, branch, turn, tick):
pass
def _get_cache(self, key, branch, turn, tick):
pass
def _set_db(self, key, branch, turn, tick, value):
pass
def _set_cache(self, key, branch, turn, tick, value):
pass
def _del_db(self, key, branch, turn, tick):
pass
def clear(self):
pass
def unwrap(self):
pass
def __eq__(self, other):
pass
| 15 | 1 | 5 | 0 | 5 | 0 | 1 | 0.01 | 1 | 3 | 0 | 0 | 14 | 10 | 14 | 70 | 98 | 15 | 82 | 41 | 67 | 1 | 66 | 41 | 51 | 3 | 10 | 1 | 20 |
146,464 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/allegedb/graph.py
|
lisien.allegedb.graph.GraphEdgeMapping
|
class GraphEdgeMapping(AllegedMapping):
"""Provides an adjacency mapping and possibly a predecessor mapping
for a graph.
"""
__slots__ = ("graph", "_cache")
db = getatt("graph.db")
"""Alias to ``self.graph.db``"""
def __init__(self, graph):
super().__init__()
self.graph = graph
self._cache = {}
def __eq__(self, other):
"""Compare dictified versions of the edge mappings within me.
As I serve custom Predecessor or Successor classes, which
themselves serve the custom Edge class, I wouldn't normally be
comparable to a networkx adjacency dictionary. Converting
myself and the other argument to dicts allows the comparison
to work anyway.
"""
if not hasattr(other, "keys"):
return False
if self.keys() != other.keys():
return False
for k in self.keys():
if dict(self[k]) != dict(other[k]):
return False
return True
def __iter__(self):
return iter(self.graph.node)
|
class GraphEdgeMapping(AllegedMapping):
'''Provides an adjacency mapping and possibly a predecessor mapping
for a graph.
'''
def __init__(self, graph):
pass
def __eq__(self, other):
'''Compare dictified versions of the edge mappings within me.
As I serve custom Predecessor or Successor classes, which
themselves serve the custom Edge class, I wouldn't normally be
comparable to a networkx adjacency dictionary. Converting
myself and the other argument to dicts allows the comparison
to work anyway.
'''
pass
def __iter__(self):
pass
| 4 | 2 | 8 | 1 | 5 | 2 | 2 | 0.61 | 1 | 2 | 0 | 4 | 3 | 2 | 3 | 47 | 37 | 8 | 18 | 9 | 14 | 11 | 18 | 9 | 14 | 5 | 9 | 2 | 7 |
146,465 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/allegedb/query.py
|
lisien.allegedb.query.QueryEngine
|
class QueryEngine(AbstractQueryEngine):
flush_edges_t = 0
holder_cls = ConnectionHolder
tables = (
"global",
"branches",
"turns",
"graphs",
"keyframes",
"graph_val",
"nodes",
"node_val",
"edges",
"edge_val",
"plans",
"plan_ticks",
"universals",
)
def __init__(
self, dbstring, connect_args, pack=None, unpack=None, gather=None
):
dbstring = dbstring or "sqlite:///:memory:"
self._inq = Queue()
self._outq = Queue()
self._holder = self.holder_cls(
dbstring, connect_args, self._inq, self._outq, self.tables, gather
)
if pack is None:
def pack(s: str) -> bytes:
return repr(s).encode()
if unpack is None:
from ast import literal_eval
def unpack(b: bytes) -> Any:
return literal_eval(b.decode())
self.pack = pack
self.unpack = unpack
self._branches = {}
self._nodevals2set = []
self._edgevals2set = []
self._graphvals2set = []
self._nodes2set = []
self._edges2set = []
self._new_keyframes = []
self._new_keyframe_times = set()
self._btts = set()
self._t = Thread(target=self._holder.run, daemon=True)
self._t.start()
def echo(self, string):
self._inq.put(("echo", string))
return self._outq.get()
def call_one(self, string, *args, **kwargs):
__doc__ = ConnectionHolder.call_one.__doc__
with self._holder.lock:
self._inq.put(("one", string, args, kwargs))
ret = self._outq.get()
if isinstance(ret, Exception):
raise ret
return ret
def call_many(self, string, args):
__doc__ = ConnectionHolder.call_many.__doc__
with self._holder.lock:
self._inq.put(("many", string, args))
ret = self._outq.get()
if isinstance(ret, Exception):
raise ret
return ret
def execute(self, stmt):
if not isinstance(stmt, Select):
raise TypeError("Only select statements should be executed")
self.flush()
with self._holder.lock:
self._inq.put(stmt)
return self._outq.get()
def new_graph(self, graph, branch, turn, tick, typ):
"""Declare a new graph by this name of this type."""
graph = self.pack(graph)
return self.call_one("graphs_insert", graph, branch, turn, tick, typ)
def keyframe_graph_insert(
self, graph, branch, turn, tick, nodes, edges, graph_val
):
self._new_keyframes.append(
(graph, branch, turn, tick, nodes, edges, graph_val)
)
self._new_keyframe_times.add((branch, turn, tick))
def keyframe_insert(self, branch: str, turn: int, tick: int):
self._new_keyframe_times.add((branch, turn, tick))
def keyframes_dump(self):
yield from self.call_one("keyframes_dump")
def keyframes_graphs(self):
unpack = self.unpack
for graph, branch, turn, tick in self.call_one(
"keyframes_graphs_list"
):
yield unpack(graph), branch, turn, tick
def get_keyframe_graph(
self, graph: Key, branch: str, turn: int, tick: int
):
unpack = self.unpack
stuff = self.call_one(
"get_keyframe_graph", self.pack(graph), branch, turn, tick
)
if not stuff:
raise KeyError(f"No keyframe for {graph} at {branch, turn, tick}")
nodes, edges, graph_val = stuff[0]
return unpack(nodes), unpack(edges), unpack(graph_val)
def get_all_keyframe_graphs(self, branch, turn, tick):
unpack = self.unpack
for graph, nodes, edges, graph_val in self.call_one(
"all_graphs_in_keyframe", branch, turn, tick
):
yield (
unpack(graph),
unpack(nodes),
unpack(edges),
unpack(graph_val),
)
def graph_type(self, graph):
"""What type of graph is this?"""
graph = self.pack(graph)
return self.call_one("graph_type", graph)[0][0]
def have_branch(self, branch):
"""Return whether the branch thus named exists in the database."""
return bool(self.call_one("ctbranch", branch)[0][0])
def all_branches(self):
"""Return all the branch data in tuples of (branch, parent,
parent_turn).
"""
return self.call_one("branches_dump")
def global_get(self, key):
"""Return the value for the given key in the ``globals`` table."""
key = self.pack(key)
r = self.call_one("global_get", key)[0]
if r is None:
raise KeyError("Not set")
return self.unpack(r[0])
def global_items(self):
"""Iterate over (key, value) pairs in the ``globals`` table."""
unpack = self.unpack
dumped = self.call_one("global_dump")
for k, v in dumped:
yield (unpack(k), unpack(v))
def get_branch(self):
v = self.call_one("global_get", self.pack("branch"))[0]
if v is None:
return self.globl["main_branch"]
return self.unpack(v[0])
def get_turn(self):
v = self.call_one("global_get", self.pack("turn"))[0]
if v is None:
return 0
return self.unpack(v[0])
def get_tick(self):
v = self.call_one("global_get", self.pack("tick"))[0]
if v is None:
return 0
return self.unpack(v[0])
def global_set(self, key, value):
"""Set ``key`` to ``value`` globally (not at any particular branch or
revision)
"""
(key, value) = map(self.pack, (key, value))
try:
return self.call_one("global_insert", key, value)
except IntegrityError:
try:
return self.call_one("global_update", value, key)
except IntegrityError:
self.commit()
return self.call_one("global_update", value, key)
def global_del(self, key):
"""Delete the global record for the key."""
key = self.pack(key)
return self.call_one("global_del", key)
def new_branch(self, branch, parent, parent_turn, parent_tick):
"""Declare that the ``branch`` is descended from ``parent`` at
``parent_turn``, ``parent_tick``
"""
return self.call_one(
"branches_insert",
branch,
parent,
parent_turn,
parent_tick,
parent_turn,
parent_tick,
)
def update_branch(
self, branch, parent, parent_turn, parent_tick, end_turn, end_tick
):
return self.call_one(
"update_branches",
parent,
parent_turn,
parent_tick,
end_turn,
end_tick,
branch,
)
def set_branch(
self, branch, parent, parent_turn, parent_tick, end_turn, end_tick
):
try:
self.call_one(
"branches_insert",
branch,
parent,
parent_turn,
parent_tick,
end_turn,
end_tick,
)
except IntegrityError:
try:
self.update_branch(
branch,
parent,
parent_turn,
parent_tick,
end_turn,
end_tick,
)
except IntegrityError:
self.commit()
self.update_branch(
branch,
parent,
parent_turn,
parent_tick,
end_turn,
end_tick,
)
def new_turn(self, branch, turn, end_tick=0, plan_end_tick=0):
return self.call_one(
"turns_insert", branch, turn, end_tick, plan_end_tick
)
def update_turn(self, branch, turn, end_tick, plan_end_tick):
return self.call_one(
"update_turns", end_tick, plan_end_tick, branch, turn
)
def set_turn(self, branch, turn, end_tick, plan_end_tick):
try:
return self.call_one(
"turns_insert", branch, turn, end_tick, plan_end_tick
)
except IntegrityError:
return self.call_one(
"update_turns", end_tick, plan_end_tick, branch, turn
)
def set_turn_completed(self, branch, turn):
try:
return self.call_one("turns_completed_insert", branch, turn)
except IntegrityError:
try:
return self.call_one("turns_completed_update", turn, branch)
except IntegrityError:
self.commit()
return self.call_one("turns_completed_update", turn, branch)
def turns_dump(self):
return self.call_one("turns_dump")
def graph_val_dump(self) -> Iterator[GraphValRowType]:
"""Yield the entire contents of the graph_val table."""
self._flush_graph_val()
unpack = self.unpack
for graph, key, branch, turn, tick, value in self.call_one(
"graph_val_dump"
):
yield (
unpack(graph),
unpack(key),
branch,
turn,
tick,
unpack(value),
)
def load_graph_val(
self, graph, branch, turn_from, tick_from, turn_to=None, tick_to=None
) -> Iterator[GraphValRowType]:
if (turn_to is None) ^ (tick_to is None):
raise ValueError("I need both or neither of turn_to and tick_to")
self._flush_graph_val()
pack = self.pack
unpack = self.unpack
if turn_to is None:
it = self.call_one(
"load_graph_val_tick_to_end",
pack(graph),
branch,
turn_from,
turn_from,
tick_from,
)
else:
it = self.call_one(
"load_graph_val_tick_to_tick",
pack(graph),
branch,
turn_from,
turn_from,
tick_from,
turn_to,
turn_to,
tick_to,
)
for key, turn, tick, value in it:
yield graph, unpack(key), branch, turn, tick, unpack(value)
def _flush_graph_val(self):
"""Send all new and changed graph values to the database."""
if not self._graphvals2set:
return
pack = self.pack
self.call_many(
"graph_val_insert",
(
(pack(graph), pack(key), branch, turn, tick, pack(value))
for (
graph,
key,
branch,
turn,
tick,
value,
) in self._graphvals2set
),
)
self._graphvals2set = []
def graph_val_set(self, graph, key, branch, turn, tick, value):
if (branch, turn, tick) in self._btts:
raise TimeError
self._btts.add((branch, turn, tick))
self._graphvals2set.append((graph, key, branch, turn, tick, value))
def graph_val_del_time(self, branch, turn, tick):
self._flush_graph_val()
self.call_one("graph_val_del_time", branch, turn, tick)
self._btts.discard((branch, turn, tick))
def graphs_types(
self,
branch: str,
turn_from: int,
tick_from: int,
turn_to: int = None,
tick_to: int = None,
):
unpack = self.unpack
if turn_to is None:
if tick_to is not None:
raise ValueError("Need both or neither of turn_to and tick_to")
for graph, turn, tick, typ in self.call_one(
"graphs_after", branch, turn_from, turn_from, tick_from
):
yield unpack(graph), branch, turn, tick, typ
return
else:
if tick_to is None:
raise ValueError("Need both or neither of turn_to and tick_to")
for graph, turn, tick, typ in self.call_one(
"graphs_between",
branch,
turn_from,
turn_from,
tick_from,
turn_to,
turn_to,
tick_to,
):
yield unpack(graph), branch, turn, tick, typ
def graphs_dump(self):
unpack = self.unpack
for graph, branch, turn, tick, typ in self.call_one("graphs_dump"):
yield unpack(graph), branch, turn, tick, typ
def graphs_insert(self, graph, branch, turn, tick, typ):
self.call_one(
"graphs_insert", self.pack(graph), branch, turn, tick, typ
)
def _flush_nodes(self):
if not self._nodes2set:
return
pack = self.pack
self.call_many(
"nodes_insert",
(
(pack(graph), pack(node), branch, turn, tick, bool(extant))
for (
graph,
node,
branch,
turn,
tick,
extant,
) in self._nodes2set
),
)
self._nodes2set = []
def exist_node(self, graph, node, branch, turn, tick, extant):
"""Declare that the node exists or doesn't.
Inserts a new record or updates an old one, as needed.
"""
if (branch, turn, tick) in self._btts:
raise TimeError
self._btts.add((branch, turn, tick))
self._nodes2set.append((graph, node, branch, turn, tick, extant))
def nodes_del_time(self, branch, turn, tick):
self._flush_nodes()
self.call_one("nodes_del_time", branch, turn, tick)
self._btts.discard((branch, turn, tick))
def nodes_dump(self) -> Iterator[NodeRowType]:
"""Dump the entire contents of the nodes table."""
self._flush_nodes()
unpack = self.unpack
for graph, node, branch, turn, tick, extant in self.call_one(
"nodes_dump"
):
yield (
unpack(graph),
unpack(node),
branch,
turn,
tick,
bool(extant),
)
def iter_nodes(
self, graph, branch, turn_from, tick_from, turn_to=None, tick_to=None
) -> Iterator[NodeRowType]:
if (turn_to is None) ^ (tick_to is None):
raise TypeError("I need both or neither of turn_to and tick_to")
self._flush_nodes()
pack = self.pack
unpack = self.unpack
if turn_to is None:
it = self.call_one(
"load_nodes_tick_to_end",
pack(graph),
branch,
turn_from,
turn_from,
tick_from,
)
else:
it = self.call_one(
"load_nodes_tick_to_tick",
pack(graph),
branch,
turn_from,
turn_from,
tick_from,
turn_to,
turn_to,
tick_to,
)
for node, turn, tick, extant in it:
yield graph, unpack(node), branch, turn, tick, extant
def load_nodes(
self, graph, branch, turn_from, tick_from, turn_to=None, tick_to=None
) -> list[NodeRowType]:
return list(
self.iter_nodes(
graph, branch, turn_from, tick_from, turn_to, tick_to
)
)
def node_val_dump(self) -> Iterator[NodeValRowType]:
"""Yield the entire contents of the node_val table."""
self._flush_node_val()
unpack = self.unpack
for graph, node, key, branch, turn, tick, value in self.call_one(
"node_val_dump"
):
yield (
unpack(graph),
unpack(node),
unpack(key),
branch,
turn,
tick,
unpack(value),
)
def iter_node_val(
self, graph, branch, turn_from, tick_from, turn_to=None, tick_to=None
) -> Iterator[NodeValRowType]:
if (turn_to is None) ^ (tick_to is None):
raise TypeError("I need both or neither of turn_to and tick_to")
self._flush_node_val()
pack = self.pack
unpack = self.unpack
if turn_to is None:
it = self.call_one(
"load_node_val_tick_to_end",
pack(graph),
branch,
turn_from,
turn_from,
tick_from,
)
else:
it = self.call_one(
"load_node_val_tick_to_tick",
pack(graph),
branch,
turn_from,
turn_from,
tick_from,
turn_to,
turn_to,
tick_to,
)
for node, key, turn, tick, value in it:
yield (
graph,
unpack(node),
unpack(key),
branch,
turn,
tick,
unpack(value),
)
def load_node_val(
self, graph, branch, turn_from, tick_from, turn_to=None, tick_to=None
):
return list(
self.iter_node_val(
graph, branch, turn_from, tick_from, turn_to, tick_to
)
)
def _flush_node_val(self):
if not self._nodevals2set:
return
pack = self.pack
self.call_many(
"node_val_insert",
(
(
pack(graph),
pack(node),
pack(key),
branch,
turn,
tick,
pack(value),
)
for (
graph,
node,
key,
branch,
turn,
tick,
value,
) in self._nodevals2set
),
)
self._nodevals2set = []
def node_val_set(self, graph, node, key, branch, turn, tick, value):
"""Set a key-value pair on a node at a specific branch and revision"""
if (branch, turn, tick) in self._btts:
raise TimeError
self._btts.add((branch, turn, tick))
self._nodevals2set.append(
(graph, node, key, branch, turn, tick, value)
)
def node_val_del_time(self, branch, turn, tick):
self._flush_node_val()
self.call_one("node_val_del_time", branch, turn, tick)
self._btts.discard((branch, turn, tick))
def edges_dump(self) -> Iterator[EdgeRowType]:
"""Dump the entire contents of the edges table."""
self._flush_edges()
unpack = self.unpack
for (
graph,
orig,
dest,
idx,
branch,
turn,
tick,
extant,
) in self.call_one("edges_dump"):
yield (
unpack(graph),
unpack(orig),
unpack(dest),
idx,
branch,
turn,
tick,
bool(extant),
)
def iter_edges(
self, graph, branch, turn_from, tick_from, turn_to=None, tick_to=None
) -> Iterator[EdgeRowType]:
if (turn_to is None) ^ (tick_to is None):
raise ValueError("I need both or neither of turn_to and tick_to")
self._flush_edge_val()
pack = self.pack
unpack = self.unpack
if turn_to is None:
it = self.call_one(
"load_edges_tick_to_end",
pack(graph),
branch,
turn_from,
turn_from,
tick_from,
)
else:
it = self.call_one(
"load_edges_tick_to_tick",
pack(graph),
branch,
turn_from,
turn_from,
tick_from,
turn_to,
turn_to,
tick_to,
)
for orig, dest, idx, turn, tick, extant in it:
yield (
graph,
unpack(orig),
unpack(dest),
idx,
branch,
turn,
tick,
extant,
)
def load_edges(
self, graph, branch, turn_from, tick_from, turn_to=None, tick_to=None
) -> list[EdgeRowType]:
return list(
self.iter_edges(
graph, branch, turn_from, tick_from, turn_to, tick_to
)
)
def _pack_edge2set(self, tup):
graph, orig, dest, idx, branch, turn, tick, extant = tup
pack = self.pack
return (
pack(graph),
pack(orig),
pack(dest),
idx,
branch,
turn,
tick,
extant,
)
def _flush_edges(self):
start = monotonic()
if not self._edges2set:
return
self.call_many(
"edges_insert", map(self._pack_edge2set, self._edges2set)
)
self._edges2set = []
QueryEngine.flush_edges_t += monotonic() - start
def exist_edge(self, graph, orig, dest, idx, branch, turn, tick, extant):
"""Declare whether or not this edge exists."""
if (branch, turn, tick) in self._btts:
raise TimeError
self._btts.add((branch, turn, tick))
self._edges2set.append(
(graph, orig, dest, idx, branch, turn, tick, extant)
)
def edges_del_time(self, branch, turn, tick):
self._flush_edges()
self.call_one("edges_del_time", branch, turn, tick)
self._btts.discard((branch, turn, tick))
def edge_val_dump(self) -> Iterator[EdgeValRowType]:
"""Yield the entire contents of the edge_val table."""
self._flush_edge_val()
unpack = self.unpack
for (
graph,
orig,
dest,
idx,
key,
branch,
turn,
tick,
value,
) in self.call_one("edge_val_dump"):
yield (
unpack(graph),
unpack(orig),
unpack(dest),
idx,
unpack(key),
branch,
turn,
tick,
unpack(value),
)
def iter_edge_val(
self, graph, branch, turn_from, tick_from, turn_to=None, tick_to=None
) -> Iterator[EdgeValRowType]:
if (turn_to is None) ^ (tick_to is None):
raise TypeError("I need both or neither of turn_to and tick_to")
self._flush_edge_val()
pack = self.pack
unpack = self.unpack
if turn_to is None:
it = self.call_one(
"load_edge_val_tick_to_end",
pack(graph),
branch,
turn_from,
turn_from,
tick_from,
)
else:
it = self.call_one(
"load_edge_val_tick_to_tick",
pack(graph),
branch,
turn_from,
turn_from,
tick_from,
turn_to,
turn_to,
tick_to,
)
for orig, dest, idx, key, turn, tick, value in it:
yield (
graph,
unpack(orig),
unpack(dest),
idx,
unpack(key),
branch,
turn,
tick,
unpack(value),
)
def load_edge_val(
self, graph, branch, turn_from, tick_from, turn_to=None, tick_to=None
):
return list(
self.iter_edge_val(
graph, branch, turn_from, tick_from, turn_to, tick_to
)
)
def _pack_edgeval2set(self, tup):
graph, orig, dest, idx, key, branch, turn, tick, value = tup
pack = self.pack
return (
pack(graph),
pack(orig),
pack(dest),
idx,
pack(key),
branch,
turn,
tick,
pack(value),
)
def _flush_edge_val(self):
if not self._edgevals2set:
return
self.call_many(
"edge_val_insert", map(self._pack_edgeval2set, self._edgevals2set)
)
self._edgevals2set = []
def edge_val_set(
self, graph, orig, dest, idx, key, branch, turn, tick, value
):
"""Set this key of this edge to this value."""
if (branch, turn, tick) in self._btts:
raise TimeError
self._btts.add((branch, turn, tick))
self._edgevals2set.append(
(graph, orig, dest, idx, key, branch, turn, tick, value)
)
def edge_val_del_time(self, branch, turn, tick):
self._flush_edge_val()
self.call_one("edge_val_del_time", branch, turn, tick)
self._btts.discard((branch, turn, tick))
def plans_dump(self):
return self.call_one("plans_dump")
def plans_insert(self, plan_id, branch, turn, tick):
return self.call_one("plans_insert", plan_id, branch, turn, tick)
def plans_insert_many(self, many):
return self.call_many("plans_insert", many)
def plan_ticks_insert(self, plan_id, turn, tick):
return self.call_one("plan_ticks_insert", plan_id, turn, tick)
def plan_ticks_insert_many(self, many):
return self.call_many("plan_ticks_insert", many)
def plan_ticks_dump(self):
return self.call_one("plan_ticks_dump")
def flush(self):
"""Put all pending changes into the SQL transaction."""
with self._holder.lock:
self._inq.put(("echo", "ready"))
readied = self._outq.get()
assert readied == "ready", readied
self._flush()
self._inq.put(("echo", "flushed"))
flushed = self._outq.get()
assert flushed == "flushed", flushed
def _flush(self):
pack = self.pack
put = self._inq.put
if self._nodes2set:
put(
(
"silent",
"many",
"nodes_insert",
[
(
pack(graph),
pack(node),
branch,
turn,
tick,
bool(extant),
)
for (
graph,
node,
branch,
turn,
tick,
extant,
) in self._nodes2set
],
)
)
self._nodes2set = []
if self._edges2set:
put(
(
"silent",
"many",
"edges_insert",
list(map(self._pack_edge2set, self._edges2set)),
)
)
self._edges2set = []
if self._graphvals2set:
put(
(
"silent",
"many",
"graph_val_insert",
[
(
pack(graph),
pack(key),
branch,
turn,
tick,
pack(value),
)
for (
graph,
key,
branch,
turn,
tick,
value,
) in self._graphvals2set
],
)
)
self._graphvals2set = []
if self._nodevals2set:
put(
(
"silent",
"many",
"node_val_insert",
[
(
pack(graph),
pack(node),
pack(key),
branch,
turn,
tick,
pack(value),
)
for (
graph,
node,
key,
branch,
turn,
tick,
value,
) in self._nodevals2set
],
)
)
self._nodevals2set = []
if self._edgevals2set:
put(
(
"silent",
"many",
"edge_val_insert",
list(map(self._pack_edgeval2set, self._edgevals2set)),
)
)
self._edgevals2set = []
if self._new_keyframe_times:
put(
(
"silent",
"many",
"keyframes_insert",
list(self._new_keyframe_times),
)
)
self._new_keyframe_times = set()
if self._new_keyframes:
put(
(
"silent",
"many",
"keyframes_graphs_insert",
[
(
pack(graph),
branch,
turn,
tick,
pack(nodes),
pack(edges),
pack(graph_val),
)
for (
graph,
branch,
turn,
tick,
nodes,
edges,
graph_val,
) in self._new_keyframes
],
)
)
self._new_keyframes = []
def commit(self):
"""Commit the transaction"""
self._inq.put("commit")
assert self.echo("committed") == "committed"
def close(self):
"""Commit the transaction, then close the connection"""
self._inq.put("shutdown")
self._holder.existence_lock.acquire()
self._holder.existence_lock.release()
self._t.join()
def initdb(self):
with self._holder.lock:
self._inq.put("initdb")
ret = self._outq.get()
if isinstance(ret, Exception):
raise ret
self.globl = GlobalKeyValueStore(self)
if "main_branch" not in self.globl:
self.globl["main_branch"] = "trunk"
if "branch" not in self.globl:
self.globl["branch"] = self.globl["main_branch"]
if "turn" not in self.globl:
self.globl["turn"] = 0
if "tick" not in self.globl:
self.globl["tick"] = 0
def truncate_all(self):
"""Delete all data from every table"""
for table in self.tables:
try:
self.call_one("truncate_" + table)
except OperationalError:
pass # table wasn't created yet
self.commit()
|
class QueryEngine(AbstractQueryEngine):
def __init__(
self, dbstring, connect_args, pack=None, unpack=None, gather=None
):
pass
def pack(s: str) -> bytes:
pass
def unpack(b: bytes) -> Any:
pass
def echo(self, string):
pass
def call_one(self, string, *args, **kwargs):
pass
def call_many(self, string, args):
pass
def execute(self, stmt):
pass
def new_graph(self, graph, branch, turn, tick, typ):
'''Declare a new graph by this name of this type.'''
pass
def keyframe_graph_insert(
self, graph, branch, turn, tick, nodes, edges, graph_val
):
pass
def keyframe_insert(self, branch: str, turn: int, tick: int):
pass
def keyframes_dump(self):
pass
def keyframes_graphs(self):
pass
def get_keyframe_graph(
self, graph: Key, branch: str, turn: int, tick: int
):
pass
def get_all_keyframe_graphs(self, branch, turn, tick):
pass
def graph_type(self, graph):
'''What type of graph is this?'''
pass
def have_branch(self, branch):
'''Return whether the branch thus named exists in the database.'''
pass
def all_branches(self):
'''Return all the branch data in tuples of (branch, parent,
parent_turn).
'''
pass
def global_get(self, key):
'''Return the value for the given key in the ``globals`` table.'''
pass
def global_items(self):
'''Iterate over (key, value) pairs in the ``globals`` table.'''
pass
def get_branch(self):
pass
def get_turn(self):
pass
def get_tick(self):
pass
def global_set(self, key, value):
'''Set ``key`` to ``value`` globally (not at any particular branch or
revision)
'''
pass
def global_del(self, key):
'''Delete the global record for the key.'''
pass
def new_branch(self, branch, parent, parent_turn, parent_tick):
'''Declare that the ``branch`` is descended from ``parent`` at
``parent_turn``, ``parent_tick``
'''
pass
def update_branch(
self, branch, parent, parent_turn, parent_tick, end_turn, end_tick
):
pass
def set_branch(
self, branch, parent, parent_turn, parent_tick, end_turn, end_tick
):
pass
def new_turn(self, branch, turn, end_tick=0, plan_end_tick=0):
pass
def update_turn(self, branch, turn, end_tick, plan_end_tick):
pass
def set_turn(self, branch, turn, end_tick, plan_end_tick):
pass
def set_turn_completed(self, branch, turn):
pass
def turns_dump(self):
pass
def graph_val_dump(self) -> Iterator[GraphValRowType]:
'''Yield the entire contents of the graph_val table.'''
pass
def load_graph_val(
self, graph, branch, turn_from, tick_from, turn_to=None, tick_to=None
) -> Iterator[GraphValRowType]:
pass
def _flush_graph_val(self):
'''Send all new and changed graph values to the database.'''
pass
def graph_val_set(self, graph, key, branch, turn, tick, value):
pass
def graph_val_del_time(self, branch, turn, tick):
pass
def graphs_types(
self,
branch: str,
turn_from: int,
tick_from: int,
turn_to: int = None,
tick_to: int = None,
):
pass
def graphs_dump(self):
pass
def graphs_insert(self, graph, branch, turn, tick, typ):
pass
def _flush_nodes(self):
pass
def exist_node(self, graph, node, branch, turn, tick, extant):
'''Declare that the node exists or doesn't.
Inserts a new record or updates an old one, as needed.
'''
pass
def nodes_del_time(self, branch, turn, tick):
pass
def nodes_dump(self) -> Iterator[NodeRowType]:
'''Dump the entire contents of the nodes table.'''
pass
def iter_nodes(
self, graph, branch, turn_from, tick_from, turn_to=None, tick_to=None
) -> Iterator[NodeRowType]:
pass
def load_nodes(
self, graph, branch, turn_from, tick_from, turn_to=None, tick_to=None
) -> list[NodeRowType]:
pass
def node_val_dump(self) -> Iterator[NodeValRowType]:
'''Yield the entire contents of the node_val table.'''
pass
def iter_node_val(
self, graph, branch, turn_from, tick_from, turn_to=None, tick_to=None
) -> Iterator[NodeValRowType]:
pass
def load_node_val(
self, graph, branch, turn_from, tick_from, turn_to=None, tick_to=None
):
pass
def _flush_node_val(self):
pass
def node_val_set(self, graph, node, key, branch, turn, tick, value):
'''Set a key-value pair on a node at a specific branch and revision'''
pass
def node_val_del_time(self, branch, turn, tick):
pass
def edges_dump(self) -> Iterator[EdgeRowType]:
'''Dump the entire contents of the edges table.'''
pass
def iter_edges(
self, graph, branch, turn_from, tick_from, turn_to=None, tick_to=None
) -> Iterator[EdgeRowType]:
pass
def load_edges(
self, graph, branch, turn_from, tick_from, turn_to=None, tick_to=None
) -> list[EdgeRowType]:
pass
def _pack_edge2set(self, tup):
pass
def _flush_edges(self):
pass
def exist_edge(self, graph, orig, dest, idx, branch, turn, tick, extant):
'''Declare whether or not this edge exists.'''
pass
def edges_del_time(self, branch, turn, tick):
pass
def edge_val_dump(self) -> Iterator[EdgeValRowType]:
'''Yield the entire contents of the edge_val table.'''
pass
def iter_edge_val(
self, graph, branch, turn_from, tick_from, turn_to=None, tick_to=None
) -> Iterator[EdgeValRowType]:
pass
def load_edge_val(
self, graph, branch, turn_from, tick_from, turn_to=None, tick_to=None
):
pass
def _pack_edgeval2set(self, tup):
pass
def _flush_edge_val(self):
pass
def edge_val_set(
self, graph, orig, dest, idx, key, branch, turn, tick, value
):
'''Set this key of this edge to this value.'''
pass
def edge_val_del_time(self, branch, turn, tick):
pass
def plans_dump(self):
pass
def plans_insert(self, plan_id, branch, turn, tick):
pass
def plans_insert_many(self, many):
pass
def plan_ticks_insert(self, plan_id, turn, tick):
pass
def plan_ticks_insert_many(self, many):
pass
def plan_ticks_dump(self):
pass
def flush(self):
'''Put all pending changes into the SQL transaction.'''
pass
def _flush_graph_val(self):
pass
def commit(self):
'''Commit the transaction'''
pass
def close(self):
'''Commit the transaction, then close the connection'''
pass
def initdb(self):
pass
def truncate_all(self):
'''Delete all data from every table'''
pass
| 79 | 23 | 12 | 0 | 12 | 0 | 2 | 0.03 | 1 | 17 | 3 | 1 | 76 | 16 | 76 | 135 | 1,063 | 86 | 946 | 217 | 829 | 32 | 423 | 165 | 343 | 8 | 1 | 2 | 148 |
146,466 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/xcollections.py
|
lisien.xcollections.MethodStore
|
class MethodStore(FunctionStore):
def __init__(self, engine):
self.engine = engine
super().__init__("method.py")
def __getattr__(self, item):
return MethodType(super().__getattr__(item), self.engine)
|
class MethodStore(FunctionStore):
def __init__(self, engine):
pass
def __getattr__(self, item):
pass
| 3 | 0 | 3 | 0 | 3 | 0 | 1 | 0 | 1 | 1 | 0 | 0 | 2 | 1 | 2 | 13 | 7 | 1 | 6 | 4 | 3 | 0 | 6 | 4 | 3 | 1 | 2 | 0 | 2 |
146,467 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/xcollections.py
|
lisien.xcollections.StringStore
|
class StringStore(MutableMapping, Signal):
language = LanguageDescriptor()
def __init__(self, query, prefix, lang="eng"):
"""Store the engine, the name of the database table to use, and the
language code.
"""
super().__init__()
self.query = query
self._prefix = prefix
self._language = lang
self._load_language(lang)
def _load_language(self, lang):
if hasattr(self, "_cache"):
with open(
os.path.join(self._prefix, self.language + ".json"), "w"
) as outf:
json.dump(self._cache, outf)
try:
with open(os.path.join(self._prefix, lang + ".json"), "r") as inf:
self._cache = json.load(inf)
except FileNotFoundError:
self._cache = {}
self._language = lang
def __iter__(self):
return iter(self._cache)
def __len__(self):
return len(self._cache)
def __getitem__(self, k):
return self._cache[k]
def __setitem__(self, k, v):
"""Set the value of a string for the current language."""
self._cache[k] = v
self.send(self, key=k, val=v)
def __delitem__(self, k):
"""Delete the string from the current language, and remove it from the
cache.
"""
del self._cache[k]
self.send(self, key=k, val=None)
def lang_items(self, lang=None):
"""Yield pairs of (id, string) for the given language."""
if lang is not None and self._language != lang:
self._load_language(lang)
yield from self._cache.items()
def save(self, reimport=False):
if not os.path.exists(self._prefix):
os.mkdir(self._prefix)
with open(
os.path.join(self._prefix, self._language + ".json"), "w"
) as outf:
json.dump(self._cache, outf, indent=4, sort_keys=True)
|
class StringStore(MutableMapping, Signal):
def __init__(self, query, prefix, lang="eng"):
'''Store the engine, the name of the database table to use, and the
language code.
'''
pass
def _load_language(self, lang):
pass
def __iter__(self):
pass
def __len__(self):
pass
def __getitem__(self, k):
pass
def __setitem__(self, k, v):
'''Set the value of a string for the current language.'''
pass
def __delitem__(self, k):
'''Delete the string from the current language, and remove it from the
cache.
'''
pass
def lang_items(self, lang=None):
'''Yield pairs of (id, string) for the given language.'''
pass
def save(self, reimport=False):
pass
| 10 | 4 | 6 | 0 | 5 | 1 | 1 | 0.19 | 2 | 2 | 0 | 0 | 9 | 4 | 9 | 50 | 62 | 11 | 43 | 18 | 33 | 8 | 39 | 15 | 29 | 3 | 7 | 2 | 13 |
146,468 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/xcollections.py
|
lisien.xcollections.TabUnparser
|
class TabUnparser(Unparser):
def fill(self, text=""):
__doc__ = Unparser.fill.__doc__
self.f.write("\n" + "\t" * self._indent + text)
|
class TabUnparser(Unparser):
def fill(self, text=""):
pass
| 2 | 0 | 3 | 0 | 3 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 4 | 0 | 4 | 3 | 2 | 0 | 4 | 3 | 2 | 1 | 1 | 0 | 1 |
146,469 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/elide/elide/calendar.py
|
elide.calendar.CalendarToggleButton
|
class CalendarToggleButton(CalendarWidget, ToggleButton):
index = None
true_text = StringProperty("True")
false_text = StringProperty("False")
def on_state(self, *_):
self.val = self.state == "down"
self.text = self.true_text if self.val else self.false_text
|
class CalendarToggleButton(CalendarWidget, ToggleButton):
def on_state(self, *_):
pass
| 2 | 0 | 3 | 0 | 3 | 0 | 2 | 0 | 2 | 0 | 0 | 0 | 1 | 2 | 1 | 6 | 8 | 1 | 7 | 7 | 5 | 0 | 7 | 7 | 5 | 2 | 2 | 0 | 2 |
146,470 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/elide/elide/calendar.py
|
elide.calendar.CalendarTextInput
|
class CalendarTextInput(CalendarWidget, TextInput):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self._trigger_parse_text = Clock.create_trigger(self._parse_text)
def _parse_text(self, *_):
from ast import literal_eval
try:
v = literal_eval(self.text)
except (TypeError, ValueError, SyntaxError):
v = self.text
self.val = self.hint_text = v
self.text = ""
|
class CalendarTextInput(CalendarWidget, TextInput):
def __init__(self, **kwargs):
pass
def _parse_text(self, *_):
pass
| 3 | 0 | 6 | 1 | 6 | 0 | 2 | 0 | 2 | 4 | 0 | 0 | 2 | 4 | 2 | 7 | 14 | 2 | 12 | 8 | 8 | 0 | 12 | 8 | 8 | 2 | 2 | 1 | 3 |
146,471 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/elide/elide/tests/test_python_editor.py
|
elide.tests.test_python_editor.PythonEditorTest
|
class PythonEditorTest(ELiDEAppTest):
def setUp(self):
super().setUp()
with Engine(self.prefix) as eng:
sickle.install(eng)
def _get_actions_box(self):
app = self.app
idle_until(
lambda: hasattr(app, "mainscreen")
and app.mainscreen.mainview
and app.mainscreen.statpanel
and hasattr(app.mainscreen, "gridview")
)
app.funcs.toggle()
idle_until(
lambda: "actions" in app.funcs.ids, 100, "Never got actions box"
)
actions_box = app.funcs.ids.actions
idle_until(lambda: actions_box.editor, 100, "Never got FuncEditor")
idle_until(lambda: actions_box.storelist, 100, "Never got StoreList")
idle_until(
lambda: actions_box.storelist.data, 100, "Never got StoreList data"
)
return actions_box
|
class PythonEditorTest(ELiDEAppTest):
def setUp(self):
pass
def _get_actions_box(self):
pass
| 3 | 0 | 12 | 0 | 12 | 0 | 1 | 0 | 1 | 1 | 0 | 2 | 2 | 0 | 2 | 6 | 25 | 1 | 24 | 6 | 21 | 0 | 15 | 5 | 12 | 1 | 2 | 1 | 2 |
146,472 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/elide/elide/tests/test_gridboard.py
|
elide.tests.test_gridboard.SwitchGridTest
|
class SwitchGridTest(ELiDEAppTest):
def test_character_switch_grid(self):
with Engine(self.prefix) as eng:
eng.add_character("physical", nx.grid_2d_graph(10, 1))
eng.add_character("tall", nx.grid_2d_graph(1, 10))
app = self.app
self.Window.add_widget(app.build())
idle_until(
lambda: hasattr(app, "mainscreen")
and app.mainscreen.mainview
and app.mainscreen.statpanel
and hasattr(app.mainscreen, "gridview")
)
app.mainscreen.charmenu.toggle_gridview()
idle_until(
lambda: app.mainscreen.gridview in app.mainscreen.mainview.children
)
idle_until(lambda: app.mainscreen.gridview.board.children)
assert len(app.mainscreen.gridview.board.spot) == 10
assert all(
spot.y == 0 for spot in app.mainscreen.gridview.board.spot.values()
)
idle_until(
lambda: not all(
spot.x == 0
for spot in app.mainscreen.gridview.board.spot.values()
),
100,
)
app.character_name = "tall"
idle_until(
lambda: all(
spot.x == 0
for spot in app.mainscreen.gridview.board.spot.values()
),
1000,
"Never got the new board",
)
idle_until(
lambda: not all(
spot.y == 0
for spot in app.mainscreen.gridview.board.spot.values()
),
1000,
"New board arranged weird",
)
|
class SwitchGridTest(ELiDEAppTest):
def test_character_switch_grid(self):
pass
| 2 | 0 | 45 | 0 | 45 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 5 | 46 | 0 | 46 | 4 | 44 | 0 | 17 | 3 | 15 | 1 | 2 | 1 | 1 |
146,473 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/elide/elide/tests/test_gridboard.py
|
elide.tests.test_gridboard.GridBoardTest
|
class GridBoardTest(GraphicUnitTest):
def test_layout_grid(self):
spots_wide = 3
spots_tall = 3
spot_width = 32
spot_height = 32
graph = nx.grid_2d_graph(spots_wide, spots_tall)
char = CharacterFacade(graph)
char.place[1, 1].add_thing("something")
otherthing = char.place[2, 2].new_thing("otherthing")
assert len(char.thing) == 2
board = GridBoard(character=char)
self.Window.add_widget(GridBoardView(board=board))
while not (
all_spots_placed(board, char) and all_pawns_placed(board, char)
):
EventLoop.idle()
otherthing["location"] = (0, 0)
board.spot_plane.data = list(map(board.make_spot, char.place.values()))
board.spot_plane.redraw()
board.pawn_plane.data = list(map(board.make_pawn, char.thing.values()))
def arranged():
for x in range(spots_wide):
for y in range(spots_tall):
spot = board.spot[x, y]
if spot.x != x * spot_width or spot.y != y * spot_height:
return False
return True
idle_until(arranged, 100)
idle_until(lambda: board.pawn_plane._stack_index)
this = board.pawn["something"]
that = board.pawn["otherthing"]
print(this.pos, board.spot[1, 1].pos)
idle_until(lambda: this.pos == board.spot[1, 1].pos)
idle_until(lambda: that.pos == board.spot[0, 0].pos)
assert this.x == board.spot[1, 1].x
assert this.y == board.spot[1, 1].y
assert that.x == board.spot[0, 0].x
assert that.y == board.spot[0, 0].y
|
class GridBoardTest(GraphicUnitTest):
def test_layout_grid(self):
pass
def arranged():
pass
| 3 | 0 | 24 | 1 | 23 | 0 | 3 | 0 | 1 | 6 | 3 | 0 | 1 | 0 | 1 | 1 | 41 | 2 | 39 | 16 | 36 | 0 | 37 | 16 | 34 | 4 | 1 | 3 | 6 |
146,474 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/elide/elide/tests/test_graphboard.py
|
elide.tests.test_graphboard.SwitchGraphTest
|
class SwitchGraphTest(ELiDEAppTest):
def test_character_switch_graph(self):
with Engine(self.prefix) as eng:
eng.add_character("physical", nx.grid_2d_graph(10, 1))
eng.add_character("tall", nx.grid_2d_graph(1, 10))
app = self.app
self.Window.add_widget(app.build())
idle_until(
lambda: hasattr(app, "mainscreen")
and app.mainscreen.mainview
and app.mainscreen.statpanel
and hasattr(app.mainscreen, "gridview")
)
idle_until(
lambda: app.mainscreen.boardview
in app.mainscreen.mainview.children
)
idle_until(lambda: app.mainscreen.boardview.board.children)
print(
f"test_character_switch_graph got app {id(app)}, engine proxy {id(app.engine)}"
)
assert len(
set(
child.x
for child in app.mainscreen.boardview.board.stack_plane.children
)
) == len(app.mainscreen.boardview.board.stack_plane.children)
app.character_name = "tall"
def all_x_same():
if (
app.mainscreen.boardview.board is None
or app.mainscreen.boardview.board.stack_plane is None
or not app.mainscreen.boardview.board.spot
):
return False
first_x = next(
iter(app.mainscreen.boardview.board.spot.values())
).x
return all(
child.x == first_x
for child in app.mainscreen.boardview.board.spot.values()
)
idle_until(all_x_same, 100, "Never got the new board")
idle_until(
lambda: len(
set(
child.y
for child in app.mainscreen.boardview.board.stack_plane.children
)
)
== len(app.mainscreen.boardview.board.stack_plane.children),
100,
"New board arranged weird",
)
|
class SwitchGraphTest(ELiDEAppTest):
def test_character_switch_graph(self):
pass
def all_x_same():
pass
| 3 | 0 | 35 | 1 | 34 | 0 | 2 | 0 | 1 | 1 | 0 | 0 | 1 | 0 | 1 | 5 | 56 | 2 | 54 | 6 | 51 | 0 | 20 | 5 | 17 | 2 | 2 | 1 | 3 |
146,475 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/elide/elide/tests/test_graphboard.py
|
elide.tests.test_graphboard.GraphBoardTest
|
class GraphBoardTest(GraphicUnitTest):
def test_layout_grid(self):
spots_wide = 3
spots_tall = 3
graph = nx.grid_2d_graph(spots_wide, spots_tall)
char = CharacterFacade(graph)
app = ELiDEApp()
spotlayout = TextureStackPlane()
arrowlayout = ArrowPlane()
board = GraphBoard(
app=app,
character=char,
stack_plane=spotlayout,
arrow_plane=arrowlayout,
)
board.engine = FakeEngineProxy()
spotlayout.pos = board.pos
board.bind(pos=spotlayout.setter("pos"))
spotlayout.size = board.size
board.bind(size=spotlayout.setter("size"))
board.add_widget(spotlayout)
arrowlayout.pos = board.pos
board.bind(pos=arrowlayout.setter("pos"))
arrowlayout.size = board.size
board.bind(size=arrowlayout.setter("size"))
board.add_widget(arrowlayout)
board.update()
boardview = GraphBoardView(board=board)
self.Window.add_widget(boardview)
@idle_until(timeout=1000, message="Never finished placing spots")
def all_spots_placed():
for x in range(spots_wide):
for y in range(spots_tall):
if (x, y) not in board.spot:
return False
return True
# Don't get too picky about the exact proportions of the grid; just make sure the
# spots are positioned logically with respect to one another
for name, spot in board.spot.items():
x, y = name
if x > 0:
assert spot.x > board.spot[x - 1, y].x
if y > 0:
assert spot.y > board.spot[x, y - 1].y
if x < spots_wide - 1:
assert spot.x < board.spot[x + 1, y].x
if y < spots_tall - 1:
assert spot.y < board.spot[x, y + 1].y
def test_select_arrow(self):
char = CharacterFacade()
char.add_place(0, _x=0.1, _y=0.1)
char.add_place(1, _x=0.2, _y=0.1)
char.add_portal(0, 1)
app = ELiDEApp()
board = GraphBoard(app=app, character=char)
boardview = GraphBoardView(board=board)
self.Window.add_widget(boardview)
idle_until(
lambda: board.arrow_plane, 100, "GraphBoard never got arrow_plane"
)
idle_until(
lambda: 0 in board.arrow and 1 in board.arrow[0],
100,
"GraphBoard never got arrow",
)
idle_until(
lambda: board.arrow_plane.data,
100,
"GraphBoard.arrow_plane.data never populated",
)
idle_until(
lambda: board.arrow_plane._bot_left_corner_xs.shape[0] > 0,
100,
"GraphBoard.arrow_plane never got bounding boxes",
)
ox, oy = board.spot[0].center
dx, dy = board.spot[1].center
motion = UnitTestTouch((ox + ((dx - ox) / 2)), dy)
motion.touch_down()
motion.touch_up()
idle_until(
lambda: app.selection == board.arrow[0][1],
100,
"Arrow not selected",
)
def test_select_spot(self):
char = CharacterFacade()
char.add_place(0, _x=0.1, _y=0.1)
app = ELiDEApp()
board = GraphBoard(app=app, character=char)
boardview = GraphBoardView(board=board)
self.Window.add_widget(boardview)
idle_until(lambda: 0 in board.spot)
x, y = board.spot[0].center
motion = UnitTestTouch(x, y)
motion.touch_down()
motion.touch_up()
assert app.selection == board.spot[0]
def test_select_pawn(self):
char = CharacterFacade()
char.add_place(0, _x=0.1, _y=0.1)
char.add_thing("that", location=0)
app = ELiDEApp()
board = GraphBoard(app=app, character=char)
boardview = GraphBoardView(board=board)
self.Window.add_widget(boardview)
idle_until(lambda: 0 in board.spot and "that" in board.pawn, 100)
motion = UnitTestTouch(*board.pawn["that"].center)
motion.touch_down()
motion.touch_up()
assert app.selection == board.pawn["that"]
def test_pawn_drag(self):
char = CharacterFacade()
char.add_place(0, _x=0.1, _y=0.1)
char.add_place(1, _x=0.2, _y=0.1)
char.add_thing("that", location=0)
app = ELiDEApp()
board = GraphBoard(app=app, character=char)
boardview = GraphBoardView(board=board)
self.Window.add_widget(boardview)
idle_until(
lambda: 0 in board.spot
and 1 in board.spot
and "that" in board.pawn
)
that = board.pawn["that"]
one = board.spot[1]
touch = UnitTestTouch(*that.center)
touch.touch_down()
dist_x = one.center_x - that.center_x
dist_y = one.center_y - that.center_y
for i in range(1, 11):
coef = 1 / i
x = one.center_x - coef * dist_x
y = one.center_y - coef * dist_y
touch.touch_move(x, y)
self.advance_frames(1)
touch.touch_move(*one.center)
self.advance_frames(1)
touch.touch_up(*one.center)
idle_until(lambda: that.pos != one.center, 100)
idle_until(lambda: that.proxy["location"] == 1, 100)
def test_spot_and_pawn_from_dummy(self):
char = CharacterFacade()
app = ELiDEApp()
board = GraphBoard(app=app, character=char)
board._connect_proxy_objects()
view = GraphBoardView(board=board)
idle_until(
lambda: view.plane is not None, 100, "Never made BoardScatterPlane"
)
idle_until(
lambda: board.stack_plane is not None, 100, "Never made StackPlane"
)
self.Window.add_widget(view)
dummy = Dummy(
name="hello",
paths=["atlas://rltiles/base/unseen"],
size=(32, 32),
pos=(0, 0),
)
board.add_widget(dummy)
idle_until(
lambda: dummy in board.children, 100, "Dummy didn't get to board"
)
dummy_name = dummy.name
view.spot_from_dummy(dummy)
idle_until(
lambda: dummy_name in char.node, 100, "Dummy didn't add place"
)
dummy2 = Dummy(
name="goodbye",
paths=["atlas://rltiles/base/unseen"],
pos=dummy.pos,
size=(32, 32),
)
dummy2_name = dummy2.name = "dummy2"
board.add_widget(dummy2)
idle_until(
lambda: dummy2 in board.children,
100,
"Dummy 2 didn't get to board",
)
idle_until(
lambda: board.stack_plane.data,
100,
"Dummy 2 didn't get into the board's stack_plane",
)
view.pawn_from_dummy(dummy2)
idle_until(
lambda: dummy2_name in char.thing, 100, "Dummy 2 didn't add thing"
)
idle_until(
lambda: dummy2_name in board.pawn,
100,
"Board didn't add pawn for dummy 2",
)
spot = board.spot[dummy_name]
idle_until(
lambda: board.pawn[dummy2_name].pos == (spot.right, spot.top),
100,
"Dummy 2 didn't get to dummy 1",
)
def test_pawn_add_new_place(self):
char = CharacterFacade()
app = ELiDEApp()
board = GraphBoard(app=app, character=char)
board._connect_proxy_objects()
boardview = GraphBoardView(board=board)
self.Window.add_widget(boardview)
idle_until(lambda: board.stack_plane)
char.add_place(1, _x=0.2, _y=0.2)
board.add_spot(1)
idle_until(lambda: 1 in board.spot, 100, "Didn't make spot")
char.add_thing("that", location=1)
idle_until(lambda: "that" in board.pawn, 100, "Didn't make pawn")
that = board.pawn["that"]
one = board.spot[1]
idle_until(
lambda: pos_near(*getattr(that, "pos", None), one.right, one.top),
100,
f"pawn did not locate within 100 ticks. "
f"Should be at {one.right, one.top}, is at {that.pos}",
)
|
class GraphBoardTest(GraphicUnitTest):
def test_layout_grid(self):
pass
@idle_until(timeout=1000, message="Never finished placing spots")
def all_spots_placed():
pass
def test_select_arrow(self):
pass
def test_select_spot(self):
pass
def test_select_pawn(self):
pass
def test_pawn_drag(self):
pass
def test_spot_and_pawn_from_dummy(self):
pass
def test_pawn_add_new_place(self):
pass
| 10 | 0 | 29 | 0 | 28 | 0 | 2 | 0.01 | 1 | 9 | 8 | 0 | 7 | 0 | 7 | 7 | 232 | 8 | 222 | 69 | 212 | 2 | 153 | 68 | 144 | 6 | 1 | 3 | 17 |
146,476 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/elide/elide/tests/test_graphboard.py
|
elide.tests.test_graphboard.FakeEngineProxy
|
class FakeEngineProxy:
def handle(self, *args, **kwargs):
pass
|
class FakeEngineProxy:
def handle(self, *args, **kwargs):
pass
| 2 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 3 | 0 | 3 | 2 | 1 | 0 | 3 | 2 | 1 | 1 | 0 | 0 | 1 |
146,477 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/elide/elide/tests/test_character_switcher.py
|
elide.tests.test_character_switcher.CharacterSwitcherTest
|
class CharacterSwitcherTest(ELiDEAppTest):
def setUp(self):
super(CharacterSwitcherTest, self).setUp()
with Engine(self.prefix) as eng:
polygons.install(eng)
def test_character_switcher(self):
app = self.app
self.Window.add_widget(app.build())
idle_until(
lambda: app.manager.current == "main",
100,
"Never switched to main",
)
idle_until(
lambda: app.mainscreen.boardview, 100, "never got boardview"
)
idle_until(
lambda: app.mainscreen.boardview.board.spot, 100, "never got spots"
)
physspots = len(app.mainscreen.boardview.board.spot)
app.mainscreen.charmenu.charmenu.toggle_chars_screen()
idle_until(
lambda: app.manager.current == "chars",
100,
"Never switched to chars",
)
boxl = app.chars.ids.charsview.ids.boxl
idle_until(
lambda: len(boxl.children) == 3,
100,
"Didn't get all three characters",
)
for charb in boxl.children:
if charb.text == "triangle":
touch = UnitTestTouch(*charb.center)
touch.pos = charb.center
assert charb.dispatch("on_touch_down", touch)
self.advance_frames(5)
charb.dispatch("on_touch_up", touch)
idle_until(
lambda: charb.state == "down",
10,
"Button press did not work",
)
break
else:
assert False, 'No button for "triangle" character'
idle_until(
lambda: app.chars.ids.charsview.character_name == "triangle",
100,
"Never propagated character_name",
)
app.chars.toggle()
idle_until(
lambda: app.manager.current == "main",
100,
"Didn't switch back to main",
)
idle_until(
lambda: not app.mainscreen.boardview.board.spot,
100,
"Didn't clear out spots, {} left".format(
len(app.mainscreen.boardview.board.spot)
),
)
app.mainscreen.charmenu.charmenu.toggle_chars_screen()
idle_until(
lambda: app.manager.current == "chars",
100,
"Never switched to chars",
)
for charb in boxl.children:
if charb.text == "physical":
touch = UnitTestTouch(*charb.center)
touch.pos = charb.center
assert charb.dispatch("on_touch_down", touch)
self.advance_frames(5)
charb.dispatch("on_touch_up", touch)
idle_until(
lambda: charb.state == "down",
10,
"Button press did not work",
)
break
else:
assert False, 'No button for "physical" character'
idle_until(
lambda: app.chars.ids.charsview.character_name == "physical",
100,
"Never propagated character_name",
)
app.chars.toggle()
idle_until(
lambda: len(app.mainscreen.boardview.board.spot) == physspots,
100,
"Never got physical back",
)
|
class CharacterSwitcherTest(ELiDEAppTest):
def setUp(self):
pass
def test_character_switcher(self):
pass
| 3 | 0 | 48 | 0 | 48 | 0 | 3 | 0 | 1 | 1 | 0 | 0 | 2 | 0 | 2 | 6 | 98 | 1 | 97 | 9 | 94 | 0 | 47 | 8 | 44 | 5 | 2 | 2 | 6 |
146,478 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/elide/elide/calendar.py
|
elide.calendar.CalendarWidget
|
class CalendarWidget(Widget, RecycleDataViewBehavior):
"""Base class for widgets within a Calendar
Shows the value of its ``key`` at a particular ``turn``, and sets
it at that turn if the value changes.
"""
turn = NumericProperty()
"""What turn I'm displaying the stat's value for"""
key = ObjectProperty()
"""The key to set in the entity"""
val = ObjectProperty(allownone=True)
"""The value you want to set the key to"""
def _update_disabledness(self, *_, **__):
if not self.parent:
return
self.disabled = self.turn < self.parent.parent.entity.engine.turn
def _trigger_update_disabledness(self, *_, **__):
if hasattr(self, "_scheduled_update_disabledness"):
Clock.unschedule(self._scheduled_update_disabledness)
self._scheduled_update_disabledness = Clock.schedule_once(
self._update_disabledness
)
def _set_value(self):
entity = self.parent.parent.entity
entity = getattr(entity, "stat", entity)
entity[self.key] = self.val
def on_val(self, *_):
# do I want to do some validation at this point?
# Maybe I should validate on the proxy objects and catch that in Calendar,
# display an error message?
if not self.parent:
return
calendar = self.parent.parent
my_dict = calendar.idx[(self.turn, self.key)]
entity = calendar.entity
update_mode = calendar.update_mode
if my_dict["val"] != self.val:
my_dict["val"] = self.val
if update_mode == "batch":
calendar.changed = True
elif update_mode == "present":
if self.turn == entity.engine.turn:
self._set_value()
else:
calendar.changed = True
else:
eng = entity.engine
now = eng.turn
if now == self.turn:
self._set_value()
else:
eng.turn = self.turn
self._set_value()
eng.turn = now
def on_parent(self, *_):
if not self.parent:
return
self._trigger_update_disabledness()
self.parent.parent.entity.engine.time.connect(
self._trigger_update_disabledness
)
|
class CalendarWidget(Widget, RecycleDataViewBehavior):
'''Base class for widgets within a Calendar
Shows the value of its ``key`` at a particular ``turn``, and sets
it at that turn if the value changes.
'''
def _update_disabledness(self, *_, **__):
pass
def _trigger_update_disabledness(self, *_, **__):
pass
def _set_value(self):
pass
def on_val(self, *_):
pass
def on_parent(self, *_):
pass
| 6 | 1 | 10 | 0 | 9 | 1 | 3 | 0.2 | 2 | 0 | 0 | 5 | 5 | 2 | 5 | 5 | 68 | 8 | 50 | 18 | 44 | 10 | 42 | 18 | 36 | 7 | 1 | 3 | 14 |
146,479 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/elide/elide/stores.py
|
elide.stores.StringsEdScreen
|
class StringsEdScreen(Screen):
"""A screen in which to edit strings to be presented to humans
Needs a ``toggle`` function to switch back to the main screen;
a ``language`` identifier; and a ``language_setter`` function to be called
with that ``language`` when changed.
"""
toggle = ObjectProperty()
"""Function to switch back to the main screen"""
language = StringProperty("eng")
"""Code identifying the language we're editing"""
edbox = ObjectProperty()
"""Widget containing editors for the current string and its name"""
def on_language(self, *_):
if self.edbox is None:
Clock.schedule_once(self.on_language, 0)
return
self.edbox.storelist.redata()
if self.store.language != self.language:
self.store.language = self.language
def on_store(self, *_):
self.language = self.store.language
self.store.language.connect(self._pull_language)
def _pull_language(self, *_, language):
self.language = language
def save(self, *_):
if self.edbox is None:
Clock.schedule_once(self.save, 0)
return
self.edbox.save()
|
class StringsEdScreen(Screen):
'''A screen in which to edit strings to be presented to humans
Needs a ``toggle`` function to switch back to the main screen;
a ``language`` identifier; and a ``language_setter`` function to be called
with that ``language`` when changed.
'''
def on_language(self, *_):
pass
def on_store(self, *_):
pass
def _pull_language(self, *_, language):
pass
def save(self, *_):
pass
| 5 | 1 | 4 | 0 | 4 | 0 | 2 | 0.38 | 1 | 0 | 0 | 0 | 4 | 0 | 4 | 4 | 36 | 7 | 21 | 8 | 16 | 8 | 21 | 8 | 16 | 3 | 1 | 1 | 7 |
146,480 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/elide/elide/stores.py
|
elide.stores.StringNameInput
|
class StringNameInput(TextInput):
"""Small text box for the names of strings"""
_trigger_save = ObjectProperty()
def on_focus(self, inst, val, *largs):
if self.text and not val:
self._trigger_save(self.text)
|
class StringNameInput(TextInput):
'''Small text box for the names of strings'''
def on_focus(self, inst, val, *largs):
pass
| 2 | 1 | 3 | 0 | 3 | 0 | 2 | 0.2 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 8 | 2 | 5 | 3 | 3 | 1 | 5 | 3 | 3 | 2 | 1 | 1 | 2 |
146,481 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/elide/elide/stores.py
|
elide.stores.StringInput
|
class StringInput(Editor):
"""Editor for human-readable strings"""
validate_name_input = ObjectProperty()
"""Boolean function for checking if a string name is acceptable"""
def on_name_wid(self, *_):
if not self.validate_name_input:
Clock.schedule_once(self.on_name_wid, 0)
return
self.name_wid.bind(text=self.validate_name_input)
def _get_name(self):
if self.name_wid:
return self.name_wid.text
def _set_name(self, v, *_):
if not self.name_wid:
Clock.schedule_once(partial(self._set_name, v), 0)
return
self.name_wid.text = v
name = AliasProperty(_get_name, _set_name)
def _get_source(self):
if "string" not in self.ids:
return ""
return self.ids.string.text
def _set_source(self, v, *args):
if "string" not in self.ids:
Clock.schedule_once(partial(self._set_source, v), 0)
return
self.ids.string.text = v
source = AliasProperty(_get_source, _set_source)
|
class StringInput(Editor):
'''Editor for human-readable strings'''
def on_name_wid(self, *_):
pass
def _get_name(self):
pass
def _set_name(self, v, *_):
pass
def _get_source(self):
pass
def _set_source(self, v, *args):
pass
| 6 | 1 | 4 | 0 | 4 | 0 | 2 | 0.08 | 1 | 1 | 0 | 0 | 5 | 0 | 5 | 7 | 36 | 8 | 26 | 9 | 20 | 2 | 26 | 9 | 20 | 2 | 2 | 1 | 10 |
146,482 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/elide/elide/stores.py
|
elide.stores.StoreList
|
class StoreList(RecycleView):
"""Holder for a :class:`kivy.uix.listview.ListView` that shows what's
in a store, using one of the StoreAdapter classes.
"""
store = ObjectProperty()
"""Either a FunctionStore or a StringStore"""
selection_name = StringProperty()
"""The ``name`` of the ``StoreButton`` currently selected"""
boxl = ObjectProperty()
"""Instance of ``SelectableRecycleBoxLayout``"""
def __init__(self, **kwargs):
self._i2name = {}
self._name2i = {}
super().__init__(**kwargs)
def on_store(self, *_):
self.store.connect(self._trigger_redata)
self.redata()
def on_boxl(self, *_):
self.boxl.bind(selected_nodes=self._pull_selection)
def _pull_selection(self, *_):
if not self.boxl.selected_nodes:
return
self.selection_name = self._i2name[self.boxl.selected_nodes[0]]
def munge(self, datum):
i, name = datum
self._i2name[i] = name
self._name2i[name] = i
return {
"store": self.store,
"text": str(name),
"name": name,
"select": self.ids.boxl.select_node,
"index": i,
}
def redata(self, *_, **kwargs):
"""Update my ``data`` to match what's in my ``store``"""
select_name = kwargs.get("select_name")
if not self.store:
Clock.schedule_once(self.redata)
return
self.data = list(
map(self.munge, enumerate(sorted(self.store._cache.keys())))
)
if select_name:
self._trigger_select_name(select_name)
def _trigger_redata(self, *args, **kwargs):
part = partial(self.redata, *args, **kwargs)
if hasattr(self, "_scheduled_redata"):
Clock.unschedule(self._scheduled_redata)
self._scheduled_redata = Clock.schedule_once(part, 0)
def select_name(self, name, *_):
"""Select an item by its name, highlighting"""
self.boxl.select_node(self._name2i[name])
def _trigger_select_name(self, name):
part = partial(self.select_name, name)
if hasattr(self, "_scheduled_select_name"):
Clock.unschedule(self._scheduled_select_name)
self._scheduled_select_name = Clock.schedule_once(part, 0)
|
class StoreList(RecycleView):
'''Holder for a :class:`kivy.uix.listview.ListView` that shows what's
in a store, using one of the StoreAdapter classes.
'''
def __init__(self, **kwargs):
pass
def on_store(self, *_):
pass
def on_boxl(self, *_):
pass
def _pull_selection(self, *_):
pass
def munge(self, datum):
pass
def redata(self, *_, **kwargs):
'''Update my ``data`` to match what's in my ``store``'''
pass
def _trigger_redata(self, *args, **kwargs):
pass
def select_name(self, name, *_):
'''Select an item by its name, highlighting'''
pass
def _trigger_select_name(self, name):
pass
| 10 | 3 | 5 | 0 | 5 | 0 | 2 | 0.16 | 1 | 6 | 0 | 0 | 9 | 5 | 9 | 9 | 69 | 11 | 50 | 22 | 40 | 8 | 42 | 22 | 32 | 3 | 1 | 1 | 14 |
146,483 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/elide/elide/stores.py
|
elide.stores.StoreButton
|
class StoreButton(RecycleToggleButton):
"""RecycleToggleButton to select something to edit in a Store"""
store = ObjectProperty()
"""Either a FunctionStore or a StringStore"""
name = StringProperty()
"""Name of this particular item"""
source = StringProperty()
"""Text of this item"""
select = ObjectProperty()
"""Function that gets called with my ``index`` when I'm selected"""
def on_parent(self, *_):
if self.name == "+":
self.state = "down"
self.select(self.index)
def on_state(self, *_):
if self.state == "down":
self.select(self.index)
|
class StoreButton(RecycleToggleButton):
'''RecycleToggleButton to select something to edit in a Store'''
def on_parent(self, *_):
pass
def on_state(self, *_):
pass
| 3 | 1 | 4 | 0 | 4 | 0 | 2 | 0.42 | 1 | 0 | 0 | 0 | 2 | 1 | 2 | 4 | 20 | 3 | 12 | 8 | 9 | 5 | 12 | 8 | 9 | 2 | 2 | 1 | 4 |
146,484 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/elide/elide/stores.py
|
elide.stores.RecycleToggleButton
|
class RecycleToggleButton(ToggleButton, RecycleDataViewBehavior):
"""Toggle button at some index in a RecycleView"""
index = NumericProperty()
def on_touch_down(self, touch):
if self.collide_point(*touch.pos):
return self.parent.select_with_touch(self.index, touch)
def apply_selection(self, rv, index, is_selected):
if is_selected and index == self.index:
self.state = "down"
else:
self.state = "normal"
|
class RecycleToggleButton(ToggleButton, RecycleDataViewBehavior):
'''Toggle button at some index in a RecycleView'''
def on_touch_down(self, touch):
pass
def apply_selection(self, rv, index, is_selected):
pass
| 3 | 1 | 4 | 0 | 4 | 0 | 2 | 0.1 | 2 | 0 | 0 | 1 | 2 | 1 | 2 | 2 | 14 | 3 | 10 | 5 | 7 | 1 | 9 | 5 | 6 | 2 | 1 | 1 | 4 |
146,485 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/elide/elide/stores.py
|
elide.stores.LanguageInput
|
class LanguageInput(TextInput):
"""Widget to enter the language you want to edit"""
screen = ObjectProperty()
"""The instance of ``StringsEdScreen`` that I'm in"""
def on_focus(self, instance, value, *largs):
if not value:
if self.screen.language != self.text:
self.screen.language = self.text
self.text = ""
|
class LanguageInput(TextInput):
'''Widget to enter the language you want to edit'''
def on_focus(self, instance, value, *largs):
pass
| 2 | 1 | 5 | 0 | 5 | 0 | 3 | 0.29 | 1 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 11 | 2 | 7 | 4 | 5 | 2 | 7 | 4 | 5 | 3 | 1 | 2 | 3 |
146,486 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/elide/elide/stores.py
|
elide.stores.FunctionNameInput
|
class FunctionNameInput(TextInput):
"""Input for the name of a function
Filters out illegal characters.
"""
_trigger_save = ObjectProperty()
def insert_text(self, s, from_undo=False):
if self.text == "":
if s[0] not in (string.ascii_letters + "_"):
return
return super().insert_text(
"".join(
c
for c in s
if c in (string.ascii_letters + string.digits + "_")
)
)
def on_focus(self, inst, val, *_):
if not val:
self._trigger_save(self.text)
|
class FunctionNameInput(TextInput):
'''Input for the name of a function
Filters out illegal characters.
'''
def insert_text(self, s, from_undo=False):
pass
def on_focus(self, inst, val, *_):
pass
| 3 | 1 | 7 | 0 | 7 | 0 | 3 | 0.19 | 1 | 1 | 0 | 0 | 2 | 0 | 2 | 2 | 24 | 5 | 16 | 4 | 13 | 3 | 10 | 4 | 7 | 3 | 1 | 2 | 5 |
146,487 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/elide/elide/stores.py
|
elide.stores.FuncsEdScreen
|
class FuncsEdScreen(Screen):
"""Screen containing three FuncsEdBox
Triggers, prereqs, and actions.
"""
toggle = ObjectProperty()
def save(self, *args):
self.ids.triggers.save()
self.ids.prereqs.save()
self.ids.actions.save()
|
class FuncsEdScreen(Screen):
'''Screen containing three FuncsEdBox
Triggers, prereqs, and actions.
'''
def save(self, *args):
pass
| 2 | 1 | 4 | 0 | 4 | 0 | 1 | 0.5 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 13 | 4 | 6 | 3 | 4 | 3 | 6 | 3 | 4 | 1 | 1 | 0 | 1 |
146,488 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/elide/elide/stores.py
|
elide.stores.FuncsEdBox
|
class FuncsEdBox(EdBox):
"""Widget for editing the Python source of funcs to be used in lisien sims.
Contains a list of functions in the store it's about, next to a
FuncEditor showing the source of the selected one, and a close button.
"""
def get_default_text(self, newname):
return self.editor.get_default_text(newname)
@staticmethod
def valid_name(name):
return (
name
and name[0]
not in string.digits + string.whitespace + string.punctuation
)
def on_data(self, *_):
app = App.get_running_app()
if app is None:
return
app.rules.rulesview.set_functions(
self.store_name, map(app.rules.rulesview.inspect_func, self.data)
)
|
class FuncsEdBox(EdBox):
'''Widget for editing the Python source of funcs to be used in lisien sims.
Contains a list of functions in the store it's about, next to a
FuncEditor showing the source of the selected one, and a close button.
'''
def get_default_text(self, newname):
pass
@staticmethod
def valid_name(name):
pass
def on_data(self, *_):
pass
| 5 | 1 | 5 | 0 | 5 | 0 | 1 | 0.24 | 1 | 1 | 0 | 0 | 2 | 0 | 3 | 12 | 26 | 5 | 17 | 6 | 12 | 4 | 10 | 5 | 6 | 2 | 2 | 1 | 4 |
146,489 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/elide/elide/stores.py
|
elide.stores.StringsEdBox
|
class StringsEdBox(EdBox):
"""Box containing most of the strings editing screen
Contains the storelist and the editor, which in turn contains the string name input
and a bigger input field for the string itself.
"""
language = StringProperty("eng")
@staticmethod
def get_default_text(newname):
return ""
@staticmethod
def valid_name(name):
return name and name[0] != "+"
|
class StringsEdBox(EdBox):
'''Box containing most of the strings editing screen
Contains the storelist and the editor, which in turn contains the string name input
and a bigger input field for the string itself.
'''
@staticmethod
def get_default_text(newname):
pass
@staticmethod
def valid_name(name):
pass
| 5 | 1 | 2 | 0 | 2 | 0 | 1 | 0.5 | 1 | 0 | 0 | 0 | 0 | 0 | 2 | 11 | 17 | 5 | 8 | 6 | 3 | 4 | 6 | 4 | 3 | 1 | 2 | 0 | 2 |
146,490 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/tests/test_character.py
|
lisien.tests.test_character.CharacterTest
|
class CharacterTest(lisien.allegedb.tests.test_all.AllegedTest):
def setUp(self):
self.tempdir = tempfile.mkdtemp()
self.engine = Engine(
self.tempdir, enforce_end_of_time=False, workers=0
)
self.graphmakers = (self.engine.new_character,)
def tearDown(self):
self.engine.close()
rmtree(self.tempdir)
|
class CharacterTest(lisien.allegedb.tests.test_all.AllegedTest):
def setUp(self):
pass
def tearDown(self):
pass
| 3 | 0 | 5 | 0 | 5 | 0 | 1 | 0 | 1 | 1 | 1 | 4 | 2 | 3 | 2 | 75 | 11 | 1 | 10 | 6 | 7 | 0 | 8 | 6 | 5 | 1 | 3 | 0 | 2 |
146,491 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/elide/elide/card.py
|
elide.card.Card
|
class Card(FloatLayout):
"""A trading card with text and illustration
Its appearance is determined by several properties, the most
important being:
* ``headline_text``, a string to be shown at the top of the card;
may be styled with eg. ``headline_font_name`` or
``headline_color``
* ``art_source``, the path to an image to be displayed below the
headline; may be hidden by setting ``show_art`` to ``False``
* ``midline_text``, similar to ``headline_text`` but appearing
below the art
* ``text``, shown in a box the same size as the art. Styleable
like ``headline_text`` and you can customize the box with
eg. ``foreground_color`` and ``foreground_source``
* ``footer_text``, like ``headline_text`` but at the bottom
:class:`Card` is particularly useful when put in a
:class:`DeckLayout`, allowing the user to drag cards in between
any number of piles, into particular positions within a particular
pile, and so forth.
"""
dragging = BooleanProperty(False)
deck = NumericProperty()
idx = NumericProperty()
ud = DictProperty({})
collide_x = NumericProperty()
collide_y = NumericProperty()
collide_pos = ReferenceListProperty(collide_x, collide_y)
foreground = ObjectProperty()
foreground_source = StringProperty("")
foreground_color = ListProperty([1, 1, 1, 1])
foreground_image = ObjectProperty(None, allownone=True)
foreground_texture = ObjectProperty(None, allownone=True)
background_source = StringProperty("")
background_color = ListProperty([0.7, 0.7, 0.7, 1])
background_image = ObjectProperty(None, allownone=True)
background_texture = ObjectProperty(None, allownone=True)
outline_color = ListProperty([0, 0, 0, 1])
content_outline_color = ListProperty([0, 0, 0, 0])
foreground_outline_color = ListProperty([0, 0, 0, 1])
art_outline_color = ListProperty([0, 0, 0, 0])
art = ObjectProperty()
art_source = StringProperty("")
art_color = ListProperty([1, 1, 1, 1])
art_image = ObjectProperty(None, allownone=True)
art_texture = ObjectProperty(None, allownone=True)
show_art = BooleanProperty(True)
headline = ObjectProperty()
headline_text = StringProperty("Headline")
headline_markup = BooleanProperty(True)
headline_font_name = StringProperty("Roboto-Regular")
headline_font_size = NumericProperty(18)
headline_color = ListProperty([0, 0, 0, 1])
midline = ObjectProperty()
midline_text = StringProperty("")
midline_markup = BooleanProperty(True)
midline_font_name = StringProperty("Roboto-Regular")
midline_font_size = NumericProperty(14)
midline_color = ListProperty([0, 0, 0, 1])
footer = ObjectProperty()
footer_text = StringProperty("")
footer_markup = BooleanProperty(True)
footer_font_name = StringProperty("Roboto-Regular")
footer_font_size = NumericProperty(10)
footer_color = ListProperty([0, 0, 0, 1])
text = StringProperty("")
text_color = ListProperty([0, 0, 0, 1])
markup = BooleanProperty(True)
font_name = StringProperty("Roboto-Regular")
font_size = NumericProperty(12)
editable = BooleanProperty(False)
edit_func = ObjectProperty()
def on_text(self, *_):
if "main_text" not in self.ids:
Clock.schedule_once(self.on_text, 0)
return
text = self.text.replace("\t", " ")
if self.markup:
if not hasattr(self, "_lexer"):
self._lexer = PythonLexer()
self._formatter = BBCodeFormatter()
text = (
text.replace("[", "\x01")
.replace("]", "\x02")
.replace("\t", " " * 4)
)
text = pygments.format(
self._lexer.get_tokens(text),
self._formatter,
)
text = text.replace("\x01", "&bl;").replace("\x02", "&br;")
text = "".join(
f"[color={get_hex_from_color(self.text_color)}]{text}[/color]"
)
self.ids.main_text.text = text
def on_background_source(self, *args):
"""When I get a new ``background_source``, load it as an
:class:`Image` and store that in ``background_image``.
"""
if self.background_source:
self.background_image = Image(source=self.background_source)
def on_background_image(self, *args):
"""When I get a new ``background_image``, store its texture in
``background_texture``.
"""
if self.background_image is not None:
self.background_texture = self.background_image.texture
def on_foreground_source(self, *args):
"""When I get a new ``foreground_source``, load it as an
:class:`Image` and store that in ``foreground_image``.
"""
if self.foreground_source:
self.foreground_image = Image(source=self.foreground_source)
def on_foreground_image(self, *args):
"""When I get a new ``foreground_image``, store its texture in my
``foreground_texture``.
"""
if self.foreground_image is not None:
self.foreground_texture = self.foreground_image.texture
def on_art_source(self, *args):
"""When I get a new ``art_source``, load it as an :class:`Image` and
store that in ``art_image``.
"""
if self.art_source:
self.art_image = Image(source=self.art_source)
def on_art_image(self, *args):
"""When I get a new ``art_image``, store its texture in
``art_texture``.
"""
if self.art_image is not None:
self.art_texture = self.art_image.texture
def on_touch_down(self, touch):
"""If I'm the first card to collide this touch, grab it, store my
metadata in its userdict, and store the relative coords upon
me where the collision happened.
"""
if not self.collide_point(*touch.pos):
return
if "card" in touch.ud:
return
if self.editable and self.ids.editbut.collide_point(*touch.pos):
touch.grab(self.ids.editbut)
self.ids.editbut.dispatch("on_touch_down", touch)
return
touch.grab(self)
self.dragging = True
touch.ud["card"] = self
touch.ud["idx"] = self.idx
touch.ud["deck"] = self.deck
touch.ud["layout"] = self.parent
self.collide_x = touch.x - self.x
self.collide_y = touch.y - self.y
def on_touch_move(self, touch):
"""If I'm being dragged, move so as to be always positioned the same
relative to the touch.
"""
if not self.dragging:
touch.ungrab(self)
return
self.pos = (touch.x - self.collide_x, touch.y - self.collide_y)
def on_touch_up(self, touch):
"""Stop dragging if needed."""
if not self.dragging:
return
touch.ungrab(self)
self.dragging = False
def copy(self):
"""Return a new :class:`Card` just like me."""
d = {}
for att in (
"deck",
"idx",
"ud",
"foreground_source",
"foreground_color",
"foreground_image",
"foreground_texture",
"background_source",
"background_color",
"background_image",
"background_texture",
"outline_color",
"content_outline_color",
"foreground_outline_color",
"art_outline_color",
"art_source",
"art_color",
"art_image",
"art_texture",
"show_art",
"headline_text",
"headline_markup",
"headline_font_name",
"headline_font_size",
"headline_color",
"midline_text",
"midline_markup",
"midline_font_name",
"midline_font_size",
"midline_color",
"footer_text",
"footer_markup",
"footer_font_name",
"footer_font_size",
"footer_color",
"text",
"text_color",
"markup",
"font_name",
"font_size",
"editable",
"on_edit",
):
v = getattr(self, att)
if v is not None:
d[att] = v
return Card(**d)
|
class Card(FloatLayout):
'''A trading card with text and illustration
Its appearance is determined by several properties, the most
important being:
* ``headline_text``, a string to be shown at the top of the card;
may be styled with eg. ``headline_font_name`` or
``headline_color``
* ``art_source``, the path to an image to be displayed below the
headline; may be hidden by setting ``show_art`` to ``False``
* ``midline_text``, similar to ``headline_text`` but appearing
below the art
* ``text``, shown in a box the same size as the art. Styleable
like ``headline_text`` and you can customize the box with
eg. ``foreground_color`` and ``foreground_source``
* ``footer_text``, like ``headline_text`` but at the bottom
:class:`Card` is particularly useful when put in a
:class:`DeckLayout`, allowing the user to drag cards in between
any number of piles, into particular positions within a particular
pile, and so forth.
'''
def on_text(self, *_):
pass
def on_background_source(self, *args):
'''When I get a new ``background_source``, load it as an
:class:`Image` and store that in ``background_image``.
'''
pass
def on_background_image(self, *args):
'''When I get a new ``background_image``, store its texture in
``background_texture``.
'''
pass
def on_foreground_source(self, *args):
'''When I get a new ``foreground_source``, load it as an
:class:`Image` and store that in ``foreground_image``.
'''
pass
def on_foreground_image(self, *args):
'''When I get a new ``foreground_image``, store its texture in my
``foreground_texture``.
'''
pass
def on_art_source(self, *args):
'''When I get a new ``art_source``, load it as an :class:`Image` and
store that in ``art_image``.
'''
pass
def on_art_image(self, *args):
'''When I get a new ``art_image``, store its texture in
``art_texture``.
'''
pass
def on_touch_down(self, touch):
'''If I'm the first card to collide this touch, grab it, store my
metadata in its userdict, and store the relative coords upon
me where the collision happened.
'''
pass
def on_touch_move(self, touch):
'''If I'm being dragged, move so as to be always positioned the same
relative to the touch.
'''
pass
def on_touch_up(self, touch):
'''Stop dragging if needed.'''
pass
def copy(self):
'''Return a new :class:`Card` just like me.'''
pass
| 12 | 11 | 14 | 1 | 11 | 2 | 2 | 0.27 | 1 | 1 | 0 | 0 | 11 | 3 | 11 | 11 | 254 | 38 | 170 | 70 | 158 | 46 | 118 | 70 | 106 | 4 | 1 | 2 | 27 |
146,492 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/elide/elide/card.py
|
elide.card.DeckBuilderScrollBar
|
class DeckBuilderScrollBar(FloatLayout):
"""A widget that looks a lot like one of the scrollbars on the sides
of eg. :class:`kivy.uix.ScrollView`, which moves a single deck
within a :class:`DeckBuilderLayout`.
"""
orientation = OptionProperty(
"vertical", options=["horizontal", "vertical"]
)
"""Which way to scroll? Options are 'horizontal' and 'vertical'."""
deckbuilder = ObjectProperty()
"""The :class:`DeckBuilderLayout` of the deck to scroll."""
deckidx = NumericProperty(0)
"""The index of the deck to scroll, within its
:class:`DeckBuilderLayout`'s ``decks`` property.
"""
scrolling = BooleanProperty(False)
"""Has the user grabbed me?"""
scroll_min = NumericProperty(-1)
"""How far left (if horizontal) or down (if vertical) I can move my
deck, expressed as a proportion of the
:class:`DeckBuilderLayout`'s width or height, respectively.
"""
scroll_max = NumericProperty(1)
"""How far right (if horizontal) or up (if vertical) I can move my
deck, expressed as a proportion of the
:class:`DeckBuilderLayout`'s width or height, respectively.
"""
scroll_hint = AliasProperty(
lambda self: abs(self.scroll_max - self.scroll_min),
lambda self, v: None,
bind=("scroll_min", "scroll_max"),
)
"""The distance between ``scroll_max`` and ``scroll_min``."""
_scroll = NumericProperty(0)
"""Private. The current adjustment to the deck's ``pos_hint_x`` or
``pos_hint_y``.
"""
def _get_scroll(self):
zero = self._scroll - self.scroll_min
return zero / self.scroll_hint
def _set_scroll(self, v):
if v < 0:
v = 0
if v > 1:
v = 1
normal = v * self.scroll_hint
self._scroll = self.scroll_min + normal
scroll = AliasProperty(
_get_scroll, _set_scroll, bind=("_scroll", "scroll_min", "scroll_max")
)
"""A number between 0 and 1 representing how far beyond ``scroll_min``
toward ``scroll_max`` I am presently scrolled.
"""
def _get_vbar(self):
if self.deckbuilder is None:
return (0, 1)
vh = self.deckbuilder.height * (self.scroll_hint + 1)
h = self.height
if vh < h or vh == 0:
return (0, 1)
ph = max(0.01, h / vh)
sy = min(1.0, max(0.0, self.scroll))
py = (1 - ph) * sy
return (py, ph)
vbar = AliasProperty(
_get_vbar, None, bind=("_scroll", "scroll_min", "scroll_max")
)
"""A tuple of ``(y, height)`` for my scroll bar, if it's vertical."""
def _get_hbar(self):
if self.deckbuilder is None:
return (0, 1)
vw = self.deckbuilder.width * self.scroll_hint
w = self.width
if vw < w or vw == 0:
return (0, 1)
pw = max(0.01, w / vw)
sx = min(1.0, max(0.0, self.scroll))
px = (1 - pw) * sx
return (px, pw)
hbar = AliasProperty(
_get_hbar, None, bind=("_scroll", "scroll_min", "scroll_max")
)
"""A tuple of ``(x, width)`` for my scroll bar, if it's horizontal."""
bar_color = ListProperty([0.7, 0.7, 0.7, 0.9])
"""Color to use for the scroll bar when scrolling. RGBA format."""
bar_inactive_color = ListProperty([0.7, 0.7, 0.7, 0.2])
"""Color to use for the scroll bar when not scrolling. RGBA format."""
bar_texture = ObjectProperty(None, allownone=True)
"""Texture for the scroll bar, normally ``None``."""
def __init__(self, **kwargs):
"""Arrange to be laid out whenever I'm scrolled or the range of my
scrolling changes.
"""
super().__init__(**kwargs)
self.bind(
_scroll=self._trigger_layout,
scroll_min=self._trigger_layout,
scroll_max=self._trigger_layout,
)
def do_layout(self, *args):
"""Put the bar where it's supposed to be, and size it in proportion to
the size of the scrollable area.
"""
if "bar" not in self.ids:
Clock.schedule_once(self.do_layout)
return
if self.orientation == "horizontal":
self.ids.bar.size_hint_x = self.hbar[1]
self.ids.bar.pos_hint = {"x": self.hbar[0], "y": 0}
else:
self.ids.bar.size_hint_y = self.vbar[1]
self.ids.bar.pos_hint = {"x": 0, "y": self.vbar[0]}
super().do_layout(*args)
def upd_scroll(self, *args):
"""Update my own ``scroll`` property to where my deck is actually
scrolled.
"""
att = "deck_{}_hint_offsets".format(
"x" if self.orientation == "horizontal" else "y"
)
self._scroll = getattr(self.deckbuilder, att)[self.deckidx]
def on_deckbuilder(self, *args):
"""Bind my deckbuilder to update my ``scroll``, and my ``scroll`` to
update my deckbuilder.
"""
if self.deckbuilder is None:
return
att = "deck_{}_hint_offsets".format(
"x" if self.orientation == "horizontal" else "y"
)
offs = getattr(self.deckbuilder, att)
if len(offs) <= self.deckidx:
Clock.schedule_once(self.on_deckbuilder, 0)
return
self.bind(scroll=self.handle_scroll)
self.deckbuilder.bind(**{att: self.upd_scroll})
self.upd_scroll()
self.deckbuilder._trigger_layout()
def handle_scroll(self, *args):
"""When my ``scroll`` changes, tell my deckbuilder how it's scrolled
now.
"""
if "bar" not in self.ids:
Clock.schedule_once(self.handle_scroll, 0)
return
att = "deck_{}_hint_offsets".format(
"x" if self.orientation == "horizontal" else "y"
)
offs = list(getattr(self.deckbuilder, att))
if len(offs) <= self.deckidx:
Clock.schedule_once(self.on_scroll, 0)
return
offs[self.deckidx] = self._scroll
setattr(self.deckbuilder, att, offs)
self.deckbuilder._trigger_layout()
def bar_touched(self, bar, touch):
"""Start scrolling, and record where I started scrolling."""
self.scrolling = True
self._start_bar_pos_hint = get_pos_hint(bar.pos_hint, *bar.size_hint)
self._start_touch_pos_hint = (
touch.x / self.width,
touch.y / self.height,
)
self._start_bar_touch_hint = (
self._start_touch_pos_hint[0] - self._start_bar_pos_hint[0],
self._start_touch_pos_hint[1] - self._start_bar_pos_hint[1],
)
touch.grab(self)
def on_touch_move(self, touch):
"""Move the scrollbar to the touch, and update my ``scroll``
accordingly.
"""
if not self.scrolling or "bar" not in self.ids:
touch.ungrab(self)
return
touch.push()
touch.apply_transform_2d(self.parent.to_local)
touch.apply_transform_2d(self.to_local)
if self.orientation == "horizontal":
hint_right_of_bar = (touch.x - self.ids.bar.x) / self.width
hint_correction = hint_right_of_bar - self._start_bar_touch_hint[0]
self.scroll += hint_correction
else: # self.orientation == 'vertical'
hint_above_bar = (touch.y - self.ids.bar.y) / self.height
hint_correction = hint_above_bar - self._start_bar_touch_hint[1]
self.scroll += hint_correction
touch.pop()
def on_touch_up(self, touch):
"""Stop scrolling."""
self.scrolling = False
|
class DeckBuilderScrollBar(FloatLayout):
'''A widget that looks a lot like one of the scrollbars on the sides
of eg. :class:`kivy.uix.ScrollView`, which moves a single deck
within a :class:`DeckBuilderLayout`.
'''
def _get_scroll(self):
pass
def _set_scroll(self, v):
pass
def _get_vbar(self):
pass
def _get_hbar(self):
pass
def __init__(self, **kwargs):
'''Arrange to be laid out whenever I'm scrolled or the range of my
scrolling changes.
'''
pass
def do_layout(self, *args):
'''Put the bar where it's supposed to be, and size it in proportion to
the size of the scrollable area.
'''
pass
def upd_scroll(self, *args):
'''Update my own ``scroll`` property to where my deck is actually
scrolled.
'''
pass
def on_deckbuilder(self, *args):
'''Bind my deckbuilder to update my ``scroll``, and my ``scroll`` to
update my deckbuilder.
'''
pass
def handle_scroll(self, *args):
'''When my ``scroll`` changes, tell my deckbuilder how it's scrolled
now.
'''
pass
def bar_touched(self, bar, touch):
'''Start scrolling, and record where I started scrolling.'''
pass
def on_touch_move(self, touch):
'''Move the scrollbar to the touch, and update my ``scroll``
accordingly.
'''
pass
def on_touch_up(self, touch):
'''Stop scrolling.'''
pass
| 13 | 9 | 12 | 1 | 9 | 2 | 2 | 0.36 | 1 | 2 | 0 | 0 | 12 | 4 | 12 | 12 | 219 | 29 | 140 | 51 | 127 | 51 | 110 | 50 | 97 | 4 | 1 | 1 | 29 |
146,493 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/xcollections.py
|
lisien.xcollections.UniversalMapping
|
class UniversalMapping(MutableMapping, Signal):
"""Mapping for variables that are global but which I keep history for"""
__slots__ = ["engine"]
def __init__(self, engine):
"""Store the engine and initialize my private dictionary of
listeners.
"""
super().__init__()
self.engine = engine
def __iter__(self):
return self.engine._universal_cache.iter_keys(*self.engine._btt())
def __len__(self):
return self.engine._universal_cache.count_keys(*self.engine._btt())
def __getitem__(self, k):
"""Get the current value of this key"""
return self.engine._universal_cache.retrieve(k, *self.engine._btt())
def __setitem__(self, k, v):
"""Set k=v at the current branch and tick"""
branch, turn, tick = self.engine._nbtt()
self.engine._universal_cache.store(k, branch, turn, tick, v)
self.engine.query.universal_set(k, branch, turn, tick, v)
self.send(self, key=k, val=v)
def __delitem__(self, k):
"""Unset this key for the present (branch, tick)"""
branch, turn, tick = self.engine._nbtt()
self.engine._universal_cache.store(k, branch, turn, tick, None)
self.engine.query.universal_del(k, branch, turn, tick)
self.send(self, key=k, val=None)
|
class UniversalMapping(MutableMapping, Signal):
'''Mapping for variables that are global but which I keep history for'''
def __init__(self, engine):
'''Store the engine and initialize my private dictionary of
listeners.
'''
pass
def __iter__(self):
pass
def __len__(self):
pass
def __getitem__(self, k):
'''Get the current value of this key'''
pass
def __setitem__(self, k, v):
'''Set k=v at the current branch and tick'''
pass
def __delitem__(self, k):
'''Unset this key for the present (branch, tick)'''
pass
| 7 | 5 | 4 | 0 | 3 | 1 | 1 | 0.33 | 2 | 1 | 0 | 0 | 6 | 1 | 6 | 47 | 36 | 8 | 21 | 11 | 14 | 7 | 21 | 11 | 14 | 1 | 7 | 0 | 6 |
146,494 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/allegedb/cache.py
|
lisien.allegedb.cache.NodesCache
|
class NodesCache(Cache):
"""A cache for remembering whether nodes exist at a given time."""
__slots__ = ()
def __init__(self, db, kfkvs=None):
super().__init__(db, "nodes_cache", kfkvs)
def store(
self,
graph: Hashable,
node: Hashable,
branch: str,
turn: int,
tick: int,
ex: bool,
*,
planning: bool = None,
forward: bool = None,
loading=False,
contra: bool = None,
):
if not ex:
ex = None
if contra is None:
contra = not loading
return super().store(
graph,
node,
branch,
turn,
tick,
ex,
planning=planning,
forward=forward,
loading=loading,
contra=contra,
)
def _update_keycache(self, *args, forward):
graph: Hashable
node: Hashable
branch: str
turn: int
tick: int
ex: bool | None
graph, node, branch, turn, tick, ex = args
if not ex:
ex = None
super()._update_keycache(
graph, node, branch, turn, tick, ex, forward=forward
)
def _iter_future_contradictions(
self,
entity: Hashable,
key: Hashable,
turns: dict,
branch: str,
turn: int,
tick: int,
value,
):
yield from super()._iter_future_contradictions(
entity, key, turns, branch, turn, tick, value
)
yield from self.db._edges_cache._slow_iter_node_contradicted_times(
branch, turn, tick, entity, key
)
|
class NodesCache(Cache):
'''A cache for remembering whether nodes exist at a given time.'''
def __init__(self, db, kfkvs=None):
pass
def store(
self,
graph: Hashable,
node: Hashable,
branch: str,
turn: int,
tick: int,
ex: bool,
*,
planning: bool = None,
forward: bool = None,
loading=False,
contra: bool = None,
):
pass
def _update_keycache(self, *args, forward):
pass
def _iter_future_contradictions(
self,
entity: Hashable,
key: Hashable,
turns: dict,
branch: str,
turn: int,
tick: int,
value,
):
pass
| 5 | 1 | 15 | 0 | 15 | 0 | 2 | 0.02 | 1 | 5 | 0 | 0 | 4 | 0 | 4 | 31 | 69 | 5 | 63 | 28 | 36 | 1 | 24 | 6 | 19 | 3 | 1 | 1 | 7 |
146,495 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/allegedb/cache.py
|
lisien.allegedb.cache.EntitylessCache
|
class EntitylessCache(Cache):
__slots__ = ()
def store(
self,
key,
branch,
turn,
tick,
value,
*,
planning: bool = None,
forward: bool = None,
loading=False,
contra: bool = None,
):
super().store(
None,
key,
branch,
turn,
tick,
value,
planning=planning,
forward=forward,
loading=loading,
contra=contra,
)
def get_keyframe(self, branch, turn, tick, copy=True):
return super().get_keyframe((None,), branch, turn, tick, copy=copy)
def set_keyframe(self, branch, turn, tick, keyframe):
super().set_keyframe((None,), branch, turn, tick, keyframe)
def iter_entities_or_keys(self, branch, turn, tick, *, forward=None):
return super().iter_entities_or_keys(
None, branch, turn, tick, forward=forward
)
iter_entities = iter_keys = iter_entities_or_keys
def contains_entity_or_key(self, ke, branch, turn, tick):
return super().contains_entity_or_key(None, ke, branch, turn, tick)
contains_entity = contains_key = contains_entity_or_key
def retrieve(self, *args):
return super().retrieve(*(None,) + args)
|
class EntitylessCache(Cache):
def store(
self,
key,
branch,
turn,
tick,
value,
*,
planning: bool = None,
forward: bool = None,
loading=False,
contra: bool = None,
):
pass
def get_keyframe(self, branch, turn, tick, copy=True):
pass
def set_keyframe(self, branch, turn, tick, keyframe):
pass
def iter_entities_or_keys(self, branch, turn, tick, *, forward=None):
pass
def contains_entity_or_key(self, ke, branch, turn, tick):
pass
def retrieve(self, *args):
pass
| 7 | 0 | 6 | 0 | 6 | 0 | 1 | 0 | 1 | 2 | 0 | 1 | 6 | 0 | 6 | 33 | 49 | 8 | 41 | 22 | 22 | 0 | 16 | 10 | 9 | 1 | 1 | 0 | 6 |
146,496 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/allegedb/cache.py
|
lisien.allegedb.cache.EdgesCache
|
class EdgesCache(Cache):
"""A cache for remembering whether edges exist at a given time."""
__slots__ = (
"destcache",
"origcache",
"predecessors",
"_origcache_lru",
"_destcache_lru",
"_get_destcache_stuff",
"_get_origcache_stuff",
"_additional_store_stuff",
)
@property
def successors(self):
return self.parents
def __init__(self, db):
def gettest(k):
assert len(k) == 3, "Bad key: " + repr(k)
def settest(k, v):
assert len(k) == 3, "Bad key: {}, to be set to {}".format(k, v)
Cache.__init__(
self,
db,
kfkvs={"gettest": gettest, "settest": settest},
name="edges_cache",
)
self.destcache = PickyDefaultDict(SettingsTurnDict)
self.origcache = PickyDefaultDict(SettingsTurnDict)
self.predecessors = StructuredDefaultDict(3, TurnDict)
self._origcache_lru = OrderedDict()
self._destcache_lru = OrderedDict()
self._get_destcache_stuff: tuple[
PickyDefaultDict,
OrderedDict,
callable,
StructuredDefaultDict,
callable,
] = (
self.destcache,
self._destcache_lru,
self._get_keycachelike,
self.successors,
self._adds_dels_successors,
)
self._get_origcache_stuff: tuple[
PickyDefaultDict,
OrderedDict,
callable,
StructuredDefaultDict,
callable,
] = (
self.origcache,
self._origcache_lru,
self._get_keycachelike,
self.predecessors,
self._adds_dels_predecessors,
)
self._additional_store_stuff = (
self.db,
self.predecessors,
self.successors,
)
def total_size(self, handlers=(), verbose=False):
all_handlers = {
EdgesCache: lambda e: [
e.predecessors,
e.successors,
e.destcache,
e.origcache,
]
}
all_handlers.update(handlers)
return super().total_size(all_handlers, verbose)
def get_keyframe(
self, graph_ent: tuple, branch: str, turn: int, tick: int, copy=True
):
if len(graph_ent) == 3:
return super().get_keyframe(graph_ent, branch, turn, tick, copy)
ret = {}
for graph, orig, dest in self.keyframe:
if (graph, orig) == graph_ent:
ret[dest] = super().get_keyframe(
(graph, orig, dest), branch, turn, tick, copy
)
return ret
def _update_keycache(self, *args, forward: bool):
super()._update_keycache(*args, forward=forward)
dest: Hashable
key: Hashable
branch: str
turn: int
tick: int
dest, key, branch, turn, tick, value = args[-6:]
graph, orig = args[:-6]
# it's possible either of these might cause unnecessary iteration
dests = self._get_destcache(
graph, orig, branch, turn, tick, forward=forward
)
origs = self._get_origcache(
graph, dest, branch, turn, tick, forward=forward
)
if value is None:
dests = dests.difference((dest,))
origs = origs.difference((orig,))
else:
dests = dests.union((dest,))
origs = origs.union((orig,))
self.destcache[graph, orig, branch][turn][tick] = dests
self.origcache[graph, dest, branch][turn][tick] = origs
def _slow_iter_node_contradicted_times(
self,
branch: str,
turn: int,
tick: int,
graph: Hashable,
node: Hashable,
):
# slow and bad.
retrieve = self._base_retrieve
for items in (
self.successors[graph, node].items(),
self.predecessors[graph, node].items(),
):
for dest, idxs in items: # dest might really be orig
for idx, branches in idxs.items():
brnch = branches[branch]
if turn in brnch:
ticks = brnch[turn]
for tck, present in ticks.future(tick).items():
if tck > tick and present is not retrieve(
(graph, node, dest, idx, branch, turn, tick)
):
yield turn, tck
for trn, ticks in brnch.future(turn).items():
for tck, present in ticks.items():
if present is not retrieve(
(graph, node, dest, idx, branch, turn, tick)
):
yield trn, tck
def _adds_dels_successors(
self,
parentity: tuple,
branch: str,
turn: int,
tick: int,
*,
stoptime: tuple[str, int, int] = None,
cache: dict = None,
):
graph, orig = parentity
added = set()
deleted = set()
cache = cache or self.successors
if (graph, orig) in cache and cache[graph, orig]:
for dest in cache[graph, orig]:
addidx, delidx = self._get_adds_dels(
(graph, orig, dest), branch, turn, tick, stoptime=stoptime
)
if addidx and not delidx:
added.add(dest)
elif delidx and not addidx:
deleted.add(dest)
kf = self.keyframe
itparbtt = self.db._iter_parent_btt
its = list(kf.items())
for ks, v in its:
assert len(ks) == 3, "Bad key in keyframe: " + repr(ks)
for (grap, org, dest), kfg in its: # too much iteration!
if (grap, org) != (graph, orig):
continue
for branc, trn, tck in itparbtt(
branch, turn, tick, stoptime=stoptime
):
if branc not in kfg:
continue
kfgb = kfg[branc]
if trn in kfgb:
kfgbr = kfgb[trn]
if kfgbr.rev_gettable(tck):
if kfgbr[tck][0] and dest not in deleted:
added.add(dest)
continue
if kfgb.rev_gettable(trn):
if kfgb[trn].final()[0] and dest not in deleted:
added.add(dest)
for ks in kf.keys():
assert len(ks) == 3, "BBadd key in keyframe: " + repr(ks)
return added, deleted
def _adds_dels_predecessors(
self,
parentity: tuple,
branch: str,
turn: int,
tick: int,
*,
stoptime: tuple[str, int, int] = None,
cache: dict = None,
):
graph, dest = parentity
added = set()
deleted = set()
cache = cache or self.predecessors
if cache[graph, dest]:
for orig in cache[graph, dest]:
addidx, delidx = self._get_adds_dels(
(graph, orig, dest), branch, turn, tick, stoptime=stoptime
)
if addidx and not delidx:
added.add(orig)
elif delidx and not addidx:
deleted.add(orig)
else:
kf = self.keyframe
itparbtt = self.db._iter_parent_btt
for (grap, orig, dst), kfg in kf.items(): # too much iteration!
if (grap, dst) != (graph, dest):
continue
for branc, trn, tck in itparbtt(
branch, turn, tick, stoptime=stoptime
):
if branc not in kfg:
continue
kfgb = kfg[branc]
if trn in kfgb:
kfgbr = kfgb[trn]
if kfgbr.rev_gettable(tck):
if kfgbr[tck][0]:
added.add(orig)
continue
if kfgb.rev_gettable(trn):
if kfgb[trn].final()[0]:
added.add(orig)
return added, deleted
def _get_destcache(
self,
graph: Hashable,
orig: Hashable,
branch: str,
turn: int,
tick: int,
*,
forward: bool,
):
"""Return a set of destination nodes succeeding ``orig``"""
(
destcache,
destcache_lru,
get_keycachelike,
successors,
adds_dels_sucpred,
) = self._get_destcache_stuff
return get_keycachelike(
destcache,
successors,
adds_dels_sucpred,
(graph, orig),
branch,
turn,
tick,
forward=forward,
)
def _get_origcache(
self,
graph: Hashable,
dest: Hashable,
branch: str,
turn: int,
tick: int,
*,
forward: bool,
):
"""Return a set of origin nodes leading to ``dest``"""
(
origcache,
origcache_lru,
get_keycachelike,
predecessors,
adds_dels_sucpred,
) = self._get_origcache_stuff
return get_keycachelike(
origcache,
predecessors,
adds_dels_sucpred,
(graph, dest),
branch,
turn,
tick,
forward=forward,
)
def iter_successors(
self, graph, orig, branch, turn, tick, *, forward=None
):
"""Iterate over successors of a given origin node at a given time."""
if self.db._no_kc:
yield from self._adds_dels_successors(
(graph, orig), branch, turn, tick
)[0]
return
if forward is None:
forward = self.db._forward
yield from self._get_destcache(
graph, orig, branch, turn, tick, forward=forward
)
def iter_predecessors(
self,
graph: Hashable,
dest: Hashable,
branch: str,
turn: int,
tick: int,
*,
forward: bool = None,
):
"""Iterate over predecessors to a destination node at a given time."""
if self.db._no_kc:
yield from self._adds_dels_predecessors(
(graph, dest), branch, turn, tick
)[0]
return
if forward is None:
forward = self.db._forward
yield from self._get_origcache(
graph, dest, branch, turn, tick, forward=forward
)
def count_successors(
self,
graph: Hashable,
orig: Hashable,
branch: str,
turn: int,
tick: int,
*,
forward: str = None,
):
"""Return the number of successors to an origin node at a given time."""
if self.db._no_kc:
return len(
self._adds_dels_successors((graph, orig), branch, turn, tick)[
0
]
)
if forward is None:
forward = self.db._forward
return len(
self._get_destcache(
graph, orig, branch, turn, tick, forward=forward
)
)
def count_predecessors(
self,
graph: Hashable,
dest: Hashable,
branch: str,
turn: int,
tick: int,
*,
forward: bool = None,
):
"""Return the number of predecessors from a destination node at a time."""
if self.db._no_kc:
return len(
self._adds_dels_predecessors(
(graph, dest), branch, turn, tick
)[0]
)
if forward is None:
forward = self.db._forward
return len(
self._get_origcache(
graph, dest, branch, turn, tick, forward=forward
)
)
def has_successor(
self,
graph: Hashable,
orig: Hashable,
dest: Hashable,
branch: str,
turn: int,
tick: int,
*,
forward: bool = None,
):
"""Return whether an edge connects the origin to the destination now"""
# Use a keycache if we have it.
# If we don't, only generate one if we're forwarding, and only
# if it's no more than a turn ago.
keycache_key = (graph, orig, dest, branch)
if keycache_key in self.keycache:
return dest in self._get_destcache(
graph, orig, branch, turn, tick, forward=forward
)
got = self._base_retrieve((graph, orig, dest, 0, branch, turn, tick))
return got is not None and not isinstance(got, Exception)
def has_predecessor(
self,
graph: Hashable,
dest: Hashable,
orig: Hashable,
branch: str,
turn: int,
tick: int,
*,
forward: bool = None,
):
"""Return whether an edge connects the destination to the origin now"""
got = self._base_retrieve((graph, orig, dest, 0, branch, turn, tick))
return got is not None and not isinstance(got, Exception)
def store(
self,
graph,
orig,
dest,
idx,
branch,
turn,
tick,
ex,
*,
planning: bool = None,
forward: bool = None,
loading=False,
contra: bool = None,
):
if contra is None:
contra = not loading
db, predecessors, successors = self._additional_store_stuff
if not ex:
ex = None
if planning is None:
planning = db._planning
Cache.store(
self,
graph,
orig,
dest,
idx,
branch,
turn,
tick,
ex,
planning=planning,
forward=forward,
loading=loading,
contra=contra,
)
try:
predecessors[graph, dest][orig][idx][branch][turn] = successors[
graph, orig
][dest][idx][branch][turn]
except HistoricKeyError:
pass
|
class EdgesCache(Cache):
'''A cache for remembering whether edges exist at a given time.'''
@property
def successors(self):
pass
def __init__(self, db):
pass
def gettest(k):
pass
def settest(k, v):
pass
def total_size(self, handlers=(), verbose=False):
pass
def get_keyframe(
self, graph_ent: tuple, branch: str, turn: int, tick: int, copy=True
):
pass
def _update_keycache(self, *args, forward: bool):
pass
def _slow_iter_node_contradicted_times(
self,
branch: str,
turn: int,
tick: int,
graph: Hashable,
node: Hashable,
):
pass
def _adds_dels_successors(
self,
parentity: tuple,
branch: str,
turn: int,
tick: int,
*,
stoptime: tuple[str, int, int] = None,
cache: dict = None,
):
pass
def _adds_dels_predecessors(
self,
parentity: tuple,
branch: str,
turn: int,
tick: int,
*,
stoptime: tuple[str, int, int] = None,
cache: dict = None,
):
pass
def _get_destcache(
self,
graph: Hashable,
orig: Hashable,
branch: str,
turn: int,
tick: int,
*,
forward: bool,
):
'''Return a set of destination nodes succeeding ``orig``'''
pass
def _get_origcache(
self,
graph: Hashable,
dest: Hashable,
branch: str,
turn: int,
tick: int,
*,
forward: bool,
):
'''Return a set of origin nodes leading to ``dest``'''
pass
def iter_successors(
self, graph, orig, branch, turn, tick, *, forward=None
):
'''Iterate over successors of a given origin node at a given time.'''
pass
def iter_predecessors(
self,
graph: Hashable,
dest: Hashable,
branch: str,
turn: int,
tick: int,
*,
forward: bool = None,
):
'''Iterate over predecessors to a destination node at a given time.'''
pass
def count_successors(
self,
graph: Hashable,
orig: Hashable,
branch: str,
turn: int,
tick: int,
*,
forward: str = None,
):
'''Return the number of successors to an origin node at a given time.'''
pass
def count_predecessors(
self,
graph: Hashable,
dest: Hashable,
branch: str,
turn: int,
tick: int,
*,
forward: bool = None,
):
'''Return the number of predecessors from a destination node at a time.'''
pass
def has_successor(
self,
graph: Hashable,
orig: Hashable,
dest: Hashable,
branch: str,
turn: int,
tick: int,
*,
forward: bool = None,
):
'''Return whether an edge connects the origin to the destination now'''
pass
def has_predecessor(
self,
graph: Hashable,
dest: Hashable,
orig: Hashable,
branch: str,
turn: int,
tick: int,
*,
forward: bool = None,
):
'''Return whether an edge connects the destination to the origin now'''
pass
def store(
self,
graph,
orig,
dest,
idx,
branch,
turn,
tick,
ex,
*,
planning: bool = None,
forward: bool = None,
loading=False,
contra: bool = None,
):
pass
| 21 | 9 | 23 | 0 | 23 | 1 | 4 | 0.04 | 1 | 15 | 5 | 0 | 17 | 8 | 17 | 44 | 472 | 20 | 438 | 192 | 308 | 17 | 182 | 74 | 162 | 16 | 1 | 6 | 73 |
146,497 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/allegedb/cache.py
|
lisien.allegedb.cache.Cache
|
class Cache:
"""A data store that's useful for tracking graph revisions."""
name: str
def __init__(self, db, name: str, kfkvs=None):
self.name = name
self.db = db
self.parents = StructuredDefaultDict(3, SettingsTurnDict)
"""Entity data keyed by the entities' parents.
An entity's parent is what it's contained in. When speaking of a node,
this is its graph. When speaking of an edge, the parent is usually the
graph and the origin in a pair, though for multigraphs the destination
might be part of the parent as well.
Deeper layers of this cache are keyed by branch and revision.
"""
self.keys = StructuredDefaultDict(2, SettingsTurnDict)
"""Cache of entity data keyed by the entities themselves.
That means the whole tuple identifying the entity is the
top-level key in this cache here. The second-to-top level
is the key within the entity.
Deeper layers of this cache are keyed by branch, turn, and tick.
"""
self.keycache = PickyDefaultDict(SettingsTurnDict)
"""Keys an entity has at a given turn and tick."""
self.branches = StructuredDefaultDict(1, SettingsTurnDict)
"""A less structured alternative to ``keys``.
For when you already know the entity and the key within it,
but still need to iterate through history to find the value.
"""
self.keyframe = StructuredDefaultDict(
1, SettingsTurnDict, **(kfkvs or {})
)
"""Key-value dictionaries representing my state at a given time"""
self.shallowest = OrderedDict()
"""A dictionary for plain, unstructured hinting."""
self.settings = PickyDefaultDict(EntikeySettingsTurnDict)
"""All the ``entity[key] = value`` settings on some turn"""
self.presettings = PickyDefaultDict(EntikeySettingsTurnDict)
"""The values prior to ``entity[key] = value`` settings on some turn"""
self.time_entity = {}
self._kc_lru = OrderedDict()
self._lock = RLock()
self._store_stuff = (
self._lock,
self.parents,
self.branches,
self.keys,
db.delete_plan,
db._time_plan,
self._iter_future_contradictions,
db._branches,
db._turn_end,
self._store_journal,
self.time_entity,
db._where_cached,
self.keycache,
db,
self._update_keycache,
)
self._remove_stuff = (
self._lock,
self.time_entity,
self.parents,
self.branches,
self.keys,
self.settings,
self.presettings,
self._remove_keycache,
self.keycache,
)
self._truncate_stuff = (
self._lock,
self.parents,
self.branches,
self.keys,
self.settings,
self.presettings,
self.keycache,
)
self._store_journal_stuff: tuple[
PickyDefaultDict, PickyDefaultDict, callable
] = (self.settings, self.presettings, self._base_retrieve)
def total_size(self, handlers=(), verbose=False):
"""Returns the approximate memory footprint an object and all of its contents.
Automatically finds the contents of the following builtin containers and
their subclasses: tuple, list, deque, dict, set and frozenset.
To search other containers, add handlers to iterate over their contents:
handlers = {SomeContainerClass: iter,
OtherContainerClass: OtherContainerClass.get_elements}
From https://code.activestate.com/recipes/577504-compute-memory-footprint-of-an-object-and-its-cont/download/1/
"""
all_handlers = {
tuple: iter,
list: iter,
deque: iter,
WindowDict: lambda d: [d._past, d._future, d._keys],
dict: lambda d: chain.from_iterable(d.items()),
set: iter,
frozenset: iter,
Cache: lambda o: [
o.branches,
o.settings,
o.presettings,
o.keycache,
],
}
all_handlers.update(handlers)
seen = set() # track which object id's have already been seen
default_size = getsizeof(
0
) # estimate sizeof object without __sizeof__
def sizeof(o):
if id(o) in seen: # do not double count the same object
return 0
seen.add(id(o))
s = getsizeof(o, default_size)
if verbose:
print(s, type(o), repr(o), file=stderr)
for typ, handler in all_handlers.items():
if isinstance(o, typ):
s += sum(map(sizeof, handler(o)))
break
return s
return sizeof(self)
def _get_keyframe(
self, graph_ent: tuple, branch: str, turn: int, tick: int
) -> dict:
if graph_ent not in self.keyframe:
raise KeyframeError("Unknown graph-entity", graph_ent)
g = self.keyframe[graph_ent]
if branch not in g:
raise KeyframeError("Unknown branch", branch)
b = g[branch]
if turn not in b:
raise KeyframeError("Unknown turn", branch, turn)
r = b[turn]
if tick not in r:
raise KeyframeError("Unknown tick", branch, turn, tick)
ret = r[tick]
return ret
def get_keyframe(
self, graph_ent: tuple, branch: str, turn: int, tick: int, copy=True
):
ret = self._get_keyframe(graph_ent, branch, turn, tick)
if copy:
ret = ret.copy()
return ret
def _set_keyframe(
self, graph_ent: tuple, branch: str, turn: int, tick: int, keyframe
):
if not isinstance(graph_ent, tuple):
raise TypeError(
"Keyframes can only be set to tuples identifying graph entities"
)
if not isinstance(branch, str):
raise TypeError("Branches must be strings")
if not isinstance(turn, int):
raise TypeError("Turns must be integers")
if turn < 0:
raise ValueError("Turns can't be negative")
if not isinstance(tick, int):
raise TypeError("Ticks must be integers")
if tick < 0:
raise ValueError("Ticks can't be negative")
kfg = self.keyframe[graph_ent]
if branch in kfg:
kfgb = kfg[branch]
if turn in kfgb:
kfgb[turn][tick] = keyframe
else:
kfgb[turn] = {tick: keyframe}
else:
d = SettingsTurnDict()
d[turn] = {tick: keyframe}
kfg[branch] = d
def set_keyframe(
self,
graph_ent: tuple,
branch: str,
turn: int,
tick: int,
keyframe: dict,
):
self._set_keyframe(graph_ent, branch, turn, tick, keyframe)
def copy_keyframe(self, branch_from, branch_to, turn, tick):
for graph_ent in self.keyframe:
try:
kf = self._get_keyframe(graph_ent, branch_from, turn, tick)
except KeyframeError:
continue
if isinstance(kf, dict):
kf = kf.copy()
self._set_keyframe(graph_ent, branch_to, turn, tick, kf)
def load(self, data):
"""Add a bunch of data. Must be in chronological order.
But it doesn't need to all be from the same branch, as long as
each branch is chronological of itself.
"""
def sort_key(v):
if isinstance(v, tuple):
return (2,) + tuple(map(repr, v))
if isinstance(v, str):
return 1, v
return 0, repr(v)
branches = defaultdict(list)
for row in data:
branches[row[-4]].append(row)
db = self.db
# Make keycaches and valcaches. Must be done chronologically
# to make forwarding work.
childbranch = db._childbranch
branch2do = deque(["trunk"])
store = self.store
while branch2do:
branch = branch2do.popleft()
for row in sorted(branches[branch], key=sort_key):
store(*row, planning=False, loading=True)
if branch in childbranch:
branch2do.extend(childbranch[branch])
def _valcache_lookup(self, cache: dict, branch: str, turn: int, tick: int):
"""Return the value at the given time in ``cache``"""
for b, r, t in self.db._iter_parent_btt(branch, turn, tick):
if b in cache:
if r in cache[b] and cache[b][r].rev_gettable(t):
try:
return cache[b][r][t]
except HistoricKeyError as ex:
if ex.deleted:
raise
elif cache[b].rev_gettable(r - 1):
cbr = cache[b][r - 1]
try:
return cbr.final()
except HistoricKeyError as ex:
if ex.deleted:
raise
def _get_keycachelike(
self,
keycache: dict,
keys: dict,
get_adds_dels: callable,
parentity: tuple,
branch: str,
turn: int,
tick: int,
*,
forward: bool,
):
"""Try to retrieve a frozenset representing extant keys.
If I can't, generate one, store it, and return it.
"""
keycache_key = parentity + (branch,)
keycache2 = keycache3 = None
if keycache_key in keycache:
keycache2 = keycache[keycache_key]
if turn in keycache2:
keycache3 = keycache2[turn]
if tick in keycache3:
return keycache3[tick]
with self._lock:
if forward:
# Take valid values from the past of a keycache and copy them
# forward, into the present. Assumes that time is only moving
# forward, never backward, never skipping any turns or ticks,
# and any changes to the world state are happening through
# allegedb proper, meaning they'll all get cached. In lisien this
# means every change to the world state should happen inside of
# a call to ``Engine.next_turn`` in a rule.
if keycache2 and keycache2.rev_gettable(turn):
# there's a keycache from a prior turn in this branch. Get it
if turn not in keycache2:
# since it's not this *exact* turn, there might be changes
old_turn = keycache2.rev_before(turn)
old_turn_kc = keycache2[turn]
added, deleted = get_adds_dels(
parentity,
branch,
turn,
tick,
stoptime=(branch, old_turn, old_turn_kc.end),
cache=keys,
)
try:
ret = (
old_turn_kc.final()
.union(added)
.difference(deleted)
)
except KeyError:
ret = frozenset()
# assert ret == get_adds_dels(
# keys[parentity], branch, turn, tick)[0] # slow
new_turn_kc = WindowDict()
new_turn_kc[tick] = ret
keycache2[turn] = new_turn_kc
return ret
if not keycache3:
keycache3 = keycache2[turn]
if tick not in keycache3:
if keycache3.rev_gettable(tick):
added, deleted = get_adds_dels(
parentity,
branch,
turn,
tick,
stoptime=(
branch,
turn,
keycache3.rev_before(tick),
),
cache=keys,
)
ret = (
keycache3[tick]
.union(added)
.difference(deleted)
)
# assert ret == get_adds_dels(
# keys[parentity], branch, turn, tick)[0] # slow
keycache3[tick] = ret
return ret
else:
turn_before = keycache2.rev_before(turn)
if turn_before == turn:
tick_before = tick
if keycache2[turn_before].rev_gettable(tick):
keys_before = keycache2[turn_before][tick]
else:
keys_before = frozenset()
else:
tick_before = keycache2[turn_before].end
keys_before = keycache2[turn_before][
tick_before
]
added, deleted = get_adds_dels(
parentity,
branch,
turn,
tick,
stoptime=(branch, turn_before, tick_before),
cache=keys,
)
ret = keycache3[tick] = keys_before.union(
added
).difference(deleted)
# assert ret == get_adds_dels(
# keys[parentity], branch, turn, tick)[0] # slow
return ret
# assert kcturn[tick] == get_adds_dels(
# keys[parentity], branch, turn, tick)[0] # slow
return keycache3[tick]
# still have to get a stoptime -- the time of the last keyframe
stoptime, _ = self.db._build_keyframe_window(branch, turn, tick)
if stoptime is None:
ret = None
if parentity in self.keyframe:
keyframes = self.keyframe[parentity]
if branch in keyframes:
kfb = keyframes[branch]
if turn in kfb:
kfbr = kfb[turn]
if tick in kfbr:
ret = frozenset(kfbr[tick].keys())
if ret is None:
adds, _ = get_adds_dels(parentity, branch, turn, tick)
ret = frozenset(adds)
elif stoptime == (branch, turn, tick):
try:
kf = self._get_keyframe(parentity, branch, turn, tick)
ret = frozenset(kf.keys())
except KeyframeError:
adds, _ = get_adds_dels(
parentity, branch, turn, tick, stoptime=stoptime
)
ret = frozenset(adds)
else:
adds, _ = get_adds_dels(
parentity, branch, turn, tick, stoptime=stoptime
)
ret = frozenset(adds)
if keycache2:
if keycache3:
keycache3[tick] = ret
else:
keycache2[turn] = {tick: ret}
else:
kcc = SettingsTurnDict()
kcc[turn] = {tick: ret}
keycache[keycache_key] = kcc
return ret
def _get_keycache(
self,
parentity: tuple,
branch: str,
turn: int,
tick: int,
*,
forward: bool,
):
"""Get a frozenset of keys that exist in the entity at the moment.
With ``forward=True``, enable an optimization that copies old key sets
forward and updates them.
"""
return self._get_keycachelike(
self.keycache,
self.keys,
self._get_adds_dels,
parentity,
branch,
turn,
tick,
forward=forward,
)
def _update_keycache(self, *args, forward: bool):
"""Add or remove a key in the set describing the keys that exist."""
entity: Hashable
key: Hashable
branch: str
turn: int
tick: int
entity, key, branch, turn, tick, value = args[-6:]
parent = args[:-6]
kc = self._get_keycache(
parent + (entity,), branch, turn, tick, forward=forward
)
if value is None:
kc = kc.difference((key,))
else:
kc = kc.union((key,))
if parent + (entity, branch) not in self.keycache:
self.keycache[parent + (entity, branch)] = SettingsTurnDict(
{turn: {tick: kc}}
)
else:
self.keycache[parent + (entity, branch)][turn][tick] = kc
def _get_adds_dels(
self,
entity: Hashable,
branch: str,
turn: int,
tick: int,
*,
stoptime: tuple[str, int, int] = None,
cache: dict = None,
):
"""Return a pair of sets describing changes to the entity's keys
Returns a pair of sets: ``(added, deleted)``. These are the changes
to the key set that occurred since ``stoptime``, which, if present,
should be a triple ``(branch, turn, tick)``.
With ``stoptime=None`` (the default), ``added`` will in fact be all
keys, and ``deleted`` will be empty.
"""
# Not using the journal because that doesn't distinguish entities.
# I think I might not want to use ``stoptime`` at all, now that
# there is such a thing as keyframes...
cache = cache or self.keys
added = set()
deleted = set()
kf = self.keyframe.get(entity, None)
for key, branches in cache.get(entity, {}).items():
for branc, trn, tck in self.db._iter_parent_btt(
branch, turn, tick, stoptime=stoptime
):
if branc not in branches or not branches[branc].rev_gettable(
trn
):
continue
turnd = branches[branc]
if trn in turnd:
if turnd[trn].rev_gettable(tck):
if turnd[trn][tck] is None:
deleted.add(key)
else:
added.add(key)
break
else:
trn -= 1
if not turnd.rev_gettable(trn):
break
tickd = turnd[trn]
if tickd.final() is None:
deleted.add(key)
else:
added.add(key)
break
if not kf:
return added, deleted
for branc, trn, tck in self.db._iter_parent_btt(
branch, turn, tick, stoptime=stoptime
):
if branc not in kf or not kf[branc].rev_gettable(trn):
continue
kfb = kf[branc]
if trn in kfb and kfb[trn].rev_gettable(tck):
added.update(set(kfb[trn][tck]).difference(deleted))
elif kfb.rev_gettable(trn):
try:
additions = set(kfb[trn].final())
except KeyError:
additions = set()
added.update(additions.difference(deleted))
else:
continue
break
return added, deleted
def store(
self,
*args,
planning: bool = None,
forward: bool = None,
loading=False,
contra: bool = None,
):
"""Put a value in various dictionaries for later .retrieve(...).
Needs at least five arguments, of which the -1th is the value
to store, the -2th is the tick to store it at, the -3th
is the turn to store it in, the -4th is the branch the
revision is in, the -5th is the key the value is for,
and the remaining arguments identify the entity that has
the key, eg. a graph, node, or edge.
With ``planning=True``, you will be permitted to alter
"history" that takes place after the last non-planning
moment of time, without much regard to consistency.
Otherwise, contradictions will be handled by deleting
everything in the contradicted plan after the present moment,
unless you set ``contra=False``.
``loading=True`` prevents me from updating the ORM's records
of the ends of branches and turns.
"""
(
lock,
self_parents,
self_branches,
self_keys,
delete_plan,
time_plan,
self_iter_future_contradictions,
db_branches,
db_turn_end,
self_store_journal,
self_time_entity,
db_where_cached,
keycache,
db,
update_keycache,
) = self._store_stuff
if planning is None:
planning = db._planning
if forward is None:
forward = db._forward
if contra is None:
contra = not loading
entity: Hashable
key: Hashable
branch: str
turn: int
tick: int
entity, key, branch, turn, tick, value = args[-6:]
if loading:
self.db._updload(branch, turn, tick)
parent = args[:-6]
entikey = (entity, key)
parentikey = parent + (entity, key)
contras = []
with lock:
if parent:
parentity = self_parents[parent][entity]
if key in parentity:
branches = parentity[key]
turns = branches[branch]
else:
branches = self_branches[parentikey] = self_keys[
parent + (entity,)
][key] = parentity[key]
turns = branches[branch]
else:
if entikey in self_branches:
branches = self_branches[entikey]
turns = branches[branch]
else:
branches = self_branches[entikey]
self_keys[entity,][key] = branches
turns = branches[branch]
if planning:
if turn in turns and tick < turns[turn].end:
raise HistoricKeyError(
"Already have some ticks after {} in turn {} of branch {}".format(
tick, turn, branch
)
)
if contra:
contras = list(
self_iter_future_contradictions(
entity, key, turns, branch, turn, tick, value
)
)
if contras:
self.shallowest = {}
for contra_turn, contra_tick in contras:
if (
branch,
contra_turn,
contra_tick,
) in time_plan: # could've been deleted in this very loop
delete_plan(
time_plan[branch, contra_turn, contra_tick]
)
branches[branch] = turns
if not loading and not planning:
parbranch, turn_start, tick_start, turn_end, tick_end = (
db_branches[branch]
)
if (turn, tick) > (turn_end, tick_end):
db_branches[branch] = (
parbranch,
turn_start,
tick_start,
turn,
tick,
)
if tick > db_turn_end[branch, turn]:
db_turn_end[branch, turn] = tick
self_store_journal(*args)
self.shallowest[parent + (entity, key, branch, turn, tick)] = value
if turn in turns:
the_turn = turns[turn]
the_turn.truncate(tick)
the_turn[tick] = value
else:
new = FuturistWindowDict()
new[tick] = value
turns[turn] = new
self_time_entity[branch, turn, tick] = parent, entity, key
where_cached = db_where_cached[args[-4:-1]]
if self not in where_cached:
where_cached.append(self)
# if we're editing the past, have to invalidate the keycache
keycache_key = parent + (entity, branch)
if keycache_key in keycache:
thiskeycache = keycache[keycache_key]
if turn in thiskeycache:
thiskeycache[turn].truncate(tick)
if not thiskeycache[turn]:
del thiskeycache[turn]
else:
thiskeycache.truncate(turn)
if not thiskeycache:
del keycache[keycache_key]
if not db._no_kc:
update_keycache(*args, forward=forward)
def remove_character(self, character):
(
lock,
time_entity,
parents,
branches,
keys,
settings,
presettings,
remove_keycache,
keycache,
) = self._remove_stuff
todel = {
(branch, turn, tick, parent, entity, key)
for (
(branch, turn, tick),
(parent, entity, key),
) in time_entity.items()
if (parent and parent[0] == character)
or (not parent and entity == character)
}
todel_shallow = {k for k in self.shallowest if k[0] == character}
with lock:
for k in todel_shallow:
del self.shallowest[k]
for branch, turn, tick, parent, entity, key in todel:
self._remove_btt_parentikey(
branch, turn, tick, parent, entity, key
)
def remove_branch(self, branch: str):
(
lock,
time_entity,
parents,
branches,
keys,
settings,
presettings,
remove_keycache,
keycache,
) = self._remove_stuff
todel = {
(branc, turn, tick, parent, entity, key)
for (
(branc, turn, tick),
(parent, entity, key),
) in time_entity.items()
if branc == branch
}
todel_shallow = {k for k in self.shallowest if k[-2] == branch}
with lock:
for k in todel_shallow:
del self.shallowest[k]
for branc, turn, tick, parent, entity, key in todel:
self._remove_btt_parentikey(
branc, turn, tick, parent, entity, key
)
if (
*parent,
entity,
key,
branc,
turn,
tick,
) in self.shallowest:
del self.shallowest[
(*parent, entity, key, branc, turn, tick)
]
def _remove_btt_parentikey(self, branch, turn, tick, parent, entity, key):
(
_,
time_entity,
parents,
branches,
keys,
settings,
presettings,
remove_keycache,
keycache,
) = self._remove_stuff
try:
del time_entity[branch][turn][tick]
except KeyError:
pass
branchkey = parent + (entity, key)
keykey = parent + (entity,)
if parent in parents:
parentt = parents[parent]
if entity in parentt:
entty = parentt[entity]
if key in entty:
kee = entty[key]
if branch in kee:
del kee[branch]
if not kee:
del entty[key]
if not entty:
del parentt[entity]
if not parentt:
del parents[parent]
if branchkey in branches:
entty = branches[branchkey]
if branch in entty:
del entty[branch]
if not entty:
del branches[branchkey]
if keykey in keys:
entty = keys[keykey]
if key in entty:
kee = entty[key]
if branch in kee:
del kee[branch]
if not kee:
del entty[key]
if not entty:
del keys[keykey]
def remove(self, branch: str, turn: int, tick: int):
"""Delete all data from a specific tick"""
(
lock,
time_entity,
parents,
branches,
keys,
settings,
presettings,
remove_keycache,
keycache,
) = self._remove_stuff
parent, entity, key = time_entity[branch, turn, tick]
branchkey = parent + (entity, key)
keykey = parent + (entity,)
with lock:
if parent in parents:
parentt = parents[parent]
if entity in parentt:
entty = parentt[entity]
if key in entty:
kee = entty[key]
if branch in kee:
branhc = kee[branch]
if turn in branhc:
trn = branhc[turn]
del trn[tick]
if not trn:
del branhc[turn]
if not branhc:
del kee[branch]
if not kee:
del entty[key]
if not entty:
del parentt[entity]
if not parentt:
del parents[parent]
if branchkey in branches:
entty = branches[branchkey]
if branch in entty:
branhc = entty[branch]
if turn in branhc:
trn = branhc[turn]
if tick in trn:
del trn[tick]
if not trn:
del branhc[turn]
if not branhc:
del entty[branch]
if not entty:
del branches[branchkey]
if keykey in keys:
entty = keys[keykey]
if key in entty:
kee = entty[key]
if branch in kee:
branhc = kee[branch]
if turn in branhc:
trn = entty[turn]
if tick in trn:
del trn[tick]
if not trn:
del branhc[turn]
if not branhc:
del kee[branch]
if not kee:
del entty[key]
if not entty:
del keys[keykey]
branhc = settings[branch]
pbranhc = presettings[branch]
trn = branhc[turn]
ptrn = pbranhc[turn]
if tick in trn:
del trn[tick]
if tick in ptrn:
del ptrn[tick]
if not ptrn:
del pbranhc[turn]
del branhc[turn]
if not pbranhc:
del settings[branch]
del presettings[branch]
self.shallowest = OrderedDict()
remove_keycache(parent + (entity, branch), turn, tick)
def _remove_keycache(self, entity_branch: tuple, turn: int, tick: int):
"""Remove the future of a given entity from a branch in the keycache"""
keycache = self.keycache
if entity_branch in keycache:
kc = keycache[entity_branch]
if turn in kc:
kcturn = kc[turn]
if tick in kcturn:
del kcturn[tick]
kcturn.truncate(tick)
if not kcturn:
del kc[turn]
kc.truncate(turn)
if not kc:
del keycache[entity_branch]
def truncate(self, branch: str, turn: int, tick: int, direction="forward"):
if direction not in {"forward", "backward"}:
raise ValueError("Illegal direction")
(lock, parents, branches, keys, settings, presettings, keycache) = (
self._truncate_stuff
)
def truncate_branhc(branhc):
if turn in branhc:
trn = branhc[turn]
trn.truncate(tick, direction)
branhc.truncate(turn, direction)
if turn in branhc and not branhc[turn]:
del branhc[turn]
else:
branhc.truncate(turn, direction)
with lock:
for entities in parents.values():
for keys in entities.values():
for branches in keys.values():
if branch not in branches:
continue
truncate_branhc(branches[branch])
for branches in branches.values():
if branch not in branches:
continue
truncate_branhc(branches[branch])
for keys in keys.values():
for branches in keys.values():
if branch not in branches:
continue
truncate_branhc(branches[branch])
truncate_branhc(settings[branch])
truncate_branhc(presettings[branch])
self.shallowest = OrderedDict()
for entity_branch in keycache:
if entity_branch[-1] == branch:
truncate_branhc(keycache[entity_branch])
@staticmethod
def _iter_future_contradictions(
entity: Hashable,
key: Hashable,
turns: WindowDict,
branch: str,
turn: int,
tick: int,
value,
):
"""Iterate over contradicted ``(turn, tick)`` if applicable"""
# assumes that all future entries are in the plan
if not turns:
return
if turn in turns:
future_ticks = turns[turn].future(tick)
for tck, newval in future_ticks.items():
if newval != value:
yield turn, tck
future_turns = turns.future(turn)
elif turns.rev_gettable(turn):
future_turns = turns.future(turn)
else:
future_turns = turns
if not future_turns:
return
for trn, ticks in future_turns.items():
for tick, newval in ticks.items():
if newval != value:
yield trn, tick
def _store_journal(self, *args):
# overridden in lisien.cache.InitializedCache
(settings, presettings, base_retrieve) = self._store_journal_stuff
entity: Hashable
key: Hashable
branch: str
turn: int
tick: int
entity, key, branch, turn, tick, value = args[-6:]
parent = args[:-6]
settings_turns = settings[branch]
presettings_turns = presettings[branch]
prev = base_retrieve(args[:-1])
if isinstance(prev, KeyError):
prev = None
if turn in settings_turns:
# These assertions hold for most caches but not for the contents
# caches, and are therefore commented out.
# assert turn in presettings_turns \
# or turn in presettings_turns.future()
setticks = settings_turns[turn]
# assert tick not in setticks
presetticks = presettings_turns[turn]
# assert tick not in presetticks
presetticks[tick] = parent + (entity, key, prev)
setticks[tick] = parent + (entity, key, value)
else:
presettings_turns[turn] = {tick: parent + (entity, key, prev)}
settings_turns[turn] = {tick: parent + (entity, key, value)}
def _base_retrieve(
self, args, store_hint=True, retrieve_hint=True, search=False
):
"""Hot code.
Swim up the timestream trying to find a value for the
key in the entity that applied at the given (branch, turn, tick).
If we hit a keyframe, return the value therein, or KeyError if
there is none.
May *return* an exception, rather than raising it. This is to enable
use outside try-blocks, which have some performance overhead.
Memoized by default. Use ``store_hint=False`` to avoid making a memo,
``retrieve_hint=False`` to avoid using one already made.
With ``search=True``, use binary search. This isn't the default,
because most retrievals are close to each other.
"""
shallowest = self.shallowest
if retrieve_hint and args in shallowest:
return shallowest[args]
entity: tuple = args[:-4]
key: Hashable
branch: str
turn: int
tick: int
key, branch, turn, tick = args[-4:]
keyframes = self.keyframe.get(entity, {})
branches = self.branches
entikey = entity + (key,)
def get(d: WindowDict, k: int):
if search:
return d.search(k)
else:
return d[k]
def hint(v):
if store_hint:
shallowest[args] = v
return v
if entikey in branches:
branchentk = branches[entikey]
# We have data for this entity and key,
# but a keyframe might have more recent data.
# Iterate over the keyframes in reverse chronological order
# and return either the first value in a keyframe for this
# entity and key, or the first value in our own
# store, whichever took effect later.
it = pairwise(
self.db._iter_keyframes(
branch, turn, tick, loaded=True, with_fork_points=True
)
)
try:
zero, one = next(it)
if zero == (branch, turn, tick):
it = chain([(zero, one)], it)
else:
it = chain([((branch, turn, tick), zero), (zero, one)], it)
except StopIteration:
# There is at most one keyframe in the past.
# If branches has anything later than that, before the present,
# use branches. Otherwise, defer to the keyframe.
def get_chron(b, r, t):
if b in branchentk:
if r in branchentk[b]:
if branchentk[b][r].rev_gettable(t):
return hint(branchentk[b][r][t])
elif (
branchentk[b].rev_before(r, search=search)
is not None
):
return hint(branchentk[b][r].final())
return KeyError("Not in chron data", b, r, t)
kfit = self.db._iter_keyframes(branch, turn, tick, loaded=True)
try:
stoptime = next(kfit)
for b, r, t in self.db._iter_parent_btt(
branch, turn, tick, stoptime=stoptime
):
ret = get_chron(b, r, t)
if isinstance(ret, KeyError):
continue
return ret
b, r, t = stoptime
if (
b in keyframes
and r in keyframes[b]
and t in keyframes[b][r]
):
kf = keyframes[b][r][t]
if key in kf:
return hint(kf[key])
else:
return hint(
NotInKeyframeError(
"No value", entikey, b, r, t
)
)
except StopIteration:
# There are no keyframes in the past at all.
for b, r, t in self.db._iter_parent_btt(
branch, turn, tick
):
ret = get_chron(b, r, t)
if isinstance(ret, KeyError):
continue
return ret
return TotalKeyError(
"No keyframe loaded", entikey, branch, turn, tick
)
if (
b in keyframes
and r in keyframes[b]
and t in keyframes[b][r]
):
kf = keyframes[b][r][t]
if key in kf:
return hint(kf[key])
else:
return NotInKeyframeError("No value", entikey, b, r, t)
else:
return TotalKeyError(
"No keyframe loaded", entikey, b, r, t
)
for (b0, r0, t0), (b1, r1, t1) in it:
if self.db._kf_loaded(b0, r0, t0) and (
b0 in keyframes
and r0 in keyframes[b0]
and t0 in keyframes[b0][r0]
):
# There's a keyframe at this exact moment. Use it.
kf = keyframes[b0][r0][t0]
if key in kf:
return hint(kf[key])
else:
return hint(
NotInKeyframeError("No value", entikey, b0, r0, t0)
)
if b0 in branchentk and r0 in branchentk[b0]:
# No keyframe *right* now; we might have one earlier this turn
# ... but let's check branches first
if t0 in branchentk[b0][r0]:
# Yeah, branches has a value at this very moment!
return hint(branchentk[b0][r0][t0])
elif (
branches_tick := branchentk[b0][r0].rev_before(
t0, search=search
)
) is not None:
# branches has a value this turn.
# Is there a loaded keyframe this turn, as well?
if b0 == b1 and r0 == r1:
# There is; is it more recent than branches' value?
if t1 > branches_tick:
# It is, so use the keyframe.
# If that keyframe includes a value stored here,
# return it; otherwise return an error
if (
b1 in keyframes
and r1 in keyframes[b1]
and t1 in keyframes[b1][r1]
):
kf = keyframes[b1][r1][t1]
if key in kf:
return hint(kf[key])
else:
return hint(
NotInKeyframeError(
"No value", entikey, b1, r1, t1
)
)
else:
return hint(
NotInKeyframeError(
"No value", entikey, b1, r1, t1
)
)
# No keyframe this turn, so use the value from branches
return hint(get(branchentk[b0][r0], t0))
elif b0 in branchentk and (
r0 != r1
and branchentk[b0].rev_gettable(r0)
and (
(
branchentk[b0].rev_before(
r0, search=search) == r1
and get(branchentk[b0], r0).end > t1
)
or branchentk[b0].rev_before(r0, search=search) > r1
)
):
# branches does not have a value *this* turn,
# but has one for a prior turn, and it's still between
# the two keyframes.
return hint(branchentk[b0][r0 - 1].final())
elif self.db._kf_loaded(b1, r1, t1):
# branches has no value between these two keyframes,
# but we have the keyframe further back.
# Which doesn't mean any of its data is stored in
# this cache, though.
if (
b1 not in keyframes
or r1 not in keyframes[b1]
or t1 not in keyframes[b1][r1]
):
return hint(
NotInKeyframeError("No value", entikey, b1, r1, t1)
)
brtk = keyframes[b1][r1][t1]
if key in brtk:
return hint(brtk[key])
else:
return hint(
NotInKeyframeError("No value", entikey, b1, r1, t1)
)
elif keyframes:
# We have no chronological data, just keyframes.
# That makes things easy.
for b0, r0, t0 in self.db._iter_keyframes(
branch, turn, tick, loaded=True
):
if (
b0 not in keyframes
or r0 not in keyframes[b0]
or t0 not in keyframes[b0][r0]
or key not in keyframes[b0][r0][t0]
):
return hint(
NotInKeyframeError("No value", entikey, b0, r0, t0)
)
return hint(keyframes[b0][r0][t0][key])
return hint(TotalKeyError("No value, ever", entikey))
def retrieve(self, *args, search=False):
"""Get a value previously .store(...)'d.
Needs at least five arguments. The -1th is the tick
within the turn you want,
the -2th is that turn, the -3th is the branch,
and the -4th is the key. All other arguments identify
the entity that the key is in.
With ``search=True``, use binary search; otherwise,
seek back and forth like a tape head.
"""
ret = self._base_retrieve(args, search=search)
if ret is None:
raise HistoricKeyError("Set, then deleted", deleted=True)
elif isinstance(ret, Exception):
raise ret
return ret
def iter_entities_or_keys(self, *args, forward: bool = None):
"""Iterate over the keys an entity has, if you specify an entity.
Otherwise iterate over the entities themselves, or at any rate the
tuple specifying which entity.
"""
if forward is None:
forward = self.db._forward
entity: tuple = args[:-3]
branch: str
turn: int
tick: int
branch, turn, tick = args[-3:]
if self.db._no_kc:
yield from self._get_adds_dels(entity, branch, turn, tick)[0]
return
yield from self._get_keycache(
entity, branch, turn, tick, forward=forward
)
iter_entities = iter_keys = iter_entity_keys = iter_entities_or_keys
def count_entities_or_keys(self, *args, forward: bool = None):
"""Return the number of keys an entity has, if you specify an entity.
Otherwise return the number of entities.
"""
if forward is None:
forward = self.db._forward
entity: tuple = args[:-3]
branch: str
turn: int
tick: int
branch, turn, tick = args[-3:]
if self.db._no_kc:
return len(self._get_adds_dels(entity, branch, turn, tick)[0])
return len(
self._get_keycache(entity, branch, turn, tick, forward=forward)
)
count_entities = count_keys = count_entity_keys = count_entities_or_keys
def contains_entity_or_key(self, *args):
"""Check if an entity has a key at the given time, if entity specified.
Otherwise check if the entity exists.
"""
retr = self._base_retrieve(args)
return retr is not None and not isinstance(retr, Exception)
contains_entity = contains_key = contains_entity_key = (
contains_entity_or_key
)
|
class Cache:
'''A data store that's useful for tracking graph revisions.'''
def __init__(self, db, name: str, kfkvs=None):
pass
def total_size(self, handlers=(), verbose=False):
'''Returns the approximate memory footprint an object and all of its contents.
Automatically finds the contents of the following builtin containers and
their subclasses: tuple, list, deque, dict, set and frozenset.
To search other containers, add handlers to iterate over their contents:
handlers = {SomeContainerClass: iter,
OtherContainerClass: OtherContainerClass.get_elements}
From https://code.activestate.com/recipes/577504-compute-memory-footprint-of-an-object-and-its-cont/download/1/
'''
pass
def sizeof(o):
pass
def _get_keyframe(
self, graph_ent: tuple, branch: str, turn: int, tick: int
) -> dict:
pass
def get_keyframe(
self, graph_ent: tuple, branch: str, turn: int, tick: int, copy=True
):
pass
def _set_keyframe(
self, graph_ent: tuple, branch: str, turn: int, tick: int, keyframe
):
pass
def set_keyframe(
self,
graph_ent: tuple,
branch: str,
turn: int,
tick: int,
keyframe: dict,
):
pass
def copy_keyframe(self, branch_from, branch_to, turn, tick):
pass
def load(self, data):
'''Add a bunch of data. Must be in chronological order.
But it doesn't need to all be from the same branch, as long as
each branch is chronological of itself.
'''
pass
def sort_key(v):
pass
def _valcache_lookup(self, cache: dict, branch: str, turn: int, tick: int):
'''Return the value at the given time in ``cache``'''
pass
def _get_keycachelike(
self,
keycache: dict,
keys: dict,
get_adds_dels: callable,
parentity: tuple,
branch: str,
turn: int,
tick: int,
*,
forward: bool,
):
'''Try to retrieve a frozenset representing extant keys.
If I can't, generate one, store it, and return it.
'''
pass
def _get_keycachelike(
self,
keycache: dict,
keys: dict,
get_adds_dels: callable,
parentity: tuple,
branch: str,
turn: int,
tick: int,
*,
forward: bool,
):
'''Get a frozenset of keys that exist in the entity at the moment.
With ``forward=True``, enable an optimization that copies old key sets
forward and updates them.
'''
pass
def _update_keycache(self, *args, forward: bool):
'''Add or remove a key in the set describing the keys that exist.'''
pass
def _get_adds_dels(
self,
entity: Hashable,
branch: str,
turn: int,
tick: int,
*,
stoptime: tuple[str, int, int] = None,
cache: dict = None,
):
'''Return a pair of sets describing changes to the entity's keys
Returns a pair of sets: ``(added, deleted)``. These are the changes
to the key set that occurred since ``stoptime``, which, if present,
should be a triple ``(branch, turn, tick)``.
With ``stoptime=None`` (the default), ``added`` will in fact be all
keys, and ``deleted`` will be empty.
'''
pass
def store(
self,
*args,
planning: bool = None,
forward: bool = None,
loading=False,
contra: bool = None,
):
'''Put a value in various dictionaries for later .retrieve(...).
Needs at least five arguments, of which the -1th is the value
to store, the -2th is the tick to store it at, the -3th
is the turn to store it in, the -4th is the branch the
revision is in, the -5th is the key the value is for,
and the remaining arguments identify the entity that has
the key, eg. a graph, node, or edge.
With ``planning=True``, you will be permitted to alter
"history" that takes place after the last non-planning
moment of time, without much regard to consistency.
Otherwise, contradictions will be handled by deleting
everything in the contradicted plan after the present moment,
unless you set ``contra=False``.
``loading=True`` prevents me from updating the ORM's records
of the ends of branches and turns.
'''
pass
def remove_character(self, character):
pass
def remove_branch(self, branch: str):
pass
def _remove_btt_parentikey(self, branch, turn, tick, parent, entity, key):
pass
def remove_character(self, character):
'''Delete all data from a specific tick'''
pass
def _remove_keycache(self, entity_branch: tuple, turn: int, tick: int):
'''Remove the future of a given entity from a branch in the keycache'''
pass
def truncate(self, branch: str, turn: int, tick: int, direction="forward"):
pass
def truncate_branhc(branhc):
pass
@staticmethod
def _iter_future_contradictions(
entity: Hashable,
key: Hashable,
turns: WindowDict,
branch: str,
turn: int,
tick: int,
value,
):
'''Iterate over contradicted ``(turn, tick)`` if applicable'''
pass
def _store_journal(self, *args):
pass
def _base_retrieve(
self, args, store_hint=True, retrieve_hint=True, search=False
):
'''Hot code.
Swim up the timestream trying to find a value for the
key in the entity that applied at the given (branch, turn, tick).
If we hit a keyframe, return the value therein, or KeyError if
there is none.
May *return* an exception, rather than raising it. This is to enable
use outside try-blocks, which have some performance overhead.
Memoized by default. Use ``store_hint=False`` to avoid making a memo,
``retrieve_hint=False`` to avoid using one already made.
With ``search=True``, use binary search. This isn't the default,
because most retrievals are close to each other.
'''
pass
def get_keyframe(
self, graph_ent: tuple, branch: str, turn: int, tick: int, copy=True
):
pass
def hint(v):
pass
def get_chron(b, r, t):
pass
def retrieve(self, *args, search=False):
'''Get a value previously .store(...)'d.
Needs at least five arguments. The -1th is the tick
within the turn you want,
the -2th is that turn, the -3th is the branch,
and the -4th is the key. All other arguments identify
the entity that the key is in.
With ``search=True``, use binary search; otherwise,
seek back and forth like a tape head.
'''
pass
def iter_entities_or_keys(self, *args, forward: bool = None):
'''Iterate over the keys an entity has, if you specify an entity.
Otherwise iterate over the entities themselves, or at any rate the
tuple specifying which entity.
'''
pass
def count_entities_or_keys(self, *args, forward: bool = None):
'''Return the number of keys an entity has, if you specify an entity.
Otherwise return the number of entities.
'''
pass
def contains_entity_or_key(self, *args):
'''Check if an entity has a key at the given time, if entity specified.
Otherwise check if the entity exists.
'''
pass
| 35 | 17 | 41 | 2 | 34 | 5 | 8 | 0.15 | 0 | 27 | 10 | 8 | 26 | 16 | 27 | 27 | 1,334 | 83 | 1,088 | 290 | 995 | 167 | 680 | 183 | 646 | 31 | 0 | 8 | 251 |
146,498 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/elide/elide/timestream.py
|
elide.timestream.TimestreamScreen
|
class TimestreamScreen(Screen):
toggle = ObjectProperty()
timestream = ObjectProperty()
_thread: Thread
def on_pre_enter(self, *_):
self.timestream.disabled = True
self._thread = Thread(target=self._get_data)
self._thread.start()
def _get_data(self, *_):
Logger.debug("Timestream: getting branches")
engine = App.get_running_app().engine
data, cols = _data_and_cols_from_branches(engine.handle("branches"))
self.timestream.cols = cols
self.timestream.data = data
Logger.debug("Timestream: loaded!")
self.timestream.disabled = False
|
class TimestreamScreen(Screen):
def on_pre_enter(self, *_):
pass
def _get_data(self, *_):
pass
| 3 | 0 | 6 | 0 | 6 | 0 | 1 | 0 | 1 | 1 | 0 | 0 | 2 | 0 | 2 | 2 | 18 | 2 | 16 | 7 | 13 | 0 | 16 | 7 | 13 | 1 | 1 | 0 | 2 |
146,499 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/elide/elide/timestream.py
|
elide.timestream.ThornyRectangle
|
class ThornyRectangle(Button):
left_margin = NumericProperty(10)
right_margin = NumericProperty(10)
top_margin = NumericProperty(10)
bottom_margin = NumericProperty(10)
draw_left = BooleanProperty(False)
draw_right = BooleanProperty(False)
draw_up = BooleanProperty(False)
draw_down = BooleanProperty(False)
branch = StringProperty()
turn = NumericProperty()
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.bind(
pos=self._trigger_redraw,
size=self._trigger_redraw,
left_margin=self._trigger_redraw,
right_margin=self._trigger_redraw,
top_margin=self._trigger_redraw,
bottom_margin=self._trigger_redraw,
draw_left=self._trigger_redraw,
draw_right=self._trigger_redraw,
draw_up=self._trigger_redraw,
draw_down=self._trigger_redraw,
)
self._trigger_redraw()
def collide_point(self, x, y):
return (
self.x + self.left_margin < x < self.right - self.right_margin
and self.y + self.bottom_margin < y < self.top - self.top_margin
)
def _redraw_line(self, enabled, name, point_lambda):
if enabled:
points = point_lambda()
if hasattr(self, name):
the_line = getattr(self, name)
the_line.points = points
else:
the_line = Line(points=points)
if the_line not in self.canvas.children:
self.canvas.add(the_line)
setattr(self, name, the_line)
elif hasattr(self, name):
self.canvas.remove(getattr(self, name))
delattr(self, name)
def _get_left_line_points(self):
return [
self.x,
self.center_y,
self.x + self.left_margin,
self.center_y,
]
def _get_right_line_points(self):
return [
self.right - self.right_margin,
self.center_y,
self.right,
self.center_y,
]
def _get_top_line_points(self):
return [
self.center_x,
self.top,
self.center_x,
self.top - self.top_margin,
]
def _get_bottom_line_points(self):
return [
self.center_x,
self.y,
self.center_x,
self.y + self.bottom_margin,
]
def _redraw(self, *_):
self._color = Color(rgba=[1, 1, 1, 1])
if self._color not in self.canvas.children:
self.canvas.add(self._color)
rectpoints = [
self.x + self.left_margin,
self.y + self.bottom_margin,
self.right - self.right_margin,
self.y + self.bottom_margin,
self.right - self.right_margin,
self.top - self.top_margin,
self.x + self.left_margin,
self.top - self.top_margin,
self.x + self.left_margin,
self.y + self.bottom_margin,
]
if hasattr(self, "_rect"):
self._rect.points = rectpoints
else:
self._rect = Line(points=rectpoints)
self.canvas.add(self._rect)
self._redraw_line(
self.draw_left, "_left_line", self._get_left_line_points
)
self._redraw_line(
self.draw_right, "_right_line", self._get_right_line_points
)
self._redraw_line(self.draw_up, "_top_line", self._get_top_line_points)
self._redraw_line(
self.draw_down, "_bot_line", self._get_bottom_line_points
)
self.canvas.ask_update()
_trigger_redraw = trigger(_redraw)
def on_release(self):
if self.branch is None or self.turn is None:
return
app = App.get_running_app()
app.mainscreen.toggle_timestream()
self._push_time()
@triggered(timeout=0.1)
def _push_time(self, *_):
app = App.get_running_app()
app.time_travel(self.branch, self.turn)
|
class ThornyRectangle(Button):
def __init__(self, **kwargs):
pass
def collide_point(self, x, y):
pass
def _redraw_line(self, enabled, name, point_lambda):
pass
def _get_left_line_points(self):
pass
def _get_right_line_points(self):
pass
def _get_top_line_points(self):
pass
def _get_bottom_line_points(self):
pass
def _redraw_line(self, enabled, name, point_lambda):
pass
def on_release(self):
pass
@triggered(timeout=0.1)
def _push_time(self, *_):
pass
| 12 | 0 | 10 | 0 | 10 | 0 | 2 | 0 | 1 | 1 | 0 | 0 | 10 | 2 | 10 | 10 | 129 | 13 | 116 | 30 | 104 | 0 | 61 | 29 | 50 | 5 | 1 | 2 | 17 |
146,500 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/elide/elide/timestream.py
|
elide.timestream.Cross
|
class Cross(Widget):
draw_left = BooleanProperty(True)
draw_right = BooleanProperty(True)
draw_up = BooleanProperty(True)
draw_down = BooleanProperty(True)
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.bind(
draw_left=self._trigger_redraw,
draw_right=self._trigger_redraw,
draw_up=self._trigger_redraw,
draw_down=self._trigger_redraw,
size=self._trigger_redraw,
pos=self._trigger_redraw,
)
def _draw_line(self, enabled, name, get_points):
if enabled:
points = get_points()
if hasattr(self, name):
getattr(self, name).points = points
else:
the_line = Line(points=points)
setattr(self, name, the_line)
self.canvas.add(the_line)
elif hasattr(self, name):
the_line = getattr(self, name)
if the_line in self.canvas.children:
self.canvas.remove(the_line)
delattr(self, name)
def _get_left_points(self):
return [self.x, self.center_y, self.center_x, self.center_y]
def _get_right_points(self):
return [self.center_x, self.center_y, self.right, self.center_y]
def _get_up_points(self):
return [self.center_x, self.center_y, self.center_x, self.top]
def _get_down_points(self):
return [self.center_x, self.center_y, self.center_x, self.y]
def _redraw(self, *_):
self._draw_line(self.draw_left, "_left_line", self._get_left_points)
self._draw_line(self.draw_right, "_right_line", self._get_right_points)
self._draw_line(self.draw_up, "_up_line", self._get_up_points)
self._draw_line(self.draw_down, "_down_line", self._get_down_points)
self.canvas.ask_update()
_trigger_redraw = trigger(_redraw)
|
class Cross(Widget):
def __init__(self, **kwargs):
pass
def _draw_line(self, enabled, name, get_points):
pass
def _get_left_points(self):
pass
def _get_right_points(self):
pass
def _get_up_points(self):
pass
def _get_down_points(self):
pass
def _redraw(self, *_):
pass
| 8 | 0 | 5 | 0 | 5 | 0 | 2 | 0 | 1 | 1 | 0 | 0 | 7 | 0 | 7 | 7 | 52 | 8 | 44 | 15 | 36 | 0 | 35 | 15 | 27 | 5 | 1 | 2 | 11 |
146,501 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/elide/elide/tests/util.py
|
elide.tests.util.MockTouch
|
class MockTouch(MotionEvent):
def depack(self, args):
self.is_touch = True
self.sx = args["sx"]
self.sy = args["sy"]
super().depack(args)
|
class MockTouch(MotionEvent):
def depack(self, args):
pass
| 2 | 0 | 5 | 0 | 5 | 0 | 1 | 0 | 1 | 1 | 0 | 0 | 1 | 3 | 1 | 1 | 6 | 0 | 6 | 5 | 4 | 0 | 6 | 5 | 4 | 1 | 1 | 0 | 1 |
146,502 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/elide/elide/card.py
|
elide.card.DeckBuilderLayout
|
class DeckBuilderLayout(Layout):
"""Sizes and positions :class:`Card` objects based on their order
within ``decks``, a list of lists where each sublist is a deck of
cards.
"""
direction = OptionProperty(
"ascending", options=["ascending", "descending"]
)
"""Should the beginning card of each deck appear on the bottom
('ascending'), or the top ('descending')?
"""
card_size_hint_x = BoundedNumericProperty(1, min=0, max=1)
"""Each card's width, expressed as a proportion of my width."""
card_size_hint_y = BoundedNumericProperty(1, min=0, max=1)
"""Each card's height, expressed as a proportion of my height."""
card_size_hint = ReferenceListProperty(card_size_hint_x, card_size_hint_y)
"""Size hint of cards, relative to my size."""
starting_pos_hint = DictProperty({"x": 0, "y": 0})
"""Pos hint at which to place the initial card of the initial deck."""
card_x_hint_step = NumericProperty(0)
"""Each time I put another card on a deck, I'll move it this much of
my width to the right of the previous card.
"""
card_y_hint_step = NumericProperty(-1)
"""Each time I put another card on a deck, I'll move it this much of
my height above the previous card.
"""
card_hint_step = ReferenceListProperty(card_x_hint_step, card_y_hint_step)
"""An offset, expressed in proportion to my size, applied to each
successive card in a given deck.
"""
deck_x_hint_step = NumericProperty(1)
"""When I start a new deck, it will be this far to the right of the
previous deck, expressed as a proportion of my width.
"""
deck_y_hint_step = NumericProperty(0)
"""When I start a new deck, it will be this far above the previous
deck, expressed as a proportion of my height.
"""
deck_hint_step = ReferenceListProperty(deck_x_hint_step, deck_y_hint_step)
"""Offset of each deck with respect to the previous, as a proportion
of my size.
"""
decks = ListProperty([[]]) # list of lists of cards
"""Put a list of lists of :class:`Card` objects here and I'll position
them appropriately. Please don't use ``add_widget``.
"""
deck_x_hint_offsets = ListProperty([])
"""An additional proportional x-offset for each deck, defaulting to 0."""
deck_y_hint_offsets = ListProperty([])
"""An additional proportional y-offset for each deck, defaulting to 0."""
foundation_color = ListProperty([1, 1, 1, 1])
"""Color to use for the outline showing where a deck is when it's
empty.
"""
insertion_deck = BoundedNumericProperty(None, min=0, allownone=True)
"""Index of the deck that a card is being dragged into."""
insertion_card = BoundedNumericProperty(None, min=0, allownone=True)
"""Index within the current deck that a card is being dragged into."""
_foundations = ListProperty([])
"""Private. A list of :class:`Foundation` widgets, one per deck."""
def __init__(self, **kwargs):
"""Bind most of my custom properties to ``_trigger_layout``."""
super().__init__(**kwargs)
self.bind(
card_size_hint=self._trigger_layout,
starting_pos_hint=self._trigger_layout,
card_hint_step=self._trigger_layout,
deck_hint_step=self._trigger_layout,
decks=self._trigger_layout,
deck_x_hint_offsets=self._trigger_layout,
deck_y_hint_offsets=self._trigger_layout,
insertion_deck=self._trigger_layout,
insertion_card=self._trigger_layout,
)
def scroll_deck_x(self, decknum, scroll_x):
"""Move a deck left or right."""
if decknum >= len(self.decks):
raise IndexError("I have no deck at {}".format(decknum))
if decknum >= len(self.deck_x_hint_offsets):
self.deck_x_hint_offsets = list(self.deck_x_hint_offsets) + [0] * (
decknum - len(self.deck_x_hint_offsets) + 1
)
self.deck_x_hint_offsets[decknum] += scroll_x
self._trigger_layout()
def scroll_deck_y(self, decknum, scroll_y):
"""Move a deck up or down."""
if decknum >= len(self.decks):
raise IndexError("I have no deck at {}".format(decknum))
if decknum >= len(self.deck_y_hint_offsets):
self.deck_y_hint_offsets = list(self.deck_y_hint_offsets) + [0] * (
decknum - len(self.deck_y_hint_offsets) + 1
)
self.deck_y_hint_offsets[decknum] += scroll_y
self._trigger_layout()
def scroll_deck(self, decknum, scroll_x, scroll_y):
"""Move a deck."""
self.scroll_deck_x(decknum, scroll_x)
self.scroll_deck_y(decknum, scroll_y)
def _get_foundation_pos(self, i):
"""Private. Get the absolute coordinates to use for a deck's
foundation, based on the ``starting_pos_hint``, the
``deck_hint_step``, ``deck_x_hint_offsets``, and
``deck_y_hint_offsets``.
"""
(phx, phy) = get_pos_hint(self.starting_pos_hint, *self.card_size_hint)
phx += self.deck_x_hint_step * i + self.deck_x_hint_offsets[i]
phy += self.deck_y_hint_step * i + self.deck_y_hint_offsets[i]
x = phx * self.width + self.x
y = phy * self.height + self.y
return (x, y)
def _get_foundation(self, i):
"""Return a :class:`Foundation` for some deck, creating it if
needed.
"""
if i >= len(self._foundations) or self._foundations[i] is None:
oldfound = list(self._foundations)
extend = i - len(oldfound) + 1
if extend > 0:
oldfound += [None] * extend
width = self.card_size_hint_x * self.width
height = self.card_size_hint_y * self.height
found = Foundation(
pos=self._get_foundation_pos(i), size=(width, height), deck=i
)
self.bind(
pos=found.upd_pos,
card_size_hint=found.upd_pos,
deck_hint_step=found.upd_pos,
size=found.upd_pos,
deck_x_hint_offsets=found.upd_pos,
deck_y_hint_offsets=found.upd_pos,
)
self.bind(size=found.upd_size, card_size_hint=found.upd_size)
oldfound[i] = found
self._foundations = oldfound
return self._foundations[i]
def remove_widget(self, widget, *args, **kwargs):
if isinstance(widget, Foundation):
self.unbind(
pos=widget.upd_pos,
card_size_hint=widget.upd_pos,
deck_hint_step=widget.upd_pos,
size=widget.upd_pos,
deck_x_hint_offsets=widget.upd_pos,
deck_y_hint_offsets=widget.upd_pos,
)
self.unbind(size=widget.upd_size, card_size_hint=widget.upd_size)
super().remove_widget(widget, *args, **kwargs)
def on_decks(self, *args):
"""Inform the cards of their deck and their index within the deck;
extend the ``_hint_offsets`` properties as needed; and trigger
a layout.
"""
if None in (
self.canvas,
self.decks,
self.deck_x_hint_offsets,
self.deck_y_hint_offsets,
):
Clock.schedule_once(self.on_decks, 0)
return
self.clear_widgets()
decknum = 0
for deck in self.decks:
cardnum = 0
for card in deck:
if not isinstance(card, Card):
raise TypeError("You must only put Card in decks")
if card not in self.children:
self.add_widget(card)
if card.deck != decknum:
card.deck = decknum
if card.idx != cardnum:
card.idx = cardnum
cardnum += 1
decknum += 1
if len(self.deck_x_hint_offsets) < len(self.decks):
self.deck_x_hint_offsets = list(self.deck_x_hint_offsets) + [0] * (
len(self.decks) - len(self.deck_x_hint_offsets)
)
if len(self.deck_y_hint_offsets) < len(self.decks):
self.deck_y_hint_offsets = list(self.deck_y_hint_offsets) + [0] * (
len(self.decks) - len(self.deck_y_hint_offsets)
)
self._trigger_layout()
def point_before_card(self, card, x, y):
"""Return whether ``(x, y)`` is somewhere before ``card``, given how I
know cards to be arranged.
If the cards are being stacked down and to the right, that
means I'm testing whether ``(x, y)`` is above or to the left
of the card.
"""
def ycmp():
if self.card_y_hint_step == 0:
return False
elif self.card_y_hint_step > 0:
# stacking upward
return y < card.y
else:
# stacking downward
return y > card.top
if self.card_x_hint_step > 0:
# stacking to the right
if x < card.x:
return True
return ycmp()
elif self.card_x_hint_step == 0:
return ycmp()
else:
# stacking to the left
if x > card.right:
return True
return ycmp()
def point_after_card(self, card, x, y):
"""Return whether ``(x, y)`` is somewhere after ``card``, given how I
know cards to be arranged.
If the cards are being stacked down and to the right, that
means I'm testing whether ``(x, y)`` is below or to the left
of ``card``.
"""
def ycmp():
if self.card_y_hint_step == 0:
return False
elif self.card_y_hint_step > 0:
# stacking upward
return y > card.top
else:
# stacking downward
return y < card.y
if self.card_x_hint_step > 0:
# stacking to the right
if x > card.right:
return True
return ycmp()
elif self.card_x_hint_step == 0:
return ycmp()
else:
# stacking to the left
if x < card.x:
return True
return ycmp()
def on_touch_move(self, touch):
"""If a card is being dragged, move other cards out of the way to show
where the dragged card will go if you drop it.
"""
if (
"card" not in touch.ud
or "layout" not in touch.ud
or touch.ud["layout"] != self
):
return
if touch.ud["layout"] == self and not hasattr(
touch.ud["card"], "_topdecked"
):
touch.ud["card"]._topdecked = InstructionGroup()
touch.ud["card"]._topdecked.add(touch.ud["card"].canvas)
self.canvas.after.add(touch.ud["card"]._topdecked)
for i, deck in enumerate(self.decks):
cards = [card for card in deck if not card.dragging]
maxidx = max(card.idx for card in cards) if cards else 0
if self.direction == "descending":
cards.reverse()
cards_collided = [
card for card in cards if card.collide_point(*touch.pos)
]
if cards_collided:
collided = cards_collided.pop()
for card in cards_collided:
if card.idx > collided.idx:
collided = card
if collided.deck == touch.ud["deck"]:
self.insertion_card = (
1
if collided.idx == 0
else maxidx + 1
if collided.idx == maxidx
else collided.idx + 1
if collided.idx > touch.ud["idx"]
else collided.idx
)
else:
dropdeck = self.decks[collided.deck]
maxidx = max(card.idx for card in dropdeck)
self.insertion_card = (
1
if collided.idx == 0
else maxidx + 1
if collided.idx == maxidx
else collided.idx + 1
)
if self.insertion_deck != collided.deck:
self.insertion_deck = collided.deck
return
else:
if self.insertion_deck == i:
if self.insertion_card in (0, len(deck)):
pass
elif self.point_before_card(cards[0], *touch.pos):
self.insertion_card = 0
elif self.point_after_card(cards[-1], *touch.pos):
self.insertion_card = cards[-1].idx
else:
for j, found in enumerate(self._foundations):
if found is not None and found.collide_point(
*touch.pos
):
self.insertion_deck = j
self.insertion_card = 0
return
def on_touch_up(self, touch):
"""If a card is being dragged, put it in the place it was just dropped
and trigger a layout.
"""
if (
"card" not in touch.ud
or "layout" not in touch.ud
or touch.ud["layout"] != self
):
return
if hasattr(touch.ud["card"], "_topdecked"):
self.canvas.after.remove(touch.ud["card"]._topdecked)
del touch.ud["card"]._topdecked
if None not in (self.insertion_deck, self.insertion_card):
# need to sync to adapter.data??
card = touch.ud["card"]
del card.parent.decks[card.deck][card.idx]
for i in range(0, len(card.parent.decks[card.deck])):
card.parent.decks[card.deck][i].idx = i
deck = self.decks[self.insertion_deck]
if self.insertion_card >= len(deck):
deck.append(card)
else:
deck.insert(self.insertion_card, card)
card.deck = self.insertion_deck
card.idx = self.insertion_card
self.decks[self.insertion_deck] = deck
self.insertion_deck = self.insertion_card = None
self._trigger_layout()
def on_insertion_card(self, *args):
"""Trigger a layout"""
if self.insertion_card is not None:
self._trigger_layout()
def do_layout(self, *args):
"""Layout each of my decks"""
if self.size == [1, 1]:
return
for i in range(0, len(self.decks)):
self.layout_deck(i)
def layout_deck(self, i):
"""Stack the cards, starting at my deck's foundation, and proceeding
by ``card_pos_hint``
"""
def get_dragidx(cards):
j = 0
for card in cards:
if card.dragging:
return j
j += 1
# Put a None in the card list in place of the card you're
# hovering over, if you're dragging another card. This will
# result in an empty space where the card will go if you drop
# it now.
cards = list(self.decks[i])
dragidx = get_dragidx(cards)
if dragidx is not None:
del cards[dragidx]
if self.insertion_deck == i and self.insertion_card is not None:
insdx = self.insertion_card
if dragidx is not None and insdx > dragidx:
insdx -= 1
cards.insert(insdx, None)
if self.direction == "descending":
cards.reverse()
# Work out the initial pos_hint for this deck
(phx, phy) = get_pos_hint(self.starting_pos_hint, *self.card_size_hint)
phx += self.deck_x_hint_step * i + self.deck_x_hint_offsets[i]
phy += self.deck_y_hint_step * i + self.deck_y_hint_offsets[i]
(w, h) = self.size
(x, y) = self.pos
# start assigning pos and size to cards
found = self._get_foundation(i)
if found not in self.children:
self.add_widget(found)
for card in cards:
if card is not None:
if card in self.children:
self.remove_widget(card)
(shw, shh) = self.card_size_hint
card.pos = (x + phx * w, y + phy * h)
card.size = (w * shw, h * shh)
self.add_widget(card)
phx += self.card_x_hint_step
phy += self.card_y_hint_step
|
class DeckBuilderLayout(Layout):
'''Sizes and positions :class:`Card` objects based on their order
within ``decks``, a list of lists where each sublist is a deck of
cards.
'''
def __init__(self, **kwargs):
'''Bind most of my custom properties to ``_trigger_layout``.'''
pass
def scroll_deck_x(self, decknum, scroll_x):
'''Move a deck left or right.'''
pass
def scroll_deck_y(self, decknum, scroll_y):
'''Move a deck up or down.'''
pass
def scroll_deck_x(self, decknum, scroll_x):
'''Move a deck.'''
pass
def _get_foundation_pos(self, i):
'''Private. Get the absolute coordinates to use for a deck's
foundation, based on the ``starting_pos_hint``, the
``deck_hint_step``, ``deck_x_hint_offsets``, and
``deck_y_hint_offsets``.
'''
pass
def _get_foundation_pos(self, i):
'''Return a :class:`Foundation` for some deck, creating it if
needed.
'''
pass
def remove_widget(self, widget, *args, **kwargs):
pass
def on_decks(self, *args):
'''Inform the cards of their deck and their index within the deck;
extend the ``_hint_offsets`` properties as needed; and trigger
a layout.
'''
pass
def point_before_card(self, card, x, y):
'''Return whether ``(x, y)`` is somewhere before ``card``, given how I
know cards to be arranged.
If the cards are being stacked down and to the right, that
means I'm testing whether ``(x, y)`` is above or to the left
of the card.
'''
pass
def ycmp():
pass
def point_after_card(self, card, x, y):
'''Return whether ``(x, y)`` is somewhere after ``card``, given how I
know cards to be arranged.
If the cards are being stacked down and to the right, that
means I'm testing whether ``(x, y)`` is below or to the left
of ``card``.
'''
pass
def ycmp():
pass
def on_touch_move(self, touch):
'''If a card is being dragged, move other cards out of the way to show
where the dragged card will go if you drop it.
'''
pass
def on_touch_up(self, touch):
'''If a card is being dragged, put it in the place it was just dropped
and trigger a layout.
'''
pass
def on_insertion_card(self, *args):
'''Trigger a layout'''
pass
def do_layout(self, *args):
'''Layout each of my decks'''
pass
def layout_deck(self, i):
'''Stack the cards, starting at my deck's foundation, and proceeding
by ``card_pos_hint``
'''
pass
def get_dragidx(cards):
pass
| 19 | 15 | 21 | 1 | 17 | 3 | 5 | 0.32 | 1 | 8 | 2 | 1 | 15 | 1 | 15 | 15 | 436 | 42 | 300 | 72 | 281 | 95 | 217 | 71 | 198 | 22 | 1 | 5 | 85 |
146,503 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/elide/elide/tests/util.py
|
elide.tests.util.MockEngine
|
class MockEngine(Signal):
eternal = ListenableDict()
universal = ListenableDict()
character = ListenableDict()
string = ListenableDict()
time = MockTime()
closed = False
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.turn = self.initial_turn = self.final_turn = 0
self._ready = True
def __setattr__(self, key, value):
if not hasattr(self, "_ready"):
super().__setattr__(key, value)
return
self.send(self, key=key, value=value)
super().__setattr__(key, value)
def next_turn(self, *args, **kwargs):
self.turn += 1
self.final_turn = self.turn
kwargs["cb"]("next_turn", "trunk", self.turn, 0, ([], {}))
def handle(self, *args, **kwargs):
return {"a": "b"}
def commit(self):
pass
|
class MockEngine(Signal):
def __init__(self, *args, **kwargs):
pass
def __setattr__(self, key, value):
pass
def next_turn(self, *args, **kwargs):
pass
def handle(self, *args, **kwargs):
pass
def commit(self):
pass
| 6 | 0 | 4 | 0 | 4 | 0 | 1 | 0 | 1 | 1 | 0 | 0 | 5 | 4 | 5 | 5 | 30 | 5 | 25 | 14 | 19 | 0 | 25 | 14 | 19 | 2 | 1 | 1 | 6 |
146,504 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/elide/elide/tests/util.py
|
elide.tests.util.ELiDEAppTest
|
class ELiDEAppTest(GraphicUnitTest):
def __init__(self, methodName="runTest"):
super().__init__(methodName)
self.prefix = mkdtemp()
self.addCleanup(self.cleanup)
def cleanup(self):
shutil.rmtree(self.prefix)
def setUp(self):
super(ELiDEAppTest, self).setUp()
self.old_argv = sys.argv.copy()
sys.argv = ["python", "-m", "elide", self.prefix]
self.app = ELiDEApp()
self.app.config = ConfigParser(None)
self.app.build_config(self.app.config)
def tearDown(self, fake=False):
EventLoop.idle()
super().tearDown(fake=fake)
self.app.stop()
sys.argv = self.old_argv
|
class ELiDEAppTest(GraphicUnitTest):
def __init__(self, methodName="runTest"):
pass
def cleanup(self):
pass
def setUp(self):
pass
def tearDown(self, fake=False):
pass
| 5 | 0 | 5 | 0 | 5 | 0 | 1 | 0 | 1 | 2 | 1 | 9 | 4 | 3 | 4 | 4 | 22 | 3 | 19 | 8 | 14 | 0 | 19 | 8 | 14 | 1 | 1 | 0 | 4 |
146,505 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/elide/elide/tests/test_strings_editor.py
|
elide.tests.test_strings_editor.StringsEditorTest
|
class StringsEditorTest(ELiDEAppTest):
def test_strings_editor(self):
assert "lisien" in self.app.config
app = self.app
print("app", id(app))
self.Window.add_widget(app.build())
idle_until(
lambda: hasattr(app, "mainscreen"), 100, "app never got mainscreen"
)
idle_until(
lambda: app.manager.has_screen("timestream"),
100,
"timestream never added to manager",
)
idle_until(
lambda: hasattr(app.mainmenu, "configurator"),
100,
"DirPicker never got configurator",
)
app.mainmenu.configurator.start() # start with blank world
def app_has_engine():
return hasattr(self.app.get_running_app(), "engine")
idle_until(app_has_engine, 600, "app never got engine")
idle_until(
lambda: app.strings.children, 100, "strings never got children"
)
idle_until(lambda: app.strings.edbox, 100, "strings never got edbox")
idle_until(
lambda: "physical" in app.mainscreen.graphboards,
100,
"never got physical in graphboards",
)
edbox = app.strings.edbox
strings_list = edbox.ids.strings_list
idle_until(
lambda: strings_list.store, 100, "strings_list never got store"
)
strings_ed = edbox.ids.strings_ed
app.strings.toggle()
self.advance_frames(10)
touchy = UnitTestTouch(*strings_ed.ids.stringname.center)
touchy.touch_down()
EventLoop.idle()
touchy.touch_up()
EventLoop.idle()
strings_ed.ids.stringname.text = "a string"
idle_until(
lambda: strings_ed.name == "a string", 100, "name never set"
)
touchier = UnitTestTouch(*strings_ed.ids.string.center)
touchier.touch_down()
EventLoop.idle()
touchier.touch_up()
self.advance_frames(10)
strings_ed.ids.string.text = "its value"
idle_until(
lambda: strings_ed.source == "its value", 100, "source never set"
)
self.advance_frames(10)
edbox.dismiss()
app.stop()
with Engine(self.prefix) as eng:
assert "a string" in eng.string
assert eng.string["a string"] == "its value"
|
class StringsEditorTest(ELiDEAppTest):
def test_strings_editor(self):
pass
def app_has_engine():
pass
| 3 | 0 | 34 | 1 | 33 | 1 | 1 | 0.02 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 5 | 66 | 2 | 64 | 10 | 61 | 1 | 42 | 9 | 39 | 1 | 2 | 1 | 2 |
146,506 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/elide/elide/tests/test_sprite_builder.py
|
elide.tests.test_sprite_builder.TestSpriteBuilder
|
class TestSpriteBuilder(ELiDEAppTest):
def test_build_pawn(self):
app = self.app
self.Window.add_widget(app.build())
app.manager.current = "pawncfg"
idle_until(
lambda: "dialog" in app.pawncfg.ids,
100,
"Never made dialog for pawncfg",
)
pawn_cfg_dialog = app.pawncfg.ids.dialog
idle_until(
lambda: "builder" in pawn_cfg_dialog.ids,
100,
"Never made pawn builder",
)
builder = pawn_cfg_dialog.ids.builder
idle_until(lambda: builder.labels, 100, "Never got any builder labels")
idle_until(
lambda: builder.pallets, 100, "Never got any builder pallets"
)
idle_until(
lambda: len(builder.labels) == len(builder.pallets),
100,
"Never updated pawn builder",
)
palbox = builder._palbox
for child in palbox.children:
if not isinstance(child, Pallet):
continue
idle_until(
lambda: child.swatches,
100,
"Never got swatches for " + child.filename,
)
if "draconian_m" in child.swatches:
child.swatches["draconian_m"].state = "down"
idle_until(
lambda: child.swatches["draconian_m"] in child.selection,
100,
"Selection never updated",
)
if "robe_red" in child.swatches:
child.swatches["robe_red"].state = "down"
idle_until(
lambda: child.swatches["robe_red"] in child.selection,
100,
"Selection never updated",
)
idle_until(
lambda: pawn_cfg_dialog.ids.selector.imgpaths,
100,
"Never got imgpaths",
)
pawn_cfg_dialog.pressed()
idle_until(
lambda: pawn_cfg_dialog.imgpaths, 100, "Never propagated imgpaths"
)
assert pawn_cfg_dialog.imgpaths == [
"atlas://base.atlas/draconian_m",
"atlas://body.atlas/robe_red",
]
|
class TestSpriteBuilder(ELiDEAppTest):
def test_build_pawn(self):
pass
| 2 | 0 | 61 | 0 | 61 | 0 | 5 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 1 | 5 | 62 | 0 | 62 | 7 | 60 | 0 | 27 | 7 | 25 | 5 | 2 | 2 | 5 |
146,507 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/elide/elide/tests/test_screen.py
|
elide.tests.test_screen.ScreenTest
|
class ScreenTest(ELiDEAppTest):
def test_advance_time(self):
app = self.app
app.mainmenu = DirPicker()
app.spotcfg = SpotConfigScreen()
app.pawncfg = PawnConfigScreen()
app.statcfg = StatScreen()
char = CharacterFacade()
char.name = "physical"
app.character = char
app.engine = MockEngine()
app.strings = MockStore()
app.funcs = MockStore()
char.character = SimpleNamespace(engine=app.engine)
app.engine.character["physical"] = char
entity = ListenableDict()
entity.engine = app.engine
entity.name = "name"
app.selected_proxy = app.proxy = app.statcfg.proxy = entity
screen = MainScreen(
graphboards={"physical": GraphBoard(character=char)},
gridboards={"physical": GridBoard(character=char)},
)
self.Window.add_widget(screen)
idle_until(
lambda: "timepanel" in screen.ids, 100, "timepanel never got id"
)
timepanel = screen.ids["timepanel"]
idle_until(
lambda: timepanel.size != [100, 100],
100,
"timepanel never resized",
)
turnfield = timepanel.ids["turnfield"]
turn_before = int(turnfield.hint_text)
stepbut = timepanel.ids["stepbut"]
motion = UnitTestTouch(*stepbut.center)
motion.touch_down()
motion.touch_up()
EventLoop.idle()
assert int(turnfield.hint_text) == turn_before + 1
def test_play(self):
app = self.app
app.spotcfg = SpotConfigScreen()
app.pawncfg = PawnConfigScreen()
app.statcfg = StatScreen()
app.mainmenu = DirPicker()
app.strings = MockStore()
app.funcs = MockStore()
char = CharacterFacade()
char.name = "foo"
app.character = char
app.engine = MockEngine()
app.manager = ScreenManager()
char.character = SimpleNamespace(engine=app.engine)
app.engine.character["foo"] = char
entity = ListenableDict()
entity.engine = app.engine
entity.name = "name"
app.selected_proxy = app.proxy = app.statcfg.proxy = entity
screen = MainScreen(
graphboards={"foo": GraphBoard(character=char)},
gridboards={"foo": GridBoard(character=char)},
play_speed=1.0,
)
app.manager.add_widget(screen)
self.Window.add_widget(app.manager)
idle_until(
lambda: "timepanel" in screen.ids, 100, "timepanel never got id"
)
timepanel = screen.ids["timepanel"]
idle_until(
lambda: timepanel.size != [100, 100],
100,
"timepanel never resized",
)
turnfield = timepanel.ids["turnfield"]
playbut = screen.playbut = timepanel.ids["playbut"]
motion = UnitTestTouch(*playbut.center)
motion.touch_down()
motion.touch_up()
idle_until(
lambda: int(turnfield.hint_text) == 3,
400,
"Time didn't advance fast enough",
)
playbut.state = "normal"
def test_update(self):
def almost(a, b):
if isinstance(a, tuple) and isinstance(b, tuple):
return all(almost(aa, bb) for (aa, bb) in zip(a, b))
return abs(a - b) < 1
with Engine(self.prefix) as eng:
phys = eng.new_character("physical")
here = phys.new_place((0, 0))
phys.add_place((1, 1))
phys.add_place(9) # test that gridboard can handle this
this = here.new_thing(2)
@this.rule(always=True)
def go(me):
if me["location"] == (1, 1):
me["location"] = 9
elif me["location"] == 9:
me["location"] = (0, 0)
else:
me["location"] = (1, 1)
app = self.app
app.starting_dir = self.prefix
app.build()
idle_until(
lambda: hasattr(app, "engine"), 100, "Never got engine proxy"
)
assert app.engine.character["physical"].thing[2]["location"] == (0, 0)
graphboard = app.mainscreen.graphboards["physical"]
gridboard = app.mainscreen.gridboards["physical"]
idle_until(
lambda: graphboard.size != [100, 100],
100,
"Never resized graphboard",
)
idle_until(
lambda: gridboard.size != [100, 100],
100,
"Never resized gridboard",
)
idle_until(
lambda: (0, 0) in graphboard.spot,
100,
"Never made spot for location 0",
)
idle_until(
lambda: 2 in graphboard.pawn, 100, "Never made pawn for thing 2"
)
locspot0 = graphboard.spot[0, 0]
gridspot0 = gridboard.spot[0, 0]
locspot1 = graphboard.spot[1, 1]
gridspot1 = gridboard.spot[1, 1]
graphpawn = graphboard.pawn[2]
gridpawn = gridboard.pawn[2]
idle_until(
lambda: almost(graphpawn.x, locspot0.right),
100,
f"Never positioned pawn to 0's right (it's at {graphpawn.x}"
f", not {locspot0.right})",
)
idle_until(
lambda: almost(graphpawn.y, locspot0.top),
100,
"Never positioned pawn to 0's top",
)
idle_until(
lambda: almost(gridpawn.pos, gridspot0.pos),
100,
"Never positioned pawn to grid 0, 0",
)
app.mainscreen.next_turn()
idle_until(lambda: not app.edit_locked, 100, "Never unlocked")
loc = app.engine.character["physical"].thing[2]["location"]
def relocated_to(dest):
nonlocal loc
loc = app.engine.character["physical"].thing[2]["location"]
return loc == dest
idle_until(
partial(relocated_to, (1, 1)),
1000,
f"Thing 2 didn't go to location (1, 1); instead, it's at {loc}",
)
idle_until(
lambda: almost(graphpawn.x, locspot1.right),
100,
"Never positioned pawn to 1's right "
f"(pawn is at {graphpawn.x} not {locspot1.right})",
)
idle_until(
lambda: almost(graphpawn.y, locspot1.top),
100,
"Never positioned pawn to 1's top "
f"(it's at {graphpawn.y}, not {locspot1.top})",
)
idle_until(
lambda: almost(gridpawn.pos, gridspot1.pos),
100,
"Never positioned pawn to grid 1, 1",
)
locspot9 = graphboard.spot[9]
app.mainscreen.next_turn()
idle_until(lambda: not app.edit_locked, 100, "Never unlocked")
loc = app.engine.character["physical"].thing[2]["location"]
idle_until(
partial(relocated_to, 9),
1000,
f"Thing 2 didn't relocate to 9; it's at {loc}",
)
idle_until(
lambda: 2 not in gridboard.pawn,
100,
"pawn never removed from grid",
)
idle_until(
lambda: almost(graphpawn.x, locspot9.right)
and almost(graphpawn.y, locspot9.top),
100,
f"Never positioned pawn to 9's top-right, "
f"it's at {graphpawn.pos} not {locspot9.right, locspot9.top}",
)
app.mainscreen.next_turn()
idle_until(lambda: not app.edit_locked, 100, "Never unlocked")
loc = app.engine.character["physical"].thing[2]["location"]
idle_until(
partial(relocated_to, (0, 0)),
1000,
f"Thing 2 didn't relocate to (0, 0); it's at {loc}",
)
idle_until(
lambda: 2 in gridboard.pawn, 100, "pawn never returned to grid"
)
idle_until(
lambda: almost(graphpawn.x, locspot0.right)
and almost(graphpawn.y, locspot0.top),
100,
f"Never returned to 0's top-right "
f"(stuck at {graphpawn.pos}, should be "
f"{locspot0.right, locspot0.top})",
)
idle_until(
lambda: almost(gridpawn.pos, gridspot0.pos),
100,
"Never returned to grid 0, 0",
)
|
class ScreenTest(ELiDEAppTest):
def test_advance_time(self):
pass
def test_play(self):
pass
def test_update(self):
pass
def almost(a, b):
pass
@this.rule(always=True)
def go(me):
pass
def relocated_to(dest):
pass
| 8 | 0 | 42 | 1 | 41 | 0 | 2 | 0 | 1 | 15 | 11 | 0 | 3 | 0 | 3 | 7 | 237 | 8 | 229 | 41 | 220 | 1 | 125 | 39 | 117 | 3 | 2 | 1 | 9 |
146,508 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/elide/elide/tests/test_screen.py
|
elide.tests.test_screen.MockStore
|
class MockStore:
def save(self, *args):
pass
|
class MockStore:
def save(self, *args):
pass
| 2 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 3 | 0 | 3 | 2 | 1 | 0 | 3 | 2 | 1 | 1 | 0 | 0 | 1 |
146,509 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/elide/elide/tests/test_rule_builder.py
|
elide.tests.test_rule_builder.TestRuleBuilderKobold
|
class TestRuleBuilderKobold(RuleBuilderTest):
def install(self, engine: Engine):
kobold.inittest(engine)
def get_selection(self):
return self.board.pawn["kobold"]
def test_rule_builder_display_trigger(self):
rules_list = self.rules_list
rules_view = self.rules_view
idle_until(
lambda: "shrubsprint"
in {rulebut.text for rulebut in rules_list.children[0].children},
100,
"Never made shrubsprint button",
)
for rulebut in rules_list.children[0].children:
if rulebut.text == "shrubsprint":
rulebut.state = "down"
break
idle_until(lambda: rules_view.children)
idle_until(
lambda: hasattr(rules_view, "_trigger_tab"),
100,
"Never made trigger tab",
)
builder = rules_view._trigger_builder
idle_until(
lambda: [
child for child in builder.children if isinstance(child, Card)
],
100,
"Never filled trigger builder",
)
card_names = {
card.headline_text
for card in builder.children
if isinstance(card, Card)
}
assert card_names == {
"standing_still",
"aware",
"uncovered",
"sametile",
"kobold_alive",
}
def test_rule_builder_remove_trigger(self):
rules_list = self.rules_list
rules_view = self.rules_view
idle_until(
lambda: "shrubsprint"
in {rulebut.text for rulebut in rules_list.children[0].children},
100,
"Never made shrubsprint button",
)
for rulebut in rules_list.children[0].children:
if rulebut.text == "shrubsprint":
rulebut.state = "down"
break
idle_until(lambda: rules_view.children)
idle_until(
lambda: hasattr(rules_view, "_trigger_tab"),
100,
"Never made trigger tab",
)
builder = rules_view._trigger_builder
idle_until(
lambda: [
child for child in builder.children if isinstance(child, Card)
],
100,
"Never filled trigger builder",
)
uncovered = None
def have_uncovered():
nonlocal uncovered
for card in builder.children:
if not isinstance(card, Card):
continue
if card.headline_text == "uncovered":
uncovered = card
return True
return False
idle_until(have_uncovered, 100, "Never got 'uncovered' card")
right_foundation = None
def have_right_foundation():
nonlocal right_foundation
for foundation in builder.children:
if not isinstance(foundation, Foundation):
continue
if foundation.x > uncovered.right:
right_foundation = foundation
return True
return False
idle_until(have_right_foundation, 100, "Never built right foundation")
assert uncovered is not None
assert right_foundation is not None
def uncovered_is_flush_with_right_foundation():
for card in builder.children:
if not isinstance(card, Card):
continue
if card.headline_text == "uncovered":
breakcover = card
right_foundation = None
for foundation in builder.children:
if isinstance(foundation, Card):
continue
if (
right_foundation is None
or foundation.x > right_foundation.x
):
right_foundation = foundation
assert right_foundation is not None, "No foundations??"
return breakcover.x == right_foundation.x
return False
card = uncovered
foundation = right_foundation
mov = UnitTestTouch(*card.center)
mov.touch_down()
dist_x = foundation.center_x - card.center_x
dist_y = foundation.y - card.center_y
for i in range(1, 11):
coef = 1 / i
x = foundation.center_x - coef * dist_x
y = foundation.y - coef * dist_y
mov.touch_move(x, y)
self.advance_frames(1)
mov.touch_up(foundation.center_x, foundation.y)
idle_until(
partial(builder_foundation, builder),
100,
"didn't replace foundations",
)
idle_until(
uncovered_is_flush_with_right_foundation, 100, "card didn't move"
)
idle_until(
lambda: not any(
func.name == "breakcover"
for func in self.app.engine.rule["shrubsprint"].triggers
),
100,
"breakcover never removed from rulebook",
)
def test_rule_builder_add_trigger(self):
rules_list = self.rules_list
rules_view = self.rules_view
idle_until(
lambda: "shrubsprint"
in {rulebut.text for rulebut in rules_list.children[0].children},
100,
"Never made shrubsprint button",
)
for rulebut in rules_list.children[0].children:
if rulebut.text == "shrubsprint":
rulebut.state = "down"
break
idle_until(lambda: rules_view.children)
idle_until(
lambda: hasattr(rules_view, "_trigger_tab"),
100,
"Never made trigger tab",
)
builder = rules_view._trigger_builder
idle_until(
lambda: [
child for child in builder.children if isinstance(child, Card)
],
100,
"Never filled trigger builder",
)
idle_until(
lambda: [child.x for child in builder.children if child.x > 0],
100,
"Never positioned trigger builder's children",
)
aware = None
for card in builder.children:
if isinstance(card, Foundation):
continue
assert isinstance(card, Card)
if card.headline_text == "aware":
aware = card
break
assert aware is not None, "Didn't get 'aware' card"
uncovered = None
for card in builder.children:
if isinstance(card, Foundation):
continue
assert isinstance(card, Card)
if card.headline_text == "uncovered":
uncovered = card
break
assert uncovered is not None, "Didn't get 'uncovered' card"
start_x = aware.center_x
start_y = aware.top - 10
assert aware.collide_point(start_x, start_y)
mov = UnitTestTouch(start_x, start_y)
mov.touch_down()
dist_x = start_x - uncovered.center_x
dist_y = start_y - uncovered.center_y
decr_x = dist_x / 10
decr_y = dist_y / 10
x = start_x
y = start_y
for i in range(1, 11):
x -= decr_x
y -= decr_y
mov.touch_move(x, y)
self.advance_frames(1)
mov.touch_up(*uncovered.center)
idle_until(
lambda: abs(aware.x - uncovered.x) < 2,
100,
"aware didn't move to its new place",
)
idle_until(
lambda: any(
func.name == "aware"
for func in self.app.engine.rule["shrubsprint"].triggers
),
100,
"aware never added to rulebook",
)
|
class TestRuleBuilderKobold(RuleBuilderTest):
def install(self, engine: Engine):
pass
def get_selection(self):
pass
def test_rule_builder_display_trigger(self):
pass
def test_rule_builder_remove_trigger(self):
pass
def have_uncovered():
pass
def have_right_foundation():
pass
def uncovered_is_flush_with_right_foundation():
pass
def test_rule_builder_add_trigger(self):
pass
| 9 | 0 | 33 | 1 | 32 | 0 | 4 | 0 | 1 | 4 | 2 | 0 | 5 | 0 | 5 | 12 | 235 | 13 | 222 | 54 | 211 | 0 | 136 | 54 | 125 | 10 | 3 | 4 | 34 |
146,510 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/elide/elide/tests/test_rule_builder.py
|
elide.tests.test_rule_builder.TestCharRuleBuilder
|
class TestCharRuleBuilder(ELiDEAppTest):
def setUp(self):
with Engine(self.prefix) as eng:
polygons.install(eng)
assert list(
eng.character["triangle"].unit.rule["relocate"].triggers
) == [
eng.trigger.similar_neighbors,
eng.trigger.dissimilar_neighbors,
]
super(TestCharRuleBuilder, self).setUp()
app = self.app
mgr = app.build()
self.Window.add_widget(mgr)
idle_until(
lambda: hasattr(app, "engine"), 100, "App never made engine"
)
idle_until(
lambda: "triangle" in app.engine.character,
100,
"Engine proxy never made triangle character proxy",
)
app.select_character(app.engine.character["triangle"])
idle_until(
lambda: app.character_name == "triangle",
100,
"Never changed character",
)
app.mainscreen.charmenu.charmenu.toggle_rules()
idle_until(
lambda: getattr(app.charrules, "_finalized", False),
100,
"Never finalized",
)
def test_char_rule_builder_remove_unit_trigger(self):
app = self.app
idle_until(
lambda: getattr(app.charrules, "_finalized", False),
100,
"Never finalized charrules",
)
tabitem = app.charrules._unit_tab
idle_until(lambda: tabitem.content, 100, "unit tab never got content")
tabitem.on_press()
self.advance_frames(1)
tabitem.on_release()
idle_until(
lambda: app.charrules._tabs.current_tab == tabitem,
100,
"Never switched tab",
)
rules_box = app.charrules._unit_box
idle_until(lambda: rules_box.parent, 100, "unit box never got parent")
idle_until(
lambda: getattr(rules_box.rulesview, "_finalized", False),
100,
"Never finalized unit rules view",
)
idle_until(
lambda: rules_box.children, 100, "_unit_box never got children"
)
idle_until(
lambda: rules_box.rulesview.children,
100,
"Never filled rules view",
)
rules_list = rules_box.ruleslist
idle_until(
lambda: rules_list.children[0].children,
1000,
"Never filled rules list",
)
idle_until(
lambda: "relocate"
in {rulebut.text for rulebut in rules_list.children[0].children},
1000,
"Never made relocate button",
)
for rulebut in rules_list.children[0].children:
if rulebut.text == "relocate":
rulebut.state = "down"
break
builder = rules_box.rulesview._trigger_builder
assert (
rules_box.rulesview._tabs.current_tab
== rules_box.rulesview._trigger_tab
)
idle_until(
lambda: builder.children,
1000,
"trigger builder never got children",
)
idle_until(
partial(builder_foundation, builder),
100,
"Never filled trigger builder",
)
idle_until(
lambda: builder.parent, 1000, "trigger builder never got parent"
)
card_names = {
card.headline_text
for card in builder.children
if isinstance(card, Card)
}
assert card_names == {
"similar_neighbors",
"dissimilar_neighbors",
}
for card in builder.children:
if not isinstance(card, Card):
continue
if card.headline_text == "similar_neighbors":
break
else:
assert False, "Didn't get similar_neighbors"
startx = card.center_x
starty = card.top - 1
assert card.collide_point(startx, starty), "card didn't collide itself"
for cardother in builder.children:
if not isinstance(cardother, Card) or cardother == card:
continue
assert not cardother.collide_point(startx, starty), (
"other card will grab the touch"
)
touch = UnitTestTouch(startx, starty)
for target in builder.children:
if isinstance(target, Card):
continue
if target.x > card.right:
break
else:
assert False, "Didn't get target foundation"
targx, targy = target.center
distx = targx - startx
disty = targy - starty
x, y = startx, starty
touch.touch_down()
self.advance_frames(1)
for i in range(1, 11):
x += distx / 10
y += disty / 10
touch.touch_move(x, y)
self.advance_frames(1)
touch.touch_up()
self.advance_frames(5)
rules_box.ids.closebut.on_release()
idle_until(
lambda: all(
card.headline_text != "similar_neighbors"
for card in builder.decks[0]
),
100,
"similar_neighbors still in used pile",
)
idle_until(
lambda: not any(
trig.name == "similar_neighbors"
for trig in app.charrules.character.unit.rulebook[0].triggers
),
100,
"similar_neighbors still in proxy triggers list",
)
app.stop()
with Engine(self.prefix) as eng:
assert list(
eng.character["triangle"].unit.rule["relocate"].triggers
) == [eng.trigger.dissimilar_neighbors]
|
class TestCharRuleBuilder(ELiDEAppTest):
def setUp(self):
pass
def test_char_rule_builder_remove_unit_trigger(self):
pass
| 3 | 0 | 84 | 0 | 84 | 0 | 7 | 0 | 1 | 5 | 1 | 0 | 2 | 0 | 2 | 6 | 169 | 1 | 168 | 24 | 165 | 0 | 84 | 22 | 81 | 12 | 2 | 2 | 13 |
146,511 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/elide/elide/tests/test_python_editor.py
|
elide.tests.test_python_editor.TestShowCode
|
class TestShowCode(PythonEditorTest):
def test_show_code(self):
app = self.app
self.Window.add_widget(app.build())
actions_box = self._get_actions_box()
last = actions_box.storelist.data[-1]["name"]
actions_box.storelist.selection_name = last
idle_until(
lambda: "funname" in actions_box.editor.ids,
100,
"Never got function input widget",
)
idle_until(
lambda: actions_box.editor.ids.funname.hint_text,
100,
"Never got function name",
)
idle_until(
lambda: "code" in actions_box.editor.ids,
100,
"Never got code editor widget",
)
idle_until(
lambda: actions_box.editor.ids.code.text,
100,
"Never got source code",
)
|
class TestShowCode(PythonEditorTest):
def test_show_code(self):
pass
| 2 | 0 | 26 | 0 | 26 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 7 | 27 | 0 | 27 | 5 | 25 | 0 | 11 | 5 | 9 | 1 | 3 | 0 | 1 |
146,512 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/elide/elide/tests/test_python_editor.py
|
elide.tests.test_python_editor.TestCreateAction
|
class TestCreateAction(PythonEditorTest):
def test_create_action(self):
app = self.app
self.Window.add_widget(app.build())
actions_box = self._get_actions_box()
actions_box.editor.ids.funname.text = "new_func"
actions_box.editor.ids.code.text = 'return "Hello, world!"'
app.stop()
with Engine(self.prefix) as eng:
assert hasattr(eng.action, "new_func")
|
class TestCreateAction(PythonEditorTest):
def test_create_action(self):
pass
| 2 | 0 | 9 | 0 | 9 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 7 | 10 | 0 | 10 | 5 | 8 | 0 | 10 | 4 | 8 | 1 | 3 | 1 | 1 |
146,513 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/elide/elide/tests/util.py
|
elide.tests.util.ListenableDict
|
class ListenableDict(dict, Signal):
def __init__(self):
Signal.__init__(self)
|
class ListenableDict(dict, Signal):
def __init__(self):
pass
| 2 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 2 | 0 | 0 | 0 | 1 | 0 | 1 | 28 | 3 | 0 | 3 | 2 | 1 | 0 | 3 | 2 | 1 | 1 | 2 | 0 | 1 |
146,514 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/server/__init__.py
|
lisien.server.LiSEHandleWebService
|
class LiSEHandleWebService(object):
exposed = True
def __init__(self, *args, **kwargs):
if "logger" in kwargs:
self.logger = kwargs["logger"]
else:
self.logger = kwargs["logger"] = logging.getLogger(__name__)
self.cmdq = kwargs["cmdq"] = Queue()
self.outq = kwargs["outq"] = Queue()
self._handle_thread = threading.Thread(
target=self._run_handle_forever,
args=args,
kwargs=kwargs,
daemon=True,
)
self._handle_thread.start()
@staticmethod
def _run_handle_forever(*args, **kwargs):
cmdq = kwargs.pop("cmdq")
outq = kwargs.pop("outq")
logger = kwargs.pop("logger")
setup = kwargs.pop("setup", None)
logq = Queue()
def log(typ, data):
if typ == "command":
(cmd, args) = data
logger.debug(
"lisien thread {}: calling {}{}".format(
threading.get_ident(), cmd, tuple(args)
)
)
else:
logger.debug(
"lisien thread {}: returning {} (of type {})".format(
threading.get_ident(), data, repr(type(data))
)
)
def get_log_forever(logq):
(level, data) = logq.get()
getattr(logger, level)(data)
engine_handle = EngineHandle(*args, logq=logq, **kwargs)
if setup:
setup(engine_handle._real)
handle_log_thread = threading.Thread(
target=get_log_forever, args=(logq,), daemon=True
)
handle_log_thread.start()
while True:
inst = cmdq.get()
if inst == "shutdown":
handle_log_thread.join()
cmdq.close()
outq.close()
return 0
cmd = inst.pop("command")
silent = inst.pop("silent", False)
log("command", (cmd, args))
response = getattr(engine_handle, cmd)(**inst)
if silent:
continue
log("result", response)
outq.put(engine_handle._real.listify(response))
@cherrypy.tools.accept(media="application/json")
@cherrypy.tools.json_out()
def GET(self):
return cherrypy.session["LiSE_response"]
@cherrypy.tools.json_out()
def POST(self, **kwargs):
silent = kwargs.get("silent", False)
self.cmdq.put(kwargs)
if silent:
return None
response = self.outq.get()
cherrypy.session["LiSE_response"] = response
return response
def PUT(self, silent=False, **kwargs):
silent = silent
self.cmdq.put(kwargs)
if not silent:
cherrypy.session["LiSE_response"] = self.outq.get()
def DELETE(self):
cherrypy.session.pop("LiSE_response", None)
|
class LiSEHandleWebService(object):
def __init__(self, *args, **kwargs):
pass
@staticmethod
def _run_handle_forever(*args, **kwargs):
pass
def log(typ, data):
pass
def get_log_forever(logq):
pass
@cherrypy.tools.accept(media="application/json")
@cherrypy.tools.json_out()
def GET(self):
pass
@cherrypy.tools.json_out()
def POST(self, **kwargs):
pass
def PUT(self, silent=False, **kwargs):
pass
def DELETE(self):
pass
| 13 | 0 | 12 | 0 | 12 | 0 | 2 | 0 | 1 | 4 | 0 | 0 | 5 | 4 | 6 | 6 | 91 | 9 | 82 | 32 | 69 | 0 | 61 | 29 | 52 | 5 | 1 | 2 | 16 |
146,515 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/rule.py
|
lisien.rule.TriggerList
|
class TriggerList(RuleFuncList):
"""A list of trigger functions for rules"""
@cached_property
def _funcstore(self):
return self.rule.engine.trigger
@cached_property
def _cache(self):
return self.rule.engine._triggers_cache
@cached_property
def _setter(self):
return self.rule.engine.query.set_rule_triggers
|
class TriggerList(RuleFuncList):
'''A list of trigger functions for rules'''
@cached_property
def _funcstore(self):
pass
@cached_property
def _cache(self):
pass
@cached_property
def _setter(self):
pass
| 7 | 1 | 2 | 0 | 2 | 0 | 1 | 0.1 | 1 | 0 | 0 | 0 | 3 | 0 | 3 | 61 | 14 | 3 | 10 | 7 | 3 | 1 | 7 | 4 | 3 | 1 | 8 | 0 | 3 |
146,516 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/rule.py
|
lisien.rule.RuleMapping
|
class RuleMapping(MutableMapping, Signal):
"""Wraps a :class:`RuleBook` so you can get its rules by name.
You can access the rules in this either dictionary-style or as
attributes. This is for convenience if you want to get at a rule's
decorators, eg. to add an Action to the rule.
Using this as a decorator will create a new rule, named for the
decorated function, and using the decorated function as the
initial Action.
Using this like a dictionary will let you create new rules,
appending them onto the underlying :class:`RuleBook`; replace one
rule with another, where the new one will have the same index in
the :class:`RuleBook` as the old one; and activate or deactivate
rules. The name of a rule may be used in place of the actual rule,
so long as the rule already exists.
:param name: If you want the rule's name to be different from the name
of its first action, supply the name here.
:param neighborhood: Optional integer; if supplied, the rule will only
be run when something's changed within this many nodes.
``neighborhood=0`` means this only runs when something's changed
*here*, or a place containing this entity.
:param big: Set to ``True`` if the rule will make many changes to the world,
so that Lisien can optimize for a big batch of changes.
:param always: If set to ``True``, the rule will run every turn.
"""
def __init__(self, engine, rulebook):
super().__init__()
self.engine = engine
self._rule_cache = self.engine.rule._cache
if isinstance(rulebook, RuleBook):
self.rulebook = rulebook
else:
self.rulebook = self.engine.rulebook[rulebook]
def __repr__(self):
return "RuleMapping({})".format([k for k in self])
def __iter__(self):
return iter(self.rulebook)
def __len__(self):
return len(self.rulebook)
def __contains__(self, k):
return k in self.rulebook
def __getitem__(self, k):
if k not in self:
raise KeyError("Rule '{}' is not in effect".format(k))
return self._rule_cache[k]
def __getattr__(self, k):
if k in self:
return self[k]
raise AttributeError
def __setitem__(self, k, v):
if k == "truth":
raise KeyError("Illegal rule name")
if isinstance(v, Hashable) and v in self.engine.rule:
v = self.engine.rule[v]
elif isinstance(v, str) and hasattr(self.engine.function, v):
v = getattr(self.engine.function, v)
if not isinstance(v, Rule) and callable(v):
# create a new rule, named k, performing action v
self.engine.rule[k] = v
v = self.engine.rule[k]
assert isinstance(v, Rule)
if isinstance(k, int):
self.rulebook[k] = v
else:
self.rulebook.append(v)
def __call__(
self,
v: Optional[callable] = None,
name: Optional[str] = None,
*,
neighborhood: Optional[int] = -1,
big: bool = False,
always: bool = False,
):
def wrap(name, v, **kwargs):
name = name if name is not None else v.__name__
if name == "truth":
raise ValueError("Illegal rule name")
self[name] = v
r = self[name]
if kwargs.get("always"):
r.always()
if "neighborhood" in kwargs:
r.neighborhood = kwargs["neighborhood"]
if "big" in kwargs:
r.big = kwargs["big"]
return r
kwargs = {"big": big}
if always:
kwargs["always"] = True
if neighborhood != -1:
kwargs["neighborhood"] = neighborhood
if v is None:
return partial(wrap, name, **kwargs)
return wrap(name, v, **kwargs)
def __delitem__(self, k):
i = self.rulebook.index(k)
del self.rulebook[i]
self.send(self, key=k, val=None)
@property
def priority(self):
return self.rulebook.priority
@priority.setter
def priority(self, v: float):
self.rulebook.priority = v
|
class RuleMapping(MutableMapping, Signal):
'''Wraps a :class:`RuleBook` so you can get its rules by name.
You can access the rules in this either dictionary-style or as
attributes. This is for convenience if you want to get at a rule's
decorators, eg. to add an Action to the rule.
Using this as a decorator will create a new rule, named for the
decorated function, and using the decorated function as the
initial Action.
Using this like a dictionary will let you create new rules,
appending them onto the underlying :class:`RuleBook`; replace one
rule with another, where the new one will have the same index in
the :class:`RuleBook` as the old one; and activate or deactivate
rules. The name of a rule may be used in place of the actual rule,
so long as the rule already exists.
:param name: If you want the rule's name to be different from the name
of its first action, supply the name here.
:param neighborhood: Optional integer; if supplied, the rule will only
be run when something's changed within this many nodes.
``neighborhood=0`` means this only runs when something's changed
*here*, or a place containing this entity.
:param big: Set to ``True`` if the rule will make many changes to the world,
so that Lisien can optimize for a big batch of changes.
:param always: If set to ``True``, the rule will run every turn.
'''
def __init__(self, engine, rulebook):
pass
def __repr__(self):
pass
def __iter__(self):
pass
def __len__(self):
pass
def __contains__(self, k):
pass
def __getitem__(self, k):
pass
def __getattr__(self, k):
pass
def __setitem__(self, k, v):
pass
def __call__(
self,
v: Optional[callable] = None,
name: Optional[str] = None,
*,
neighborhood: Optional[int] = -1,
big: bool = False,
always: bool = False,
):
pass
def wrap(name, v, **kwargs):
pass
def __delitem__(self, k):
pass
@property
def priority(self):
pass
@priority.setter
def priority(self):
pass
| 16 | 1 | 7 | 0 | 7 | 0 | 2 | 0.3 | 2 | 12 | 2 | 2 | 12 | 3 | 12 | 53 | 122 | 18 | 80 | 30 | 56 | 24 | 67 | 20 | 53 | 6 | 7 | 1 | 29 |
146,517 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/node.py
|
lisien.node.Place
|
class Place(Node):
"""The kind of node where a thing might ultimately be located.
lisien entities are truthy so long as they exist, falsy if they've
been deleted.
"""
__slots__ = (
"graph",
"db",
"node",
"_rulebook",
"_rulebooks",
"_real_rule_mapping",
)
extrakeys = {
"name",
}
def __getitem__(self, key):
if key == "name":
return self.name
return super().__getitem__(key)
def __repr__(self):
return "<{}.character[{}].place[{}]>".format(
repr(self.engine), self.character.name, self.name
)
def _validate_node_type(self):
try:
self.engine._things_cache.retrieve(
self.character.name, self.name, *self.engine._btt()
)
return False
except:
return True
def facade(self):
return FacadePlace(self.character.facade(), self)
def delete(self) -> None:
"""Remove myself from the world model immediately."""
super().delete()
self.character.place.send(
self.character.place, key=self.name, val=None
)
|
class Place(Node):
'''The kind of node where a thing might ultimately be located.
lisien entities are truthy so long as they exist, falsy if they've
been deleted.
'''
def __getitem__(self, key):
pass
def __repr__(self):
pass
def _validate_node_type(self):
pass
def facade(self):
pass
def delete(self) -> None:
'''Remove myself from the world model immediately.'''
pass
| 6 | 2 | 5 | 0 | 5 | 0 | 1 | 0.14 | 1 | 2 | 1 | 0 | 5 | 1 | 5 | 64 | 49 | 9 | 35 | 9 | 29 | 5 | 20 | 8 | 14 | 2 | 6 | 1 | 7 |
146,518 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/node.py
|
lisien.node.Portals
|
class Portals(Set):
__slots__ = ("_pn", "_pecnb")
def __init__(self, node) -> None:
name = node.name
character = node.character
engine = node.engine
self._pn = (character.portal, name)
self._pecnb = (
engine._get_edge,
engine._edges_cache,
character,
character.name,
name,
engine._btt,
)
def __contains__(self, x) -> bool:
_, edges_cache, _, charname, name, btt_f = self._pecnb
btt = btt_f()
return edges_cache.has_predecessor(
charname, name, x, *btt
) or edges_cache.has_successor(charname, name, x, *btt)
def __len__(self) -> int:
_, edges_cache, _, charname, name, btt_f = self._pecnb
btt = btt_f()
return edges_cache.count_predecessors(
charname, name, *btt
) + edges_cache.count_successors(charname, name, *btt)
def __iter__(self) -> Iterator["lisien.portal.Portal"]:
get_edge, edges_cache, character, charname, name, btt_f = self._pecnb
btt = btt_f()
for dest in edges_cache.iter_successors(charname, name, *btt):
yield get_edge(character, name, dest, 0)
for orig in edges_cache.iter_predecessors(charname, name, *btt):
yield get_edge(character, orig, name, 0)
|
class Portals(Set):
def __init__(self, node) -> None:
pass
def __contains__(self, x) -> bool:
pass
def __len__(self) -> int:
pass
def __iter__(self) -> Iterator["lisien.portal.Portal"]:
pass
| 5 | 0 | 8 | 0 | 8 | 0 | 2 | 0 | 1 | 2 | 0 | 0 | 4 | 2 | 4 | 44 | 38 | 4 | 34 | 19 | 29 | 0 | 23 | 19 | 18 | 3 | 6 | 1 | 6 |
146,519 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/node.py
|
lisien.node.Thing
|
class Thing(Node, AbstractThing):
"""The sort of item that has a particular location at any given time.
Things are always in Places or other Things, and may additionally be
travelling through a Portal.
lisien entities are truthy so long as they exist, falsy if they've
been deleted.
"""
__slots__ = (
"graph",
"db",
"node",
"_rulebook",
"_rulebooks",
"_real_rule_mapping",
)
_extra_keys = {"name", "location"}
def _getname(self):
return self.name
def _getloc(self):
ret = self.engine._things_cache._base_retrieve(
(self.character.name, self.name, *self.engine._btt())
)
if ret is None or isinstance(ret, Exception):
return None
return ret
def _validate_node_type(self):
return self._getloc() is not None
def _get_arrival_time(self):
charn = self.character.name
n = self.name
thingcache = self.engine._things_cache
for b, trn, tck in self.engine._iter_parent_btt():
try:
v = thingcache.turn_before(charn, n, b, trn)
except KeyError:
v = thingcache.turn_after(charn, n, b, trn)
if v is not None:
return v
else:
raise ValueError("Couldn't find arrival time")
def _set_loc(self, loc: Optional[Key]):
self.engine._set_thing_loc(self.character.name, self.name, loc)
_getitem_dispatch = {"name": _getname, "location": _getloc}
_setitem_dispatch = {"name": roerror, "location": _set_loc}
def __getitem__(self, key: Key):
"""Return one of my stats stored in the database, or special cases:
``name``: return the name that uniquely identifies me within
my Character
``location``: return the name of my location
"""
disp = self._getitem_dispatch
if key in disp:
return disp[key](self)
else:
return super().__getitem__(key)
def __setitem__(self, key, value):
"""Set ``key``=``value`` for the present game-time."""
try:
self._setitem_dispatch[key](self, value)
except HistoricKeyError as ex:
raise ex
except KeyError:
super().__setitem__(key, value)
def __delitem__(self, key):
"""As of now, this key isn't mine."""
if key in self._extra_keys:
raise ValueError("Can't delete {}".format(key))
super().__delitem__(key)
def __repr__(self):
return "<{}.character['{}'].thing['{}']>".format(
self.engine, self.character.name, self.name
)
def facade(self):
return FacadeThing(self.character.facade(), self)
def delete(self) -> None:
super().delete()
self._set_loc(None)
self.character.thing.send(
self.character.thing, key=self.name, val=None
)
def clear(self) -> None:
"""Unset everything."""
for k in list(self.keys()):
if k not in self._extra_keys:
del self[k]
|
class Thing(Node, AbstractThing):
'''The sort of item that has a particular location at any given time.
Things are always in Places or other Things, and may additionally be
travelling through a Portal.
lisien entities are truthy so long as they exist, falsy if they've
been deleted.
'''
def _getname(self):
pass
def _getloc(self):
pass
def _validate_node_type(self):
pass
def _get_arrival_time(self):
pass
def _set_loc(self, loc: Optional[Key]):
pass
def __getitem__(self, key: Key):
'''Return one of my stats stored in the database, or special cases:
``name``: return the name that uniquely identifies me within
my Character
``location``: return the name of my location
'''
pass
def __setitem__(self, key, value):
'''Set ``key``=``value`` for the present game-time.'''
pass
def __delitem__(self, key):
'''As of now, this key isn't mine.'''
pass
def __repr__(self):
pass
def facade(self):
pass
def delete(self) -> None:
pass
def clear(self) -> None:
'''Unset everything.'''
pass
| 13 | 5 | 6 | 0 | 5 | 1 | 2 | 0.2 | 2 | 7 | 2 | 0 | 12 | 1 | 12 | 76 | 107 | 22 | 71 | 27 | 58 | 14 | 57 | 25 | 44 | 4 | 6 | 2 | 22 |
146,520 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/node.py
|
lisien.node.UserMapping
|
class UserMapping(Mapping):
"""A mapping of the characters that have a particular node as a unit.
Getting characters from here isn't any better than getting them from
the engine direct, but with this you can do things like use the
.get() method to get a character if it's a user and otherwise
get something else; or test whether the character's name is in
the keys; and so on.
"""
__slots__ = ["node"]
def __init__(self, node) -> None:
"""Store the node"""
self.node = node
engine = getatt("node.engine")
def _user_names(self) -> Iterator[Key]:
node = self.node
engine = self.engine
charn = node.character.name
nn = node.name
seen = set()
for b, r, t in engine._iter_parent_btt():
for user in engine._unitness_cache.user_cache.iter_keys(
charn, nn, b, r, t
):
if user in seen:
continue
seen.add(user)
try:
if engine._unitness_cache.user_cache.retrieve(
charn, nn, user, b, r, t
):
yield user
except KeyError:
continue
@property
def only(self) -> "Node":
"""If there's only one unit, return it.
Otherwise, raise ``AmbiguousUserError``, a type of ``AttributeError``.
"""
if len(self) != 1:
raise AmbiguousUserError("No users, or more than one")
return next(iter(self.values()))
def __iter__(self) -> Iterator[Key]:
yield from self._user_names()
def __contains__(self, item: Key) -> bool:
return item in self.engine._unitness_cache.user_cache.retrieve(
self.node.character.name, self.node.name, *self.engine._btt()
)
def __len__(self) -> int:
return self.engine._unitness_cache.user_cache.count_keys(
self.node.character.name, self.node.name, *self.engine._btt()
)
def __bool__(self) -> bool:
return bool(
self.engine._unitness_cache.user_cache.count_keys(
self.node.character.name, self.node.name, *self.engine._btt()
)
)
def __getitem__(self, k) -> AbstractCharacter:
ret = self.engine.character[k]
node = self.node
charn = node.character.name
nn = node.name
avatar = ret.unit
if charn not in avatar or nn not in avatar[charn]:
raise KeyError(
"{} not used by {}".format(self.node.name, k),
self.engine._btt(),
)
return ret
|
class UserMapping(Mapping):
'''A mapping of the characters that have a particular node as a unit.
Getting characters from here isn't any better than getting them from
the engine direct, but with this you can do things like use the
.get() method to get a character if it's a user and otherwise
get something else; or test whether the character's name is in
the keys; and so on.
'''
def __init__(self, node) -> None:
'''Store the node'''
pass
def _user_names(self) -> Iterator[Key]:
pass
@property
def only(self) -> "Node":
'''If there's only one unit, return it.
Otherwise, raise ``AmbiguousUserError``, a type of ``AttributeError``.
'''
pass
def __iter__(self) -> Iterator[Key]:
pass
def __contains__(self, item: Key) -> bool:
pass
def __len__(self) -> int:
pass
def __bool__(self) -> bool:
pass
def __getitem__(self, k) -> AbstractCharacter:
pass
| 10 | 3 | 8 | 0 | 7 | 1 | 2 | 0.19 | 1 | 6 | 2 | 1 | 8 | 1 | 8 | 42 | 83 | 14 | 58 | 25 | 48 | 11 | 42 | 24 | 33 | 6 | 6 | 4 | 15 |
146,521 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/portal.py
|
lisien.portal.Portal
|
class Portal(Edge, RuleFollower):
"""Connection between two nodes that :class:`lisien.node.Thing` travel along
lisien entities are truthy so long as they exist, falsy if they've
been deleted.
"""
__slots__ = (
"graph",
"orig",
"dest",
"idx",
"origin",
"destination",
"_rulebook",
"_real_rule_mapping",
)
character = getatt("graph")
engine = getatt("db")
no_unwrap = True
def __init__(self, graph: AbstractCharacter, orig: Key, dest: Key):
super().__init__(graph, orig, dest, 0)
self.origin = graph.node[orig]
self.destination = graph.node[dest]
@property
def _cache(self):
return self.db._edge_val_cache[self.character.name][self.orig][
self.dest
][0]
def _rule_name_activeness(self):
rulebook_name = self._get_rulebook_name()
cache = self.engine._active_rules_cache
if rulebook_name not in cache:
return
cache = cache[rulebook_name]
for rule in cache:
for branch, turn, tick in self.engine._iter_parent_btt():
if branch not in cache[rule]:
continue
try:
yield (rule, cache[rule][branch][turn][tick])
break
except ValueError:
continue
except HistoricKeyError as ex:
if ex.deleted:
break
raise KeyError("{}->{} has no rulebook?".format(self.orig, self.dest))
def _get_rulebook_name(self):
btt = self.engine._btt()
try:
return self.engine._portals_rulebooks_cache.retrieve(
self.character.name, self.orig, self.dest, *btt
)
except KeyError:
ret = (self.character.name, self.orig, self.dest)
self.engine._portals_rulebooks_cache.store(*ret, *btt, ret)
self.engine.query.set_portal_rulebook(*ret, *btt, ret)
return ret
def _set_rulebook_name(self, rulebook):
character = self.character.name
orig = self.orig
dest = self.dest
cache = self.engine._portals_rulebooks_cache
try:
if rulebook == cache.retrieve(
character, orig, dest, *self.engine._btt()
):
return
except KeyError:
pass
branch, turn, tick = self.engine._nbtt()
cache.store(character, orig, dest, branch, turn, tick, rulebook)
self.engine.query.set_portal_rulebook(
character, orig, dest, branch, turn, tick, rulebook
)
def _get_rule_mapping(self):
return RuleMapping(self)
def __getitem__(self, key):
if key == "origin":
return self.orig
elif key == "destination":
return self.dest
elif key == "character":
return self.character.name
else:
return super().__getitem__(key)
def __setitem__(self, key, value):
if key in ("origin", "destination", "character"):
raise KeyError("Can't change " + key)
super().__setitem__(key, value)
def __repr__(self):
"""Describe character, origin, and destination"""
return "<{}.character[{}].portal[{}][{}]>".format(
repr(self.engine),
repr(self["character"]),
repr(self["origin"]),
repr(self["destination"]),
)
def __bool__(self):
"""It means something that I exist, even if I have no data."""
return (
self.orig in self.character.portal
and self.dest in self.character.portal[self.orig]
)
@property
def reciprocal(self) -> "Portal":
"""If there's another Portal connecting the same origin and
destination that I do, but going the opposite way, return
it. Else raise KeyError.
"""
try:
return self.character.portal[self.dest][self.orig]
except KeyError:
raise AttributeError("This portal has no reciprocal")
def facade(self):
face = self.character.facade()
ret = FacadePortal(face.portal[self.orig], self.dest)
face.portal._patch = {self.orig: {self.dest: ret}}
return ret
def historical(self, stat: Key) -> StatusAlias:
"""Return a reference to the values that a stat has had in the past.
You can use the reference in comparisons to make a history
query, and execute the query by calling it, or passing it to
``self.engine.ticks_when``.
"""
return StatusAlias(entity=self, stat=stat)
def update(self, e: Mapping | list[tuple[Any, Any]] = None, **f) -> None:
"""Works like regular update, but less
Only actually updates when the new value and the old value differ.
This is necessary to prevent certain infinite loops.
"""
if e is not None:
if hasattr(e, "keys") and callable(e.keys):
for k in e.keys():
if k not in self:
self[k] = e[k]
else:
v = e[k]
if self[k] != v:
self[k] = v
else:
for k, v in e:
if k not in self or self[k] != v:
self[k] = v
for k, v in f.items():
if k not in self or self[k] != v:
self[k] = v
def delete(self) -> None:
"""Remove myself from my :class:`Character`.
For symmetry with :class:`Thing` and :class:`Place`.
"""
self.clear()
self.engine._exist_edge(
self.character.name, self.orig, self.dest, exist=None
)
def unwrap(self) -> dict:
"""Return a dictionary representation of this entity"""
return {
k: v.unwrap()
if hasattr(v, "unwrap") and not hasattr(v, "no_unwrap")
else v
for (k, v) in self.items()
}
|
class Portal(Edge, RuleFollower):
'''Connection between two nodes that :class:`lisien.node.Thing` travel along
lisien entities are truthy so long as they exist, falsy if they've
been deleted.
'''
def __init__(self, graph: AbstractCharacter, orig: Key, dest: Key):
pass
@property
def _cache(self):
pass
def _rule_name_activeness(self):
pass
def _get_rulebook_name(self):
pass
def _set_rulebook_name(self, rulebook):
pass
def _get_rule_mapping(self):
pass
def __getitem__(self, key):
pass
def __setitem__(self, key, value):
pass
def __repr__(self):
'''Describe character, origin, and destination'''
pass
def __bool__(self):
'''It means something that I exist, even if I have no data.'''
pass
@property
def reciprocal(self) -> "Portal":
'''If there's another Portal connecting the same origin and
destination that I do, but going the opposite way, return
it. Else raise KeyError.
'''
pass
def facade(self):
pass
def historical(self, stat: Key) -> StatusAlias:
'''Return a reference to the values that a stat has had in the past.
You can use the reference in comparisons to make a history
query, and execute the query by calling it, or passing it to
``self.engine.ticks_when``.
'''
pass
def update(self, e: Mapping | list[tuple[Any, Any]] = None, **f) -> None:
'''Works like regular update, but less
Only actually updates when the new value and the old value differ.
This is necessary to prevent certain infinite loops.
'''
pass
def delete(self) -> None:
'''Remove myself from my :class:`Character`.
For symmetry with :class:`Thing` and :class:`Place`.
'''
pass
def unwrap(self) -> dict:
'''Return a dictionary representation of this entity'''
pass
| 19 | 8 | 9 | 0 | 8 | 1 | 3 | 0.17 | 2 | 13 | 5 | 0 | 16 | 4 | 16 | 91 | 188 | 26 | 139 | 43 | 120 | 23 | 100 | 38 | 83 | 10 | 11 | 5 | 41 |
146,522 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/allegedb/wrap.py
|
lisien.allegedb.wrap.ListWrapper
|
class ListWrapper(MutableWrapperDictList, MutableSequence, list):
"""A list synchronized with a serialized field.
This is meant to be used in allegedb entities (graph, node, or
edge), for when the user stores a list in them.
"""
__slots__ = ("_getter", "_setter", "_outer", "_key")
def __init__(self, getter, setter, outer, key):
self._outer = outer
self._key = key
self._getter = getter
self._setter = setter
def __eq__(self, other):
if self is other:
return True
if not isinstance(other, Sequence):
return NotImplemented
for me, you in zip_longest(self, other):
if hasattr(me, "unwrap"):
me = me.unwrap()
if hasattr(you, "unwrap"):
you = you.unwrap()
if me != you:
return False
else:
return True
def _copy(self):
return list(self._getter())
def _set(self, v):
self._setter(v)
self._outer[self._key] = v
def insert(self, i, v):
new = self._copy()
new.insert(i, v)
self._set(new)
def append(self, v):
new = self._copy()
new.append(v)
self._set(new)
def unwrap(self):
"""Deep copy myself as a list, with all contents unwrapped"""
return [
v.unwrap()
if hasattr(v, "unwrap") and not hasattr(v, "no_unwrap")
else v
for v in self
]
|
class ListWrapper(MutableWrapperDictList, MutableSequence, list):
'''A list synchronized with a serialized field.
This is meant to be used in allegedb entities (graph, node, or
edge), for when the user stores a list in them.
'''
def __init__(self, getter, setter, outer, key):
pass
def __eq__(self, other):
pass
def _copy(self):
pass
def _set(self, v):
pass
def insert(self, i, v):
pass
def append(self, v):
pass
def unwrap(self):
'''Deep copy myself as a list, with all contents unwrapped'''
pass
| 8 | 2 | 6 | 0 | 6 | 0 | 2 | 0.12 | 3 | 0 | 0 | 0 | 7 | 4 | 7 | 98 | 56 | 10 | 41 | 16 | 33 | 5 | 36 | 16 | 28 | 7 | 7 | 2 | 14 |
146,523 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/allegedb/wrap.py
|
lisien.allegedb.wrap.DictWrapper
|
class DictWrapper(MutableMappingWrapper, dict):
"""A dictionary synchronized with a serialized field.
This is meant to be used in allegedb entities (graph, node, or
edge), for when the user stores a dictionary in them.
"""
__slots__ = ("_getter", "_setter", "_outer", "_key")
_getter: Callable
def __init__(self, getter, setter, outer, key):
super().__init__()
self._getter = getter
self._setter = setter
self._outer = outer
self._key = key
def _copy(self):
return dict(self._getter())
def _set(self, v):
self._setter(v)
self._outer[self._key] = v
|
class DictWrapper(MutableMappingWrapper, dict):
'''A dictionary synchronized with a serialized field.
This is meant to be used in allegedb entities (graph, node, or
edge), for when the user stores a dictionary in them.
'''
def __init__(self, getter, setter, outer, key):
pass
def _copy(self):
pass
def _set(self, v):
pass
| 4 | 1 | 4 | 0 | 4 | 0 | 1 | 0.29 | 2 | 1 | 0 | 0 | 3 | 3 | 3 | 88 | 24 | 6 | 14 | 8 | 10 | 4 | 14 | 8 | 10 | 1 | 9 | 0 | 3 |
146,524 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/allegedb/window.py
|
lisien.allegedb.window.WindowDictSlice
|
class WindowDictSlice:
"""A slice of history in which the start is earlier than the stop"""
__slots__ = ["dic", "slic"]
dic: "WindowDict"
slic: slice
def __init__(self, dic: "WindowDict", slic: slice):
self.dic = dic
self.slic = slic
def __reversed__(self) -> Iterable[Any]:
return iter(WindowDictReverseSlice(self.dic, self.slic))
def __iter__(self):
dic = self.dic
with dic._lock:
if not dic:
return
slic = self.slic
if slic.step is not None:
for i in range(
slic.start or dic.beginning,
slic.stop or dic.end + 1,
slic.step,
):
dic._seek(i)
yield dic._past[-1][1]
return
if slic.start is None and slic.stop is None:
yield from map(get1, dic._past)
yield from map(get1, reversed(dic._future))
return
if slic.start is not None and slic.stop is not None:
if slic.stop == slic.start:
try:
yield dic[slic.stop]
except HistoricKeyError:
pass
return
past = dic._past
future = dic._future
if slic.start < slic.stop:
left, right = slic.start, slic.stop
dic._seek(right)
if not past:
return
if past[-1][0] == right:
future.append(past.pop())
cmp = lt
else:
left, right = slic.stop, slic.start
dic._seek(right)
cmp = le
if not past:
return
it = iter(past)
p0, p1 = next(it)
while cmp(p0, left):
try:
p0, p1 = next(it)
except StopIteration:
return
else:
yield p1
yield from map(get1, it)
elif slic.start is None:
stac = dic._past + list(reversed(dic._future))
while stac and stac[-1][0] >= slic.stop:
stac.pop()
yield from map(get1, stac)
return
else: # slic.stop is None
if not dic._past and not dic._future:
return
chan = chain(dic._past, reversed(dic._future))
nxt = next(chan)
while nxt[0] < slic.start:
try:
nxt = next(chan)
except StopIteration:
return
yield get1(nxt)
yield from map(get1, chan)
|
class WindowDictSlice:
'''A slice of history in which the start is earlier than the stop'''
def __init__(self, dic: "WindowDict", slic: slice):
pass
def __reversed__(self) -> Iterable[Any]:
pass
def __iter__(self):
pass
| 4 | 1 | 25 | 0 | 25 | 0 | 7 | 0.03 | 0 | 10 | 2 | 0 | 3 | 0 | 3 | 3 | 84 | 4 | 79 | 17 | 75 | 2 | 72 | 17 | 68 | 19 | 0 | 4 | 21 |
146,525 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/allegedb/window.py
|
lisien.allegedb.window.WindowDictReverseSlice
|
class WindowDictReverseSlice:
"""A slice of history in which the start is later than the stop"""
__slots__ = ["dict", "slice"]
def __init__(self, dict: "WindowDict", slic: slice):
self.dict = dict
self.slice = slic
def __reversed__(self):
return iter(WindowDictSlice(self.dict, self.slice))
def __iter__(self):
dic = self.dict
with dic._lock:
if not dic:
return
slic = self.slice
if slic.step is not None:
for i in range(
slic.start or dic.end,
slic.stop or dic.beginning,
slic.step,
):
dic._seek(i)
yield dic._past[-1][1]
return
if slic.start is None and slic.stop is None:
yield from map(get1, dic._future)
yield from map(get1, reversed(dic._past))
return
if slic.start is not None and slic.stop is not None:
if slic.start == slic.stop:
dic._seek(slic.stop)
yield dic._past[-1][1]
return
if slic.start < slic.stop:
left, right = slic.start, slic.stop
dic._seek(right)
it = reversed(dic._past)
next(it)
cmp = lt
else:
left, right = slic.stop, slic.start
dic._seek(right)
it = reversed(dic._past)
cmp = le
for frev, fv in it:
if cmp(frev, left):
return
yield fv
elif slic.start is None:
stac = dic._past + list(reversed(dic._future))
while stac and stac[-1][0] >= slic.stop:
stac.pop()
yield from map(get1, reversed(stac))
else: # slic.stop is None
stac = deque(dic._past)
stac.extend(reversed(dic._future))
while stac and stac[0][0] < slic.start:
stac.popleft()
yield from map(get1, reversed(stac))
|
class WindowDictReverseSlice:
'''A slice of history in which the start is later than the stop'''
def __init__(self, dict: "WindowDict", slic: slice):
pass
def __reversed__(self):
pass
def __iter__(self):
pass
| 4 | 1 | 18 | 0 | 18 | 0 | 5 | 0.04 | 0 | 6 | 1 | 0 | 3 | 2 | 3 | 3 | 62 | 4 | 57 | 15 | 53 | 2 | 50 | 15 | 46 | 13 | 0 | 4 | 15 |
146,526 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/node.py
|
lisien.node.OrigsValues
|
class OrigsValues(ValuesView):
_mapping: "Origs"
def __contains__(self, item) -> bool:
_, name = self._mapping._pn
return item.destination.name == name
|
class OrigsValues(ValuesView):
def __contains__(self, item) -> bool:
pass
| 2 | 0 | 3 | 0 | 3 | 0 | 1 | 0 | 1 | 1 | 0 | 0 | 1 | 0 | 1 | 33 | 6 | 1 | 5 | 3 | 3 | 0 | 5 | 3 | 3 | 1 | 6 | 0 | 1 |
146,527 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/allegedb/window.py
|
lisien.allegedb.window.WindowDictPastView
|
class WindowDictPastView(WindowDictPastFutureView):
"""Read-only mapping of just the past of a WindowDict"""
def __iter__(self) -> Iterable[int]:
with self.lock:
stack = self.stack
return map(get0, reversed(stack))
def __getitem__(self, key: int) -> Any:
with self.lock:
stack = self.stack
if not stack or key < stack[0][0] or key > stack[-1][0]:
raise KeyError
for rev, value in stack:
if rev == key:
return value
raise KeyError
def keys(self) -> WindowDictPastFutureKeysView:
return WindowDictPastFutureKeysView(self)
def items(self) -> WindowDictPastItemsView:
return WindowDictPastItemsView(self)
def values(self) -> WindowDictPastFutureValuesView:
return WindowDictPastFutureValuesView(self)
|
class WindowDictPastView(WindowDictPastFutureView):
'''Read-only mapping of just the past of a WindowDict'''
def __iter__(self) -> Iterable[int]:
pass
def __getitem__(self, key: int) -> Any:
pass
def keys(self) -> WindowDictPastFutureKeysView:
pass
def items(self) -> WindowDictPastItemsView:
pass
def values(self) -> WindowDictPastFutureValuesView:
pass
| 6 | 1 | 4 | 0 | 4 | 0 | 2 | 0.05 | 1 | 8 | 3 | 0 | 5 | 0 | 5 | 41 | 26 | 5 | 20 | 9 | 14 | 1 | 20 | 9 | 14 | 4 | 7 | 3 | 8 |
146,528 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/allegedb/window.py
|
lisien.allegedb.window.WindowDictFutureView
|
class WindowDictFutureView(WindowDictPastFutureView):
"""Read-only mapping of just the future of a WindowDict"""
def __iter__(self):
with self.lock:
stack = self.stack
return map(get0, reversed(stack))
def __getitem__(self, key: int):
with self.lock:
stack = self.stack
if not stack or key < stack[-1][0] or key > stack[0][0]:
raise KeyError
for rev, value in stack:
if rev == key:
return value
raise KeyError("No such revision", key)
def keys(self) -> WindowDictPastFutureKeysView:
return WindowDictPastFutureKeysView(self)
def items(self) -> WindowDictFutureItemsView:
return WindowDictFutureItemsView(self)
def values(self) -> WindowDictPastFutureValuesView:
return WindowDictPastFutureValuesView(self)
|
class WindowDictFutureView(WindowDictPastFutureView):
'''Read-only mapping of just the future of a WindowDict'''
def __iter__(self):
pass
def __getitem__(self, key: int):
pass
def keys(self) -> WindowDictPastFutureKeysView:
pass
def items(self) -> WindowDictFutureItemsView:
pass
def values(self) -> WindowDictPastFutureValuesView:
pass
| 6 | 1 | 4 | 0 | 4 | 0 | 2 | 0.05 | 1 | 7 | 3 | 0 | 5 | 0 | 5 | 41 | 26 | 5 | 20 | 9 | 14 | 1 | 20 | 9 | 14 | 4 | 7 | 3 | 8 |
146,529 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/allegedb/window.py
|
lisien.allegedb.window.WindowDictFutureItemsView
|
class WindowDictFutureItemsView(WindowDictPastFutureItemsView):
"""View on a WindowDict's future items relative to last lookup"""
@staticmethod
def _out_of_range(item: tuple[int, Any], stack: list[tuple[int, Any]]):
return item[0] < stack[-1][0] or item[0] > stack[0][0]
|
class WindowDictFutureItemsView(WindowDictPastFutureItemsView):
'''View on a WindowDict's future items relative to last lookup'''
@staticmethod
def _out_of_range(item: tuple[int, Any], stack: list[tuple[int, Any]]):
pass
| 3 | 1 | 2 | 0 | 2 | 0 | 1 | 0.25 | 1 | 4 | 0 | 0 | 0 | 0 | 1 | 50 | 6 | 1 | 4 | 3 | 1 | 1 | 3 | 2 | 1 | 1 | 8 | 0 | 1 |
146,530 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/allegedb/window.py
|
lisien.allegedb.window.WindowDict
|
class WindowDict(MutableMapping):
"""A dict that keeps every value that a variable has had over time.
Look up a revision number in this dict, and it will give you the
effective value as of that revision. Keys should always be
revision numbers.
Optimized for the cases where you look up the same revision
repeatedly, or its neighbors.
This supports slice notation to get all values in a given
time-frame. If you do not supply a step, you'll just get the
values, with no indication of when they're from exactly --
so explicitly supply a step of 1 to get the value at each point in
the slice, or use the ``future`` and ``past`` methods to get read-only
mappings of data relative to a particular revision.
Unlike slices of eg. lists, you can slice with a start greater than the stop
even if you don't supply a step. That will get you values in reverse order.
"""
__slots__ = ("_future", "_past", "_keys", "_last", "_lock")
_past: list[tuple[int, Any]]
_future: list[tuple[int, Any]]
_keys: set[int]
_last: int | None
@property
def beginning(self) -> int | None:
with self._lock:
if not self._past:
if not self._future:
return None
return self._future[-1][0]
return self._past[0][0]
@property
def end(self) -> int | None:
with self._lock:
if not self._future:
if not self._past:
return None
return self._past[-1][0]
return self._future[0][0]
def future(self, rev: int = None) -> WindowDictFutureView:
"""Return a Mapping of items after the given revision.
Default revision is the last one looked up.
"""
if rev is not None:
with self._lock:
self._seek(rev)
return WindowDictFutureView(self._future, self._lock)
def past(self, rev: int = None) -> WindowDictPastView:
"""Return a Mapping of items at or before the given revision.
Default revision is the last one looked up.
"""
if rev is not None:
with self._lock:
self._seek(rev)
return WindowDictPastView(self._past, self._lock)
def search(self, rev: int) -> Any:
"""Alternative access for far-away revisions
This uses a binary search, which is faster in the case of random
access, but not in the case of fast-forward and rewind, which are
more common in time travel.
This arranges the cache to optimize retrieval of the same and
nearby revisions, same as normal lookups.
"""
def recurse(revs: list[tuple[int, Any]]) -> Any:
if len(revs) < 1:
raise HistoricKeyError(
"No data ever for revision", rev, deleted=False
)
elif len(revs) == 1:
if revs[0][0] <= rev:
return revs[0]
raise HistoricKeyError(
"Can't retrieve revision", rev, deleted=True
)
pivot = len(revs) // 2
before = revs[:pivot]
after = revs[pivot:]
assert before and after
if rev < after[0][0]:
if rev > before[-1][0]:
return before[-1]
return recurse(before)
elif rev == after[0][0]:
return after[0]
else:
return recurse(after)
with self._lock:
revs = self._past + list(reversed(self._future))
if len(revs) == 1:
result_rev, result = revs[0]
if rev < result_rev:
raise HistoricKeyError(
"No data ever for revision", rev, deleted=False
)
else:
result_rev, result = recurse(revs)
i = revs.index((result_rev, result)) + 1
self._past = revs[:i]
self._future = list(reversed(revs[i:]))
self._last = rev
return result
def _seek(self, rev: int) -> None:
"""Arrange the caches to help look up the given revision."""
if rev == self._last:
return
past = self._past
future = self._future
if future:
appender = past.append
popper = future.pop
future_start = future[-1][0]
while future_start <= rev:
appender(popper())
if future:
future_start = future[-1][0]
else:
break
if past:
popper = past.pop
appender = future.append
past_end = past[-1][0]
while past_end > rev:
appender(popper())
if past:
past_end = past[-1][0]
else:
break
self._last = rev
def rev_gettable(self, rev: int) -> bool:
beg = self.beginning
if beg is None:
return False
return rev >= beg
def rev_before(self, rev: int, search=False):
"""Return the latest past rev on which the value changed.
If it changed on this exact rev, return the rev.
"""
with self._lock:
if search:
self.search(rev)
else:
self._seek(rev)
if self._past:
return self._past[-1][0]
def rev_after(self, rev: int, search=False):
"""Return the earliest future rev on which the value will change."""
with self._lock:
if search:
self.search(rev)
else:
self._seek(rev)
if self._future:
return self._future[-1][0]
def initial(self) -> Any:
"""Return the earliest value we have"""
with self._lock:
if self._past:
return self._past[0][1]
if self._future:
return self._future[-1][1]
raise KeyError("No data")
def final(self) -> Any:
"""Return the latest value we have"""
with self._lock:
if self._future:
return self._future[0][1]
if self._past:
return self._past[-1][1]
raise KeyError("No data")
def truncate(
self, rev: int, direction: Direction = Direction.FORWARD, search=False
) -> set[int]:
"""Delete everything after the given revision, exclusive.
With direction='backward', delete everything before the revision,
exclusive, instead.
Return a set of keys deleted.
"""
if not isinstance(direction, Direction):
direction = Direction(direction)
deleted = set()
with self._lock:
if search:
self.search(rev)
else:
self._seek(rev)
if direction == Direction.FORWARD:
to_delete = set(map(get0, self._future))
deleted.update(to_delete)
self._keys.difference_update(to_delete)
self._future = []
elif direction == Direction.BACKWARD:
if not self._past:
return deleted
if self._past[-1][0] == rev:
to_delete = set(map(get0, self._past[:-1]))
deleted.update(to_delete)
self._keys.difference_update(to_delete)
self._past = [self._past[-1]]
else:
to_delete = set(map(get0, self._past))
deleted.update(to_delete)
self._keys.difference_update(to_delete)
self._past = []
else:
raise ValueError("Need direction 'forward' or 'backward'")
return deleted
def keys(self) -> WindowDictKeysView:
return WindowDictKeysView(self)
def items(self) -> WindowDictItemsView:
return WindowDictItemsView(self)
def values(self) -> WindowDictValuesView:
return WindowDictValuesView(self)
def __bool__(self) -> bool:
return bool(self._keys)
def copy(self):
with self._lock:
empty = WindowDict.__new__(WindowDict)
empty._past = self._past.copy()
empty._future = self._future.copy()
empty._keys = self._keys.copy()
empty._last = self._last
return empty
def __init__(
self, data: Union[list[tuple[int, Any]], dict[int, Any]] = None
) -> None:
self._lock = RLock()
with self._lock:
if not data:
self._past = []
elif isinstance(data, Mapping):
self._past = list(data.items())
else:
# assume it's an orderable sequence of pairs
self._past = list(data)
self._past.sort()
self._future = []
self._keys = set(map(get0, self._past))
self._last = None
def __iter__(self) -> Iterable[Any]:
if not self:
return
if self._past:
yield from map(get0, self._past)
if self._future:
yield from map(get0, self._future)
def __contains__(self, item: int) -> bool:
return item in self._keys
def __len__(self) -> int:
return len(self._keys)
def __getitem__(self, rev: int) -> Any:
if isinstance(rev, slice):
if None not in (rev.start, rev.stop) and rev.start > rev.stop:
return WindowDictReverseSlice(self, rev)
return WindowDictSlice(self, rev)
with self._lock:
self._seek(rev)
past = self._past
if not past:
raise HistoricKeyError(
"Revision {} is before the start of history".format(rev)
)
return past[-1][1]
def __setitem__(self, rev: int, v: Any) -> None:
self.set_item(rev, v)
def set_item(self, rev: int, v: Any, search=False) -> None:
past = self._past
with self._lock:
if past or self._future:
if search:
self.search(rev)
else:
self._seek(rev)
if past:
if past[-1][0] == rev:
past[-1] = (rev, v)
else:
past.append((rev, v))
else:
past.append((rev, v))
else:
past.append((rev, v))
self._keys.add(rev)
def __delitem__(self, rev: int) -> None:
self.del_item(rev)
def del_item(self, rev: int, search=False) -> None:
# Not checking for rev's presence at the beginning because
# to do so would likely require iterating thru history,
# which I have to do anyway in deleting.
# But handle degenerate case.
if not self:
raise HistoricKeyError("Tried to delete from an empty WindowDict")
if self.beginning is None:
if self.end is not None and rev > self.end:
raise HistoricKeyError(
"Rev outside of history: {}".format(rev)
)
elif self.end is None:
if self.beginning is not None and rev < self.beginning:
raise HistoricKeyError(
"Rev outside of history: {}".format(rev)
)
elif not self.beginning <= rev <= self.end:
raise HistoricKeyError("Rev outside of history: {}".format(rev))
with self._lock:
if search:
self.search(rev)
else:
self._seek(rev)
past = self._past
if not past or past[-1][0] != rev:
raise HistoricKeyError("Rev not present: {}".format(rev))
del past[-1]
self._keys.remove(rev)
def __repr__(self) -> str:
me = {}
if self._past:
me.update(self._past)
if self._future:
me.update(self._future)
return "{}({})".format(self.__class__.__name__, me)
|
class WindowDict(MutableMapping):
'''A dict that keeps every value that a variable has had over time.
Look up a revision number in this dict, and it will give you the
effective value as of that revision. Keys should always be
revision numbers.
Optimized for the cases where you look up the same revision
repeatedly, or its neighbors.
This supports slice notation to get all values in a given
time-frame. If you do not supply a step, you'll just get the
values, with no indication of when they're from exactly --
so explicitly supply a step of 1 to get the value at each point in
the slice, or use the ``future`` and ``past`` methods to get read-only
mappings of data relative to a particular revision.
Unlike slices of eg. lists, you can slice with a start greater than the stop
even if you don't supply a step. That will get you values in reverse order.
'''
@property
def beginning(self) -> int | None:
pass
@property
def end(self) -> int | None:
pass
def future(self, rev: int = None) -> WindowDictFutureView:
'''Return a Mapping of items after the given revision.
Default revision is the last one looked up.
'''
pass
def past(self, rev: int = None) -> WindowDictPastView:
'''Return a Mapping of items at or before the given revision.
Default revision is the last one looked up.
'''
pass
def search(self, rev: int) -> Any:
'''Alternative access for far-away revisions
This uses a binary search, which is faster in the case of random
access, but not in the case of fast-forward and rewind, which are
more common in time travel.
This arranges the cache to optimize retrieval of the same and
nearby revisions, same as normal lookups.
'''
pass
def recurse(revs: list[tuple[int, Any]]) -> Any:
pass
def _seek(self, rev: int) -> None:
'''Arrange the caches to help look up the given revision.'''
pass
def rev_gettable(self, rev: int) -> bool:
pass
def rev_before(self, rev: int, search=False):
'''Return the latest past rev on which the value changed.
If it changed on this exact rev, return the rev.
'''
pass
def rev_after(self, rev: int, search=False):
'''Return the earliest future rev on which the value will change.'''
pass
def initial(self) -> Any:
'''Return the earliest value we have'''
pass
def final(self) -> Any:
'''Return the latest value we have'''
pass
def truncate(
self, rev: int, direction: Direction = Direction.FORWARD, search=False
) -> set[int]:
'''Delete everything after the given revision, exclusive.
With direction='backward', delete everything before the revision,
exclusive, instead.
Return a set of keys deleted.
'''
pass
def keys(self) -> WindowDictKeysView:
pass
def items(self) -> WindowDictItemsView:
pass
def values(self) -> WindowDictValuesView:
pass
def __bool__(self) -> bool:
pass
def copy(self):
pass
def __init__(
self, data: Union[list[tuple[int, Any]], dict[int, Any]] = None
) -> None:
pass
def __iter__(self) -> Iterable[Any]:
pass
def __contains__(self, item: int) -> bool:
pass
def __len__(self) -> int:
pass
def __getitem__(self, rev: int) -> Any:
pass
def __setitem__(self, rev: int, v: Any) -> None:
pass
def set_item(self, rev: int, v: Any, search=False) -> None:
pass
def __delitem__(self, rev: int) -> None:
pass
def del_item(self, rev: int, search=False) -> None:
pass
def __repr__(self) -> str:
pass
| 31 | 10 | 12 | 1 | 10 | 1 | 3 | 0.16 | 1 | 22 | 9 | 3 | 27 | 1 | 27 | 68 | 366 | 48 | 273 | 57 | 238 | 45 | 234 | 51 | 205 | 9 | 7 | 4 | 86 |
146,531 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/allegedb/window.py
|
lisien.allegedb.window.TurnDict
|
class TurnDict(FuturistWindowDict):
__slots__ = ("_future", "_past")
_future: list[tuple[int, Any]]
_past: list[tuple[int, Any]]
cls = FuturistWindowDict
def __setitem__(self, turn: int, value: Any) -> None:
if type(value) is not FuturistWindowDict:
value = FuturistWindowDict(value)
FuturistWindowDict.__setitem__(self, turn, value)
|
class TurnDict(FuturistWindowDict):
def __setitem__(self, turn: int, value: Any) -> None:
pass
| 2 | 0 | 4 | 0 | 4 | 0 | 2 | 0 | 1 | 2 | 0 | 0 | 1 | 0 | 1 | 70 | 10 | 1 | 9 | 4 | 7 | 0 | 9 | 4 | 7 | 2 | 9 | 1 | 2 |
146,532 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/allegedb/window.py
|
lisien.allegedb.window.SettingsTurnDict
|
class SettingsTurnDict(WindowDict):
"""A WindowDict that contains a span of time, indexed as turns and ticks
Each turn is a series of ticks. Once a value is set at some turn and tick,
it's in effect at every tick in the turn after that one, and every
further turn.
"""
__slots__ = ("_future", "_past")
_future: list[tuple[int, Any]]
_past: list[tuple[int, Any]]
cls = WindowDict
def __setitem__(self, turn: int, value: Any) -> None:
if not isinstance(value, self.cls):
value = self.cls(value)
WindowDict.__setitem__(self, turn, value)
def retrieve(self, turn: int, tick: int) -> Any:
"""Retrieve the value that was in effect at this turn and tick
Whether or not it was *set* at this turn and tick
"""
if turn in self and self[turn].rev_gettable(tick):
return self[turn][tick]
elif self.rev_gettable(turn - 1):
return self[turn - 1].final()
raise KeyError(f"Can't retrieve turn {turn}, tick {tick}")
def retrieve_exact(self, turn: int, tick: int) -> Any:
"""Retrieve the value only if it was set at this exact turn and tick"""
if turn not in self:
raise KeyError(f"No data in turn {turn}")
if tick not in self[turn]:
raise KeyError(f"No data for tick {tick} in turn {turn}")
return self[turn][tick]
def store_at(self, turn: int, tick: int, value: Any) -> None:
"""Set a value at a time, creating the turn if needed"""
if turn in self:
self[turn][tick] = value
else:
self[turn] = {tick: value}
|
class SettingsTurnDict(WindowDict):
'''A WindowDict that contains a span of time, indexed as turns and ticks
Each turn is a series of ticks. Once a value is set at some turn and tick,
it's in effect at every tick in the turn after that one, and every
further turn.
'''
def __setitem__(self, turn: int, value: Any) -> None:
pass
def retrieve(self, turn: int, tick: int) -> Any:
'''Retrieve the value that was in effect at this turn and tick
Whether or not it was *set* at this turn and tick
'''
pass
def retrieve_exact(self, turn: int, tick: int) -> Any:
'''Retrieve the value only if it was set at this exact turn and tick'''
pass
def store_at(self, turn: int, tick: int, value: Any) -> None:
'''Set a value at a time, creating the turn if needed'''
pass
| 5 | 4 | 7 | 1 | 5 | 1 | 3 | 0.38 | 1 | 3 | 0 | 1 | 4 | 0 | 4 | 72 | 45 | 9 | 26 | 7 | 21 | 10 | 24 | 7 | 19 | 3 | 8 | 1 | 10 |
146,533 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/allegedb/window.py
|
lisien.allegedb.window.FuturistWindowDict
|
class FuturistWindowDict(WindowDict):
"""A WindowDict that does not let you rewrite the past."""
__slots__ = (
"_future",
"_past",
)
_future: list[tuple[int, Any]]
_past: list[tuple[int, Any]]
def __setitem__(self, rev: int, v: Any) -> None:
if hasattr(v, "unwrap") and not hasattr(v, "no_unwrap"):
v = v.unwrap()
with self._lock:
self._seek(rev)
past = self._past
future = self._future
if future:
raise HistoricKeyError(
"Already have some history after {}".format(rev)
)
if not past:
past.append((rev, v))
elif rev > past[-1][0]:
past.append((rev, v))
elif rev == past[-1][0]:
past[-1] = (rev, v)
else:
raise HistoricKeyError(
"Already have some history after {} "
"(and my seek function is broken?)".format(rev)
)
self._keys.add(rev)
|
class FuturistWindowDict(WindowDict):
'''A WindowDict that does not let you rewrite the past.'''
def __setitem__(self, rev: int, v: Any) -> None:
pass
| 2 | 1 | 23 | 0 | 23 | 0 | 6 | 0.03 | 1 | 3 | 1 | 1 | 1 | 0 | 1 | 69 | 33 | 2 | 30 | 5 | 28 | 1 | 19 | 5 | 17 | 6 | 8 | 2 | 6 |
146,534 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/allegedb/window.py
|
lisien.allegedb.window.EntikeyWindowDict
|
class EntikeyWindowDict(WindowDict):
__slots__ = ("_past", "_future", "entikeys")
def __init__(
self, data: Union[list[tuple[int, Any]], dict[int, Any]] = None
) -> None:
if data:
if hasattr(data, "values") and callable(data.values):
self.entikeys = {value[:-2] for value in data.values()}
else:
self.entikeys = {value[:-2] for value in data}
else:
self.entikeys = set()
super().__init__(data)
def __setitem__(self, rev: int, v: tuple) -> None:
self.entikeys.add(v[:-2])
super().__setitem__(rev, v)
def __delitem__(self, rev: int) -> None:
entikey = self[rev][:-2]
super().__delitem__(rev)
for tup in self.values():
if tup[:-2] == entikey:
return
self.entikeys.remove(entikey)
|
class EntikeyWindowDict(WindowDict):
def __init__(
self, data: Union[list[tuple[int, Any]], dict[int, Any]] = None
) -> None:
pass
def __setitem__(self, rev: int, v: tuple) -> None:
pass
def __delitem__(self, rev: int) -> None:
pass
| 4 | 0 | 7 | 0 | 7 | 0 | 2 | 0 | 1 | 7 | 0 | 0 | 3 | 1 | 3 | 71 | 26 | 3 | 23 | 10 | 17 | 0 | 19 | 8 | 15 | 3 | 8 | 2 | 7 |
146,535 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/allegedb/tests/test_all.py
|
lisien.allegedb.tests.test_all.StorageTest
|
class StorageTest(AllegedTest):
def runTest(self):
"""Test that all the graph types can store and retrieve key-value pairs
for the graph as a whole, for nodes, and for edges.
"""
for graphmaker in self.graphmakers:
g = graphmaker("testgraph")
g.add_node(0)
g.add_node(1)
g.add_edge(0, 1)
n = g.node[0]
e = g.edge[0][1]
for k, v in testdata:
g.graph[k] = v
self.assertIn(k, g.graph)
self.assertEqual(g.graph[k], v)
del g.graph[k]
self.assertNotIn(k, g.graph)
n[k] = v
self.assertIn(k, n)
self.assertEqual(n[k], v)
del n[k]
self.assertNotIn(k, n)
e[k] = v
self.assertIn(k, e)
self.assertEqual(e[k], v)
del e[k]
self.assertNotIn(k, e)
self.engine.del_graph("testgraph")
|
class StorageTest(AllegedTest):
def runTest(self):
'''Test that all the graph types can store and retrieve key-value pairs
for the graph as a whole, for nodes, and for edges.
'''
pass
| 2 | 1 | 29 | 1 | 25 | 3 | 3 | 0.12 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 74 | 30 | 1 | 26 | 7 | 24 | 3 | 26 | 7 | 24 | 3 | 3 | 2 | 3 |
146,536 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/allegedb/tests/test_all.py
|
lisien.allegedb.tests.test_all.SetStorageTest
|
class SetStorageTest(AllegedTest):
"""Make sure the set wrapper works"""
def runTest(self):
for i, graphmaker in enumerate(self.graphmakers):
self.engine.turn = i
g = graphmaker("testgraph")
g.add_node(0)
g.add_node(1)
g.add_edge(0, 1)
n = g.node[0]
e = g.edge[0][1]
for entity in g.graph, n, e:
entity[0] = set(range(10))
self.engine.turn = i + 1
for entity in g.graph, n, e:
self.assertEqual(entity[0], set(range(10)))
for j in range(0, 12, 2):
entity[0].discard(j)
self.assertEqual(entity[0], {1, 3, 5, 7, 9})
self.engine.turn = i
for entity in g.graph, n, e:
self.assertEqual(entity[0], set(range(10)))
|
class SetStorageTest(AllegedTest):
'''Make sure the set wrapper works'''
def runTest(self):
pass
| 2 | 1 | 20 | 0 | 20 | 0 | 6 | 0.05 | 1 | 3 | 0 | 2 | 1 | 0 | 1 | 74 | 23 | 1 | 21 | 8 | 19 | 1 | 21 | 8 | 19 | 6 | 3 | 3 | 6 |
146,537 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/allegedb/window.py
|
lisien.allegedb.window.WindowDictPastItemsView
|
class WindowDictPastItemsView(WindowDictPastFutureItemsView):
@staticmethod
def _out_of_range(item: tuple[int, Any], stack: list[tuple[int, Any]]):
return item[0] < stack[0][0] or item[0] > stack[-1][0]
|
class WindowDictPastItemsView(WindowDictPastFutureItemsView):
@staticmethod
def _out_of_range(item: tuple[int, Any], stack: list[tuple[int, Any]]):
pass
| 3 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 1 | 4 | 0 | 0 | 0 | 0 | 1 | 50 | 4 | 0 | 4 | 3 | 1 | 0 | 3 | 2 | 1 | 1 | 8 | 0 | 1 |
146,538 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/allegedb/tests/test_all.py
|
lisien.allegedb.tests.test_all.ListStorageTest
|
class ListStorageTest(AllegedTest):
"""Make sure the list wrapper works"""
def runTest(self):
for i, graphmaker in enumerate(self.graphmakers):
self.engine.turn = i
g = graphmaker("testgraph")
g.add_node(0)
g.add_node(1)
g.add_edge(0, 1)
n = g.node[0]
e = g.edge[0][1]
for entity in g.graph, n, e:
entity[0] = [
"spam",
("eggs", "ham"),
{"baked beans": "delicious"},
["qux", "quux", "quuux"],
{"hats", "shirts", "pants"},
]
self.engine.turn = i + 1
for entity in g.graph, n, e:
self.assertEqual(entity[0][0], "spam")
entity[0][0] = "eggplant"
self.assertEqual(entity[0][0], "eggplant")
self.assertEqual(entity[0][1], ("eggs", "ham"))
entity[0][1] = ("ham", "eggs")
self.assertEqual(entity[0][1], ("ham", "eggs"))
self.assertEqual(entity[0][2], {"baked beans": "delicious"})
entity[0][2]["refried beans"] = "deliciouser"
self.assertEqual(
entity[0][2],
{
"baked beans": "delicious",
"refried beans": "deliciouser",
},
)
self.assertEqual(entity[0][3], ["qux", "quux", "quuux"])
entity[0][3].pop()
self.assertEqual(entity[0][3], ["qux", "quux"])
self.assertEqual(entity[0][4], {"hats", "shirts", "pants"})
entity[0][4].discard("shame")
entity[0][4].remove("pants")
entity[0][4].add("sun")
self.assertEqual(entity[0][4], {"hats", "shirts", "sun"})
self.engine.turn = i
for entity in g.graph, n, e:
self.assertEqual(entity[0][0], "spam")
self.assertEqual(entity[0][1], ("eggs", "ham"))
self.assertEqual(entity[0][2], {"baked beans": "delicious"})
self.assertEqual(entity[0][3], ["qux", "quux", "quuux"])
self.assertEqual(entity[0][4], {"hats", "shirts", "pants"})
|
class ListStorageTest(AllegedTest):
'''Make sure the list wrapper works'''
def runTest(self):
pass
| 2 | 1 | 49 | 0 | 49 | 0 | 5 | 0.02 | 1 | 1 | 0 | 2 | 1 | 0 | 1 | 74 | 52 | 1 | 50 | 7 | 48 | 1 | 38 | 7 | 36 | 5 | 3 | 2 | 5 |
146,539 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/node.py
|
lisien.node.Origs
|
class Origs(Mapping):
__slots__ = ("_pn", "_ecnb")
def __init__(self, node) -> None:
name = node.name
character = node.character
engine = node.engine
self._pn = (character.portal, name)
self._ecnb = (engine._edges_cache, character.name, name, engine._btt)
def __iter__(self) -> Iterator["Node"]:
edges_cache, charname, name, btt = self._ecnb
return edges_cache.iter_predecessors(charname, name, *btt())
def __contains__(self, item) -> bool:
edges_cache, charname, name, btt = self._ecnb
return edges_cache.has_predecessor(charname, name, item, *btt())
def __len__(self) -> int:
edges_cache, charname, name, btt = self._ecnb
return edges_cache.count_predecessors(charname, name, *btt())
def __getitem__(self, item) -> "Node":
if item not in self:
raise KeyError
portal, name = self._pn
return portal[item][name]
def values(self) -> OrigsValues:
return OrigsValues(self)
|
class Origs(Mapping):
def __init__(self, node) -> None:
pass
def __iter__(self) -> Iterator["Node"]:
pass
def __contains__(self, item) -> bool:
pass
def __len__(self) -> int:
pass
def __getitem__(self, item) -> "Node":
pass
def values(self) -> OrigsValues:
pass
| 7 | 0 | 4 | 0 | 4 | 0 | 1 | 0 | 1 | 4 | 1 | 0 | 6 | 2 | 6 | 40 | 30 | 6 | 24 | 17 | 17 | 0 | 24 | 17 | 17 | 2 | 6 | 1 | 7 |
146,540 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/node.py
|
lisien.node.NodeContent
|
class NodeContent(Mapping):
__slots__ = ("node",)
def __init__(self, node) -> None:
self.node = node
def __iter__(self) -> Iterator[Key]:
try:
it = self.node.engine._node_contents_cache.retrieve(
self.node.character.name,
self.node.name,
*self.node.engine._btt(),
)
except KeyError:
return
yield from it
def __len__(self) -> int:
try:
return len(
self.node.engine._node_contents_cache.retrieve(
self.node.character.name,
self.node.name,
*self.node.engine._btt(),
)
)
except KeyError:
return 0
def __contains__(self, item) -> bool:
try:
return self.node.character.thing[item].location == self.node
except KeyError:
return False
def __getitem__(self, item) -> "Thing":
if item not in self:
raise KeyError
return self.node.character.thing[item]
def values(self) -> NodeContentValues:
return NodeContentValues(self)
|
class NodeContent(Mapping):
def __init__(self, node) -> None:
pass
def __iter__(self) -> Iterator[Key]:
pass
def __len__(self) -> int:
pass
def __contains__(self, item) -> bool:
pass
def __getitem__(self, item) -> "Thing":
pass
def values(self) -> NodeContentValues:
pass
| 7 | 0 | 6 | 0 | 6 | 0 | 2 | 0 | 1 | 4 | 1 | 0 | 6 | 1 | 6 | 40 | 42 | 6 | 36 | 10 | 29 | 0 | 26 | 10 | 19 | 2 | 6 | 1 | 10 |
146,541 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/facade.py
|
lisien.facade.EngineFacade.FacadeUniversalMapping
|
class FacadeUniversalMapping(MutableMapping):
def __init__(self, engine: AbstractEngine):
assert not isinstance(engine, EngineFacade)
self.engine = engine
self._patch = {}
def __iter__(self):
return iter(self._patch.keys() | self.engine.universal.keys())
def __len__(self):
return len(self._patch.keys() | self.engine.universal.keys())
def __contains__(self, item):
return item in self._patch or item in self.engine.universal
def __getitem__(self, item):
if item in self._patch:
ret = self._patch[item]
if ret is None:
raise KeyError("Universal key deleted", item)
return ret
elif item in self.engine.universal:
return self.engine.universal[item]
else:
raise KeyError("No universal key", item)
def __setitem__(self, key, value):
self._patch[key] = value
def __delitem__(self, key):
if key not in self.engine.universal:
raise KeyError("No key to delete", key)
self._patch[key] = None
|
class FacadeUniversalMapping(MutableMapping):
def __init__(self, engine: AbstractEngine):
pass
def __iter__(self):
pass
def __len__(self):
pass
def __contains__(self, item):
pass
def __getitem__(self, item):
pass
def __setitem__(self, key, value):
pass
def __delitem__(self, key):
pass
| 8 | 0 | 4 | 0 | 4 | 0 | 2 | 0 | 1 | 3 | 2 | 0 | 7 | 2 | 7 | 7 | 33 | 6 | 27 | 11 | 19 | 0 | 25 | 11 | 17 | 4 | 1 | 2 | 11 |
146,542 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/facade.py
|
lisien.facade.FacadeNode
|
class FacadeNode(FacadeEntity, Node):
class FacadeNodeUser(Mapping):
__slots__ = ("_entity",)
@property
def only(self):
if len(self) != 1:
raise AttributeError("No user, or more than one")
return self[next(iter(self))]
def __init__(self, node):
self._entity = node
def __iter__(self):
engine = self._entity.engine
charn = self._entity.character.name
return engine._unitness_cache.user_cache.iter_keys(
charn, self._entity.name, *engine._btt()
)
def __len__(self):
engine = self._entity.engine
charn = self._entity.character.name
return engine._unitness_cache.user_cache.count_keys(
charn, self._entity.name, *engine._btt()
)
def __contains__(self, item):
engine = self._entity.engine
charn = self._entity.character.name
try:
return bool(
engine._unitness_cache.user_cache.retrieve(
charn, self._entity.name, item, *engine._btt()
)
)
except KeyError:
return False
def __getitem__(self, item):
if item not in self:
raise KeyError("Not used by that character", item)
engine = self._entity.engine
return engine.character[item]
class FacadeNodeContent(Mapping):
__slots__ = ("_entity",)
def __init__(self, node):
self._entity = node
def __iter__(self):
if hasattr(self._entity.engine, "_node_contents_cache"):
# The real contents cache is wrapped by the facade engine.
try:
return self._entity.engine._node_contents_cache.retrieve(
self._entity.character.name,
self._entity.name,
*self._entity.engine._btt(),
)
except KeyError:
return
char = self._entity.character
myname = self._entity.name
for name, thing in char.thing.items():
if thing["location"] == myname:
yield name
def __len__(self):
# slow
return len(set(self))
def __contains__(self, item):
return (
item in self._entity.character.thing
and self._entity.character.thing[item]["location"]
== self._entity.name
)
def __getitem__(self, item):
if item not in self:
raise KeyError("Not contained here", item, self._entity.name)
return self._entity.character.thing[item]
@property
def portal(self):
return self.facade.portal[self["name"]]
def successors(self):
for dest in self.portal:
yield self.character.place[dest]
def contents(self):
return self.content.values()
def __init__(self, mapping, real_or_name=None, **kwargs):
self.name = self.node = getattr(real_or_name, "name", real_or_name)
super().__init__(mapping, real_or_name, **kwargs)
def __iter__(self):
seen_name = False
for k in super().__iter__():
if k == "name":
seen_name = True
yield k
if not seen_name:
yield "name"
def __getitem__(self, item):
if item == "name":
return self.name
return super().__getitem__(item)
def __eq__(self, other):
if not callable(getattr(other, "keys")):
return False
if set(self.keys()) != set(other.keys()):
return False
for key in self:
if self[key] != other[key]:
return False
return True
@property
def content(self):
return self.FacadeNodeContent(self)
@property
def user(self):
return self.FacadeNodeUser(self)
def users(self):
return self.user.values()
def _set_plan(self, k, v):
self.character.engine._planned[self.character.engine._curplan][
self.character.engine.turn
].append((self.character.name, self.name, k, v))
|
class FacadeNode(FacadeEntity, Node):
class FacadeNodeUser(Mapping):
@property
def only(self):
pass
def __init__(self, node):
pass
def __iter__(self):
pass
def __len__(self):
pass
def __contains__(self, item):
pass
def __getitem__(self, item):
pass
class FacadeNodeContent(Mapping):
def __init__(self, node):
pass
def __iter__(self):
pass
def __len__(self):
pass
def __contains__(self, item):
pass
def __getitem__(self, item):
pass
@property
def portal(self):
pass
def successors(self):
pass
def contents(self):
pass
def __init__(self, node):
pass
def __iter__(self):
pass
def __getitem__(self, item):
pass
def __eq__(self, other):
pass
@property
def contents(self):
pass
@property
def user(self):
pass
def users(self):
pass
def _set_plan(self, k, v):
pass
| 29 | 0 | 5 | 0 | 5 | 0 | 2 | 0.02 | 2 | 4 | 2 | 2 | 11 | 2 | 11 | 92 | 138 | 23 | 113 | 48 | 84 | 2 | 91 | 44 | 66 | 5 | 11 | 2 | 39 |
146,543 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/facade.py
|
lisien.facade.FacadeNode.FacadeNodeContent
|
class FacadeNodeContent(Mapping):
__slots__ = ("_entity",)
def __init__(self, node):
self._entity = node
def __iter__(self):
if hasattr(self._entity.engine, "_node_contents_cache"):
# The real contents cache is wrapped by the facade engine.
try:
return self._entity.engine._node_contents_cache.retrieve(
self._entity.character.name,
self._entity.name,
*self._entity.engine._btt(),
)
except KeyError:
return
char = self._entity.character
myname = self._entity.name
for name, thing in char.thing.items():
if thing["location"] == myname:
yield name
def __len__(self):
# slow
return len(set(self))
def __contains__(self, item):
return (
item in self._entity.character.thing
and self._entity.character.thing[item]["location"]
== self._entity.name
)
def __getitem__(self, item):
if item not in self:
raise KeyError("Not contained here", item, self._entity.name)
return self._entity.character.thing[item]
|
class FacadeNodeContent(Mapping):
def __init__(self, node):
pass
def __iter__(self):
pass
def __len__(self):
pass
def __contains__(self, item):
pass
def __getitem__(self, item):
pass
| 6 | 0 | 6 | 0 | 6 | 0 | 2 | 0.06 | 1 | 2 | 0 | 0 | 5 | 1 | 5 | 5 | 38 | 5 | 31 | 11 | 25 | 2 | 23 | 11 | 17 | 5 | 1 | 2 | 10 |
146,544 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/facade.py
|
lisien.facade.FacadeNode.FacadeNodeUser
|
class FacadeNodeUser(Mapping):
__slots__ = ("_entity",)
@property
def only(self):
if len(self) != 1:
raise AttributeError("No user, or more than one")
return self[next(iter(self))]
def __init__(self, node):
self._entity = node
def __iter__(self):
engine = self._entity.engine
charn = self._entity.character.name
return engine._unitness_cache.user_cache.iter_keys(
charn, self._entity.name, *engine._btt()
)
def __len__(self):
engine = self._entity.engine
charn = self._entity.character.name
return engine._unitness_cache.user_cache.count_keys(
charn, self._entity.name, *engine._btt()
)
def __contains__(self, item):
engine = self._entity.engine
charn = self._entity.character.name
try:
return bool(
engine._unitness_cache.user_cache.retrieve(
charn, self._entity.name, item, *engine._btt()
)
)
except KeyError:
return False
def __getitem__(self, item):
if item not in self:
raise KeyError("Not used by that character", item)
engine = self._entity.engine
return engine.character[item]
|
class FacadeNodeUser(Mapping):
@property
def only(self):
pass
def __init__(self, node):
pass
def __iter__(self):
pass
def __len__(self):
pass
def __contains__(self, item):
pass
def __getitem__(self, item):
pass
| 8 | 0 | 6 | 0 | 6 | 0 | 2 | 0 | 1 | 3 | 0 | 0 | 6 | 1 | 6 | 6 | 43 | 6 | 37 | 17 | 29 | 0 | 28 | 16 | 21 | 2 | 1 | 1 | 9 |
146,545 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/facade.py
|
lisien.facade.FacadePlace
|
class FacadePlace(FacadeNode):
"""Lightweight analogue of Place for Facade use."""
def __init__(self, mapping, real_or_name, **kwargs):
from .node import Place
super().__init__(mapping, real_or_name, **kwargs)
if not isinstance(real_or_name, Place):
if real_or_name in mapping._patch:
real_or_name = mapping._patch[real_or_name]
else:
mapping._patch[real_or_name] = self
return
self.character.place._patch[real_or_name.name] = self
def _get_real(self, name):
return self.character.character.place[name]
def add_thing(self, name):
self.facade.add_thing(name, self.name)
def new_thing(self, name):
return self.facade.new_thing(name, self.name)
def delete(self):
del self.character.place[self.name]
|
class FacadePlace(FacadeNode):
'''Lightweight analogue of Place for Facade use.'''
def __init__(self, mapping, real_or_name, **kwargs):
pass
def _get_real(self, name):
pass
def add_thing(self, name):
pass
def new_thing(self, name):
pass
def delete(self):
pass
| 6 | 1 | 4 | 0 | 4 | 0 | 1 | 0.05 | 1 | 2 | 1 | 0 | 5 | 0 | 5 | 97 | 26 | 6 | 19 | 7 | 12 | 1 | 18 | 7 | 11 | 3 | 12 | 2 | 7 |
146,546 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/facade.py
|
lisien.facade.FacadePortal
|
class FacadePortal(FacadeEntity, Edge):
"""Lightweight analogue of Portal for Facade use."""
def __init__(self, mapping, other, **kwargs):
super().__init__(mapping, other, **kwargs)
if hasattr(mapping, "orig"):
self.orig = mapping.orig
self.dest = other
else:
self.dest = mapping.dest
self.orig = other
try:
self._real = self.facade.character.portal[self.orig][self.dest]
except (KeyError, AttributeError):
self._real = {}
def __getitem__(self, item):
if item == "origin":
return self.orig
if item == "destination":
return self.dest
return super().__getitem__(item)
def __setitem__(self, k, v):
if k in ("origin", "destination"):
raise TypeError("Portals have fixed origin and destination")
super().__setitem__(k, v)
@property
def origin(self):
return self.facade.node[self.orig]
@property
def destination(self):
return self.facade.node[self.dest]
def _get_real(self, name):
return self.character.character.portal[self._mapping.orig][name]
def _set_plan(self, k, v):
self.character.engine._planned[self.character.engine._curplan][
self.character.engine.turn
].append((self.character.name, self.orig, self.dest, k, v))
def delete(self):
del self.character.portal[self.orig][self.dest]
|
class FacadePortal(FacadeEntity, Edge):
'''Lightweight analogue of Portal for Facade use.'''
def __init__(self, mapping, other, **kwargs):
pass
def __getitem__(self, item):
pass
def __setitem__(self, k, v):
pass
@property
def origin(self):
pass
@property
def destination(self):
pass
def _get_real(self, name):
pass
def _set_plan(self, k, v):
pass
def delete(self):
pass
| 11 | 1 | 4 | 0 | 4 | 0 | 2 | 0.03 | 2 | 4 | 0 | 0 | 8 | 3 | 8 | 87 | 46 | 8 | 37 | 14 | 26 | 1 | 32 | 12 | 23 | 3 | 11 | 1 | 13 |
146,547 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/facade.py
|
lisien.facade.FacadePortalPredecessors
|
class FacadePortalPredecessors(FacadeEntityMapping):
facadecls = FacadePortal
innercls: type
def __init__(self, facade, destname):
from .portal import Portal
self.innercls = Portal
super().__init__(facade, destname)
self.dest = destname
def _make(self, k, v):
return self.facadecls(self.facade.portal[k], v)
def _get_inner_map(self):
try:
return self.facade.character.preportal[self.dest]
except AttributeError:
return {}
|
class FacadePortalPredecessors(FacadeEntityMapping):
def __init__(self, facade, destname):
pass
def _make(self, k, v):
pass
def _get_inner_map(self):
pass
| 4 | 0 | 4 | 0 | 4 | 0 | 1 | 0 | 1 | 3 | 1 | 0 | 3 | 1 | 3 | 55 | 19 | 4 | 15 | 7 | 10 | 0 | 15 | 7 | 10 | 2 | 9 | 1 | 4 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.