id
int64 0
843k
| repository_name
stringlengths 7
55
| file_path
stringlengths 9
332
| class_name
stringlengths 3
290
| human_written_code
stringlengths 12
4.36M
| class_skeleton
stringlengths 19
2.2M
| total_program_units
int64 1
9.57k
| total_doc_str
int64 0
4.2k
| AvgCountLine
float64 0
7.89k
| AvgCountLineBlank
float64 0
300
| AvgCountLineCode
float64 0
7.89k
| AvgCountLineComment
float64 0
7.89k
| AvgCyclomatic
float64 0
130
| CommentToCodeRatio
float64 0
176
| CountClassBase
float64 0
48
| CountClassCoupled
float64 0
589
| CountClassCoupledModified
float64 0
581
| CountClassDerived
float64 0
5.37k
| CountDeclInstanceMethod
float64 0
4.2k
| CountDeclInstanceVariable
float64 0
299
| CountDeclMethod
float64 0
4.2k
| CountDeclMethodAll
float64 0
4.2k
| CountLine
float64 1
115k
| CountLineBlank
float64 0
9.01k
| CountLineCode
float64 0
94.4k
| CountLineCodeDecl
float64 0
46.1k
| CountLineCodeExe
float64 0
91.3k
| CountLineComment
float64 0
27k
| CountStmt
float64 1
93.2k
| CountStmtDecl
float64 0
46.1k
| CountStmtExe
float64 0
90.2k
| MaxCyclomatic
float64 0
759
| MaxInheritanceTree
float64 0
16
| MaxNesting
float64 0
34
| SumCyclomatic
float64 0
6k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
146,348 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/elide/elide/card.py
|
elide.card.ScrollBarBar
|
class ScrollBarBar(ColorTextureBox):
"""Tiny tweak to :class:`ColorTextureBox` to make it work within
:class:`DeckBuilderScrollBar`
"""
def on_touch_down(self, touch):
"""Tell my parent if I've been touched"""
if self.parent is None:
return
if self.collide_point(*touch.pos):
self.parent.bar_touched(self, touch)
|
class ScrollBarBar(ColorTextureBox):
'''Tiny tweak to :class:`ColorTextureBox` to make it work within
:class:`DeckBuilderScrollBar`
'''
def on_touch_down(self, touch):
'''Tell my parent if I've been touched'''
pass
| 2 | 2 | 6 | 0 | 5 | 1 | 3 | 0.67 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 12 | 2 | 6 | 2 | 4 | 4 | 6 | 2 | 4 | 3 | 2 | 1 | 3 |
146,349 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/elide/elide/charmenu.py
|
elide.charmenu.CharMenu
|
class CharMenu(BoxLayout):
screen = ObjectProperty()
reciprocal_portal = BooleanProperty(True)
revarrow = ObjectProperty(None, allownone=True)
dummyplace = ObjectProperty()
dummything = ObjectProperty()
toggle_gridview = ObjectProperty()
toggle_timestream = ObjectProperty()
dummies = ReferenceListProperty(dummyplace, dummything)
@property
def app(self):
if not self.screen:
raise AttributeError("No screen, therefore no app")
return self.screen.app
@property
def engine(self):
if not self.screen or not self.screen.app:
raise AttributeError("Can't get engine from screen")
return self.screen.app.engine
def on_screen(self, *_):
if not (self.screen and self.screen.boardview and self.screen.app):
Clock.schedule_once(self.on_screen, 0)
return
self.forearrow = GraphArrowWidget(
board=self.screen.boardview.board,
origin=self.ids.emptyleft,
destination=self.ids.emptyright,
)
self.ids.portaladdbut.add_widget(self.forearrow)
self.ids.emptyleft.bind(pos=self.forearrow._trigger_repoint)
self.ids.emptyright.bind(pos=self.forearrow._trigger_repoint)
if self.reciprocal_portal:
assert self.revarrow is None
self.revarrow = GraphArrowWidget(
board=self.screen.boardview.board,
origin=self.ids.emptyright,
destination=self.ids.emptyleft,
)
self.ids.portaladdbut.add_widget(self.revarrow)
self.ids.emptyleft.bind(pos=self.revarrow._trigger_repoint)
self.ids.emptyright.bind(pos=self.revarrow._trigger_repoint)
self.bind(
reciprocal_portal=self.screen.boardview.setter("reciprocal_portal")
)
def spot_from_dummy(self, dummy):
if self.screen.boardview.parent != self.screen.mainview:
return
if dummy.collide_widget(self):
return
name = dummy.name
self.screen.boardview.spot_from_dummy(dummy)
graphboard = self.screen.graphboards[self.app.character_name]
if name not in graphboard.spot:
graphboard.add_spot(name)
gridboard = self.screen.gridboards[self.app.character_name]
if (
name not in gridboard.spot
and isinstance(name, tuple)
and len(name) == 2
):
gridboard.add_spot(name)
def pawn_from_dummy(self, dummy):
name = dummy.name
if not self.screen.mainview.children[0].pawn_from_dummy(dummy):
return
graphboard = self.screen.graphboards[self.app.character_name]
if name not in graphboard.pawn:
graphboard.add_pawn(name)
gridboard = self.screen.gridboards[self.app.character_name]
if (
name not in gridboard.pawn
and self.app.character.thing[name]["location"] in gridboard.spot
):
gridboard.add_pawn(name)
def toggle_chars_screen(self, *_):
"""Display or hide the list you use to switch between characters."""
# TODO: update the list of chars
self.app.chars.toggle()
def toggle_rules(self, *_):
"""Display or hide the view for constructing rules out of cards."""
if self.app.manager.current != "rules" and not isinstance(
self.app.selected_proxy, CharStatProxy
):
self.app.rules.entity = self.app.selected_proxy
self.app.rules.rulebook = self.app.selected_proxy.rulebook
if isinstance(self.app.selected_proxy, CharStatProxy):
self.app.charrules.character = self.app.selected_proxy
self.app.charrules.toggle()
else:
self.app.rules.toggle()
def toggle_funcs_editor(self):
"""Display or hide the text editing window for functions."""
self.app.funcs.toggle()
def toggle_strings_editor(self):
self.app.strings.toggle()
def toggle_spot_cfg(self):
"""Show the dialog where you select graphics and a name for a place,
or hide it if already showing.
"""
if self.app.manager.current == "spotcfg":
dummyplace = self.screendummyplace
self.ids.placetab.remove_widget(dummyplace)
dummyplace.clear()
if self.app.spotcfg.prefix:
dummyplace.prefix = self.app.spotcfg.prefix
dummyplace.num = (
dummynum(self.app.character, dummyplace.prefix) + 1
)
if self.app.spotcfg.imgpaths:
dummyplace.paths = self.app.spotcfg.imgpaths
else:
dummyplace.paths = ["atlas://rltiles/floor/floor-stone"]
dummyplace.center = self.ids.placetab.center
self.ids.placetab.add_widget(dummyplace)
else:
self.app.spotcfg.prefix = self.ids.dummyplace.prefix
self.app.spotcfg.toggle()
def toggle_pawn_cfg(self):
"""Show or hide the pop-over where you can configure the dummy pawn"""
if self.app.manager.current == "pawncfg":
dummything = self.app.dummything
self.ids.thingtab.remove_widget(dummything)
dummything.clear()
if self.app.pawncfg.prefix:
dummything.prefix = self.app.pawncfg.prefix
dummything.num = (
dummynum(self.app.character, dummything.prefix) + 1
)
if self.app.pawncfg.imgpaths:
dummything.paths = self.app.pawncfg.imgpaths
else:
dummything.paths = ["atlas://rltiles/base/unseen"]
self.ids.thingtab.add_widget(dummything)
else:
self.app.pawncfg.prefix = self.ids.dummything.prefix
self.app.pawncfg.toggle()
def toggle_reciprocal(self):
"""Flip my ``reciprocal_portal`` boolean, and draw (or stop drawing)
an extra arrow on the appropriate button to indicate the
fact.
"""
self.reciprocal_portal = (
self.screen.boardview.reciprocal_portal
) = not self.screen.boardview.reciprocal_portal
if self.screen.boardview.reciprocal_portal:
assert self.revarrow is None
self.revarrow = GraphArrowWidget(
board=self.screen.boardview.board,
origin=self.ids.emptyright,
destination=self.ids.emptyleft,
)
self.ids.portaladdbut.add_widget(self.revarrow)
self.ids.emptyright.bind(pos=self.revarrow._trigger_repoint)
self.ids.emptyleft.bind(pos=self.revarrow._trigger_repoint)
else:
if hasattr(self, "revarrow"):
self.ids.portaladdbut.remove_widget(self.revarrow)
self.revarrow = None
def new_character(self, but):
name = self.app.chars.ids.newname.text
try:
charn = self.app.engine.unpack(name)
except (TypeError, ValueError):
charn = name
self.app.select_character(self.app.engine.new_character(charn))
self.app.chars.ids.newname.text = ""
self.app.chars.charsview.adapter.data = list(
self.engine.character.keys()
)
Clock.schedule_once(self.toggle_chars_screen, 0.01)
def on_dummyplace(self, *_):
if not self.dummyplace.paths:
self.dummyplace.paths = ["atlas://rltiles/floor.atlas/floor-stone"]
def on_dummything(self, *_):
if not self.dummything.paths:
self.dummything.paths = ["atlas://rltiles/base.atlas/unseen"]
@trigger
def _trigger_deselect(self, *_):
if hasattr(self.app.selection, "selected"):
self.app.selection.selected = False
self.app.selection = None
|
class CharMenu(BoxLayout):
@property
def app(self):
pass
@property
def engine(self):
pass
def on_screen(self, *_):
pass
def spot_from_dummy(self, dummy):
pass
def pawn_from_dummy(self, dummy):
pass
def toggle_chars_screen(self, *_):
'''Display or hide the list you use to switch between characters.'''
pass
def toggle_rules(self, *_):
'''Display or hide the view for constructing rules out of cards.'''
pass
def toggle_funcs_editor(self):
'''Display or hide the text editing window for functions.'''
pass
def toggle_strings_editor(self):
pass
def toggle_spot_cfg(self):
'''Show the dialog where you select graphics and a name for a place,
or hide it if already showing.
'''
pass
def toggle_pawn_cfg(self):
'''Show or hide the pop-over where you can configure the dummy pawn'''
pass
def toggle_reciprocal(self):
'''Flip my ``reciprocal_portal`` boolean, and draw (or stop drawing)
an extra arrow on the appropriate button to indicate the
fact.
'''
pass
def new_character(self, but):
pass
def on_dummyplace(self, *_):
pass
def on_dummything(self, *_):
pass
@trigger
def _trigger_deselect(self, *_):
pass
| 20 | 6 | 11 | 0 | 10 | 1 | 3 | 0.07 | 1 | 7 | 2 | 0 | 16 | 1 | 16 | 16 | 199 | 18 | 169 | 39 | 149 | 12 | 129 | 36 | 112 | 5 | 1 | 2 | 41 |
146,350 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/elide/elide/charsview.py
|
elide.charsview.CharactersRecycleBoxLayout
|
class CharactersRecycleBoxLayout(SelectableRecycleBoxLayout):
character_name = StringProperty()
def apply_selection(self, index, view, is_selected):
super().apply_selection(index, view, is_selected)
if is_selected:
self.character_name = view.text
|
class CharactersRecycleBoxLayout(SelectableRecycleBoxLayout):
def apply_selection(self, index, view, is_selected):
pass
| 2 | 0 | 4 | 0 | 4 | 0 | 2 | 0 | 1 | 1 | 0 | 0 | 1 | 0 | 1 | 1 | 7 | 1 | 6 | 3 | 4 | 0 | 6 | 3 | 4 | 2 | 2 | 1 | 2 |
146,351 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/elide/elide/charsview.py
|
elide.charsview.CharactersScreen
|
class CharactersScreen(Screen):
toggle = ObjectProperty()
charsview = ObjectProperty()
character_name = StringProperty()
wallpaper_path = StringProperty()
names = ListProperty()
new_board = ObjectProperty()
push_character_name = ObjectProperty()
@property
def engine(self):
return App.get_running_app().engine
def new_character(self, name, *_):
self.engine.add_character(name)
self.ids.newname.text = ""
i = len(self.charsview.data)
self.charsview.i2name[i] = name
self.charsview.name2i[name] = i
self.charsview.data.append({"index": i, "text": name})
self.names.append(name)
self.new_board(name)
self.push_character_name(name)
def _trigger_new_character(self, name):
part = partial(self.new_character, name)
if hasattr(self, "_scheduled_new_character"):
Clock.unschedule(self._scheduled_new_character)
self._scheduled_new_character = Clock.schedule_once(part)
def _munge_names(self, names):
for i, name in enumerate(names):
self.charsview.i2name[i] = name
self.charsview.name2i[name] = i
yield {"index": i, "text": name}
def on_names(self, *_):
app = App.get_running_app()
if not app.character or not self.charsview:
Clock.schedule_once(self.on_names, 0)
return
self.charsview.data = list(self._munge_names(self.names))
charname = app.character.name
for i, name in enumerate(self.names):
if name == charname:
self.charsview.children[0].select_node(i)
return
def on_charsview(self, *_):
if not self.push_character_name:
Clock.schedule_once(self.on_charsview, 0)
return
self.charsview.bind(character_name=self.setter("character_name"))
self.bind(character_name=self.push_character_name)
|
class CharactersScreen(Screen):
@property
def engine(self):
pass
def new_character(self, name, *_):
pass
def _trigger_new_character(self, name):
pass
def _munge_names(self, names):
pass
def on_names(self, *_):
pass
def on_charsview(self, *_):
pass
| 8 | 0 | 7 | 0 | 7 | 0 | 2 | 0 | 1 | 3 | 0 | 0 | 6 | 1 | 6 | 6 | 54 | 6 | 48 | 22 | 40 | 0 | 47 | 21 | 40 | 4 | 1 | 2 | 12 |
146,352 |
LogicalDash/LiSE
|
LogicalDash_LiSE/lisien/lisien/exc.py
|
lisien.exc.AmbiguousUserError
|
class AmbiguousUserError(NonUniqueError, AttributeError):
"""A user descriptor can't decide what you want."""
|
class AmbiguousUserError(NonUniqueError, AttributeError):
'''A user descriptor can't decide what you want.'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 2 | 0 | 0 | 0 | 0 | 0 | 0 | 12 | 2 | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | 5 | 0 | 0 |
146,353 |
LogicalDash/LiSE
|
LogicalDash_LiSE/lisien/lisien/exc.py
|
lisien.exc.CacheError
|
class CacheError(ValueError):
"""Error condition for something going wrong with a cache"""
pass
|
class CacheError(ValueError):
'''Error condition for something going wrong with a cache'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0.5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 11 | 4 | 1 | 2 | 1 | 1 | 1 | 2 | 1 | 1 | 0 | 4 | 0 | 0 |
146,354 |
LogicalDash/LiSE
|
LogicalDash_LiSE/lisien/lisien/exc.py
|
lisien.exc.NonUniqueError
|
class NonUniqueError(RuntimeError):
"""You tried to look up the only one of something but there wasn't just one"""
|
class NonUniqueError(RuntimeError):
'''You tried to look up the only one of something but there wasn't just one'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 2 | 0 | 0 | 0 | 11 | 2 | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | 4 | 0 | 0 |
146,355 |
LogicalDash/LiSE
|
LogicalDash_LiSE/lisien/lisien/exc.py
|
lisien.exc.PlanError
|
class PlanError(AttributeError):
"""Tried to use an attribute that shouldn't be used while planning"""
|
class PlanError(AttributeError):
'''Tried to use an attribute that shouldn't be used while planning'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 11 | 2 | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | 4 | 0 | 0 |
146,356 |
LogicalDash/LiSE
|
LogicalDash_LiSE/lisien/lisien/exc.py
|
lisien.exc.RedundantRuleError
|
class RedundantRuleError(RuleError):
"""Error condition for when you try to run a rule on a (branch,
turn) it's already been executed.
"""
|
class RedundantRuleError(RuleError):
'''Error condition for when you try to run a rule on a (branch,
turn) it's already been executed.
'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 11 | 5 | 1 | 1 | 1 | 0 | 3 | 1 | 1 | 0 | 0 | 6 | 0 | 0 |
146,357 |
LogicalDash/LiSE
|
LogicalDash_LiSE/lisien/lisien/exc.py
|
lisien.exc.WorkerProcessError
|
class WorkerProcessError(RuntimeError):
"""Something wrong to do with worker processes"""
|
class WorkerProcessError(RuntimeError):
'''Something wrong to do with worker processes'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 11 | 2 | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | 4 | 0 | 0 |
146,358 |
LogicalDash/LiSE
|
LogicalDash_LiSE/lisien/lisien/exc.py
|
lisien.exc.WorkerProcessReadOnlyError
|
class WorkerProcessReadOnlyError(WorkerProcessError):
"""You tried to change the state of the world in a worker process"""
|
class WorkerProcessReadOnlyError(WorkerProcessError):
'''You tried to change the state of the world in a worker process'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 11 | 2 | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | 5 | 0 | 0 |
146,359 |
LogicalDash/LiSE
|
LogicalDash_LiSE/lisien/lisien/exc.py
|
lisien.exc.WorldIntegrityError
|
class WorldIntegrityError(ValueError):
"""Error condition for when something breaks the world model, even if
it might be allowed by the database schema.
"""
|
class WorldIntegrityError(ValueError):
'''Error condition for when something breaks the world model, even if
it might be allowed by the database schema.
'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 11 | 5 | 1 | 1 | 1 | 0 | 3 | 1 | 1 | 0 | 0 | 4 | 0 | 0 |
146,360 |
LogicalDash/LiSE
|
LogicalDash_LiSE/lisien/lisien/proxy.py
|
lisien.proxy.ChangeSignatureError
|
class ChangeSignatureError(TypeError):
pass
|
class ChangeSignatureError(TypeError):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 11 | 2 | 0 | 2 | 1 | 1 | 0 | 2 | 1 | 1 | 0 | 4 | 0 | 0 |
146,361 |
LogicalDash/LiSE
|
LogicalDash_LiSE/lisien/lisien/query.py
|
lisien.query.CompoundQuery
|
class CompoundQuery(Query):
oper: Callable[[Any, Any], set] = lambda x, y: NotImplemented
|
class CompoundQuery(Query):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 3 | 0 | 0 | 0 | 10 | 2 | 0 | 2 | 2 | 1 | 0 | 2 | 2 | 1 | 0 | 2 | 0 | 0 |
146,362 |
LogicalDash/LiSE
|
LogicalDash_LiSE/elide/elide/dialog.py
|
elide.dialog.ScrollableLabel
|
class ScrollableLabel(ScrollView):
font_size = StringProperty("15sp")
font_name = StringProperty(DEFAULT_FONT)
color = VariableListProperty([0, 0, 0, 1])
line_spacing = NumericProperty(0)
text = StringProperty()
|
class ScrollableLabel(ScrollView):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 | 0 | 6 | 6 | 5 | 0 | 6 | 6 | 5 | 0 | 1 | 0 | 0 |
146,363 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/facade.py
|
lisien.facade.EngineFacade
|
class EngineFacade(AbstractEngine):
char_cls = CharacterFacade
thing_cls = FacadeThing
place_cls = FacadePlace
portal_cls = FacadePortal
class FacadeUniversalMapping(MutableMapping):
def __init__(self, engine: AbstractEngine):
assert not isinstance(engine, EngineFacade)
self.engine = engine
self._patch = {}
def __iter__(self):
return iter(self._patch.keys() | self.engine.universal.keys())
def __len__(self):
return len(self._patch.keys() | self.engine.universal.keys())
def __contains__(self, item):
return item in self._patch or item in self.engine.universal
def __getitem__(self, item):
if item in self._patch:
ret = self._patch[item]
if ret is None:
raise KeyError("Universal key deleted", item)
return ret
elif item in self.engine.universal:
return self.engine.universal[item]
else:
raise KeyError("No universal key", item)
def __setitem__(self, key, value):
self._patch[key] = value
def __delitem__(self, key):
if key not in self.engine.universal:
raise KeyError("No key to delete", key)
self._patch[key] = None
class FacadeCharacterMapping(Mapping):
def __init__(self, engine: "EngineFacade"):
assert isinstance(engine, EngineFacade)
self.engine = engine
self._patch = {}
def __getitem__(self, key, /):
realeng = self.engine._real
if key not in realeng.character:
raise KeyError("No character", key)
if key not in self._patch:
self._patch[key] = CharacterFacade(
realeng.character[key], engine=realeng
)
return self._patch[key]
def __len__(self):
return len(self.engine.character)
def __iter__(self):
return iter(self.engine.character)
def apply(self):
for pat in self._patch.values():
pat.apply()
rando_state = self.engine._rando.getstate()
realeng = self.engine._real
if rando_state != realeng._rando.getstate():
realeng._rando.setstate(rando_state)
realeng.universal["rando_state"] = rando_state
self._patch = {}
class FacadeCache(Cache):
def __init__(self, cache, name):
self._created = cache.db._btt()
super().__init__(cache.db, name)
self._real = cache
def retrieve(self, *args, search=False):
try:
return super().retrieve(*args, search=search)
except (NotInKeyframeError, TotalKeyError):
return self._real.retrieve(*args, search=search)
def _get_keycache(
self, parentity, branch, turn, tick, forward: bool = None
):
if forward is None:
forward = self._real.db._forward
# Find the last effective keycache before the facade was created.
# Get the additions and deletions since then.
# Apply those to the keycache and return it.
kc = set(
self._real._get_keycache(
parentity, *self._created, forward=forward
)
)
added, deleted = self._get_adds_dels(
parentity, branch, turn, tick, stoptime=self._created
)
return frozenset((kc | added) - deleted)
class FacadeUnitnessCache(FacadeCache, UnitnessCache):
def __init__(self, cache):
self._created = cache.db._btt()
UnitnessCache.__init__(self, cache.db)
self.user_cache = EngineFacade.FacadeCache(
cache.user_cache, "user_cache"
)
self._real = cache
def __init__(self, real: AbstractEngine):
assert not isinstance(real, EngineFacade)
if real is not None:
for alias in (
"submit",
"load_at",
"function",
"method",
"trigger",
"prereq",
"action",
"string",
"log",
"debug",
"info",
"warning",
"error",
"critical",
):
try:
setattr(self, alias, getattr(real, alias))
except AttributeError:
print(f"{alias} not implemented on {type(real)}")
self.closed = False
self._real = real
self._planning = False
self._planned = defaultdict(lambda: defaultdict(list))
self.character = self.FacadeCharacterMapping(self)
self.universal = self.FacadeUniversalMapping(real)
self._rando = random.Random()
self.world_lock = RLock()
if real is not None:
self._rando.setstate(real._rando.getstate())
self.branch, self.turn, self.tick = real._btt()
self._branches = real._branches.copy()
self._turn_end = TurnEndDict()
self._turn_end_plan = TurnEndPlanDict()
self._turn_end.other_d = self._turn_end_plan
self._turn_end_plan.other_d = self._turn_end
if not hasattr(real, "is_proxy"):
self._turn_end.update(real._turn_end)
self._turn_end_plan.update(real._turn_end_plan)
self._nodes_cache = self.FacadeCache(
real._nodes_cache, "nodes_cache"
)
self._things_cache = self.FacadeCache(
real._things_cache, "things_cache"
)
self._unitness_cache = self.FacadeUnitnessCache(
real._unitness_cache
)
else:
self._branches = {}
self._turn_end_plan = {}
def _btt(self):
return self.branch, self.turn, self.tick
def _nbtt(self):
self.tick += 1
return self._btt()
@contextmanager
def batch(self):
self.info(
"Facades already batch all changes, so this batch does nothing"
)
yield
@contextmanager
def plan(self):
if getattr(self, "_planning", False):
raise RuntimeError("Already planning")
self._planning = True
if hasattr(self, "_curplan"):
self._curplan += 1
else:
# Will break if used in a proxy, which I want to do eventually...
self._curplan = self._real._last_plan + 1
yield self._curplan
self._planning = False
def apply(self):
realeng = self._real
self.character.apply()
if not getattr(self, "_planned", None):
return
# Do I actually need these sorts? Insertion order's preserved...
for plan_num in sorted(self._planned):
with realeng.plan(): # resets time at end of block
for turn in sorted(self._planned[plan_num]):
realeng.turn = turn
for tup in self._planned[plan_num][turn]:
if len(tup) == 3:
char, k, v = tup
realeng.character[char].stat[k] = v
elif len(tup) == 4:
char, node, k, v = tup
realchar = realeng.character[char]
if node in realchar.node:
if k is None:
realchar.remove_node(node)
else:
realchar.node[node][k] = v
elif k == "location":
realchar.add_thing(node, v)
else:
realchar.add_place(node, k=v)
elif len(tup) == 5:
char, orig, dest, k, v = tup
realchar = realeng.character[char]
if (
orig in realchar.portal
and dest in realchar.portal[orig]
):
if k is None:
realchar.remove_portal(orig, dest)
else:
realchar.portal[orig][dest][k] = v
else:
realchar.add_portal(orig, dest, k=v)
|
class EngineFacade(AbstractEngine):
class FacadeUniversalMapping(MutableMapping):
def __init__(self, engine: AbstractEngine):
pass
def __iter__(self):
pass
def __len__(self):
pass
def __contains__(self, item):
pass
def __getitem__(self, item):
pass
def __setitem__(self, key, value):
pass
def __delitem__(self, key):
pass
class FacadeCharacterMapping(Mapping):
def __init__(self, engine: AbstractEngine):
pass
def __getitem__(self, item):
pass
def __len__(self):
pass
def __iter__(self):
pass
def apply(self):
pass
class FacadeCache(Cache):
def __init__(self, engine: AbstractEngine):
pass
def retrieve(self, *args, search=False):
pass
def _get_keycache(
self, parentity, branch, turn, tick, forward: bool = None
):
pass
class FacadeUnitnessCache(FacadeCache, UnitnessCache):
def __init__(self, engine: AbstractEngine):
pass
def __init__(self, engine: AbstractEngine):
pass
def _btt(self):
pass
def _nbtt(self):
pass
@contextmanager
def batch(self):
pass
@contextmanager
def plan(self):
pass
def apply(self):
pass
| 29 | 0 | 9 | 0 | 9 | 0 | 2 | 0.03 | 1 | 10 | 6 | 0 | 6 | 18 | 6 | 35 | 232 | 22 | 205 | 76 | 174 | 6 | 154 | 72 | 127 | 13 | 5 | 7 | 51 |
146,364 |
LogicalDash/LiSE
|
LogicalDash_LiSE/elide/elide/dialog.py
|
elide.dialog.Box
|
class Box(Widget):
padding = VariableListProperty(6)
border = VariableListProperty(4)
font_size = StringProperty("15sp")
font_name = StringProperty(DEFAULT_FONT)
background = StringProperty()
background_color = VariableListProperty([1, 1, 1, 1])
foreground_color = VariableListProperty([0, 0, 0, 1])
|
class Box(Widget):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 | 0 | 0 | 0 | 0 | 8 | 0 | 8 | 8 | 7 | 0 | 8 | 8 | 7 | 0 | 1 | 0 | 0 |
146,365 |
LogicalDash/LiSE
|
LogicalDash_LiSE/elide/elide/card.py
|
elide.card.DeckBuilderView
|
class DeckBuilderView(DeckBuilderLayout, StencilView):
"""Just a :class:`DeckBuilderLayout` mixed with
:class:`StencilView`.
"""
pass
|
class DeckBuilderView(DeckBuilderLayout, StencilView):
'''Just a :class:`DeckBuilderLayout` mixed with
:class:`StencilView`.
'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1.5 | 2 | 0 | 0 | 0 | 0 | 0 | 0 | 15 | 7 | 2 | 2 | 1 | 1 | 3 | 2 | 1 | 1 | 0 | 2 | 0 | 0 |
146,366 |
LogicalDash/LiSE
|
LogicalDash_LiSE/elide/elide/card.py
|
elide.card.ColorTextureBox
|
class ColorTextureBox(Widget):
"""A box, with a background of one solid color, an outline of another
color, and possibly a texture covering the background.
"""
color = ListProperty([1, 1, 1, 1])
outline_color = ListProperty([0, 0, 0, 0])
texture = ObjectProperty(None, allownone=True)
|
class ColorTextureBox(Widget):
'''A box, with a background of one solid color, an outline of another
color, and possibly a texture covering the background.
'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0.75 | 1 | 0 | 0 | 2 | 0 | 0 | 0 | 0 | 9 | 2 | 4 | 4 | 3 | 3 | 4 | 4 | 3 | 0 | 1 | 0 | 0 |
146,367 |
LogicalDash/LiSE
|
LogicalDash_LiSE/elide/elide/calendar.py
|
elide.calendar.CalendarMenuLayout
|
class CalendarMenuLayout(LayoutSelectionBehavior, RecycleBoxLayout):
pass
|
class CalendarMenuLayout(LayoutSelectionBehavior, RecycleBoxLayout):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 | 0 | 2 | 1 | 1 | 0 | 2 | 1 | 1 | 0 | 1 | 0 | 0 |
146,368 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/elide/elide/game.py
|
elide.game.GameApp
|
class GameApp(App):
modules = []
turn_length = NumericProperty(0.5)
branch = StringProperty("trunk")
turn = NumericProperty(0)
tick = NumericProperty(0)
prefix = StringProperty(".")
selection = ObjectProperty(allownone=True)
engine_kwargs = DictProperty({})
def wait_turns(self, turns, *, cb=None):
"""Call ``self.engine.next_turn()`` ``turns`` times, waiting ``self.turn_length`` in between
If provided, call ``cb`` when done.
:param turns: number of turns to wait
:param dt: unused, just satisfies the clock
:param cb: callback function to call when done, optional
:return: ``None``
"""
if hasattr(self, "_next_turn_thread"):
Clock.schedule_once(partial(self.wait_turns, turns, cb=cb), 0)
return
if turns == 0:
if cb:
cb()
return
self.next_turn()
turns -= 1
Clock.schedule_once(
partial(self.wait_turns, turns, cb=cb), self.turn_length
)
def wait_travel(self, character, thing, dest, cb=None):
"""Schedule a thing to travel someplace, then wait for it to finish, and call ``cb`` if provided
:param character: name of the character
:param thing: name of the thing
:param dest: name of the destination (a place)
:param cb: function to be called when I'm done
:return: ``None``
"""
self.wait_turns(
self.engine.character[character].thing[thing].travel_to(dest),
cb=cb,
)
def wait_command(self, start_func, turns=1, end_func=None):
"""Call ``start_func``, and wait to call ``end_func`` after simulating ``turns`` (default 1)
:param start_func: function to call before waiting
:param turns: number of turns to wait
:param end_func: function to call after waiting
:return: ``None``
"""
start_func()
self.wait_turns(turns, cb=end_func)
def wait_travel_command(
self, character, thing, dest, start_func, turns=1, end_func=None
):
"""Schedule a thing to travel someplace and do something, then wait for it to finish.
:param character: name of the character
:param thing: name of the thing
:param dest: name of the destination (a place)
:param start_func: function to call when the thing gets to dest
:param turns: number of turns to wait after start_func before re-enabling input
:param end_func: optional. Function to call after waiting ``turns`` after start_func
:return: ``None``
"""
self.wait_travel(
character,
thing,
dest,
cb=partial(self.wait_command, start_func, turns, end_func),
)
def _pull_time(self, *_, branch, turn, tick):
self.branch, self.turn, self.tick = branch, turn, tick
def build(self):
self.procman = lisien.proxy.EngineProcessManager()
self.engine = self.procman.start(
self.prefix,
logger=Logger,
loglevel=getattr(self, "loglevel", "debug"),
install_modules=self.modules,
**self.engine_kwargs,
)
self.branch, self.turn, self.tick = self.engine._btt()
self.engine.time.connect(self._pull_time, weak=False)
self.screen_manager = ScreenManager(transition=NoTransition())
if hasattr(self, "inspector"):
from kivy.core.window import Window
from kivy.modules import inspector
inspector.create_inspector(Window, self.screen_manager)
return self.screen_manager
def on_pause(self):
"""Sync the database with the current state of the game."""
self.engine.commit()
self.config.write()
def on_stop(self, *_):
"""Sync the database, wrap up the game, and halt."""
self.procman.shutdown()
self.config.write()
def _del_next_turn_thread(self, *_, **__):
del self._next_turn_thread
def next_turn(self, *_):
"""Smoothly advance to the next turn in the simulation
This uses a subthread to wait for lisien to finish simulating
the turn and report the changes. The interface will remain responsive.
If you're wiring up the interface, consider binding user
input to `trigger_next_turn` instead, so that the user doesn't
mistakenly go two or three turns into the future.
"""
if hasattr(self, "_next_turn_thread"):
return
self._next_turn_thread = Thread(
target=self.engine.next_turn,
kwargs={"cb": self._del_next_turn_thread},
)
self._next_turn_thread.start()
trigger_next_turn = triggered(next_turn)
|
class GameApp(App):
def wait_turns(self, turns, *, cb=None):
'''Call ``self.engine.next_turn()`` ``turns`` times, waiting ``self.turn_length`` in between
If provided, call ``cb`` when done.
:param turns: number of turns to wait
:param dt: unused, just satisfies the clock
:param cb: callback function to call when done, optional
:return: ``None``
'''
pass
def wait_travel(self, character, thing, dest, cb=None):
'''Schedule a thing to travel someplace, then wait for it to finish, and call ``cb`` if provided
:param character: name of the character
:param thing: name of the thing
:param dest: name of the destination (a place)
:param cb: function to be called when I'm done
:return: ``None``
'''
pass
def wait_command(self, start_func, turns=1, end_func=None):
'''Call ``start_func``, and wait to call ``end_func`` after simulating ``turns`` (default 1)
:param start_func: function to call before waiting
:param turns: number of turns to wait
:param end_func: function to call after waiting
:return: ``None``
'''
pass
def wait_travel_command(
self, character, thing, dest, start_func, turns=1, end_func=None
):
'''Schedule a thing to travel someplace and do something, then wait for it to finish.
:param character: name of the character
:param thing: name of the thing
:param dest: name of the destination (a place)
:param start_func: function to call when the thing gets to dest
:param turns: number of turns to wait after start_func before re-enabling input
:param end_func: optional. Function to call after waiting ``turns`` after start_func
:return: ``None``
'''
pass
def _pull_time(self, *_, branch, turn, tick):
pass
def build(self):
pass
def on_pause(self):
'''Sync the database with the current state of the game.'''
pass
def on_stop(self, *_):
'''Sync the database, wrap up the game, and halt.'''
pass
def _del_next_turn_thread(self, *_, **__):
pass
def next_turn(self, *_):
'''Smoothly advance to the next turn in the simulation
This uses a subthread to wait for lisien to finish simulating
the turn and report the changes. The interface will remain responsive.
If you're wiring up the interface, consider binding user
input to `trigger_next_turn` instead, so that the user doesn't
mistakenly go two or three turns into the future.
'''
pass
| 11 | 7 | 12 | 1 | 7 | 4 | 2 | 0.51 | 1 | 3 | 1 | 1 | 10 | 4 | 10 | 10 | 136 | 23 | 75 | 28 | 60 | 38 | 54 | 26 | 41 | 4 | 1 | 2 | 15 |
146,369 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/elide/elide/dialog.py
|
elide.dialog.DialogMenu
|
class DialogMenu(Box):
"""Some buttons that make the game do things.
Set ``options`` to a list of pairs of ``(text, function)`` and the
menu will be populated with buttons that say ``text`` that call
``function`` when pressed.
"""
options = ListProperty()
"""List of pairs of (button_text, callable)"""
def _set_sv_size(self, *_):
self._sv.width = self.width - self.padding[0] - self.padding[2]
self._sv.height = self.height - self.padding[1] - self.padding[3]
def _set_sv_pos(self, *_):
self._sv.x = self.x + self.padding[0]
self._sv.y = self.y + self.padding[3]
@mainthread
def on_options(self, *_):
if not hasattr(self, "_sv"):
self._sv = ScrollView(size=self.size, pos=self.pos)
self.bind(size=self._set_sv_size, pos=self._set_sv_pos)
layout = BoxLayout(orientation="vertical")
self._sv.add_widget(layout)
self.add_widget(self._sv)
else:
layout = self._sv.children[0]
layout.clear_widgets()
for txt, part in self.options:
if not callable(part):
raise TypeError("Menu options must be callable")
butn = Button(
text=txt,
on_release=part,
font_name=self.font_name,
font_size=self.font_size,
valign="center",
halign="center",
)
butn.bind(size=butn.setter("text_size"))
layout.add_widget(butn)
|
class DialogMenu(Box):
'''Some buttons that make the game do things.
Set ``options`` to a list of pairs of ``(text, function)`` and the
menu will be populated with buttons that say ``text`` that call
``function`` when pressed.
'''
def _set_sv_size(self, *_):
pass
def _set_sv_pos(self, *_):
pass
@mainthread
def on_options(self, *_):
pass
| 5 | 1 | 10 | 0 | 10 | 0 | 2 | 0.19 | 1 | 1 | 0 | 0 | 3 | 4 | 3 | 3 | 44 | 6 | 32 | 12 | 27 | 6 | 23 | 9 | 19 | 4 | 2 | 2 | 6 |
146,370 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/character.py
|
lisien.character.CharRuleMapping
|
class CharRuleMapping(RuleMapping):
"""Get rules by name, or make new ones by decorator
You can access the rules in this either dictionary-style or as
attributes. This is for convenience if you want to get at a rule's
decorators, eg. to add an Action to the rule.
Using this as a decorator will create a new rule, named for the
decorated function, and using the decorated function as the
initial Action.
Using this like a dictionary will let you create new rules,
appending them onto the underlying :class:`RuleBook`; replace one
rule with another, where the new one will have the same index in
the :class:`RuleBook` as the old one; and activate or deactivate
rules. The name of a rule may be used in place of the actual rule,
so long as the rule already exists.
You can also set a rule active or inactive by setting it to
``True`` or ``False``, respectively. Inactive rules are still in
the rulebook, but won't be followed.
"""
def __init__(self, character, rulebook, booktyp):
"""Initialize as usual for the ``rulebook``, mostly.
My ``character`` property will be the one passed in, and my
``_table`` will be the ``booktyp`` with ``"_rules"`` appended.
"""
super().__init__(rulebook.engine, rulebook)
self.character = character
self._table = booktyp + "_rules"
|
class CharRuleMapping(RuleMapping):
'''Get rules by name, or make new ones by decorator
You can access the rules in this either dictionary-style or as
attributes. This is for convenience if you want to get at a rule's
decorators, eg. to add an Action to the rule.
Using this as a decorator will create a new rule, named for the
decorated function, and using the decorated function as the
initial Action.
Using this like a dictionary will let you create new rules,
appending them onto the underlying :class:`RuleBook`; replace one
rule with another, where the new one will have the same index in
the :class:`RuleBook` as the old one; and activate or deactivate
rules. The name of a rule may be used in place of the actual rule,
so long as the rule already exists.
You can also set a rule active or inactive by setting it to
``True`` or ``False``, respectively. Inactive rules are still in
the rulebook, but won't be followed.
'''
def __init__(self, character, rulebook, booktyp):
'''Initialize as usual for the ``rulebook``, mostly.
My ``character`` property will be the one passed in, and my
``_table`` will be the ``booktyp`` with ``"_rules"`` appended.
'''
pass
| 2 | 2 | 10 | 2 | 4 | 4 | 1 | 4.2 | 1 | 1 | 0 | 0 | 1 | 2 | 1 | 54 | 34 | 8 | 5 | 4 | 3 | 21 | 5 | 4 | 3 | 1 | 8 | 0 | 1 |
146,371 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/cache.py
|
lisien.cache.UnitnessCache
|
class UnitnessCache(Cache):
"""A cache for remembering when a node is a unit of a character."""
def __init__(self, db):
super().__init__(db, "unitness_cache")
self.user_cache = Cache(db, "user_cache")
def store(
self,
character,
graph,
node,
branch,
turn,
tick,
is_unit,
*,
planning: bool = None,
forward: bool = None,
loading=False,
contra: bool = None,
):
is_unit = True if is_unit else None
super().store(
character,
graph,
node,
branch,
turn,
tick,
is_unit,
planning=planning,
forward=forward,
loading=loading,
contra=contra,
)
try:
noded = self.retrieve(character, graph, branch, turn, tick).copy()
noded[node] = is_unit
except KeyError:
noded = {node: is_unit}
super().store(
character,
graph,
branch,
turn,
tick,
noded,
planning=planning,
forward=forward,
loading=loading,
contra=contra,
)
try:
users = self.user_cache.retrieve(graph, node, branch, turn, tick)
users[character] = frozenset(users[character] | {node})
except KeyError:
users = {character: frozenset([node] if is_unit else [])}
self.user_cache.store(graph, node, branch, turn, tick, users)
self.user_cache.store(
graph,
node,
character,
branch,
turn,
tick,
is_unit,
planning=planning,
forward=forward,
loading=loading,
contra=contra,
)
def set_keyframe(
self,
characters: Key,
branch: str,
turn: int,
tick: int,
keyframe,
):
super().set_keyframe(characters, branch, turn, tick, keyframe)
for graph, subkf in keyframe.items():
super().set_keyframe(
(*characters, graph), branch, turn, tick, subkf
)
if isinstance(subkf, dict):
if isinstance(characters, tuple) and len(characters) == 1:
characters = characters[0]
for unit, is_unit in subkf.items():
try:
kf = self.user_cache.get_keyframe(
(graph, unit), branch, turn, tick
)
kf[characters] = is_unit
except KeyframeError:
self.user_cache.set_keyframe(
(graph, unit),
branch,
turn,
tick,
{characters: is_unit},
)
def get_char_graph_units(self, char, graph, branch, turn, tick):
return set(self.iter_entities(char, graph, branch, turn, tick))
def get_char_only_unit(self, char, branch, turn, tick):
if self.count_entities(char, branch, turn, tick) != 1:
raise ValueError("No unit, or more than one unit")
for graph in self.iter_entities(char, branch, turn, tick):
if self.count_entities(char, graph, branch, turn, tick) != 1:
raise ValueError("No unit, or more than one unit")
return graph, next(
self.iter_entities(char, graph, branch, turn, tick)
)
def get_char_only_graph(self, char, branch, turn, tick):
if self.count_entities(char, branch, turn, tick) != 1:
raise ValueError("No unit, or more than one unit")
return next(self.iter_entities(char, branch, turn, tick))
def iter_char_graphs(self, char, branch, turn, tick):
return self.iter_entities(char, branch, turn, tick)
|
class UnitnessCache(Cache):
'''A cache for remembering when a node is a unit of a character.'''
def __init__(self, db):
pass
def store(
self,
character,
graph,
node,
branch,
turn,
tick,
is_unit,
*,
planning: bool = None,
forward: bool = None,
loading=False,
contra: bool = None,
):
pass
def set_keyframe(
self,
characters: Key,
branch: str,
turn: int,
tick: int,
keyframe,
):
pass
def get_char_graph_units(self, char, graph, branch, turn, tick):
pass
def get_char_only_unit(self, char, branch, turn, tick):
pass
def get_char_only_graph(self, char, branch, turn, tick):
pass
def iter_char_graphs(self, char, branch, turn, tick):
pass
| 8 | 1 | 16 | 0 | 16 | 0 | 3 | 0.01 | 1 | 11 | 1 | 1 | 7 | 1 | 7 | 34 | 124 | 7 | 116 | 36 | 87 | 1 | 48 | 15 | 40 | 6 | 1 | 4 | 20 |
146,372 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/cache.py
|
lisien.cache.UnitRulesHandledCache
|
class UnitRulesHandledCache(RulesHandledCache):
def get_rulebook(self, character, branch, turn, tick):
try:
return self.engine._units_rulebooks_cache.retrieve(
character, branch, turn, tick
)
except KeyError:
return "unit_rulebook", character
def iter_unhandled_rules(self, branch, turn, tick):
for charname in self.engine._graph_cache.iter_keys(branch, turn, tick):
rb = self.get_rulebook(charname, branch, turn, tick)
try:
rules, prio = self.engine._rulebooks_cache.retrieve(
rb, branch, turn, tick
)
except KeyError:
continue
if not rules:
continue
for graphname in self.engine._unitness_cache.iter_keys(
charname, branch, turn, tick
):
# Seems bad that I have to check twice like this.
try:
existences = self.engine._unitness_cache.retrieve(
charname, graphname, branch, turn, tick
)
except KeyError:
continue
for node, ex in existences.items():
if not ex:
continue
handled = self.get_handled_rules(
(charname, graphname), rb, branch, turn
)
for rule in rules:
if rule not in handled:
yield prio, charname, graphname, node, rb, rule
|
class UnitRulesHandledCache(RulesHandledCache):
def get_rulebook(self, character, branch, turn, tick):
pass
def iter_unhandled_rules(self, branch, turn, tick):
pass
| 3 | 0 | 19 | 0 | 18 | 1 | 6 | 0.03 | 1 | 1 | 0 | 0 | 2 | 0 | 2 | 12 | 39 | 1 | 37 | 11 | 34 | 1 | 27 | 11 | 24 | 10 | 1 | 5 | 12 |
146,373 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/cache.py
|
lisien.cache.ThingsCache
|
class ThingsCache(Cache):
def __init__(self, db):
Cache.__init__(self, db, name="things_cache")
self._make_node = db.thing_cls
def store(self, *args, planning=None, loading=False, contra=None):
character, thing, branch, turn, tick, location = args
with self._lock:
try:
oldloc = self.retrieve(character, thing, branch, turn, tick)
except KeyError:
oldloc = None
super().store(
*args, planning=planning, loading=loading, contra=contra
)
node_contents_cache = self.db._node_contents_cache
this = frozenset((thing,))
# Cache the contents of nodes
if oldloc is not None:
try:
oldconts_orig = node_contents_cache.retrieve(
character, oldloc, branch, turn, tick
)
except KeyError:
oldconts_orig = frozenset()
newconts_orig = oldconts_orig.difference(this)
node_contents_cache.store(
character,
oldloc,
branch,
turn,
tick,
newconts_orig,
contra=False,
loading=True,
)
todo = []
# update any future contents caches pertaining to the old location
if (character, oldloc) in node_contents_cache.loc_settings:
locset = node_contents_cache.loc_settings[
character, oldloc
][branch]
if turn in locset:
for future_tick in locset[turn].future(tick):
todo.append((turn, future_tick))
for future_turn, future_ticks in locset.future(
turn
).items():
for future_tick in future_ticks:
todo.append((future_turn, future_tick))
for trn, tck in todo:
node_contents_cache.store(
character,
oldloc,
branch,
trn,
tck,
node_contents_cache.retrieve(
character, oldloc, branch, trn, tck, search=True
).difference(this),
planning=False,
contra=False,
loading=True,
)
if location is not None:
todo = []
try:
oldconts_dest = node_contents_cache.retrieve(
character, location, branch, turn, tick
)
except KeyError:
oldconts_dest = frozenset()
newconts_dest = oldconts_dest.union(this)
node_contents_cache.store(
character,
location,
branch,
turn,
tick,
newconts_dest,
contra=False,
loading=True,
)
# and the new location
if (character, location) in node_contents_cache.loc_settings:
locset = node_contents_cache.loc_settings[
character, location
][branch]
if turn in locset:
for future_tick in locset[turn].future(tick):
todo.append((turn, future_tick))
for future_turn, future_ticks in locset.future(
turn
).items():
for future_tick in future_ticks:
todo.append((future_turn, future_tick))
for trn, tck in todo:
node_contents_cache.store(
character,
location,
branch,
trn,
tck,
node_contents_cache.retrieve(
character, location, branch, trn, tck, search=True
).union(this),
planning=False,
contra=False,
loading=True,
)
def turn_before(self, character, thing, branch, turn):
with self._lock:
try:
self.retrieve(character, thing, branch, turn, 0)
except KeyError:
pass
return self.keys[(character,)][thing][branch].rev_before(turn)
def turn_after(self, character, thing, branch, turn):
with self._lock:
try:
self.retrieve(character, thing, branch, turn, 0)
except KeyError:
pass
return self.keys[(character,)][thing][branch].rev_after(turn)
|
class ThingsCache(Cache):
def __init__(self, db):
pass
def store(self, *args, planning=None, loading=False, contra=None):
pass
def turn_before(self, character, thing, branch, turn):
pass
def turn_after(self, character, thing, branch, turn):
pass
| 5 | 0 | 31 | 0 | 30 | 1 | 6 | 0.03 | 1 | 3 | 0 | 0 | 4 | 1 | 4 | 31 | 126 | 3 | 120 | 19 | 115 | 3 | 64 | 19 | 59 | 18 | 1 | 5 | 23 |
146,374 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/cache.py
|
lisien.cache.PortalsRulebooksCache
|
class PortalsRulebooksCache(InitializedCache):
def store(
self,
*args,
planning: bool = None,
forward: bool = None,
loading=False,
contra: bool = None,
):
char, orig, dest, branch, turn, tick, rb = args
try:
destrbs = self.retrieve(char, orig, branch, turn, tick)
destrbs[dest] = rb
except KeyError:
destrbs = {dest: rb}
super().store(char, orig, dest, branch, turn, tick, rb)
super().store(char, orig, branch, turn, tick, destrbs)
def set_keyframe(
self,
graph_ent: tuple[Key],
branch: str,
turn: int,
tick: int,
keyframe,
):
super().set_keyframe(graph_ent, branch, turn, tick, keyframe)
for orig, dests in keyframe.items():
for dest, rulebook in dests.items():
try:
subkf = self.get_keyframe(
(*graph_ent, orig), branch, turn, tick, copy=True
)
subkf[dest] = rulebook
except KeyError:
subkf = {dest: rulebook}
super().set_keyframe(
(*graph_ent, orig), branch, turn, tick, subkf
)
|
class PortalsRulebooksCache(InitializedCache):
def store(
self,
*args,
planning: bool = None,
forward: bool = None,
loading=False,
contra: bool = None,
):
pass
def set_keyframe(
self,
graph_ent: tuple[Key],
branch: str,
turn: int,
tick: int,
keyframe,
):
pass
| 3 | 0 | 19 | 0 | 19 | 0 | 3 | 0 | 1 | 6 | 0 | 0 | 2 | 0 | 2 | 30 | 39 | 1 | 38 | 22 | 21 | 0 | 20 | 8 | 17 | 4 | 2 | 3 | 6 |
146,375 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/cache.py
|
lisien.cache.PortalRulesHandledCache
|
class PortalRulesHandledCache(RulesHandledCache):
def __init__(self, engine):
super().__init__(engine, "portal_rules_handled_cache")
def get_rulebook(self, character, orig, dest, branch, turn, tick):
try:
return self.engine._portals_rulebooks_cache.retrieve(
character, orig, dest, branch, turn, tick
)
except KeyError:
return character, orig, dest
def iter_unhandled_rules(self, branch, turn, tick):
for character_name, character in sorted(
self.engine.character.items(), key=itemgetter(0)
):
for orig_name in sort_set(
frozenset(
self.engine._portals_rulebooks_cache.iter_keys(
character_name, branch, turn, tick
)
)
):
try:
destrbs = self.engine._portals_rulebooks_cache.retrieve(
character_name, orig_name, branch, turn, tick
)
except KeyError:
# shouldn't happen, but apparently does??
# Seems to be a case of too many keys showing up in the
# iteration. Currently demonstrated by remake_college24.py
# 2025-02-07
continue
for dest_name in sort_set(destrbs.keys()):
rulebook = destrbs[dest_name]
try:
rules, prio = self.engine._rulebooks_cache.retrieve(
rulebook, branch, turn, tick
)
except KeyError:
continue
handled = self.get_handled_rules(
(character_name, orig_name, dest_name),
rulebook,
branch,
turn,
)
for rule in rules:
if rule not in handled:
yield (
prio,
character_name,
orig_name,
dest_name,
rulebook,
rule,
)
|
class PortalRulesHandledCache(RulesHandledCache):
def __init__(self, engine):
pass
def get_rulebook(self, character, orig, dest, branch, turn, tick):
pass
def iter_unhandled_rules(self, branch, turn, tick):
pass
| 4 | 0 | 18 | 0 | 17 | 1 | 4 | 0.08 | 1 | 4 | 0 | 0 | 3 | 0 | 3 | 13 | 57 | 2 | 51 | 12 | 47 | 4 | 25 | 12 | 21 | 8 | 1 | 5 | 11 |
146,376 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/cache.py
|
lisien.cache.NodeRulesHandledCache
|
class NodeRulesHandledCache(RulesHandledCache):
def __init__(self, engine):
super().__init__(engine, "node_rules_handled_cache")
def get_rulebook(self, character, node, branch, turn, tick):
try:
return self.engine._nodes_rulebooks_cache.retrieve(
character, node, branch, turn, tick
)
except KeyError:
return character, node
def iter_unhandled_rules(self, branch, turn, tick):
charm = self.engine.character
for character_name, character in sorted(
charm.items(), key=itemgetter(0)
):
for node_name in character.node:
rulebook = self.get_rulebook(
character_name, node_name, branch, turn, tick
)
try:
rules, prio = self.engine._rulebooks_cache.retrieve(
rulebook, branch, turn, tick
)
except KeyError:
continue
handled = self.get_handled_rules(
(character_name, node_name), rulebook, branch, turn
)
for rule in rules:
if rule not in handled:
yield prio, character_name, node_name, rulebook, rule
|
class NodeRulesHandledCache(RulesHandledCache):
def __init__(self, engine):
pass
def get_rulebook(self, character, node, branch, turn, tick):
pass
def iter_unhandled_rules(self, branch, turn, tick):
pass
| 4 | 0 | 10 | 0 | 10 | 0 | 3 | 0 | 1 | 3 | 0 | 0 | 3 | 0 | 3 | 13 | 33 | 2 | 31 | 11 | 27 | 0 | 21 | 11 | 17 | 6 | 1 | 4 | 9 |
146,377 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/cache.py
|
lisien.cache.NodeContentsCache
|
class NodeContentsCache(Cache):
def __init__(self, db, kfkvs=None):
super().__init__(db, "node_contents_cache", kfkvs)
self.loc_settings = StructuredDefaultDict(1, SettingsTurnDict)
def store(
self,
character: Key,
place: Key,
branch: str,
turn: int,
tick: int,
contents: frozenset,
planning: bool = None,
forward: bool = None,
loading=False,
contra: bool = None,
):
self.loc_settings[character, place][branch].store_at(
turn, tick, contents
)
return super().store(
character,
place,
branch,
turn,
tick,
contents,
planning=planning,
forward=forward,
loading=loading,
contra=contra,
)
def _iter_future_contradictions(
self, entity, key, turns, branch, turn, tick, value
):
return self.db._things_cache._iter_future_contradictions(
entity, key, turns, branch, turn, tick, value
)
def remove(self, branch, turn, tick):
"""Delete data on or after this tick
On the assumption that the future has been invalidated.
"""
with self._lock:
assert not self.parents # not how stuff is stored in this cache
for branchkey, branches in list(self.branches.items()):
if branch in branches:
branhc = branches[branch]
if turn in branhc:
trun = branhc[turn]
if tick in trun:
del trun[tick]
trun.truncate(tick)
if not trun:
del branhc[turn]
branhc.truncate(turn)
if not branhc:
del branches[branch]
if not branches:
del self.branches[branchkey]
for keykey, keys in list(self.keys.items()):
for key, branchs in list(keys.items()):
if branch in branchs:
branhc = branchs[branch]
if turn in branhc:
trun = branhc[turn]
if tick in trun:
del trun[tick]
trun.truncate(tick)
if not trun:
del branhc[turn]
branhc.truncate(turn)
if not branhc:
del branchs[branch]
if not branchs:
del keys[key]
if not keys:
del self.keys[keykey]
sets = self.settings[branch]
if turn in sets:
setsturn = sets[turn]
if tick in setsturn:
del setsturn[tick]
setsturn.truncate(tick)
if not setsturn:
del sets[turn]
sets.truncate(turn)
if not sets:
del self.settings[branch]
presets = self.presettings[branch]
if turn in presets:
presetsturn = presets[turn]
if tick in presetsturn:
del presetsturn[tick]
presetsturn.truncate(tick)
if not presetsturn:
del presets[turn]
presets.truncate(turn)
if not presets:
del self.presettings[branch]
for entity, brnch in list(self.keycache):
if brnch == branch:
kc = self.keycache[entity, brnch]
if turn in kc:
kcturn = kc[turn]
if tick in kcturn:
del kcturn[tick]
kcturn.truncate(tick)
if not kcturn:
del kc[turn]
kc.truncate(turn)
if not kc:
del self.keycache[entity, brnch]
self.shallowest = OrderedDict()
|
class NodeContentsCache(Cache):
def __init__(self, db, kfkvs=None):
pass
def store(
self,
character: Key,
place: Key,
branch: str,
turn: int,
tick: int,
contents: frozenset,
planning: bool = None,
forward: bool = None,
loading=False,
contra: bool = None,
):
pass
def _iter_future_contradictions(
self, entity, key, turns, branch, turn, tick, value
):
pass
def remove(self, branch, turn, tick):
'''Delete data on or after this tick
On the assumption that the future has been invalidated.
'''
pass
| 5 | 1 | 29 | 1 | 27 | 1 | 9 | 0.04 | 1 | 9 | 2 | 0 | 4 | 2 | 4 | 31 | 119 | 6 | 110 | 33 | 91 | 4 | 81 | 19 | 76 | 31 | 1 | 6 | 34 |
146,378 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/cache.py
|
lisien.cache.InitializedCache
|
class InitializedCache(Cache):
__slots__ = ()
def _store_journal(self, *args):
entity, key, branch, turn, tick, value = args[-6:]
parent = args[:-6]
settings_turns = self.settings[branch]
presettings_turns = self.presettings[branch]
try:
prev = self.retrieve(*args[:-1])
except KeyError:
prev = None
if prev == value:
return # not much point reporting on a non-change in a diff
if turn in settings_turns or turn in settings_turns.future():
assert (
turn in presettings_turns or turn in presettings_turns.future()
)
setticks = settings_turns[turn]
presetticks = presettings_turns[turn]
presetticks[tick] = parent + (entity, key, prev)
setticks[tick] = parent + (entity, key, value)
else:
presettings_turns[turn] = {tick: parent + (entity, key, prev)}
settings_turns[turn] = {tick: parent + (entity, key, value)}
|
class InitializedCache(Cache):
def _store_journal(self, *args):
pass
| 2 | 0 | 22 | 0 | 22 | 1 | 4 | 0.04 | 1 | 1 | 0 | 2 | 1 | 0 | 1 | 28 | 25 | 1 | 24 | 10 | 22 | 1 | 21 | 10 | 19 | 4 | 1 | 1 | 4 |
146,379 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/cache.py
|
lisien.cache.CharactersRulebooksCache
|
class CharactersRulebooksCache(InitializedEntitylessCache):
def set_keyframe(self, branch, turn, tick, keyframe):
super().set_keyframe(branch, turn, tick, keyframe)
for char, kf in keyframe.items():
super(EntitylessCache, self).set_keyframe(
(char,), branch, turn, tick, kf
)
|
class CharactersRulebooksCache(InitializedEntitylessCache):
def set_keyframe(self, branch, turn, tick, keyframe):
pass
| 2 | 0 | 6 | 0 | 6 | 0 | 2 | 0 | 1 | 1 | 0 | 0 | 1 | 0 | 1 | 35 | 7 | 0 | 7 | 3 | 5 | 0 | 5 | 3 | 3 | 2 | 3 | 1 | 2 |
146,380 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/cache.py
|
lisien.cache.CharacterThingRulesHandledCache
|
class CharacterThingRulesHandledCache(RulesHandledCache):
def get_rulebook(self, character, branch, turn, tick):
try:
return self.engine._characters_things_rulebooks_cache.retrieve(
character, branch, turn, tick
)
except KeyError:
return "character_thing_rulebook", character
def iter_unhandled_rules(self, branch, turn, tick):
charm = self.engine.character
for character in sort_set(charm.keys()):
rulebook = self.get_rulebook(character, branch, turn, tick)
try:
rules, prio = self.engine._rulebooks_cache.retrieve(
rulebook, branch, turn, tick
)
except KeyError:
continue
if not rules:
continue
for thing in sort_set(charm[character].thing.keys()):
handled = self.get_handled_rules(
(character, thing), rulebook, branch, turn
)
for rule in rules:
if rule not in handled:
yield prio, character, thing, rulebook, rule
|
class CharacterThingRulesHandledCache(RulesHandledCache):
def get_rulebook(self, character, branch, turn, tick):
pass
def iter_unhandled_rules(self, branch, turn, tick):
pass
| 3 | 0 | 13 | 0 | 13 | 0 | 5 | 0 | 1 | 1 | 0 | 0 | 2 | 0 | 2 | 12 | 28 | 1 | 27 | 10 | 24 | 0 | 21 | 10 | 18 | 7 | 1 | 4 | 9 |
146,381 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/cache.py
|
lisien.cache.CharacterRulesHandledCache
|
class CharacterRulesHandledCache(RulesHandledCache):
def get_rulebook(self, character, branch, turn, tick):
try:
return self.engine._characters_rulebooks_cache.retrieve(
character, branch, turn, tick
)
except KeyError:
return ("character_rulebook", character)
def iter_unhandled_rules(self, branch, turn, tick):
for character in self.engine.character.keys():
rb = self.get_rulebook(character, branch, turn, tick)
try:
rules, prio = self.engine._rulebooks_cache.retrieve(
rb, branch, turn, tick
)
except KeyError:
continue
if not rules:
continue
handled = self.get_handled_rules((character,), rb, branch, turn)
for rule in rules:
if rule not in handled:
yield prio, character, rb, rule
|
class CharacterRulesHandledCache(RulesHandledCache):
def get_rulebook(self, character, branch, turn, tick):
pass
def iter_unhandled_rules(self, branch, turn, tick):
pass
| 3 | 0 | 11 | 0 | 11 | 0 | 4 | 0 | 1 | 1 | 0 | 0 | 2 | 0 | 2 | 12 | 24 | 1 | 23 | 8 | 20 | 0 | 19 | 8 | 16 | 6 | 1 | 3 | 8 |
146,382 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/cache.py
|
lisien.cache.CharacterPortalRulesHandledCache
|
class CharacterPortalRulesHandledCache(RulesHandledCache):
def get_rulebook(self, character, branch, turn, tick):
try:
return self.engine._characters_portals_rulebooks_cache.retrieve(
character, branch, turn, tick
)
except KeyError:
return "character_portal_rulebook", character
def iter_unhandled_rules(self, branch, turn, tick):
charm = self.engine.character
for character in sort_set(charm.keys()):
rulebook = self.get_rulebook(character, branch, turn, tick)
try:
rules, prio = self.engine._rulebooks_cache.retrieve(
rulebook, branch, turn, tick
)
except KeyError:
continue
if not rules:
continue
char = charm[character]
charn = char.node
charp = char.portal
for orig in sort_set(charp.keys()):
if orig not in charn:
continue
for dest in sort_set(charp[orig].keys()):
if dest not in charn:
continue
handled = self.get_handled_rules(
(character, orig, dest), rulebook, branch, turn
)
for rule in rules:
if rule not in handled:
yield prio, character, orig, dest, rulebook, rule
|
class CharacterPortalRulesHandledCache(RulesHandledCache):
def get_rulebook(self, character, branch, turn, tick):
pass
def iter_unhandled_rules(self, branch, turn, tick):
pass
| 3 | 0 | 17 | 0 | 17 | 0 | 6 | 0 | 1 | 1 | 0 | 0 | 2 | 0 | 2 | 12 | 36 | 1 | 35 | 14 | 32 | 0 | 29 | 14 | 26 | 10 | 1 | 5 | 12 |
146,383 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/cache.py
|
lisien.cache.CharacterPlaceRulesHandledCache
|
class CharacterPlaceRulesHandledCache(RulesHandledCache):
def get_rulebook(self, character, branch, turn, tick):
try:
return self.engine._characters_places_rulebooks_cache.retrieve(
character, branch, turn, tick
)
except KeyError:
return "character_place_rulebook", character
def iter_unhandled_rules(self, branch, turn, tick):
charm = self.engine.character
for character in sort_set(charm.keys()):
rulebook = self.get_rulebook(character, branch, turn, tick)
try:
rules, prio = self.engine._rulebooks_cache.retrieve(
rulebook, branch, turn, tick
)
except KeyError:
continue
if not rules:
continue
for place in sort_set(charm[character].place.keys()):
handled = self.get_handled_rules(
(character, place), rulebook, branch, turn
)
for rule in rules:
if rule not in handled:
yield prio, character, place, rulebook, rule
|
class CharacterPlaceRulesHandledCache(RulesHandledCache):
def get_rulebook(self, character, branch, turn, tick):
pass
def iter_unhandled_rules(self, branch, turn, tick):
pass
| 3 | 0 | 13 | 0 | 13 | 0 | 5 | 0 | 1 | 1 | 0 | 0 | 2 | 0 | 2 | 12 | 28 | 1 | 27 | 10 | 24 | 0 | 21 | 10 | 18 | 7 | 1 | 4 | 9 |
146,384 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/allegedb/wrap.py
|
lisien.allegedb.wrap.UnwrappingDict
|
class UnwrappingDict(dict):
"""Dict that stores the data from the wrapper classes
Won't store those objects themselves.
"""
def __setitem__(self, key, value):
if isinstance(value, MutableWrapper):
value = value.unwrap()
super(UnwrappingDict, self).__setitem__(key, value)
|
class UnwrappingDict(dict):
'''Dict that stores the data from the wrapper classes
Won't store those objects themselves.
'''
def __setitem__(self, key, value):
pass
| 2 | 1 | 4 | 0 | 4 | 0 | 2 | 0.6 | 1 | 2 | 1 | 0 | 1 | 0 | 1 | 28 | 11 | 3 | 5 | 2 | 3 | 3 | 5 | 2 | 3 | 2 | 2 | 1 | 2 |
146,385 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/allegedb/wrap.py
|
lisien.allegedb.wrap.SubSetWrapper
|
class SubSetWrapper(MutableWrapperSet, set):
__slots__ = ("_getter", "_set")
_getter: Callable
_set: Callable
def __init__(self, getter, setter):
super().__init__()
self._getter = getter
self._set = setter
def _copy(self):
return set(self._getter())
|
class SubSetWrapper(MutableWrapperSet, set):
def __init__(self, getter, setter):
pass
def _copy(self):
pass
| 3 | 0 | 3 | 0 | 3 | 0 | 1 | 0 | 2 | 1 | 0 | 0 | 2 | 0 | 2 | 109 | 12 | 2 | 10 | 4 | 7 | 0 | 10 | 4 | 7 | 1 | 8 | 0 | 2 |
146,386 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/allegedb/wrap.py
|
lisien.allegedb.wrap.SubListWrapper
|
class SubListWrapper(MutableSequenceWrapper, list):
__slots__ = ("_getter", "_set")
_getter: Callable
_set: Callable
def __init__(self, getter, setter):
super().__init__()
self._getter = getter
self._set = setter
def _copy(self):
return list(self._getter())
def insert(self, index, object):
me = self._copy()
me.insert(index, object)
self._set(me)
def append(self, object):
me = self._copy()
me.append(object)
self._set(me)
|
class SubListWrapper(MutableSequenceWrapper, list):
def __init__(self, getter, setter):
pass
def _copy(self):
pass
def insert(self, index, object):
pass
def append(self, object):
pass
| 5 | 0 | 4 | 0 | 4 | 0 | 1 | 0 | 2 | 1 | 0 | 0 | 4 | 0 | 4 | 97 | 22 | 4 | 18 | 8 | 13 | 0 | 18 | 8 | 13 | 1 | 8 | 0 | 4 |
146,387 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/allegedb/wrap.py
|
lisien.allegedb.wrap.SubDictWrapper
|
class SubDictWrapper(MutableMappingWrapper, dict):
__slots__ = ("_getter", "_set")
_getter: Callable
_set: Callable
def __init__(self, getter, setter):
super().__init__()
self._getter = getter
self._set = setter
def _copy(self):
return dict(self._getter())
def _subset(self, k, v):
new = dict(self._getter())
new[k] = v
self._set(new)
|
class SubDictWrapper(MutableMappingWrapper, dict):
def __init__(self, getter, setter):
pass
def _copy(self):
pass
def _subset(self, k, v):
pass
| 4 | 0 | 3 | 0 | 3 | 0 | 1 | 0 | 2 | 1 | 0 | 0 | 3 | 0 | 3 | 88 | 17 | 3 | 14 | 6 | 10 | 0 | 14 | 6 | 10 | 1 | 9 | 0 | 3 |
146,388 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/allegedb/wrap.py
|
lisien.allegedb.wrap.SetWrapper
|
class SetWrapper(MutableWrapperSet, set):
"""A set synchronized with a serialized field.
This is meant to be used in allegedb entities (graph, node, or
edge), for when the user stores a set in them.
"""
__slots__ = ("_getter", "_setter", "_outer", "_key")
_getter: Callable
def __init__(self, getter, setter, outer, key):
super().__init__()
self._getter = getter
self._setter = setter
self._outer = outer
self._key = key
def _set(self, v):
self._setter(v)
self._outer[self._key] = v
|
class SetWrapper(MutableWrapperSet, set):
'''A set synchronized with a serialized field.
This is meant to be used in allegedb entities (graph, node, or
edge), for when the user stores a set in them.
'''
def __init__(self, getter, setter, outer, key):
pass
def _set(self, v):
pass
| 3 | 1 | 5 | 0 | 5 | 0 | 1 | 0.33 | 2 | 1 | 0 | 0 | 2 | 3 | 2 | 109 | 21 | 5 | 12 | 7 | 9 | 4 | 12 | 7 | 9 | 1 | 8 | 0 | 2 |
146,389 |
LogicalDash/LiSE
|
LogicalDash_LiSE/elide/elide/dialog.py
|
elide.dialog.MessageBox
|
class MessageBox(Box):
"""Looks like a TextInput but doesn't accept any input.
Does support styled text with BBcode.
"""
line_spacing = NumericProperty(0)
text = StringProperty()
|
class MessageBox(Box):
'''Looks like a TextInput but doesn't accept any input.
Does support styled text with BBcode.
'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 | 3 | 3 | 3 | 2 | 3 | 3 | 3 | 2 | 0 | 2 | 0 | 0 |
146,390 |
LogicalDash/LiSE
|
LogicalDash_LiSE/elide/elide/spritebuilder.py
|
elide.spritebuilder.PawnConfigDialog
|
class PawnConfigDialog(SpriteDialog):
pass
|
class PawnConfigDialog(SpriteDialog):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 2 | 0 | 2 | 1 | 1 | 0 | 2 | 1 | 1 | 0 | 2 | 0 | 0 |
146,391 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/allegedb/wrap.py
|
lisien.allegedb.wrap.MutableWrapperSet
|
class MutableWrapperSet(MutableWrapper, MutableSet):
__slots__ = ()
_getter: Callable
_set: Callable
def _copy(self):
return set(self._getter())
def pop(self):
me = self._copy()
yours = me.pop()
self._set(me)
return yours
def discard(self, element):
me = self._copy()
me.discard(element)
self._set(me)
def remove(self, element):
me = self._copy()
me.remove(element)
self._set(me)
def add(self, element):
me = self._copy()
me.add(element)
self._set(me)
def unwrap(self):
"""Deep copy myself as a set, all contents unwrapped"""
return {
v.unwrap()
if hasattr(v, "unwrap") and not hasattr(v, "no_unwrap")
else v
for v in self
}
|
class MutableWrapperSet(MutableWrapper, MutableSet):
def _copy(self):
pass
def pop(self):
pass
def discard(self, element):
pass
def remove(self, element):
pass
def add(self, element):
pass
def unwrap(self):
'''Deep copy myself as a set, all contents unwrapped'''
pass
| 7 | 1 | 5 | 0 | 4 | 0 | 1 | 0.03 | 2 | 1 | 0 | 2 | 6 | 0 | 6 | 64 | 37 | 6 | 30 | 13 | 23 | 1 | 25 | 13 | 18 | 2 | 7 | 0 | 7 |
146,392 |
LogicalDash/LiSE
|
LogicalDash_LiSE/elide/elide/graph/board.py
|
elide.graph.board.KvLayoutFront
|
class KvLayoutFront(FloatLayout):
"""What to show in front of the graph.
By default, shows nothing.
"""
pass
|
class KvLayoutFront(FloatLayout):
'''What to show in front of the graph.
By default, shows nothing.
'''
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1.5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 | 3 | 2 | 1 | 1 | 3 | 2 | 1 | 1 | 0 | 1 | 0 | 0 |
146,393 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/facade.py
|
lisien.facade.CharacterFacade.UnitGraphMapping.UnitMapping
|
class UnitMapping(Mapping):
def __init__(self, character, graph_name):
self.character = character
self.graph_name = graph_name
def __iter__(self):
for key in self.character.engine._unitness_cache.iter_keys(
self.character.name,
self.graph_name,
*self.character.engine._btt(),
):
if key in self:
yield key
def __len__(self):
return self.character.engine._unitness_cache.count_keys(
self.character.name,
self.graph_name,
*self.character.engine._btt(),
)
def __contains__(self, item):
try:
return self.character.engine._unitness_cache.retrieve(
self.character.name,
self.graph_name,
item,
*self.character.engine._btt(),
)
except KeyError:
return False
def __getitem__(self, item):
if item not in self:
raise KeyError(
"Not a unit of this character in this graph",
item,
self.character.name,
self.graph_name,
)
return self.character.engine.character[self.graph_name].node[
item
]
|
class UnitMapping(Mapping):
def __init__(self, character, graph_name):
pass
def __iter__(self):
pass
def __len__(self):
pass
def __contains__(self, item):
pass
def __getitem__(self, item):
pass
| 6 | 0 | 8 | 0 | 8 | 0 | 2 | 0 | 1 | 1 | 0 | 0 | 5 | 2 | 5 | 5 | 43 | 4 | 39 | 9 | 33 | 0 | 19 | 9 | 13 | 3 | 1 | 2 | 9 |
146,394 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/facade.py
|
lisien.facade.CharacterFacade.UnitGraphMapping
|
class UnitGraphMapping(Mapping):
class UnitMapping(Mapping):
def __init__(self, character, graph_name):
self.character = character
self.graph_name = graph_name
def __iter__(self):
for key in self.character.engine._unitness_cache.iter_keys(
self.character.name,
self.graph_name,
*self.character.engine._btt(),
):
if key in self:
yield key
def __len__(self):
return self.character.engine._unitness_cache.count_keys(
self.character.name,
self.graph_name,
*self.character.engine._btt(),
)
def __contains__(self, item):
try:
return self.character.engine._unitness_cache.retrieve(
self.character.name,
self.graph_name,
item,
*self.character.engine._btt(),
)
except KeyError:
return False
def __getitem__(self, item):
if item not in self:
raise KeyError(
"Not a unit of this character in this graph",
item,
self.character.name,
self.graph_name,
)
return self.character.engine.character[self.graph_name].node[
item
]
def __init__(self, character):
self.character = character
def __iter__(self):
engine = self.character.engine
name = self.character.name
now = self.character.engine._btt()
for key in engine._unitness_cache.iter_keys(name, *now):
if key in self:
yield key
def __len__(self):
return self.character.engine._unitness_cache.count_keys(
self.character.name, *self.character.engine._btt()
)
def __contains__(self, item):
now = self.character.engine._btt()
name = self.character.name
engine = self.character.engine
try:
engine._unitness_cache.retrieve(name, item, *now)
return True
except KeyError:
return False
def __getitem__(self, item):
if item not in self:
raise KeyError(
"Character has no units in graph",
self.character.name,
item,
)
return self.UnitMapping(self.character, item)
|
class UnitGraphMapping(Mapping):
class UnitMapping(Mapping):
def __init__(self, character, graph_name):
pass
def __iter__(self):
pass
def __len__(self):
pass
def __contains__(self, item):
pass
def __getitem__(self, item):
pass
def __init__(self, character, graph_name):
pass
def __iter__(self):
pass
def __len__(self):
pass
def __contains__(self, item):
pass
def __getitem__(self, item):
pass
| 12 | 0 | 7 | 0 | 7 | 0 | 2 | 0 | 1 | 2 | 1 | 0 | 5 | 1 | 5 | 5 | 79 | 9 | 70 | 23 | 58 | 0 | 44 | 23 | 32 | 3 | 1 | 2 | 18 |
146,395 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/facade.py
|
lisien.facade.CharacterFacade.ThingMapping
|
class ThingMapping(FacadeEntityMapping):
facadecls = FacadeThing
innercls: type
def __init__(self, facade, _=None):
from .node import Thing
self.innercls = Thing
super().__init__(facade, _)
def _get_inner_map(self):
try:
return self.facade.character.thing
except AttributeError:
return {}
def patch(self, d: dict):
places = d.keys() & self.facade.place.keys()
if places:
raise KeyError(
f"Tried to patch places on thing mapping: {places}"
)
self.facade.node.patch(d)
|
class ThingMapping(FacadeEntityMapping):
def __init__(self, facade, _=None):
pass
def _get_inner_map(self):
pass
def patch(self, d: dict):
pass
| 4 | 0 | 6 | 0 | 5 | 0 | 2 | 0 | 1 | 5 | 1 | 0 | 3 | 0 | 3 | 55 | 23 | 4 | 19 | 7 | 14 | 0 | 17 | 7 | 12 | 2 | 9 | 1 | 5 |
146,396 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/facade.py
|
lisien.facade.CharacterFacade.StatMapping
|
class StatMapping(MutableMappingUnwrapper, Signal):
def __init__(self, facade):
super().__init__()
self.facade = facade
self._patch = {}
def __iter__(self):
seen = set()
if hasattr(self.facade.character, "graph"):
for k in self.facade.character.graph:
if k not in self._patch:
yield k
seen.add(k)
for k, v in self._patch.items():
if k not in seen and v is not None:
yield k
def __len__(self):
n = 0
for k in self:
n += 1
return n
def __contains__(self, k):
if k in self._patch:
return self._patch[k] is not None
if (
hasattr(self.facade.character, "graph")
and k in self.facade.character.graph
):
return True
return False
def __getitem__(self, k):
if k not in self._patch and hasattr(
self.facade.character, "graph"
):
ret = self.facade.character.graph[k]
if not hasattr(ret, "unwrap"):
return ret
self._patch[k] = ret.unwrap()
if self._patch[k] is None:
return KeyError
return self._patch[k]
def __setitem__(self, k, v):
if self.facade.engine._planning:
self.facade.engine._planned[
self.facade.character.engine._curplan
][self.facade.engine.turn].append((self.facade.name, k, v))
return
self._patch[k] = v
def __delitem__(self, k):
self._patch[k] = None
|
class StatMapping(MutableMappingUnwrapper, Signal):
def __init__(self, facade):
pass
def __iter__(self):
pass
def __len__(self):
pass
def __contains__(self, k):
pass
def __getitem__(self, k):
pass
def __setitem__(self, k, v):
pass
def __delitem__(self, k):
pass
| 8 | 0 | 7 | 0 | 7 | 0 | 3 | 0 | 2 | 3 | 0 | 0 | 7 | 2 | 7 | 50 | 55 | 6 | 49 | 16 | 41 | 0 | 42 | 16 | 34 | 6 | 8 | 3 | 19 |
146,397 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/facade.py
|
lisien.facade.CharacterFacade.PortalSuccessorsMapping
|
class PortalSuccessorsMapping(FacadePortalMapping):
cls = FacadePortalSuccessors
def __contains__(self, item):
return item in self.facade.node
def _get_inner_map(self):
try:
return self.facade.character._adj
except AttributeError:
return {}
|
class PortalSuccessorsMapping(FacadePortalMapping):
def __contains__(self, item):
pass
def _get_inner_map(self):
pass
| 3 | 0 | 4 | 0 | 4 | 0 | 2 | 0 | 1 | 1 | 0 | 0 | 2 | 0 | 2 | 55 | 11 | 2 | 9 | 4 | 6 | 0 | 9 | 4 | 6 | 2 | 10 | 1 | 3 |
146,398 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/facade.py
|
lisien.facade.CharacterFacade.PortalPredecessorsMapping
|
class PortalPredecessorsMapping(FacadePortalMapping):
cls = FacadePortalPredecessors
def __contains__(self, item):
return item in self.facade._node
def _get_inner_map(self):
try:
return self.facade.character.pred
except AttributeError:
return {}
|
class PortalPredecessorsMapping(FacadePortalMapping):
def __contains__(self, item):
pass
def _get_inner_map(self):
pass
| 3 | 0 | 4 | 0 | 4 | 0 | 2 | 0 | 1 | 1 | 0 | 0 | 2 | 0 | 2 | 55 | 11 | 2 | 9 | 4 | 6 | 0 | 9 | 4 | 6 | 2 | 10 | 1 | 3 |
146,399 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/facade.py
|
lisien.facade.CharacterFacade.PlaceMapping
|
class PlaceMapping(FacadeEntityMapping):
facadecls = FacadePlace
innercls: type
def __init__(self, facade, _=None):
from .node import Place
if not isinstance(facade, CharacterFacade):
raise TypeError("Need CharacterFacade")
self.innercls = Place
super().__init__(facade, _)
def _get_inner_map(self):
if isinstance(self.facade.character, nx.Graph) and not isinstance(
self.facade.character, AbstractCharacter
):
return self.facade.character._node
try:
return self.facade.character.place
except AttributeError:
return {}
def patch(self, d: dict):
things = d.keys() & self.facade.thing.keys()
if things:
raise KeyError(
f"Tried to patch things on place mapping: {things}"
)
self.facade.node.patch(d)
|
class PlaceMapping(FacadeEntityMapping):
def __init__(self, facade, _=None):
pass
def _get_inner_map(self):
pass
def patch(self, d: dict):
pass
| 4 | 0 | 8 | 1 | 7 | 0 | 2 | 0 | 1 | 8 | 3 | 0 | 3 | 0 | 3 | 55 | 30 | 5 | 25 | 7 | 20 | 0 | 21 | 7 | 16 | 3 | 9 | 1 | 7 |
146,400 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/facade.py
|
lisien.facade.CharacterFacade
|
class CharacterFacade(AbstractCharacter, nx.DiGraph):
def __getstate__(self):
ports = {}
for o in self.portal:
if o not in ports:
ports[o] = {}
for d in self.portal[o]:
ports[o][d] = dict(self.portal[o][d])
things = {k: dict(v) for (k, v) in self.thing.items()}
places = {k: dict(v) for (k, v) in self.place.items()}
stats = {
k: v.unwrap() if hasattr(v, "unwrap") else v
for (k, v) in self.graph.items()
}
return things, places, ports, stats
def __setstate__(self, state):
self.character = None
self.graph = self.StatMapping(self)
(
self.thing._patch,
self.place._patch,
self.portal._patch,
self.graph._patch,
) = state
def add_places_from(self, seq, **attrs):
for place in seq:
self.add_place(place, **attrs)
def add_things_from(self, seq, **attrs):
for thing in seq:
self.add_thing(thing, **attrs)
def thing2place(self, name):
self.place[name] = self.thing.pop(name)
def place2thing(self, name, location):
it = self.place.pop(name)
it["location"] = location
self.thing[name] = it
def add_portals_from(self, seq, **attrs):
for it in seq:
self.add_portal(*it, **attrs)
def remove_unit(self, a, b=None):
if b is None:
if not isinstance(a, FacadeNode):
raise TypeError("Need a node or character")
charn = a.character.name
noden = a.name
else:
charn = a
if isinstance(b, FacadeNode):
noden = b.name
else:
noden = b
self.engine._unitness_cache.store(
self.name, charn, noden, *self.engine._btt(), False
)
def add_place(self, name, **kwargs):
self.place[name] = kwargs
def add_node(self, name, **kwargs):
"""Version of add_node that assumes it's a place"""
self.place[name] = kwargs
def remove_node(self, node):
"""Version of remove_node that handles place or thing"""
if node in self.thing:
del self.thing[node]
else:
del self.place[node]
def remove_place(self, place):
del self.place[place]
def remove_thing(self, thing):
del self.thing[thing]
def add_thing(self, name, location, **kwargs):
kwargs["location"] = location
self.thing[name] = kwargs
def add_portal(self, orig, dest, **kwargs):
self.portal[orig][dest] = kwargs
def remove_portal(self, origin, destination):
del self.portal[origin][destination]
def add_edge(self, orig, dest, **kwargs):
"""Wrapper for add_portal"""
self.add_portal(orig, dest, **kwargs)
def add_unit(self, a, b=None):
if b is None:
if not isinstance(a, FacadeNode):
raise TypeError("Need a node or character")
charn = a.character.name
noden = a.name
else:
charn = a
if isinstance(b, FacadeNode):
noden = b.name
else:
noden = b
self.engine._unitness_cache.store(
self.name, charn, noden, *self.engine._btt(), True
)
def __init__(self, character=None, engine=None):
self.character = character
self.db = EngineFacade(engine or getattr(character, "db", None))
self._stat_map = self.StatMapping(self)
self._rb_patch = {}
if character:
self.db.character._patch[character.name] = self
@property
def graph(self):
return self._stat_map
@graph.setter
def graph(self, v):
self._stat_map.clear()
self._stat_map.update(v)
class UnitGraphMapping(Mapping):
class UnitMapping(Mapping):
def __init__(self, character, graph_name):
self.character = character
self.graph_name = graph_name
def __iter__(self):
for key in self.character.engine._unitness_cache.iter_keys(
self.character.name,
self.graph_name,
*self.character.engine._btt(),
):
if key in self:
yield key
def __len__(self):
return self.character.engine._unitness_cache.count_keys(
self.character.name,
self.graph_name,
*self.character.engine._btt(),
)
def __contains__(self, item):
try:
return self.character.engine._unitness_cache.retrieve(
self.character.name,
self.graph_name,
item,
*self.character.engine._btt(),
)
except KeyError:
return False
def __getitem__(self, item):
if item not in self:
raise KeyError(
"Not a unit of this character in this graph",
item,
self.character.name,
self.graph_name,
)
return self.character.engine.character[self.graph_name].node[
item
]
def __init__(self, character):
self.character = character
def __iter__(self):
engine = self.character.engine
name = self.character.name
now = self.character.engine._btt()
for key in engine._unitness_cache.iter_keys(name, *now):
if key in self:
yield key
def __len__(self):
return self.character.engine._unitness_cache.count_keys(
self.character.name, *self.character.engine._btt()
)
def __contains__(self, item):
now = self.character.engine._btt()
name = self.character.name
engine = self.character.engine
try:
engine._unitness_cache.retrieve(name, item, *now)
return True
except KeyError:
return False
def __getitem__(self, item):
if item not in self:
raise KeyError(
"Character has no units in graph",
self.character.name,
item,
)
return self.UnitMapping(self.character, item)
class ThingMapping(FacadeEntityMapping):
facadecls = FacadeThing
innercls: type
def __init__(self, facade, _=None):
from .node import Thing
self.innercls = Thing
super().__init__(facade, _)
def _get_inner_map(self):
try:
return self.facade.character.thing
except AttributeError:
return {}
def patch(self, d: dict):
places = d.keys() & self.facade.place.keys()
if places:
raise KeyError(
f"Tried to patch places on thing mapping: {places}"
)
self.facade.node.patch(d)
class PlaceMapping(FacadeEntityMapping):
facadecls = FacadePlace
innercls: type
def __init__(self, facade, _=None):
from .node import Place
if not isinstance(facade, CharacterFacade):
raise TypeError("Need CharacterFacade")
self.innercls = Place
super().__init__(facade, _)
def _get_inner_map(self):
if isinstance(self.facade.character, nx.Graph) and not isinstance(
self.facade.character, AbstractCharacter
):
return self.facade.character._node
try:
return self.facade.character.place
except AttributeError:
return {}
def patch(self, d: dict):
things = d.keys() & self.facade.thing.keys()
if things:
raise KeyError(
f"Tried to patch things on place mapping: {things}"
)
self.facade.node.patch(d)
def ThingPlaceMapping(self, *args):
return CompositeDict(self.place, self.thing)
class PortalSuccessorsMapping(FacadePortalMapping):
cls = FacadePortalSuccessors
def __contains__(self, item):
return item in self.facade.node
def _get_inner_map(self):
try:
return self.facade.character._adj
except AttributeError:
return {}
class PortalPredecessorsMapping(FacadePortalMapping):
cls = FacadePortalPredecessors
def __contains__(self, item):
return item in self.facade._node
def _get_inner_map(self):
try:
return self.facade.character.pred
except AttributeError:
return {}
class StatMapping(MutableMappingUnwrapper, Signal):
def __init__(self, facade):
super().__init__()
self.facade = facade
self._patch = {}
def __iter__(self):
seen = set()
if hasattr(self.facade.character, "graph"):
for k in self.facade.character.graph:
if k not in self._patch:
yield k
seen.add(k)
for k, v in self._patch.items():
if k not in seen and v is not None:
yield k
def __len__(self):
n = 0
for k in self:
n += 1
return n
def __contains__(self, k):
if k in self._patch:
return self._patch[k] is not None
if (
hasattr(self.facade.character, "graph")
and k in self.facade.character.graph
):
return True
return False
def __getitem__(self, k):
if k not in self._patch and hasattr(
self.facade.character, "graph"
):
ret = self.facade.character.graph[k]
if not hasattr(ret, "unwrap"):
return ret
self._patch[k] = ret.unwrap()
if self._patch[k] is None:
return KeyError
return self._patch[k]
def __setitem__(self, k, v):
if self.facade.engine._planning:
self.facade.engine._planned[
self.facade.character.engine._curplan
][self.facade.engine.turn].append((self.facade.name, k, v))
return
self._patch[k] = v
def __delitem__(self, k):
self._patch[k] = None
def apply(self):
"""Do all my changes for real in a batch"""
realchar = self.character
realstat = realchar.stat
realthing = realchar.thing
realplace = realchar.place
realport = realchar.portal
realeng = self.engine._real
for k, v in self.stat._patch.items():
if v is None:
del realstat[k]
else:
realstat[k] = v
self.stat._patch = {}
for k, v in self.thing._patch.items():
if v is None:
del realthing[k]
elif k not in realthing:
if isinstance(v, FacadeThing):
v = v._patch
if "name" in v:
assert v.pop("name") == k
realchar.add_thing(k, **v)
else:
v.apply()
self.thing._patch = {}
for k, v in self.place._patch.items():
if v is None:
del realplace[k]
elif k not in realplace:
realchar.add_place(k, **v)
else:
v.apply()
self.place._patch = {}
for orig, dests in self.portal._patch.items():
for dest, v in dests.items():
if v is None:
del realport[orig][dest]
elif orig not in realport or dest not in realport[orig]:
realchar.add_portal(orig, dest, **v)
else:
v.apply()
self.portal._patch = {}
|
class CharacterFacade(AbstractCharacter, nx.DiGraph):
def __getstate__(self):
pass
def __setstate__(self, state):
pass
def add_places_from(self, seq, **attrs):
pass
def add_things_from(self, seq, **attrs):
pass
def thing2place(self, name):
pass
def place2thing(self, name, location):
pass
def add_portals_from(self, seq, **attrs):
pass
def remove_unit(self, a, b=None):
pass
def add_places_from(self, seq, **attrs):
pass
def add_node(self, name, **kwargs):
'''Version of add_node that assumes it's a place'''
pass
def remove_node(self, node):
'''Version of remove_node that handles place or thing'''
pass
def remove_place(self, place):
pass
def remove_thing(self, thing):
pass
def add_things_from(self, seq, **attrs):
pass
def add_portals_from(self, seq, **attrs):
pass
def remove_portal(self, origin, destination):
pass
def add_edge(self, orig, dest, **kwargs):
'''Wrapper for add_portal'''
pass
def add_unit(self, a, b=None):
pass
def __init__(self, character=None, engine=None):
pass
@property
def graph(self):
pass
@graph.setter
def graph(self):
pass
class UnitGraphMapping(Mapping):
class UnitMapping(Mapping):
def __init__(self, character=None, engine=None):
pass
def __iter__(self):
pass
def __len__(self):
pass
def __contains__(self, item):
pass
def __getitem__(self, item):
pass
def __init__(self, character=None, engine=None):
pass
def __iter__(self):
pass
def __len__(self):
pass
def __contains__(self, item):
pass
def __getitem__(self, item):
pass
class ThingMapping(FacadeEntityMapping):
def __init__(self, character=None, engine=None):
pass
def _get_inner_map(self):
pass
def patch(self, d: dict):
pass
class PlaceMapping(FacadeEntityMapping):
def __init__(self, character=None, engine=None):
pass
def _get_inner_map(self):
pass
def patch(self, d: dict):
pass
def ThingPlaceMapping(self, *args):
pass
class PortalSuccessorsMapping(FacadePortalMapping):
def __contains__(self, item):
pass
def _get_inner_map(self):
pass
class PortalPredecessorsMapping(FacadePortalMapping):
def __contains__(self, item):
pass
def _get_inner_map(self):
pass
class StatMapping(MutableMappingUnwrapper, Signal):
def __init__(self, character=None, engine=None):
pass
def __iter__(self):
pass
def __len__(self):
pass
def __contains__(self, item):
pass
def __getitem__(self, item):
pass
def __setitem__(self, k, v):
pass
def __delitem__(self, k):
pass
def apply(self):
'''Do all my changes for real in a batch'''
pass
| 60 | 4 | 6 | 0 | 6 | 0 | 2 | 0.01 | 2 | 7 | 5 | 0 | 23 | 4 | 23 | 99 | 390 | 56 | 330 | 114 | 268 | 4 | 265 | 112 | 205 | 15 | 7 | 3 | 107 |
146,401 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/exc.py
|
lisien.exc.TravelException
|
class TravelException(Exception):
"""Exception for problems with pathfinding.
Not necessarily an error because sometimes somebody SHOULD get
confused finding a path.
"""
def __init__(
self,
message,
path=None,
followed=None,
traveller=None,
branch=None,
turn=None,
lastplace=None,
):
"""Store the message as usual, and also the optional arguments:
``path``: a list of Place names to show such a path as you found
``followed``: the portion of the path actually followed
``traveller``: the Thing doing the travelling
``branch``: branch during travel
``tick``: tick at time of error (might not be the tick at the
time this exception is raised)
``lastplace``: where the traveller was, when the error happened
"""
self.path = path
self.followed = followed
self.traveller = traveller
self.branch = branch
self.turn = turn
self.lastplace = lastplace
super().__init__(message)
|
class TravelException(Exception):
'''Exception for problems with pathfinding.
Not necessarily an error because sometimes somebody SHOULD get
confused finding a path.
'''
def __init__(
self,
message,
path=None,
followed=None,
traveller=None,
branch=None,
turn=None,
lastplace=None,
):
'''Store the message as usual, and also the optional arguments:
``path``: a list of Place names to show such a path as you found
``followed``: the portion of the path actually followed
``traveller``: the Thing doing the travelling
``branch``: branch during travel
``tick``: tick at time of error (might not be the tick at the
time this exception is raised)
``lastplace``: where the traveller was, when the error happened
'''
pass
| 2 | 2 | 33 | 7 | 17 | 9 | 1 | 0.72 | 1 | 1 | 0 | 0 | 1 | 6 | 1 | 11 | 41 | 10 | 18 | 17 | 7 | 13 | 9 | 8 | 7 | 1 | 3 | 0 | 1 |
146,402 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/engine.py
|
lisien.engine.NullSchema
|
class NullSchema(AbstractSchema):
"""Schema that permits all changes to the game world"""
def entity_permitted(self, entity):
return True
def stat_permitted(self, turn, entity, key, value):
return True
|
class NullSchema(AbstractSchema):
'''Schema that permits all changes to the game world'''
def entity_permitted(self, entity):
pass
def stat_permitted(self, turn, entity, key, value):
pass
| 3 | 1 | 2 | 0 | 2 | 0 | 1 | 0.2 | 1 | 0 | 0 | 0 | 2 | 0 | 2 | 25 | 8 | 2 | 5 | 3 | 2 | 1 | 5 | 3 | 2 | 1 | 5 | 0 | 2 |
146,403 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/engine.py
|
lisien.engine.NextTurn
|
class NextTurn(Signal):
"""Make time move forward in the simulation.
Calls ``advance`` repeatedly, returning a list of the rules' return values.
I am also a ``Signal``, so you can register functions to be
called when the simulation runs. Pass them to my ``connect``
method.
"""
def __init__(self, engine: Engine):
super().__init__()
self.engine = engine
def __call__(self) -> tuple[list, DeltaDict]:
engine = self.engine
for store in engine.stores:
if getattr(store, "_need_save", None):
store.save()
elif hasattr(store, "reimport"):
try:
store.reimport()
except FileNotFoundError:
# Maybe the game uses no prereqs or something.
pass
engine._call_every_subprocess("_reimport_code")
start_branch, start_turn, start_tick = engine._btt()
latest_turn = engine._get_last_completed_turn(start_branch)
if start_turn < latest_turn:
engine.turn += 1
self.send(
engine,
branch=engine.branch,
turn=engine.turn,
tick=engine.tick,
)
return [], engine._get_branch_delta(
branch=start_branch,
turn_from=start_turn,
turn_to=engine.turn,
tick_from=start_tick,
tick_to=engine.tick,
)
elif start_turn > latest_turn + 1:
raise exc.RulesEngineError(
"Can't run the rules engine on any turn but the latest"
)
if start_turn == latest_turn:
# Pre-emptively nudge the loadedness and branch tracking,
# so that lisien does not try to load an empty turn before every
# loop of the rules engine
turn0, tick0, turn1, tick1 = engine._loaded[start_branch]
engine._loaded[start_branch] = (turn0, tick0, start_turn + 1, 0)
parent, turn_from, tick_from, turn_to, tick_to = engine._branches[
start_branch
]
engine._branches[start_branch] = (
parent,
turn_from,
tick_from,
start_turn + 1,
0,
)
engine.turn += 1
if hasattr(engine, "_worker_updated_btts"):
engine._update_all_worker_process_states()
results = []
if hasattr(engine, "_rules_iter"):
it = engine._rules_iter
else:
todo = engine._eval_triggers()
it = engine._rules_iter = engine._follow_rules(todo)
with engine.advancing():
for res in it:
if isinstance(res, InnerStopIteration):
del engine._rules_iter
raise StopIteration from res
elif res:
if isinstance(res, tuple) and res[0] == "stop":
engine.universal["last_result"] = res
engine.universal["last_result_idx"] = 0
branch, turn, tick = engine._btt()
self.send(engine, branch=branch, turn=turn, tick=tick)
return list(res), engine._get_branch_delta(
branch=start_branch,
turn_from=start_turn,
turn_to=turn,
tick_from=start_tick,
tick_to=tick,
)
else:
results.extend(res)
del engine._rules_iter
engine._complete_turn(
start_branch,
engine.turn,
)
if (
engine.flush_interval is not None
and engine.turn % engine.flush_interval == 0
):
engine.query.flush()
if (
engine.commit_interval is not None
and engine.turn % engine.commit_interval == 0
):
engine.commit()
self.send(
self.engine,
branch=engine.branch,
turn=engine.turn,
tick=engine.tick,
)
delta = engine._get_branch_delta(
branch=engine.branch,
turn_from=start_turn,
turn_to=engine.turn,
tick_from=start_tick,
tick_to=engine.tick,
)
if results:
engine.universal["last_result"] = results
engine.universal["last_result_idx"] = 0
return results, delta
|
class NextTurn(Signal):
'''Make time move forward in the simulation.
Calls ``advance`` repeatedly, returning a list of the rules' return values.
I am also a ``Signal``, so you can register functions to be
called when the simulation runs. Pass them to my ``connect``
method.
'''
def __init__(self, engine: Engine):
pass
def __call__(self) -> tuple[list, DeltaDict]:
pass
| 3 | 1 | 58 | 0 | 56 | 2 | 9 | 0.09 | 1 | 8 | 3 | 0 | 2 | 1 | 2 | 2 | 127 | 5 | 112 | 16 | 109 | 10 | 59 | 16 | 56 | 17 | 1 | 4 | 18 |
146,404 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/engine.py
|
lisien.engine.DummyEntity
|
class DummyEntity(dict):
"""Something to use in place of a node or edge"""
__slots__ = ["engine"]
def __init__(self, engine: AbstractEngine):
super().__init__()
self.engine = engine
|
class DummyEntity(dict):
'''Something to use in place of a node or edge'''
def __init__(self, engine: AbstractEngine):
pass
| 2 | 1 | 3 | 0 | 3 | 0 | 1 | 0.2 | 1 | 2 | 1 | 0 | 1 | 1 | 1 | 28 | 8 | 2 | 5 | 4 | 3 | 1 | 5 | 4 | 3 | 1 | 2 | 0 | 1 |
146,405 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/character.py
|
lisien.character.Character.UnitGraphMapping.CharacterUnitMapping
|
class CharacterUnitMapping(Mapping):
"""Mapping of units of one Character in another Character."""
def __init__(self, outer, graphn):
"""Store this character and the name of the other one"""
self.character = character = outer.character
self.engine = engine = outer.engine
self.name = name = outer.name
self.graph = graphn
avcache = engine._unitness_cache
btt = engine._btt
self._iter_stuff = iter_stuff = (
avcache.get_char_graph_units,
name,
graphn,
btt,
)
self._contains_stuff = (
avcache._base_retrieve,
name,
graphn,
btt,
)
get_node = engine._get_node
self._getitem_stuff = iter_stuff + (
get_node,
graphn,
engine.character,
)
self._only_stuff = (get_node, engine.character, graphn)
def __iter__(self):
"""Iterate over names of unit nodes"""
get_char_graph_avs, name, graphn, btt = self._iter_stuff
return iter(get_char_graph_avs(name, graphn, *btt()))
def __contains__(self, av):
base_retrieve, name, graphn, btt = self._contains_stuff
return (
base_retrieve(
(name, graphn, av, *btt()),
store_hint=False,
retrieve_hint=False,
)
is True
)
def __len__(self):
"""Number of units of this character in that graph"""
get_char_graph_avs, name, graphn, btt = self._iter_stuff
return len(get_char_graph_avs(name, graphn, *btt()))
def __getitem__(self, av):
(
get_char_graph_avs,
name,
graphn,
btt,
get_node,
graphn,
charmap,
) = self._getitem_stuff
if av in get_char_graph_avs(name, graphn, *btt()):
return get_node(charmap[graphn], av)
raise KeyError("No unit: {}".format(av))
@property
def only(self):
"""If I have only one unit, return it; else error"""
mykey = singleton_get(self.keys())
if mykey is None:
raise AttributeError("No unit, or more than one")
get_node, charmap, graphn = self._only_stuff
return get_node(charmap[graphn], mykey)
def __repr__(self):
return "{}.character[{}].unit".format(
repr(self.engine), repr(self.name)
)
|
class CharacterUnitMapping(Mapping):
'''Mapping of units of one Character in another Character.'''
def __init__(self, outer, graphn):
'''Store this character and the name of the other one'''
pass
def __iter__(self):
'''Iterate over names of unit nodes'''
pass
def __contains__(self, av):
pass
def __len__(self):
'''Number of units of this character in that graph'''
pass
def __getitem__(self, av):
pass
@property
def only(self):
'''If I have only one unit, return it; else error'''
pass
def __repr__(self):
pass
| 9 | 5 | 10 | 0 | 9 | 1 | 1 | 0.07 | 1 | 2 | 0 | 0 | 7 | 8 | 7 | 41 | 79 | 7 | 67 | 31 | 58 | 5 | 35 | 25 | 27 | 2 | 6 | 1 | 9 |
146,406 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/character.py
|
lisien.character.Character.UnitGraphMapping
|
class UnitGraphMapping(Mapping, RuleFollower):
"""A mapping of other characters in which one has a unit."""
_book = "unit"
engine = getatt("character.engine")
name = getatt("character.name")
def _get_rulebook_cache(self):
return self._avrc
def __init__(self, char):
"""Remember my character."""
self.character = char
self._char_av_cache = {}
engine = char.engine
self._avrc = engine._units_rulebooks_cache
self._add_av = char.add_unit
avcache = engine._unitness_cache
get_char_graphs = avcache.iter_char_graphs
charn = char.name
btt = engine._btt
self._iter_stuff = (get_char_graphs, charn, btt)
self._len_stuff = (avcache.count_entities_or_keys, charn, btt)
self._contains_stuff = (
avcache.contains_key,
charn,
btt,
)
self._node_stuff = (
self._get_char_av_cache,
avcache.get_char_only_graph,
charn,
btt,
)
self._only_stuff = (
avcache.get_char_only_unit,
charn,
btt,
engine._get_node,
engine.character,
)
def __call__(self, av):
"""Add the unit
It must be an instance of Place or Thing.
"""
if av.__class__ not in (Place, Thing):
raise TypeError("Only Things and Places may be units")
self._add_av(av.name, av.character.name)
def __iter__(self):
"""Iterate over graphs with unit nodes in them"""
get_char_graphs, charn, btt = self._iter_stuff
return iter(get_char_graphs(charn, *btt()))
def __contains__(self, k):
retrieve, charn, btt = self._contains_stuff
got = retrieve(charn, k, *btt())
return got is not None and not isinstance(got, Exception)
def __len__(self):
"""Number of graphs in which I have a unit."""
count_char_graphs, charn, btt = self._len_stuff
return count_char_graphs(charn, *btt())
def _get_char_av_cache(self, g):
if g not in self:
raise KeyError
if g not in self._char_av_cache:
self._char_av_cache[g] = self.CharacterUnitMapping(self, g)
return self._char_av_cache[g]
def __getitem__(self, g):
return self._get_char_av_cache(g)
@property
def node(self):
"""If I have units in only one graph, return a map of them
Otherwise, raise AttributeError.
"""
get_char_av_cache: MethodType
get_char_av_cache, get_char_only_graph, charn, btt = (
self._node_stuff
)
try:
return get_char_av_cache(get_char_only_graph(charn, *btt()))
except KeyError:
raise AttributeError(
"I have no unit, or I have units in many graphs"
)
@property
def only(self):
"""If I have only one unit, this is it
Otherwise, raise AttributeError.
"""
get_char_only_av, charn, btt, get_node, charmap = self._only_stuff
try:
charn, noden = get_char_only_av(charn, *btt())
return get_node(charmap[charn], noden)
except (KeyError, TypeError):
raise AttributeError("I have no unit, or more than one unit")
class CharacterUnitMapping(Mapping):
"""Mapping of units of one Character in another Character."""
def __init__(self, outer, graphn):
"""Store this character and the name of the other one"""
self.character = character = outer.character
self.engine = engine = outer.engine
self.name = name = outer.name
self.graph = graphn
avcache = engine._unitness_cache
btt = engine._btt
self._iter_stuff = iter_stuff = (
avcache.get_char_graph_units,
name,
graphn,
btt,
)
self._contains_stuff = (
avcache._base_retrieve,
name,
graphn,
btt,
)
get_node = engine._get_node
self._getitem_stuff = iter_stuff + (
get_node,
graphn,
engine.character,
)
self._only_stuff = (get_node, engine.character, graphn)
def __iter__(self):
"""Iterate over names of unit nodes"""
get_char_graph_avs, name, graphn, btt = self._iter_stuff
return iter(get_char_graph_avs(name, graphn, *btt()))
def __contains__(self, av):
base_retrieve, name, graphn, btt = self._contains_stuff
return (
base_retrieve(
(name, graphn, av, *btt()),
store_hint=False,
retrieve_hint=False,
)
is True
)
def __len__(self):
"""Number of units of this character in that graph"""
get_char_graph_avs, name, graphn, btt = self._iter_stuff
return len(get_char_graph_avs(name, graphn, *btt()))
def __getitem__(self, av):
(
get_char_graph_avs,
name,
graphn,
btt,
get_node,
graphn,
charmap,
) = self._getitem_stuff
if av in get_char_graph_avs(name, graphn, *btt()):
return get_node(charmap[graphn], av)
raise KeyError("No unit: {}".format(av))
@property
def only(self):
"""If I have only one unit, return it; else error"""
mykey = singleton_get(self.keys())
if mykey is None:
raise AttributeError("No unit, or more than one")
get_node, charmap, graphn = self._only_stuff
return get_node(charmap[graphn], mykey)
def __repr__(self):
return "{}.character[{}].unit".format(
repr(self.engine), repr(self.name)
)
|
class UnitGraphMapping(Mapping, RuleFollower):
'''A mapping of other characters in which one has a unit.'''
def _get_rulebook_cache(self):
pass
def __init__(self, char):
'''Remember my character.'''
pass
def __call__(self, av):
'''Add the unit
It must be an instance of Place or Thing.
'''
pass
def __iter__(self):
'''Iterate over graphs with unit nodes in them'''
pass
def __contains__(self, k):
pass
def __len__(self):
'''Number of graphs in which I have a unit.'''
pass
def _get_char_av_cache(self, g):
pass
def __getitem__(self, g):
pass
@property
def node(self):
'''If I have units in only one graph, return a map of them
Otherwise, raise AttributeError.
'''
pass
@property
def only(self):
'''If I have only one unit, this is it
Otherwise, raise AttributeError.
'''
pass
class CharacterUnitMapping(Mapping):
'''Mapping of units of one Character in another Character.'''
def __init__(self, char):
'''Store this character and the name of the other one'''
pass
def __iter__(self):
'''Iterate over names of unit nodes'''
pass
def __contains__(self, k):
pass
def __len__(self):
'''Number of units of this character in that graph'''
pass
def __getitem__(self, g):
pass
@property
def only(self):
'''If I have only one unit, return it; else error'''
pass
def __repr__(self):
pass
| 22 | 12 | 9 | 0 | 8 | 1 | 1 | 0.12 | 2 | 7 | 3 | 0 | 10 | 9 | 10 | 59 | 189 | 26 | 145 | 68 | 123 | 18 | 92 | 60 | 73 | 3 | 6 | 1 | 24 |
146,407 |
LogicalDash/LiSE
|
LogicalDash_LiSE/elide/elide/screen.py
|
elide.screen.KvLayout
|
class KvLayout(FloatLayout):
pass
|
class KvLayout(FloatLayout):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 | 0 | 2 | 1 | 1 | 0 | 2 | 1 | 1 | 0 | 1 | 0 | 0 |
146,408 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/character.py
|
lisien.character.Character.ThingPlaceMapping
|
class ThingPlaceMapping(GraphNodeMapping, Signal):
"""GraphNodeMapping but for Place and Thing"""
_book = "character_node"
character = getatt("graph")
engine = getatt("db")
name = getatt("character.name")
def __init__(self, character):
"""Store the character."""
super().__init__(character)
Signal.__init__(self)
engine = character.engine
charn = character.name
self._contains_stuff = contains_stuff = (
engine._node_exists,
charn,
)
self._getitem_stuff = contains_stuff + (
engine._get_node,
character,
)
self._delitem_stuff = contains_stuff + (
engine._is_thing,
character.thing,
character.place,
)
self._placemap = character.place
def __contains__(self, k):
node_exists, charn = self._contains_stuff
return node_exists(charn, k)
def __getitem__(self, k):
node_exists, charn, get_node, character = self._getitem_stuff
if not node_exists(charn, k):
raise KeyError("No such node: " + str(k))
return get_node(character, k)
def __setitem__(self, k, v):
self._placemap[k] = v
def __delitem__(self, k):
(node_exists, charn, is_thing, thingmap, placemap) = (
self._delitem_stuff
)
if not node_exists(charn, k):
raise KeyError("No such node: " + str(k))
if is_thing(charn, k):
del thingmap[k]
else:
del placemap[k]
|
class ThingPlaceMapping(GraphNodeMapping, Signal):
'''GraphNodeMapping but for Place and Thing'''
def __init__(self, character):
'''Store the character.'''
pass
def __contains__(self, k):
pass
def __getitem__(self, k):
pass
def __setitem__(self, k, v):
pass
def __delitem__(self, k):
pass
| 6 | 2 | 8 | 0 | 8 | 0 | 2 | 0.05 | 2 | 3 | 0 | 0 | 5 | 4 | 5 | 58 | 53 | 7 | 44 | 19 | 38 | 2 | 31 | 19 | 25 | 3 | 10 | 1 | 8 |
146,409 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/character.py
|
lisien.character.Character.ThingMapping
|
class ThingMapping(MutableMappingUnwrapper, RuleFollower, Signal):
""":class:`Thing` objects that are in a :class:`Character`"""
_book = "character_thing"
engine = getatt("character.engine")
name = getatt("character.name")
def _get_rulebook_cache(self):
return self.engine._characters_things_rulebooks_cache
def __init__(self, character):
"""Store the character and initialize cache."""
super().__init__()
self.character = character
def __iter__(self):
cache = self.engine._things_cache
char = self.name
branch, turn, tick = self.engine._btt()
for key in cache.iter_keys(char, branch, turn, tick):
try:
if (
cache.retrieve(char, key, branch, turn, tick)
is not None
):
yield key
except KeyError:
continue
def __contains__(self, thing):
branch, turn, tick = self.engine._btt()
args = self.character.name, thing, branch, turn, tick
cache = self.engine._things_cache
return cache.contains_key(*args)
def __len__(self):
return self.engine._things_cache.count_keys(
self.character.name, *self.engine._btt()
)
def __getitem__(self, thing):
if thing not in self:
raise KeyError("No such thing: {}".format(thing))
return self._make_thing(thing)
def _make_thing(self, thing, val=None):
cache = self.engine._node_objs
if isinstance(val, Thing):
th = cache[self.name, thing] = val
elif (self.name, thing) in cache:
th = cache[(self.name, thing)]
if type(th) is not Thing:
th = cache[self.name, thing] = Thing(self.character, thing)
else:
th = cache[(self.name, thing)] = Thing(self.character, thing)
return th
def __setitem__(self, thing, val):
if not isinstance(val, Mapping):
raise TypeError("Things are made from Mappings")
if "location" not in val:
raise ValueError("Thing needs location")
self.engine._exist_node(self.character.name, thing)
self.engine._set_thing_loc(
self.character.name, thing, val["location"]
)
th = self._make_thing(thing, val)
th.clear()
th.update({k: v for (k, v) in val.items() if k != "location"})
def __delitem__(self, thing):
self[thing].delete()
def __repr__(self):
return "{}.character[{}].thing".format(
repr(self.engine), repr(self.name)
)
|
class ThingMapping(MutableMappingUnwrapper, RuleFollower, Signal):
''':class:`Thing` objects that are in a :class:`Character`'''
def _get_rulebook_cache(self):
pass
def __init__(self, character):
'''Store the character and initialize cache.'''
pass
def __iter__(self):
pass
def __contains__(self, thing):
pass
def __len__(self):
pass
def __getitem__(self, thing):
pass
def _make_thing(self, thing, val=None):
pass
def __setitem__(self, thing, val):
pass
def __delitem__(self, thing):
pass
def __repr__(self):
pass
| 11 | 2 | 6 | 0 | 6 | 0 | 2 | 0.03 | 3 | 5 | 1 | 0 | 10 | 1 | 10 | 68 | 78 | 12 | 64 | 25 | 53 | 2 | 53 | 25 | 42 | 4 | 8 | 3 | 19 |
146,410 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/character.py
|
lisien.character.Character.PortalSuccessorsMapping
|
class PortalSuccessorsMapping(DiGraphSuccessorsMapping, RuleFollower):
"""Mapping of nodes that have at least one outgoing edge.
Maps them to another mapping, keyed by the destination nodes,
which maps to Portal objects.
"""
_book = "character_portal"
character = getatt("graph")
engine = getatt("graph.engine")
def __init__(self, graph):
super().__init__(graph)
engine = graph.engine
charn = graph.name
self._cporh = engine._characters_portals_rulebooks_cache
self._getitem_stuff = (engine._node_exists, charn, self._cache)
self._setitem_stuff = (self._cache, self.Successors)
def _get_rulebook_cache(self):
return self._cporh
def __getitem__(self, orig):
node_exists, charn, cache = self._getitem_stuff
if node_exists(charn, orig):
if orig not in cache:
cache[orig] = self.Successors(self, orig)
return cache[orig]
raise KeyError("No such node")
def __delitem__(self, orig):
super().__delitem__(orig)
def update(self, other, **kwargs):
"""Recursively update the stats of all portals
Input should be a dictionary of dictionaries of dictionaries
--just like networkx ``DiGraph._edge``.
This will create portals as needed, but will only delete
them if you set their value to ``None``. Likewise, stats
not specified in the input will be left untouched, if they
are already present, but you can set them to ``None`` to
delete them.
"""
engine = self.engine
planning = engine._planning
forward = engine._forward
branch, turn, start_tick = engine._btt()
exist_edge = engine.query.exist_edge
edge_val_set = engine.query.edge_val_set
store_edge = engine._edges_cache.store
store_edge_val = engine._edge_val_cache.store
iter_edge_keys = engine._edge_val_cache.iter_entity_keys
charn = self.character.name
tick = start_tick + 1
with timer(
"seconds spent updating PortalSuccessorsMapping", engine.debug
):
for orig, dests in chain(other.items(), kwargs.items()):
for dest, kvs in dests.items():
if kvs is None:
for k in iter_edge_keys(
charn,
orig,
dest,
0,
branch,
turn,
start_tick,
forward=forward,
):
store_edge_val(
charn,
orig,
dest,
0,
k,
branch,
turn,
tick,
None,
planning=planning,
forward=forward,
loading=True,
)
edge_val_set(
charn,
orig,
dest,
0,
k,
branch,
turn,
tick,
None,
)
tick += 1
store_edge(
charn,
orig,
dest,
0,
branch,
turn,
tick,
False,
planning=planning,
forward=forward,
loading=True,
)
exist_edge(
charn, orig, dest, 0, branch, turn, tick, False
)
tick += 1
else:
store_edge(
charn,
orig,
dest,
0,
branch,
turn,
tick,
True,
planning=planning,
forward=forward,
loading=True,
)
exist_edge(
charn, orig, dest, 0, branch, turn, tick, True
)
tick += 1
for k, v in kvs.items():
store_edge_val(
charn,
orig,
dest,
0,
k,
branch,
turn,
tick,
v,
planning=planning,
forward=forward,
loading=True,
)
edge_val_set(
charn,
orig,
dest,
0,
k,
branch,
turn,
tick,
v,
)
tick += 1
parent, start_turn, start_tick, end_turn, _ = (
self.engine._branches[branch]
)
self.engine._branches[branch] = (
parent,
start_turn,
start_tick,
end_turn,
tick,
)
self.engine._turn_end_plan[branch, turn] = tick
if not self.engine._planning:
self.engine._turn_end[branch, turn] = tick
self.engine.tick = tick
class Successors(DiGraphSuccessorsMapping.Successors):
"""Mapping for possible destinations from some node."""
engine = getatt("graph.engine")
def __init__(self, container, orig):
super().__init__(container, orig)
graph = self.graph
engine = graph.engine
self._getitem_stuff = (engine._get_edge, graph, orig)
self._setitem_stuff = (
engine._edge_exists,
engine._exist_edge,
graph.name,
orig,
engine._get_edge,
graph,
engine.query.edge_val_set,
engine._edge_val_cache.store,
engine._nbtt,
)
def __getitem__(self, dest):
get_edge, graph, orig = self._getitem_stuff
if dest in self:
return get_edge(graph, orig, dest, 0)
raise KeyError("No such portal: {}->{}".format(orig, dest))
def __setitem__(self, dest, value):
if value is None:
del self[dest]
return
(
edge_exists,
exist_edge,
charn,
orig,
get_edge,
graph,
db_edge_val_set,
edge_val_cache_store,
nbtt,
) = self._setitem_stuff
exist_edge(charn, orig, dest)
for k, v in value.items():
branch, turn, tick = nbtt()
db_edge_val_set(
charn, orig, dest, 0, k, branch, turn, tick, v
)
edge_val_cache_store(
charn, orig, dest, 0, k, branch, turn, tick, v
)
def __delitem__(self, dest):
if dest not in self:
raise KeyError("No portal to {}".format(dest))
self[dest].delete()
def update(self, other, **kwargs):
charn = self.graph.name
orig = self.orig
engine = self.engine
store_edge = engine._edges_cache.store
exist_edge = engine.query.exist_edge
store_edge_val = engine._edge_val_cache.store
set_edge_val = engine.query.edge_val_set
iter_edge_keys = engine._edge_val_cache.iter_entity_keys
planning = engine._planning
forward = engine._forward
branch, turn, start_tick = engine._btt()
if start_tick < engine._turn_end_plan[branch, turn]:
raise RuntimeError(
"Tried to update successors in the past"
)
tick = start_tick + 1
with self.db.world_lock:
for dest, val in chain(other.items(), kwargs.items()):
if val is None:
for k in iter_edge_keys(
charn, orig, dest, 0, branch, turn, start_tick
):
store_edge_val(
charn,
orig,
dest,
0,
k,
branch,
turn,
tick,
None,
planning=planning,
forward=forward,
loading=True,
)
set_edge_val(
charn,
orig,
dest,
0,
k,
branch,
turn,
tick,
None,
)
tick += 1
store_edge(
charn,
orig,
dest,
0,
branch,
turn,
tick,
None,
planning=planning,
forward=forward,
loading=True,
)
exist_edge(
charn, orig, dest, 0, branch, turn, tick, None
)
tick += 1
else:
store_edge(
charn,
orig,
dest,
0,
branch,
turn,
tick,
True,
planning=planning,
forward=forward,
loading=True,
)
exist_edge(
charn, orig, dest, 0, branch, turn, tick, True
)
tick += 1
for key, value in val.items():
store_edge_val(
charn,
orig,
dest,
0,
key,
branch,
turn,
tick,
value,
)
set_edge_val(
charn,
orig,
dest,
0,
key,
branch,
turn,
tick,
value,
planning=planning,
forward=forward,
loading=True,
)
tick += 1
parent, start_turn, start_tick, end_turn, _ = (
self.db._branches[branch]
)
self.db._branches[branch] = (
parent,
start_turn,
start_tick,
end_turn,
tick,
)
self.db._turn_end_plan[branch, turn] = tick
if not planning:
self.db._turn_end[branch, turn] = tick
turn_from, tick_from, turn_to, _ = self.db._loaded[branch]
self.db._loaded[branch] = (
turn_from,
tick_from,
turn_to,
tick,
)
self.db._otick = tick
|
class PortalSuccessorsMapping(DiGraphSuccessorsMapping, RuleFollower):
'''Mapping of nodes that have at least one outgoing edge.
Maps them to another mapping, keyed by the destination nodes,
which maps to Portal objects.
'''
def __init__(self, graph):
pass
def _get_rulebook_cache(self):
pass
def __getitem__(self, orig):
pass
def __delitem__(self, orig):
pass
def update(self, other, **kwargs):
'''Recursively update the stats of all portals
Input should be a dictionary of dictionaries of dictionaries
--just like networkx ``DiGraph._edge``.
This will create portals as needed, but will only delete
them if you set their value to ``None``. Likewise, stats
not specified in the input will be left untouched, if they
are already present, but you can set them to ``None`` to
delete them.
'''
pass
class Successors(DiGraphSuccessorsMapping.Successors):
'''Mapping for possible destinations from some node.'''
def __init__(self, graph):
pass
def __getitem__(self, orig):
pass
def __setitem__(self, dest, value):
pass
def __delitem__(self, orig):
pass
def update(self, other, **kwargs):
pass
| 12 | 3 | 34 | 0 | 33 | 1 | 3 | 0.04 | 2 | 4 | 1 | 0 | 5 | 3 | 5 | 74 | 368 | 19 | 335 | 71 | 323 | 14 | 125 | 63 | 113 | 7 | 12 | 5 | 28 |
146,411 |
LogicalDash/LiSE
|
LogicalDash_LiSE/elide/elide/pallet.py
|
elide.pallet.PalletBox
|
class PalletBox(BoxLayout):
pallets = ListProperty()
|
class PalletBox(BoxLayout):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 | 0 | 2 | 2 | 1 | 0 | 2 | 2 | 1 | 0 | 1 | 0 | 0 |
146,412 |
LogicalDash/LiSE
|
LogicalDash_LiSE/elide/elide/menu.py
|
elide.menu.GeneratorButton
|
class GeneratorButton(Button):
pass
|
class GeneratorButton(Button):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 | 0 | 2 | 1 | 1 | 0 | 2 | 1 | 1 | 0 | 1 | 0 | 0 |
146,413 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/character.py
|
lisien.character.Character.PortalPredecessorsMapping.Predecessors
|
class Predecessors(DiGraphPredecessorsMapping.Predecessors):
"""Mapping of possible origins from some destination."""
def __init__(self, container, dest):
super().__init__(container, dest)
graph = self.graph
self._setitem_stuff = (
graph,
graph.name,
dest,
self.db._edge_objs,
)
def __setitem__(self, orig, value):
graph, graph_name, dest, portal_objs = self._setitem_stuff
key = (graph_name, orig, dest)
if key not in portal_objs:
portal_objs[key] = Portal(graph, orig, dest)
p = portal_objs[key]
p.clear()
p.update(value)
p.engine._exist_edge(graph_name, dest, orig)
|
class Predecessors(DiGraphPredecessorsMapping.Predecessors):
'''Mapping of possible origins from some destination.'''
def __init__(self, container, dest):
pass
def __setitem__(self, orig, value):
pass
| 3 | 1 | 9 | 0 | 9 | 0 | 2 | 0.05 | 1 | 2 | 1 | 0 | 2 | 1 | 2 | 57 | 22 | 2 | 19 | 8 | 16 | 1 | 14 | 8 | 11 | 2 | 11 | 1 | 3 |
146,414 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/character.py
|
lisien.character.Character.PortalPredecessorsMapping
|
class PortalPredecessorsMapping(DiGraphPredecessorsMapping, RuleFollower):
"""Mapping of nodes that have at least one incoming edge.
Maps to another mapping keyed by the origin nodes, which maps to
Portal objects.
"""
_book = "character_portal"
def __init__(self, graph):
super().__init__(graph)
self._cporc = graph.engine._characters_portals_rulebooks_cache
def _get_rulebook_cache(self):
return self._cporc
class Predecessors(DiGraphPredecessorsMapping.Predecessors):
"""Mapping of possible origins from some destination."""
def __init__(self, container, dest):
super().__init__(container, dest)
graph = self.graph
self._setitem_stuff = (
graph,
graph.name,
dest,
self.db._edge_objs,
)
def __setitem__(self, orig, value):
graph, graph_name, dest, portal_objs = self._setitem_stuff
key = (graph_name, orig, dest)
if key not in portal_objs:
portal_objs[key] = Portal(graph, orig, dest)
p = portal_objs[key]
p.clear()
p.update(value)
p.engine._exist_edge(graph_name, dest, orig)
|
class PortalPredecessorsMapping(DiGraphPredecessorsMapping, RuleFollower):
'''Mapping of nodes that have at least one incoming edge.
Maps to another mapping keyed by the origin nodes, which maps to
Portal objects.
'''
def __init__(self, graph):
pass
def _get_rulebook_cache(self):
pass
class Predecessors(DiGraphPredecessorsMapping.Predecessors):
'''Mapping of possible origins from some destination.'''
def __init__(self, graph):
pass
def __setitem__(self, orig, value):
pass
| 6 | 2 | 6 | 0 | 6 | 0 | 1 | 0.19 | 2 | 1 | 0 | 0 | 2 | 1 | 2 | 70 | 39 | 8 | 26 | 13 | 20 | 5 | 21 | 13 | 15 | 2 | 11 | 1 | 5 |
146,415 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/character.py
|
lisien.character.Character.PlaceMapping
|
class PlaceMapping(MutableMappingUnwrapper, RuleFollower, Signal):
""":class:`Place` objects that are in a :class:`Character`"""
_book = "character_place"
def _get_rulebook_cache(self):
return self.engine._characters_places_rulebooks_cache
def update(self, __m: dict, **kwargs) -> None:
self.character.node.update(__m, **kwargs)
def __init__(self, character):
"""Store the character."""
super().__init__()
self.character = character
self.engine = engine = character.engine
charn = character.name
nodes_cache = engine._nodes_cache
things_cache = engine._things_cache
iter_nodes = nodes_cache.iter_entities
nodes_contains = nodes_cache.contains_entity
things_contains = things_cache.contains_entity
btt = engine._btt
self._iter_stuff = (iter_nodes, things_contains, charn, btt)
self._len_stuff = (
nodes_cache.count_entities,
things_cache.count_entities,
charn,
btt,
)
self._contains_stuff = (
nodes_contains,
things_contains,
charn,
btt,
)
self._get_stuff = self._contains_stuff + (
engine._node_objs,
character,
)
self._set_stuff = (
engine._node_exists,
engine._exist_node,
engine._get_node,
charn,
character,
)
def __iter__(self):
iter_nodes, things_contains, charn, btt = self._iter_stuff
branch, turn, tick = btt()
for node in iter_nodes(charn, branch, turn, tick):
if not things_contains(charn, node, branch, turn, tick):
yield node
def __len__(self):
count_nodes, count_things, charn, btt = self._len_stuff
branch, turn, tick = btt()
return count_nodes(charn, branch, turn, tick) - count_things(
charn, branch, turn, tick
)
def __contains__(self, place):
nodes_contains, things_contains, charn, btt = self._contains_stuff
branch, turn, tick = btt()
return nodes_contains(
charn, place, branch, turn, tick
) and not things_contains(charn, place, branch, turn, tick)
def __getitem__(self, place):
(nodes_contains, things_contains, charn, btt, cache, character) = (
self._get_stuff
)
branch, turn, tick = btt()
if not nodes_contains(
charn, place, branch, turn, tick
) or things_contains(charn, place, branch, turn, tick):
raise KeyError("No such place: {}".format(place))
if (charn, place) not in cache or not isinstance(
cache[(charn, place)], Place
):
ret = cache[(charn, place)] = Place(character, place)
return ret
return cache[(charn, place)]
def __setitem__(self, place, v):
(node_exists, exist_node, get_node, charn, character) = (
self._set_stuff
)
exist_node(charn, place, True)
pl = get_node(character, place)
if not isinstance(pl, Place):
raise KeyError(
"{} is a {}, not a place".format(place, type(pl).__name__)
)
pl.update(v)
def __delitem__(self, place):
self[place].delete()
def __repr__(self):
return "{}.character[{}].place".format(
repr(self.character.engine), repr(self.character.name)
)
|
class PlaceMapping(MutableMappingUnwrapper, RuleFollower, Signal):
''':class:`Place` objects that are in a :class:`Character`'''
def _get_rulebook_cache(self):
pass
def update(self, __m: dict, **kwargs) -> None:
pass
def __init__(self, character):
'''Store the character.'''
pass
def __iter__(self):
pass
def __len__(self):
pass
def __contains__(self, place):
pass
def __getitem__(self, place):
pass
def __setitem__(self, place, v):
pass
def __delitem__(self, place):
pass
def __repr__(self):
pass
| 11 | 2 | 9 | 0 | 9 | 0 | 2 | 0.02 | 3 | 4 | 1 | 0 | 10 | 7 | 10 | 68 | 104 | 11 | 91 | 38 | 80 | 2 | 56 | 38 | 45 | 3 | 8 | 2 | 15 |
146,416 |
LogicalDash/LiSE
|
LogicalDash_LiSE/elide/elide/kwlist.py
|
elide.kwlist.KeywordListModal
|
class KeywordListModal(ModalView):
data = ListProperty([])
|
class KeywordListModal(ModalView):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 | 0 | 2 | 2 | 1 | 0 | 2 | 2 | 1 | 0 | 1 | 0 | 0 |
146,417 |
LogicalDash/LiSE
|
LogicalDash_LiSE/elide/elide/grid/board.py
|
elide.grid.board.GridBoardView
|
class GridBoardView(BoardView):
pass
|
class GridBoardView(BoardView):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 2 | 0 | 2 | 1 | 1 | 0 | 2 | 1 | 1 | 0 | 2 | 0 | 0 |
146,418 |
LogicalDash/LiSE
|
LogicalDash_LiSE/elide/elide/graph/board.py
|
elide.graph.board.KvLayoutBack
|
class KvLayoutBack(FloatLayout):
"""What to show behind the graph.
By default, shows nothing.
"""
|
class KvLayoutBack(FloatLayout):
'''What to show behind the graph.
By default, shows nothing.
'''
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 | 2 | 1 | 1 | 0 | 3 | 1 | 1 | 0 | 0 | 1 | 0 | 0 |
146,419 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/facade.py
|
lisien.facade.EngineFacade.FacadeCache
|
class FacadeCache(Cache):
def __init__(self, cache, name):
self._created = cache.db._btt()
super().__init__(cache.db, name)
self._real = cache
def retrieve(self, *args, search=False):
try:
return super().retrieve(*args, search=search)
except (NotInKeyframeError, TotalKeyError):
return self._real.retrieve(*args, search=search)
def _get_keycache(
self, parentity, branch, turn, tick, forward: bool = None
):
if forward is None:
forward = self._real.db._forward
# Find the last effective keycache before the facade was created.
# Get the additions and deletions since then.
# Apply those to the keycache and return it.
kc = set(
self._real._get_keycache(
parentity, *self._created, forward=forward
)
)
added, deleted = self._get_adds_dels(
parentity, branch, turn, tick, stoptime=self._created
)
return frozenset((kc | added) - deleted)
|
class FacadeCache(Cache):
def __init__(self, cache, name):
pass
def retrieve(self, *args, search=False):
pass
def _get_keycache(
self, parentity, branch, turn, tick, forward: bool = None
):
pass
| 4 | 0 | 9 | 0 | 8 | 1 | 2 | 0.13 | 1 | 6 | 2 | 1 | 3 | 2 | 3 | 30 | 29 | 2 | 24 | 10 | 18 | 3 | 16 | 8 | 12 | 2 | 1 | 1 | 5 |
146,420 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/elide/elide/stores.py
|
elide.stores.FuncEditor
|
class FuncEditor(Editor):
"""The editor widget for working with any particular function.
Contains a one-line field for the function's name and a multi-line
field for its code.
"""
storelist = ObjectProperty()
"""Instance of ``StoreList`` that shows all the functions you can edit"""
codeinput = ObjectProperty()
params = ListProperty(["obj"])
name = StringProperty()
_text = StringProperty()
_do_parse = True
def _get_source(self):
code = self.get_default_text(self.name)
if self._text:
code += indent(self._text, " " * 4)
else:
code += " " * 4 + "pass"
return code.rstrip(" \n\t")
def _set_source(self, v):
if not self.codeinput:
Clock.schedule_once(partial(self._set_source, v), 0)
return
self.codeinput.unbind(text=self.setter("_text"))
self.params, self.codeinput.text = munge_source(str(v))
self.codeinput.bind(text=self.setter("_text"))
source = AliasProperty(_get_source, _set_source, bind=("_text", "params"))
def get_default_text(self, name):
if not name or name == "+":
name = "a"
return "def {}({}):\n".format(name, ", ".join(self.params))
def on_codeinput(self, *args):
self._text = self.codeinput.text
self.codeinput.bind(text=self.setter("_text"))
|
class FuncEditor(Editor):
'''The editor widget for working with any particular function.
Contains a one-line field for the function's name and a multi-line
field for its code.
'''
def _get_source(self):
pass
def _set_source(self, v):
pass
def get_default_text(self, name):
pass
def on_codeinput(self, *args):
pass
| 5 | 1 | 5 | 0 | 5 | 0 | 2 | 0.17 | 1 | 2 | 0 | 0 | 4 | 0 | 4 | 6 | 42 | 8 | 29 | 13 | 24 | 5 | 28 | 13 | 23 | 2 | 2 | 1 | 7 |
146,421 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/facade.py
|
lisien.facade.EngineFacade.FacadeUnitnessCache
|
class FacadeUnitnessCache(FacadeCache, UnitnessCache):
def __init__(self, cache):
self._created = cache.db._btt()
UnitnessCache.__init__(self, cache.db)
self.user_cache = EngineFacade.FacadeCache(
cache.user_cache, "user_cache"
)
self._real = cache
|
class FacadeUnitnessCache(FacadeCache, UnitnessCache):
def __init__(self, cache):
pass
| 2 | 0 | 7 | 0 | 7 | 0 | 1 | 0 | 2 | 1 | 1 | 0 | 1 | 3 | 1 | 38 | 8 | 0 | 8 | 5 | 6 | 0 | 6 | 5 | 4 | 1 | 2 | 0 | 1 |
146,422 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/elide/elide/graph/arrow.py
|
elide.graph.arrow.GraphArrowWidget
|
class GraphArrowWidget(Widget, GraphArrow):
arrowhead_size = NumericProperty(10)
arrow_width = NumericProperty(2)
bg_scale = NumericProperty(5)
bg_color = ListProperty([0.5, 0.5, 0.5, 0.5])
fg_color = ListProperty([1.0, 1.0, 1.0, 1.0])
def __init__(self, **kwargs):
self._trigger_repoint = Clock.create_trigger(self.repoint)
super().__init__(**kwargs)
def on_origin(self, *args):
if hasattr(self, "_origin_bind_uid"):
self.unbind_uid(self._origin_bind_uid)
del self._origin_bind_uid
if not self.origin:
return
self._origin_bind_uid = self.origin.fbind("pos", self._trigger_repoint)
def on_destination(self, *args):
if hasattr(self, "_destination_bind_uid"):
self.unbind_uid(self._destination_bind_uid)
del self._destination_bind_uid
if not self.destination:
return
self._destination_bind_uid = self.destination.fbind(
"pos", self._trigger_repoint
)
def on_parent(self, *args):
if not self.origin or not self.destination or not self.canvas:
Clock.schedule_once(self.on_parent, 0)
return
self.canvas.clear()
if self.parent is None:
return
shaft_points, head_points = get_points(
self.origin, self.destination, self.arrowhead_size
)
with self.canvas:
r = self.arrow_width / 2
self._instructions = get_instructions(
*shaft_points,
*head_points,
r * self.bg_scale,
r,
self.bg_color,
self.fg_color,
)
def repoint(self, *args):
shaft_points, head_points = get_points(
self.origin, self.destination, self.arrowhead_size
)
r = self.arrow_width / 2
try:
portal = self.board.character.portal[self.origin.name][
self.destination.name
]
portal_text = str(portal.get(portal.get("_label_stat", None), ""))
except (KeyError, AttributeError):
portal_text = ""
if hasattr(self, "_label"):
label = self._label
label.text = portal_text
else:
label = self._label = Label(text=portal_text)
label_size = label.render()
verts = get_quad_vertices(
*shaft_points, *head_points, r * self.bg_scale, r, *label_size
)
insts = self._instructions
insts["color0"].rgba = self.bg_color
insts["color1"].rgba = self.fg_color
insts["shaft_bg"].points = verts["shaft_bg"]
insts["left_head_bg"].points = verts["left_head_bg"]
insts["right_head_bg"].points = verts["right_head_bg"]
insts["shaft_fg"].points = verts["shaft_fg"]
insts["left_head_fg"].points = verts["left_head_fg"]
insts["right_head_fg"].points = verts["right_head_fg"]
insts["label_rect"].pos = verts["label_pos"]
insts["label_rect"].size = label_size
label.refresh()
insts["label_rect"].texture = label.texture
|
class GraphArrowWidget(Widget, GraphArrow):
def __init__(self, **kwargs):
pass
def on_origin(self, *args):
pass
def on_destination(self, *args):
pass
def on_parent(self, *args):
pass
def repoint(self, *args):
pass
| 6 | 0 | 15 | 0 | 15 | 0 | 3 | 0 | 2 | 4 | 0 | 0 | 5 | 5 | 5 | 14 | 84 | 5 | 79 | 26 | 73 | 0 | 61 | 25 | 55 | 3 | 1 | 1 | 13 |
146,423 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/elide/elide/graph/board.py
|
elide.graph.board.FinalLayout
|
class FinalLayout(FloatLayout):
def finalize_all(self, *args):
for child in self.children:
child.finalize()
self.bind(children=self._trigger_finalize_all)
_trigger_finalize_all = trigger(finalize_all)
|
class FinalLayout(FloatLayout):
def finalize_all(self, *args):
pass
| 2 | 0 | 4 | 0 | 4 | 0 | 2 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 7 | 1 | 6 | 4 | 4 | 0 | 6 | 4 | 4 | 2 | 1 | 1 | 2 |
146,424 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/elide/elide/graph/board.py
|
elide.graph.board.GraphBoard
|
class GraphBoard(RelativeLayout):
"""A graphical view onto a :class:`lisien.Character`, resembling a game
graph.
"""
app = ObjectProperty()
character = ObjectProperty()
wallpaper_path = StringProperty()
spot = DictProperty({})
pawn = DictProperty({})
arrow = DictProperty({})
pred_arrow = DictProperty({})
wallpaper = ObjectProperty()
kvlayoutback = ObjectProperty()
arrow_plane = ObjectProperty()
stack_plane = ObjectProperty()
kvlayoutfront = ObjectProperty()
wids = ReferenceListProperty(
wallpaper, kvlayoutback, arrow_plane, stack_plane, kvlayoutfront
)
spots_unposd = ListProperty([])
layout_tries = NumericProperty(5)
tracking_vel = BooleanProperty(False)
selection_candidates = ListProperty([])
selection = ObjectProperty(allownone=True)
keep_selection = ObjectProperty(False)
adding_portal = BooleanProperty(False)
reciprocal_portal = BooleanProperty(False)
grabbing = BooleanProperty(True)
grabbed = ObjectProperty(None, allownone=True)
spot_cls = ObjectProperty(Stack)
pawn_cls = ObjectProperty(Stack)
arrow_cls = GraphArrow
proto_arrow_cls = ObjectProperty(GraphArrowWidget)
_scheduled_rm_spot = DictProperty()
_scheduled_rm_arrow = DictProperty()
_scheduled_discard_pawn = DictProperty()
_scheduled_add_pawn = DictProperty()
@property
def widkwargs(self):
return {"size_hint": (None, None), "size": self.size, "pos": (0, 0)}
def on_touch_down(self, touch):
"""Check for collisions and select an appropriate entity."""
if hasattr(self, "_lasttouch") and self._lasttouch == touch:
return
touch.push()
touch.apply_transform_2d(self.to_local)
if not self.collide_point(*touch.pos):
touch.pop()
return
if self.app.selection:
if self.app.selection.collide_point(*touch.pos):
Logger.debug("Board: hit selection")
if hasattr(self.app.selection, "__self__"):
touch.grab(self.app.selection)
else:
if hasattr(self.app.selection, "selected"):
self.app.selection.selected = False
self.app.selection = None
pawns = list(self.pawns_at(*touch.pos))
if pawns:
Logger.debug("Board: hit {} pawns".format(len(pawns)))
self.selection_candidates = pawns
if self.app.selection in self.selection_candidates:
self.selection_candidates.remove(self.app.selection)
touch.pop()
return True
spots = list(self.spots_at(*touch.pos))
if spots:
Logger.debug("Board: hit {} spots".format(len(spots)))
self.selection_candidates = spots
if self.adding_portal:
self.origspot = self.selection_candidates.pop(0)
self.protodest = Dummy(
name="protodest", pos=touch.pos, size=(0, 0)
)
self.add_widget(self.protodest)
self.protodest.on_touch_down(touch)
self.protoportal = self.proto_arrow_cls(
board=self,
origin=self.origspot,
destination=self.protodest,
)
self.add_widget(self.protoportal)
if self.reciprocal_portal:
self.protoportal2 = self.proto_arrow_cls(
board=self,
destination=self.origspot,
origin=self.protodest,
)
self.add_widget(self.protoportal2)
touch.pop()
return True
edges = list(self.arrow_plane.iter_collided_edges(*touch.pos))
if edges:
Logger.debug("Board: hit {} arrows".format(len(edges)))
self.selection_candidates = [
self.arrow[orig][dest] for (orig, dest) in edges
]
if self.app.selection in self.selection_candidates:
self.selection_candidates.remove(self.app.selection)
if (
isinstance(self.app.selection, GraphArrow)
and self.app.selection.reciprocal in self.selection_candidates
):
self.selection_candidates.remove(self.app.selection.reciprocal)
touch.pop()
return True
touch.pop()
def on_touch_move(self, touch):
"""If an entity is selected, drag it."""
if hasattr(self, "_lasttouch") and self._lasttouch == touch:
return
if self.app.selection in self.selection_candidates:
self.selection_candidates.remove(self.app.selection)
if self.app.edit_locked:
return
elif self.app.selection:
if not self.selection_candidates:
self.keep_selection = True
ret = super().on_touch_move(touch)
sel = self.app.selection
if isinstance(sel, Stack):
sel.center = touch.pos
name = sel.proxy.name
touch.ud["grabbed_pawn_or_spot"] = name
for dest in self.arrow.get(name, ()):
self.arrow[name][dest].repoint()
for orig in self.pred_arrow.get(name, ()):
self.arrow[orig][name].repoint()
for thing in sel.proxy.contents():
pawn = self.pawn[thing["name"]]
pawn.pos = sel.right, sel.top # real layout needed
return ret
elif self.selection_candidates:
for cand in self.selection_candidates:
if cand.collide_point(*touch.pos):
self.app.selection = cand
cand.selected = True
if isinstance(cand, Widget):
touch.grab(cand)
ret = super().on_touch_move(touch)
return ret
if hasattr(self, "protodest"):
self.protodest.pos = touch.pos
self.protoportal._trigger_repoint()
if hasattr(self, "protoportal2"):
self.protoportal2._trigger_repoint()
def portal_touch_up(self, touch):
"""Try to create a portal between the spots the user chose."""
try:
# If the touch ended upon a spot, and there isn't
# already a portal between the origin and this
# destination, create one.
destspot = next(self.spots_at(*touch.pos))
orig = self.origspot.proxy
dest = destspot.proxy
if orig != dest and not (
orig.name in self.character.portal
and dest.name in self.character.portal[orig.name]
):
symmetrical = hasattr(self, "protoportal2") and not (
orig.name in self.character.preportal
and dest.name in self.character.preportal[orig.name]
)
port = self.character.new_portal(
orig.name, dest.name, symmetrical=symmetrical
)
self.arrow_plane.add_new_portal(self.make_arrow(port))
if orig.name not in self.arrow:
self.arrow[orig.name] = {}
if dest.name not in self.pred_arrow:
self.pred_arrow[dest.name] = {}
self.arrow[orig.name][dest.name] = self.pred_arrow[dest.name][
orig.name
] = GraphArrow(
board=self,
origin=self.spot[orig.name],
destination=self.spot[dest.name],
)
if symmetrical:
self.arrow_plane.add_new_portal(
self.make_arrow(
self.character.portal[dest.name][orig.name]
)
)
if dest.name not in self.arrow:
self.arrow[dest.name] = {}
if orig.name not in self.pred_arrow:
self.pred_arrow[orig.name] = {}
self.arrow[dest.name][orig.name] = self.pred_arrow[
orig.name
][dest.name] = GraphArrow(
board=self,
origin=self.spot[dest.name],
destination=self.spot[orig.name],
)
except StopIteration:
pass
self.remove_widget(self.protoportal)
del self.protoportal
if hasattr(self, "protoportal2"):
self.remove_widget(self.protoportal2)
del self.protoportal2
del self.protodest
def on_touch_up(self, touch):
"""Delegate touch handling if possible, else select something."""
def unsel_graph_arrow():
origspot = self.app.selection.origin
destspot = self.app.selection.destination
insts = self.arrow_plane._instructions_map[
origspot.name, destspot.name
]
fbo = self.arrow_plane._fbo
fbo.bind()
insts["color0"].rgba = self.arrow_plane.bg_color_unselected
insts["color1"].rgba = self.arrow_plane.fg_color_unselected
fbo.clear_buffer()
fbo.release()
self.arrow_plane.canvas.ask_update()
if hasattr(self, "_lasttouch") and self._lasttouch == touch:
return
self._lasttouch = touch
touch.push()
touch.apply_transform_2d(self.to_local)
if hasattr(self, "protodest"):
Logger.debug("Board: on_touch_up making a portal")
touch.ungrab(self)
ret = self.portal_touch_up(touch)
touch.pop()
return ret
if self.app.selection:
sel = self.app.selection
if isinstance(sel, Widget):
sel.dispatch("on_touch_up", touch)
elif (
isinstance(sel, Stack)
and hasattr(sel, "proxy")
and hasattr(sel.proxy, "name")
and "grabbed_pawn_or_spot" in touch.ud
and sel.proxy.name == touch.ud["grabbed_pawn_or_spot"]
and not self.app.edit_locked
):
if hasattr(sel.proxy, "location"):
for candidate in self.stack_plane.iter_collided_keys(
*touch.pos
):
if candidate in self.spot:
newloc = self.character.place[candidate]
sel.proxy.location = newloc
newspot = self.spot[candidate]
sel.pos = newspot.right, newspot.top
return
oldloc = sel.proxy.location
oldspot = self.spot[oldloc.name]
sel.pos = oldspot.right, oldspot.top
return
else:
prox = sel.proxy
prox["_x"] = sel.x / self.width
prox["_y"] = sel.y / self.height
for candidate in self.selection_candidates:
if candidate.collide_point(*touch.pos):
if isinstance(candidate, GraphArrow):
portal = self.character.portal[candidate.origin.name][
candidate.destination.name
]
insts = self.arrow_plane._instructions_map[
portal["origin"], portal["destination"]
]
fbo = self.arrow_plane._fbo
fbo.bind()
insts["color0"].rgba = self.arrow_plane.bg_color_selected
insts["color1"].rgba = self.arrow_plane.fg_color_selected
fbo.clear_buffer()
fbo.release()
self.arrow_plane.canvas.ask_update()
if hasattr(candidate, "selected"):
candidate.selected = True
if (
hasattr(self.app.selection, "selected")
and self.app.selection != candidate
):
self.app.selection.selected = False
if isinstance(self.app.selection, GraphArrow):
unsel_graph_arrow()
self.app.selection = candidate
self.keep_selection = True
break
if not self.keep_selection:
Logger.debug("Board: deselecting " + repr(self.app.selection))
if hasattr(self.app.selection, "selected"):
self.app.selection.selected = False
if isinstance(self.app.selection, GraphArrow):
unsel_graph_arrow()
self.app.selection = None
self.keep_selection = False
touch.ungrab(self)
touch.pop()
return
def _pull_size(self, *args):
if self.wallpaper.texture is None:
Clock.schedule_once(self._pull_size, 0.001)
return
self.size = self.wallpaper.size = self.wallpaper.texture.size
def _pull_image(self, *args):
self.wallpaper.source = self.wallpaper_path
self._pull_size()
def on_parent(self, *args):
"""Create some subwidgets and trigger the first update."""
if not self.parent or hasattr(self, "_parented"):
return
if not self.wallpaper_path:
Logger.debug("Board: waiting for wallpaper_path")
Clock.schedule_once(self.on_parent, 0)
return
self._parented = True
self.wallpaper = Image(source=self.wallpaper_path)
self.bind(wallpaper_path=self._pull_image)
self._pull_size()
self.kvlayoutback = KvLayoutBack(**self.widkwargs)
self.arrow_plane = ArrowPlane(**self.widkwargs)
self.stack_plane = TextureStackPlane(**self.widkwargs)
self.kvlayoutfront = KvLayoutFront(**self.widkwargs)
for wid in self.wids:
self.add_widget(wid)
wid.pos = 0, 0
wid.size = self.size
if wid is not self.wallpaper:
self.bind(size=wid.setter("size"))
self.update()
def on_character(self, *args):
if self.character is None or self.character.engine.closed:
return
if self.parent is None:
Clock.schedule_once(self.on_character, 0)
return
wallpaper_path = self.character.stat.setdefault(
"wallpaper", "parchmentBasic.png"
)
self.engine = getattr(self.character, "engine", None)
if (
"_control" not in self.character.stat
or "wallpaper" not in self.character.stat["_control"]
):
control = self.character.stat.get("_control", {})
control["wallpaper"] = "textinput"
self.wallpaper_path = wallpaper_path
self.trigger_update()
def update_from_stat(self, sender, *, k, v):
if k == "wallpaper":
self.wallpaper_path = v
def _trigger_pull_wallpaper(self, *args, **kwargs):
if kwargs["key"] != "wallpaper":
return
if hasattr(self, "_scheduled_pull_wallpaper"):
Clock.unschedule(self._scheduled_pull_wallpaper)
self._scheduled_pull_wallpaper = Clock.schedule_once(
self.pull_wallpaper, 0
)
@trigger
def kv_updated(self, *args):
self.unbind(wallpaper_path=self.kvlayoutback.setter("wallpaper_path"))
for wid in self.wids:
self.remove_widget(wid)
self.kvlayoutback = KvLayoutBack(
pos=(0, 0), wallpaper_path=self.wallpaper_path
)
self.bind(wallpaper_path=self.kvlayoutback.setter("wallpaper_path"))
self.kvlayoutfront = KvLayoutFront(**self.widkwargs)
self.size = self.kvlayoutback.size
self.kvlayoutback.bind(size=self.setter("size"))
for wid in self.wids:
self.add_widget(wid)
def make_pawn(self, thing):
"""Make a :class:`Pawn` to represent a :class:`Thing`, store it, and
return a dict suitable for `StackPlane.add_datum`
"""
if thing["name"] in self.pawn:
raise KeyError("Already have a Pawn for this Thing")
r = self.pawn_cls(board=self, proxy=thing)
self.pawn[thing["name"]] = r
locspot = self.spot[thing["location"]]
if "_image_paths" in thing:
texs = list(thing["_image_paths"])
else:
texs = list(Pawn.default_image_paths)
width = height = 0.0
for tex in texs:
wide, high = Image(source=tex).texture_size
if wide > width:
width = wide
if high > height:
height = high
return { # need to lay out multiple pawns per spot properly
"x": int(locspot.right),
"y": int(locspot.top),
"width": width,
"height": height,
"name": thing["name"],
"textures": texs,
}
def make_spot(self, place):
"""Make a :class:`Spot` to represent a :class:`Place`, store it, and
return a dict suitable for `StackPlane.add_datum`
"""
if place["name"] in self.spot:
raise KeyError("Already have a Spot for this Place")
self.spot[place["name"]] = self.spot_cls(board=self, proxy=place)
if "_image_paths" in place:
texs = list(place["_image_paths"])
else:
texs = list(GraphSpot.default_image_paths)
width = height = 0.0
for tex in texs:
wide, high = Image(source=tex).texture_size
if wide > width:
width = wide
if high > height:
height = high
return {
"x": place.get("_x", 0.5),
"y": place.get("_y", 0.5),
"width": width,
"height": height,
"name": place["name"],
"textures": texs,
}
def make_arrow(self, portal):
if (
portal["origin"] not in self.spot
or portal["destination"] not in self.spot
):
raise ValueError(
"An :class:`Arrow` should only be made after "
"the :class:`Spot`s it connects"
)
if (
portal["origin"] in self.arrow
and portal["destination"] in self.arrow[portal["origin"]]
):
raise KeyError("Already have an Arrow for this Portal")
return self._core_make_arrow(
portal,
self.spot[portal["origin"]],
self.spot[portal["destination"]],
)
def _core_make_arrow(self, portal, origspot, destspot, points=None):
r = {
"board": self,
"portal": portal,
"origspot": origspot,
"destspot": destspot,
"label_kwargs": get_label_kwargs_from_portal(portal),
}
if points is not None:
r["points"] = points
return r
def rm_arrows_to_and_from(self, name):
if name in self.arrow.keys():
for dest in list(self.arrow[name].keys()):
self.rm_arrow(name, dest)
if name in self.pred_arrow.keys():
for orig in list(self.pred_arrow[name].keys()):
self.rm_arrow(orig, name)
def rm_pawn(self, name, *args):
"""Remove the :class:`Pawn` by the given name."""
if name not in self.pawn:
raise KeyError("No Pawn named {}".format(name))
# Currently there's no way to connect Pawns with Arrows but I
# think there will be, so, insurance
self.rm_arrows_to_and_from(name)
pwn = self.pawn.pop(name)
if pwn in self.selection_candidates:
self.selection_candidates.remove(pwn)
self.stack_plane.remove(name)
def _trigger_rm_pawn(self, name):
Clock.schedule_once(partial(self.rm_pawn, name), 0)
def rm_spot(self, name, *args):
"""Remove the :class:`Spot` by the given name."""
if name not in self.spot:
raise KeyError("No Spot named {}".format(name))
spot = self.spot.pop(name)
if spot in self.selection_candidates:
self.selection_candidates.remove(spot)
pawns_here = []
for thing in spot.proxy.contents():
pawns_here.append(self.pawn[thing.name])
self.rm_arrows_to_and_from(name)
self.stack_plane.remove(name)
for pawn in pawns_here:
self.rm_pawn(pawn.name)
if name in self._scheduled_rm_spot:
del self._scheduled_rm_spot[name]
def _trigger_rm_spot(self, name):
part = partial(self.rm_spot, name)
if name in self._scheduled_rm_spot:
Clock.unschedule(self._scheduled_rm_spot[name])
self._scheduled_rm_spot[name] = Clock.schedule_once(part, 0)
def rm_arrow(self, orig, dest, *args):
"""Remove the :class:`Arrow` that goes from ``orig`` to ``dest``."""
if orig not in self.arrow or dest not in self.arrow[orig]:
raise KeyError("No Arrow from {} to {}".format(orig, dest))
arr = self.arrow[orig].pop(dest)
if arr in self.selection_candidates:
self.selection_candidates.remove(arr)
self.arrow_plane.remove_edge(orig, dest)
if (orig, dest) in self._scheduled_rm_arrow:
del self._scheduled_rm_arrow[orig, dest]
def _trigger_rm_arrow(self, orig, dest):
part = partial(self.rm_arrow, orig, dest)
if (orig, dest) in self._scheduled_rm_arrow:
Clock.unschedule(self._scheduled_rm_arrow[orig, dest])
self._scheduled_rm_arrow[orig, dest] = Clock.schedule_once(part, 0)
def graph_layout(self, graph):
from networkx.drawing.layout import spring_layout
return normalize_layout(spring_layout(graph))
def discard_pawn(self, thingn, *args):
if thingn in self.pawn:
self.rm_pawn(thingn)
if thingn in self._scheduled_discard_pawn:
del self._scheduled_discard_pawn[thingn]
def _trigger_discard_pawn(self, thing):
part = partial(self.discard_pawn, thing)
if thing in self._scheduled_discard_pawn:
Clock.unschedule(self._scheduled_discard_pawn[thing])
self._scheduled_discard_pawn[thing] = Clock.schedule_once(part, 0)
def _remove_absent_pawns(self, *args):
for pawn_name in list(self.pawn.keys()):
if pawn_name not in self.character.thing:
self.rm_pawn(pawn_name)
def discard_spot(self, placen, *args):
if placen in self.spot:
self.rm_spot(placen)
def _trigger_discard_spot(self, place):
Clock.schedule_once(partial(self.discard_spot, place), 0)
def _remove_absent_spots(self, *args):
for spot_name in list(self.spot.keys()):
if spot_name not in self.character.place:
self.rm_spot(spot_name)
def discard_arrow(self, orign, destn, *args):
if orign in self.arrow and destn in self.arrow[orign]:
self.rm_arrow(orign, destn)
def _trigger_discard_arrow(self, orig, dest):
Clock.schedule_once(partial(self.discard_arrow, orig, dest), 0)
def _remove_absent_arrows(self, *args):
for arrow_origin in list(self.arrow.keys()):
for arrow_destination in list(self.arrow[arrow_origin].keys()):
if (
arrow_origin not in self.character.portal
or arrow_destination
not in self.character.portal[arrow_origin]
):
self.rm_arrow(arrow_origin, arrow_destination)
def add_spot(self, placen, *args):
if placen in self.character.place and placen not in self.spot:
spotten = self.make_spot(self.character.place[placen])
self.stack_plane.add_datum(spotten)
self.spot[placen].pos = (
spotten["x"] * self.width,
spotten["y"] * self.height,
)
def _trigger_add_spot(self, placen):
Clock.schedule_once(partial(self.add_spot, placen), 0)
def _add_new_spots(self, *args):
start_ts = monotonic()
places2add = []
spots_unposd = []
nodes_patch = {}
placemap = self.character.place
spotmap = self.spot
default_image_paths = GraphSpot.default_image_paths
for place_name, place in placemap.items():
if place_name not in spotmap:
place = placemap[place_name]
places2add.append(place)
make_spot = self.make_spot
spots_posd = []
stack_idx = self.stack_plane._stack_index
for place in places2add:
spot = make_spot(place)
if "_x" not in place or "_y" not in place:
spots_unposd.append(spot)
elif spot["name"] not in stack_idx:
spots_posd.append(spot)
if spots_unposd:
try:
nodes_patch_2 = self.grid_layout(spots_unposd)
except (TypeError, ValueError):
nodes_patch_2 = self.nx_layout(spots_unposd)
for k, v in nodes_patch_2.items():
if k in nodes_patch:
nodes_patch[k].update(v)
else:
nodes_patch[k] = v
if nodes_patch:
self.engine.handle(
command="update_nodes",
char=self.character.name,
patch=nodes_patch,
)
if spots_posd:
self.stack_plane.unbind_uid(
"data", self.stack_plane._redraw_bind_uid
)
self.stack_plane.data.extend(spots_posd)
self.stack_plane.redraw()
self.stack_plane._redraw_bind_uid = self.stack_plane.fbind(
"data", self.stack_plane._trigger_redraw
)
def add_arrow(self, orign, destn, *args):
if not (
orign in self.character.portal
and destn in self.character.portal[orign]
):
raise ValueError("No portal for arrow {}->{}".format(orign, destn))
portal = self.character.portal[orign][destn]
if not (orign in self.arrow and destn in self.arrow[orign]):
self.arrow_plane.add_new_portal(self.make_arrow(portal))
the_arrow = GraphArrow(
board=self,
origin=self.spot[orign],
destination=self.spot[destn],
)
if orign not in self.arrow:
self.arrow[orign] = {}
self.arrow[orign][destn] = the_arrow
if destn not in self.pred_arrow:
self.pred_arrow[destn] = {}
self.pred_arrow[destn][orign] = the_arrow
def _add_new_arrows(self, *args):
portmap = self.character.portal
arrowmap = self.arrow
pred_arrowmap = self.pred_arrow
spotmap = self.spot
append_to_arrow_plane = self.arrow_plane.data.append
core_make_arrow = self._core_make_arrow
todo = []
for arrow_orig, arrow_dests in portmap.items():
for arrow_dest, portal in arrow_dests.items():
if (
arrow_orig not in arrowmap
or arrow_dest not in arrowmap[arrow_orig]
):
todo.append(
(portal, spotmap[arrow_orig], spotmap[arrow_dest])
)
the_arr = GraphArrow(
board=self,
origin=spotmap[arrow_orig],
destination=spotmap[arrow_dest],
)
if arrow_orig not in arrowmap:
arrowmap[arrow_orig] = {}
if arrow_dest not in arrowmap[arrow_orig]:
arrowmap[arrow_orig][arrow_dest] = the_arr
if arrow_dest not in pred_arrowmap:
pred_arrowmap[arrow_dest] = {}
if arrow_orig not in pred_arrowmap[arrow_dest]:
pred_arrowmap[arrow_dest][arrow_orig] = the_arr
points = get_points_multi(
(origspot, destspot, 10) for (portal, origspot, destspot) in todo
)
for portal, origspot, destspot in todo:
append_to_arrow_plane(
core_make_arrow(
portal,
origspot,
destspot,
points[origspot.name, destspot.name],
)
)
def update_arrow_labels(self, *args):
portmap = self.character.portal
arrow_plane = self.arrow_plane
for datum in arrow_plane.data:
portal = portmap[datum["origspot"].name][datum["destspot"].name]
new_kwargs = get_label_kwargs_from_portal(portal)
if new_kwargs != datum["label_kwargs"]:
arrow_plane.update_portal(
datum["origspot"].name, datum["destspot"].name, new_kwargs
)
def add_pawn(self, thingn, *args):
if thingn not in self.character.thing:
raise KeyError(f"No Thing for pawn: {thingn}")
if thingn in self.pawn:
raise KeyError(f"Already have pawn for Thing: {thingn}")
pwn = self.make_pawn(self.character.thing[thingn])
stacp = self.stack_plane
stacp.add_datum(pwn)
self.pawn[thingn].pos = pwn["x"], pwn["y"]
if thingn in self._scheduled_add_pawn:
del self._scheduled_add_pawn[thingn]
def _trigger_add_pawn(self, thingn):
part = partial(self.add_pawn, thingn)
if thingn in self._scheduled_add_pawn:
Clock.unschedule(self._scheduled_add_pawn[thingn])
self._scheduled_add_pawn[thingn] = Clock.schedule_once(part, 0)
def _add_new_pawns(self, *args):
nodes_patch = {}
things2add = []
pawns_added = []
pawnmap = self.pawn
for thing_name, thing in self.character.thing.items():
if thing_name not in pawnmap:
things2add.append(thing)
make_pawn = self.make_pawn
for thing in things2add:
pwn = make_pawn(thing)
if "_image_paths" not in thing:
nodes_patch[thing["name"]] = {
"_image_paths": list(
pwn.get("textures", Pawn.default_image_paths)
)
}
pawns_added.append(pwn)
if nodes_patch:
self.character.node.patch(nodes_patch)
self.stack_plane.unbind_uid("data", self.stack_plane._redraw_bind_uid)
self.stack_plane.data.extend(pawns_added)
self.stack_plane.redraw()
self.stack_plane._redraw_bind_uid = self.stack_plane.fbind(
"data", self.stack_plane._trigger_redraw
)
def update(self, *args):
"""Force an update to match the current state of my character.
This polls every element of the character, and therefore
causes me to sync with the lisien core for a long time. Avoid
when possible.
"""
if not hasattr(self, "engine") or getattr(
self.engine, "closed", False
):
Logger.warning(
"Board: tried to update without a connection to a lisien core"
)
return
if not self.stack_plane or not self.arrow_plane:
self.trigger_update()
return
# remove widgets that don't represent anything anymore
Logger.debug("GraphBoard: updating")
start_ts = monotonic()
self._disconnect_proxy_objects()
self._remove_absent_pawns()
self._remove_absent_spots()
self._remove_absent_arrows()
# add widgets to represent new stuff
self._add_new_spots()
self._update_spot_display()
if self.arrow_cls:
self._add_new_arrows()
self._update_arrow_display()
self._add_new_pawns()
self._update_pawn_display()
self._connect_proxy_objects()
Logger.debug(
f"GraphBoard: updated, took {monotonic() - start_ts:,.2f} seconds"
)
trigger_update = trigger(update)
def _disconnect_proxy_objects(self):
char = self.character
char.stat.disconnect(self.update_from_character_stat)
char.node.disconnect(self.update_from_character_node)
char.portal.disconnect(self.update_from_character_edge)
def _connect_proxy_objects(self):
char = self.character
char.stat.connect(self.update_from_character_stat)
char.node.connect(self.update_from_character_node)
char.portal.connect(self.update_from_character_edge)
def update_from_character_stat(self, character, key, value):
pass
@mainthread
def update_from_character_node(self, node, key, value):
if hasattr(node, "location"):
if not node:
self.rm_pawn(node.name)
elif node.name not in self.pawn:
self.add_pawn(node.name)
elif key == "location":
loc = self.spot[value]
thing = self.pawn[node.name]
thing.pos = loc.right, loc.top
elif key == "_image_paths":
self.pawn[node.name].paths = value
elif node is self.character.node or node is self.character.place:
if value and key not in self.spot:
self.add_spot(key)
elif not value and key in self.spot:
self.rm_spot(key)
elif node is self.character.thing:
if value and key not in self.pawn:
self.add_pawn(key)
elif not value and key not in self.pawn:
self.rm_pawn(key)
else:
if not node:
self.rm_spot(node.name)
elif node.name not in self.spot:
self.add_spot(node.name)
elif key == "_image_paths":
self.spot[node.name].paths = value
def _update_spot_display(self):
"""Change spot graphics to match the state of their place"""
def _update_pawn_display(self):
"""Change pawn graphics to match the state of their thing"""
def update_from_character_edge(self, edge, key, value):
if edge:
if not self.arrow_plane.have_arrow(
edge._origin, edge._destination
):
label_kwargs = DEFAULT_ARROW_LABEL_KWARGS.copy()
if "_label_stat" in edge:
label_kwargs["text"] = str(
edge.get(edge["label_stat"], "")
)
self.arrow_plane.add_new_portal(
{
"origspot": self.spot[edge.origin.name],
"destspot": self.spot[edge.destination.name],
"label_kwargs": label_kwargs,
}
)
if key == edge.get("_label_stat"):
self.arrow_plane.update_portal_label(
edge._origin, edge._destination, str(value)
)
else:
self.arrow_plane.remove_edge(edge._origin, edge._destination)
def _update_arrow_display(self):
"""Change arrow graphics to match the state of their portal"""
def _apply_node_layout(self, l, spot, *args):
if self.width == 1 or self.height == 1:
Clock.schedule_once(
partial(self._apply_node_layout, l, spot), 0.01
)
return
if not isinstance(spot, dict):
spot = {spt["name"]: spt for spt in spot}
node_upd = {}
newspots = []
for name, (x, y) in l.items():
assert 0 <= x <= 0.99, "{} has invalid x: {}".format(name, x)
assert 0 <= y <= 0.99, "{} has invalid y: {}".format(name, y)
assert self.stack_plane.width == self.width
assert self.stack_plane.height == self.height
node_upd[name] = {"_x": x, "_y": y}
spot[name]["x"] = x
spot[name]["y"] = y
newspots.append(spot[name])
if newspots:
self.stack_plane.unbind_uid(
"data", self.stack_plane._redraw_bind_uid
)
self.stack_plane.data.extend(newspots)
self.stack_plane.redraw()
self.stack_plane._redraw_bind_uid = self.stack_plane.fbind(
"data", self.stack_plane._trigger_redraw
)
self.spots_unposd = []
return node_upd
def grid_layout(self, spots, *args):
return self._apply_node_layout(
normalize_layout({spot["name"]: spot["name"] for spot in spots}),
spots,
)
def nx_layout(self, spots, *args):
spots_only = self.character.facade()
for thing in list(spots_only.thing.keys()):
del spots_only.thing[thing]
for place in spots_only.place.keys() - set(
spot["name"] for spot in spots
):
del spots_only.place[place]
return self._apply_node_layout(self.graph_layout(spots_only), spots)
def arrows(self):
"""Iterate over all my arrows."""
for o in self.arrow.values():
for arro in o.values():
yield arro
def pawns_at(self, x, y):
"""Iterate over pawns that collide the given point."""
for name in self.pawn.keys() & set(
self.stack_plane.iter_collided_keys(x, y)
):
yield self.pawn[name]
def spots_at(self, x, y):
"""Iterate over spots that collide the given point."""
for name in self.spot.keys() & set(
self.stack_plane.iter_collided_keys(x, y)
):
yield self.spot[name]
def arrows_at(self, x, y):
"""Iterate over arrows that collide the given point."""
for orig, dest in self.arrow_plane.iter_collided_edges(x, y):
yield self.arrow[orig][dest]
|
class GraphBoard(RelativeLayout):
'''A graphical view onto a :class:`lisien.Character`, resembling a game
graph.
'''
@property
def widkwargs(self):
pass
def on_touch_down(self, touch):
'''Check for collisions and select an appropriate entity.'''
pass
def on_touch_move(self, touch):
'''If an entity is selected, drag it.'''
pass
def portal_touch_up(self, touch):
'''Try to create a portal between the spots the user chose.'''
pass
def on_touch_up(self, touch):
'''Delegate touch handling if possible, else select something.'''
pass
def unsel_graph_arrow():
pass
def _pull_size(self, *args):
pass
def _pull_image(self, *args):
pass
def on_parent(self, *args):
'''Create some subwidgets and trigger the first update.'''
pass
def on_character(self, *args):
pass
def update_from_stat(self, sender, *, k, v):
pass
def _trigger_pull_wallpaper(self, *args, **kwargs):
pass
@trigger
def kv_updated(self, *args):
pass
def make_pawn(self, thing):
'''Make a :class:`Pawn` to represent a :class:`Thing`, store it, and
return a dict suitable for `StackPlane.add_datum`
'''
pass
def make_spot(self, place):
'''Make a :class:`Spot` to represent a :class:`Place`, store it, and
return a dict suitable for `StackPlane.add_datum`
'''
pass
def make_arrow(self, portal):
pass
def _core_make_arrow(self, portal, origspot, destspot, points=None):
pass
def rm_arrows_to_and_from(self, name):
pass
def rm_pawn(self, name, *args):
'''Remove the :class:`Pawn` by the given name.'''
pass
def _trigger_rm_pawn(self, name):
pass
def rm_spot(self, name, *args):
'''Remove the :class:`Spot` by the given name.'''
pass
def _trigger_rm_spot(self, name):
pass
def rm_arrows_to_and_from(self, name):
'''Remove the :class:`Arrow` that goes from ``orig`` to ``dest``.'''
pass
def _trigger_rm_arrow(self, orig, dest):
pass
def graph_layout(self, graph):
pass
def discard_pawn(self, thingn, *args):
pass
def _trigger_discard_pawn(self, thing):
pass
def _remove_absent_pawns(self, *args):
pass
def discard_spot(self, placen, *args):
pass
def _trigger_discard_spot(self, place):
pass
def _remove_absent_spots(self, *args):
pass
def discard_arrow(self, orign, destn, *args):
pass
def _trigger_discard_arrow(self, orig, dest):
pass
def _remove_absent_arrows(self, *args):
pass
def add_spot(self, placen, *args):
pass
def _trigger_add_spot(self, placen):
pass
def _add_new_spots(self, *args):
pass
def add_arrow(self, orign, destn, *args):
pass
def _add_new_arrows(self, *args):
pass
def update_arrow_labels(self, *args):
pass
def add_pawn(self, thingn, *args):
pass
def _trigger_add_pawn(self, thingn):
pass
def _add_new_pawns(self, *args):
pass
def update_from_stat(self, sender, *, k, v):
'''Force an update to match the current state of my character.
This polls every element of the character, and therefore
causes me to sync with the lisien core for a long time. Avoid
when possible.
'''
pass
def _disconnect_proxy_objects(self):
pass
def _connect_proxy_objects(self):
pass
def update_from_character_stat(self, character, key, value):
pass
@mainthread
def update_from_character_node(self, node, key, value):
pass
def _update_spot_display(self):
'''Change spot graphics to match the state of their place'''
pass
def _update_pawn_display(self):
'''Change pawn graphics to match the state of their thing'''
pass
def update_from_character_edge(self, edge, key, value):
pass
def _update_arrow_display(self):
'''Change arrow graphics to match the state of their portal'''
pass
def _apply_node_layout(self, l, spot, *args):
pass
def grid_layout(self, spots, *args):
pass
def nx_layout(self, spots, *args):
pass
def arrows(self):
'''Iterate over all my arrows.'''
pass
def pawns_at(self, x, y):
'''Iterate over pawns that collide the given point.'''
pass
def spots_at(self, x, y):
'''Iterate over spots that collide the given point.'''
pass
def arrows_at(self, x, y):
'''Iterate over arrows that collide the given point.'''
pass
| 63 | 19 | 15 | 0 | 14 | 1 | 4 | 0.04 | 1 | 20 | 9 | 0 | 58 | 9 | 58 | 58 | 965 | 70 | 859 | 226 | 795 | 38 | 654 | 223 | 593 | 18 | 1 | 5 | 234 |
146,425 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/util.py
|
lisien.util.fake_submit.FakeFuture
|
class FakeFuture(Future):
def __init__(self, func, *args, **kwargs):
super().__init__()
self.set_result(func(*args, **kwargs))
|
class FakeFuture(Future):
def __init__(self, func, *args, **kwargs):
pass
| 2 | 0 | 3 | 0 | 3 | 0 | 1 | 0 | 1 | 1 | 0 | 0 | 1 | 0 | 1 | 15 | 4 | 0 | 4 | 2 | 2 | 0 | 4 | 2 | 2 | 1 | 2 | 0 | 1 |
146,426 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/util.py
|
lisien.util.get_rando
|
class get_rando:
"""Attribute getter for randomization functions
Aliases functions of a randomizer, wrapped so that they won't run in
planning mode, and will save the randomizer's state after every call.
"""
__slots__ = ("_getter", "_wrapfun", "_instance")
_getter: Callable
def __init__(self, attr, *attrs):
self._getter = attrgetter(attr, *attrs)
def __get__(self, instance, owner) -> Callable:
if hasattr(self, "_wrapfun") and self._instance is instance:
return self._wrapfun
retfun = self._getter(instance)
@wraps(retfun)
def remembering_rando_state(*args, **kwargs):
if instance._planning:
raise exc.PlanError("Don't use randomization in a plan")
ret = retfun(*args, **kwargs)
instance.universal["rando_state"] = instance._rando.getstate()
return ret
self._wrapfun = remembering_rando_state
self._instance = instance
return remembering_rando_state
|
class get_rando:
'''Attribute getter for randomization functions
Aliases functions of a randomizer, wrapped so that they won't run in
planning mode, and will save the randomizer's state after every call.
'''
def __init__(self, attr, *attrs):
pass
def __get__(self, instance, owner) -> Callable:
pass
@wraps(retfun)
def remembering_rando_state(*args, **kwargs):
pass
| 5 | 1 | 8 | 1 | 7 | 0 | 2 | 0.21 | 0 | 2 | 1 | 0 | 2 | 2 | 2 | 2 | 30 | 7 | 19 | 10 | 14 | 4 | 18 | 9 | 14 | 2 | 0 | 1 | 5 |
146,427 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/xcollections.py
|
lisien.xcollections.AbstractLanguageDescriptor
|
class AbstractLanguageDescriptor(Signal):
def __get__(self, instance, owner=None):
if not hasattr(self, "lang"):
self.lang = Language(self, self._get_language(instance))
return self.lang
def __set__(self, inst, val):
self._set_language(inst, val)
self.lang = Language(self, val)
self.send(inst, language=val)
def __str__(self):
return self.lang
|
class AbstractLanguageDescriptor(Signal):
def __get__(self, instance, owner=None):
pass
def __set__(self, inst, val):
pass
def __str__(self):
pass
| 4 | 0 | 3 | 0 | 3 | 0 | 1 | 0 | 1 | 1 | 1 | 2 | 3 | 1 | 3 | 3 | 13 | 2 | 11 | 5 | 7 | 0 | 11 | 5 | 7 | 2 | 1 | 1 | 4 |
146,428 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/allegedb/graph.py
|
lisien.allegedb.graph.DiGraphSuccessorsMapping.Successors
|
class Successors(AbstractSuccessors):
__slots__ = ("graph", "container", "orig", "_cache")
def _order_nodes(self, dest):
return (self.orig, dest)
|
class Successors(AbstractSuccessors):
def _order_nodes(self, dest):
pass
| 2 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 1 | 0 | 0 | 1 | 1 | 0 | 1 | 59 | 5 | 1 | 4 | 3 | 2 | 0 | 4 | 3 | 2 | 1 | 11 | 0 | 1 |
146,429 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/allegedb/graph.py
|
lisien.allegedb.graph.DiGraphSuccessorsMapping
|
class DiGraphSuccessorsMapping(GraphSuccessorsMapping):
__slots__ = ("graph",)
class Successors(AbstractSuccessors):
__slots__ = ("graph", "container", "orig", "_cache")
def _order_nodes(self, dest):
return (self.orig, dest)
|
class DiGraphSuccessorsMapping(GraphSuccessorsMapping):
class Successors(AbstractSuccessors):
def _order_nodes(self, dest):
pass
| 3 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 54 | 8 | 2 | 6 | 5 | 3 | 0 | 6 | 5 | 3 | 1 | 11 | 0 | 1 |
146,430 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/allegedb/graph.py
|
lisien.allegedb.graph.DiGraphPredecessorsMapping.Predecessors
|
class Predecessors(GraphEdgeMapping):
"""Mapping of Edges that end at a particular node"""
__slots__ = ("graph", "container", "dest")
def __init__(self, container, dest):
"""Store container and node ID"""
super().__init__(container.graph)
self.container = container
self.dest = dest
def __iter__(self):
"""Iterate over the edges that exist at the present (branch, rev)"""
return self.db._edges_cache.iter_predecessors(
self.graph.name, self.dest, *self.db._btt()
)
def __contains__(self, orig):
"""Is there an edge from ``orig`` at the moment?"""
return self.db._edges_cache.has_predecessor(
self.graph.name, self.dest, orig, *self.db._btt()
)
def __len__(self):
"""How many edges exist at this rev of this branch?"""
return self.db._edges_cache.count_predecessors(
self.graph.name, self.dest, *self.db._btt()
)
def _make_edge(self, orig):
return Edge(self.graph, orig, self.dest)
def __getitem__(self, orig):
"""Get the edge from the given node to mine"""
return self.graph.adj[orig][self.dest]
def __setitem__(self, orig, value):
"""Use ``value`` as a mapping of edge attributes, set an edge from the
given node to mine.
"""
branch, turn, tick = self.db._nbtt()
try:
e = self[orig]
e.clear()
except KeyError:
self.db.query.exist_edge(
self.graph.name,
orig,
self.dest,
0,
branch,
turn,
tick,
True,
)
e = self._make_edge(orig)
e.update(value)
self.db._edges_cache.store(
self.graph.name, orig, self.dest, 0, branch, turn, tick, True
)
def __delitem__(self, orig):
"""Unset the existence of the edge from the given node to mine"""
branch, turn, tick = self.db._nbtt()
if "Multi" in self.graph.__class__.__name__:
for idx in self[orig]:
self.db.query.exist_edge(
self.graph.name,
orig,
self.dest,
idx,
branch,
turn,
tick,
False,
)
self.db._edges_cache.store(
self.graph.name,
orig,
self.dest,
idx,
branch,
turn,
tick,
False,
)
return
else:
raise KeyError("No edges from {}".format(orig))
self.db.query.exist_edge(
self.graph.name, orig, self.dest, 0, branch, turn, tick, False
)
self.db._edges_cache.store(
self.graph.name, orig, self.dest, 0, branch, turn, tick, None
)
|
class Predecessors(GraphEdgeMapping):
'''Mapping of Edges that end at a particular node'''
def __init__(self, container, dest):
'''Store container and node ID'''
pass
def __iter__(self):
'''Iterate over the edges that exist at the present (branch, rev)'''
pass
def __contains__(self, orig):
'''Is there an edge from ``orig`` at the moment?'''
pass
def __len__(self):
'''How many edges exist at this rev of this branch?'''
pass
def _make_edge(self, orig):
pass
def __getitem__(self, orig):
'''Get the edge from the given node to mine'''
pass
def __setitem__(self, orig, value):
'''Use ``value`` as a mapping of edge attributes, set an edge from the
given node to mine.
'''
pass
def __delitem__(self, orig):
'''Unset the existence of the edge from the given node to mine'''
pass
| 9 | 8 | 11 | 0 | 9 | 1 | 1 | 0.13 | 1 | 3 | 1 | 1 | 8 | 2 | 8 | 55 | 96 | 10 | 76 | 16 | 67 | 10 | 37 | 16 | 28 | 2 | 10 | 2 | 10 |
146,431 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/allegedb/graph.py
|
lisien.allegedb.graph.DiGraphPredecessorsMapping
|
class DiGraphPredecessorsMapping(GraphEdgeMapping):
"""Mapping for Predecessors instances, which map to Edges that end at
the dest provided to this
"""
__slots__ = ("graph",)
def __contains__(self, dest):
return dest in self.graph.node
def __getitem__(self, dest):
"""Return a Predecessors instance for edges ending at the given
node
"""
if dest not in self:
raise KeyError("No edges available")
if dest not in self._cache:
self._cache[dest] = self.Predecessors(self, dest)
return self._cache[dest]
def __setitem__(self, key, val):
"""Interpret ``val`` as a mapping of edges that end at ``dest``"""
created = key not in self
if key not in self._cache:
self._cache[key] = self.Predecessors(self, key)
preds = self._cache[key]
preds.clear()
preds.update(val)
def __delitem__(self, key):
"""Delete all edges ending at ``dest``"""
it = self[key]
it.clear()
del self._cache[key]
def __iter__(self):
return iter(self.graph.node)
def __len__(self):
return len(self.graph.node)
class Predecessors(GraphEdgeMapping):
"""Mapping of Edges that end at a particular node"""
__slots__ = ("graph", "container", "dest")
def __init__(self, container, dest):
"""Store container and node ID"""
super().__init__(container.graph)
self.container = container
self.dest = dest
def __iter__(self):
"""Iterate over the edges that exist at the present (branch, rev)"""
return self.db._edges_cache.iter_predecessors(
self.graph.name, self.dest, *self.db._btt()
)
def __contains__(self, orig):
"""Is there an edge from ``orig`` at the moment?"""
return self.db._edges_cache.has_predecessor(
self.graph.name, self.dest, orig, *self.db._btt()
)
def __len__(self):
"""How many edges exist at this rev of this branch?"""
return self.db._edges_cache.count_predecessors(
self.graph.name, self.dest, *self.db._btt()
)
def _make_edge(self, orig):
return Edge(self.graph, orig, self.dest)
def __getitem__(self, orig):
"""Get the edge from the given node to mine"""
return self.graph.adj[orig][self.dest]
def __setitem__(self, orig, value):
"""Use ``value`` as a mapping of edge attributes, set an edge from the
given node to mine.
"""
branch, turn, tick = self.db._nbtt()
try:
e = self[orig]
e.clear()
except KeyError:
self.db.query.exist_edge(
self.graph.name,
orig,
self.dest,
0,
branch,
turn,
tick,
True,
)
e = self._make_edge(orig)
e.update(value)
self.db._edges_cache.store(
self.graph.name, orig, self.dest, 0, branch, turn, tick, True
)
def __delitem__(self, orig):
"""Unset the existence of the edge from the given node to mine"""
branch, turn, tick = self.db._nbtt()
if "Multi" in self.graph.__class__.__name__:
for idx in self[orig]:
self.db.query.exist_edge(
self.graph.name,
orig,
self.dest,
idx,
branch,
turn,
tick,
False,
)
self.db._edges_cache.store(
self.graph.name,
orig,
self.dest,
idx,
branch,
turn,
tick,
False,
)
return
else:
raise KeyError("No edges from {}".format(orig))
self.db.query.exist_edge(
self.graph.name, orig, self.dest, 0, branch, turn, tick, False
)
self.db._edges_cache.store(
self.graph.name, orig, self.dest, 0, branch, turn, tick, None
)
|
class DiGraphPredecessorsMapping(GraphEdgeMapping):
'''Mapping for Predecessors instances, which map to Edges that end at
the dest provided to this
'''
def __contains__(self, dest):
pass
def __getitem__(self, dest):
'''Return a Predecessors instance for edges ending at the given
node
'''
pass
def __setitem__(self, key, val):
'''Interpret ``val`` as a mapping of edges that end at ``dest``'''
pass
def __delitem__(self, key):
'''Delete all edges ending at ``dest``'''
pass
def __iter__(self):
pass
def __len__(self):
pass
class Predecessors(GraphEdgeMapping):
'''Mapping of Edges that end at a particular node'''
def __init__(self, container, dest):
'''Store container and node ID'''
pass
def __iter__(self):
'''Iterate over the edges that exist at the present (branch, rev)'''
pass
def __contains__(self, dest):
'''Is there an edge from ``orig`` at the moment?'''
pass
def __len__(self):
'''How many edges exist at this rev of this branch?'''
pass
def _make_edge(self, orig):
pass
def __getitem__(self, dest):
'''Get the edge from the given node to mine'''
pass
def __setitem__(self, key, val):
'''Use ``value`` as a mapping of edge attributes, set an edge from the
given node to mine.
'''
pass
def __delitem__(self, key):
'''Unset the existence of the edge from the given node to mine'''
pass
| 16 | 12 | 8 | 0 | 7 | 1 | 1 | 0.18 | 1 | 2 | 1 | 1 | 6 | 0 | 6 | 53 | 139 | 20 | 101 | 27 | 85 | 18 | 62 | 27 | 46 | 3 | 10 | 2 | 19 |
146,432 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/allegedb/core.py
|
lisien.allegedb.core.TimeSignalDescriptor
|
class TimeSignalDescriptor:
__doc__ = TimeSignal.__doc__
def __get__(self, inst, cls):
if not hasattr(inst, "_time_signal"):
inst._time_signal = TimeSignal(inst)
return inst._time_signal
def __set__(self, inst: "ORM", val: tuple[str, int]):
if not hasattr(inst, "_time_signal"):
inst._time_signal = TimeSignal(inst)
sig = inst._time_signal
branch_then, turn_then, tick_then = inst._btt()
branch_now, turn_now = val
if (branch_then, turn_then) == (branch_now, turn_now):
return
e = inst
# enforce the arrow of time, if it's in effect
if e._forward and not e._planning:
if branch_now != branch_then:
raise TimeError("Can't change branches in a forward context")
if turn_now < turn_then:
raise TimeError(
"Can't time travel backward in a forward context"
)
if turn_now > turn_then + 1:
raise TimeError("Can't skip turns in a forward context")
# make sure I'll end up within the revision range of the
# destination branch
branches = e._branches
if branch_now in branches:
tick_now = e._turn_end_plan.setdefault(
(branch_now, turn_now), tick_then
)
parent, turn_start, tick_start, turn_end, tick_end = branches[
branch_now
]
if turn_now < turn_start:
raise OutOfTimelineError(
"The turn number {} "
"occurs before the start of "
"the branch {}".format(turn_now, branch_now),
branch_then,
turn_then,
tick_then,
branch_now,
turn_now,
tick_now,
)
if turn_now == turn_start and tick_now < tick_start:
raise OutOfTimelineError(
"The tick number {}"
"on turn {} "
"occurs before the start of "
"the branch {}".format(tick_now, turn_now, branch_now),
branch_then,
turn_then,
tick_then,
branch_now,
turn_now,
tick_now,
)
if not e._planning and (
turn_now > turn_end
or (turn_now == turn_end and tick_now > tick_end)
):
branches[branch_now] = (
parent,
turn_start,
tick_start,
turn_now,
tick_now,
)
else:
tick_now = tick_then
branches[branch_now] = (
branch_then,
turn_now,
tick_now,
turn_now,
tick_now,
)
inst._turn_end_plan[branch_now, turn_now] = max(
(inst._turn_end_plan[branch_now, turn_now], tick_now)
)
if not inst._planning:
inst._branch_end[branch_now] = max(
(inst._branch_end[branch_now], turn_now)
)
inst._turn_end[branch_now, turn_now] = max(
(inst._turn_end[branch_now, turn_now], tick_now)
)
e.query.new_branch(branch_now, branch_then, turn_now, tick_now)
e._obranch, e._oturn = val
if not e._time_is_loaded(*val, tick_now):
e._load_at(*val, tick_now)
if not e._planning:
if tick_now > e._turn_end[val]:
e._turn_end[val] = tick_now
e._otick = e._turn_end_plan[val] = tick_now
sig.send(
e,
branch_then=branch_then,
turn_then=turn_then,
tick_then=tick_then,
branch_now=branch_now,
turn_now=turn_now,
tick_now=tick_now,
)
|
class TimeSignalDescriptor:
def __get__(self, inst, cls):
pass
def __set__(self, inst: "ORM", val: tuple[str, int]):
pass
| 3 | 0 | 54 | 1 | 51 | 2 | 9 | 0.03 | 0 | 6 | 3 | 0 | 2 | 0 | 2 | 2 | 111 | 4 | 104 | 11 | 101 | 3 | 47 | 11 | 44 | 15 | 0 | 2 | 17 |
146,433 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/allegedb/core.py
|
lisien.allegedb.core.TimeSignal
|
class TimeSignal(Signal):
"""Acts like a tuple of ``(branch, turn)`` for the most part.
This is a ``Signal``. To set a function to be called whenever the
branch or turn changes, pass it to my ``connect`` method.
"""
def __init__(self, engine: "ORM"):
super().__init__()
self.engine = engine
self.branch = self.engine.branch
self.turn = self.engine.turn
def __iter__(self):
yield self.branch
yield self.turn
def __len__(self):
return 2
def __getitem__(self, i: str | int) -> str | int:
if i in ("branch", 0):
return self.branch
if i in ("turn", 1):
return self.turn
raise IndexError(i)
def __setitem__(self, i: str | int, v: str | int) -> None:
if i in ("branch", 0):
self.engine.branch = v
elif i in ("turn", 1):
self.engine.turn = v
else:
raise KeyError(
"Can only set branch or turn. Set `Engine.tick` directly if you really want that."
)
def __str__(self):
return str(tuple(self))
def __eq__(self, other):
return tuple(self) == other
def __ne__(self, other):
return tuple(self) != other
def __gt__(self, other):
return tuple(self) > other
def __ge__(self, other):
return tuple(self) >= other
def __lt__(self, other):
return tuple(self) < other
def __le__(self, other):
return tuple(self) <= other
|
class TimeSignal(Signal):
'''Acts like a tuple of ``(branch, turn)`` for the most part.
This is a ``Signal``. To set a function to be called whenever the
branch or turn changes, pass it to my ``connect`` method.
'''
def __init__(self, engine: "ORM"):
pass
def __iter__(self):
pass
def __len__(self):
pass
def __getitem__(self, i: str | int) -> str | int:
pass
def __setitem__(self, i: str | int, v: str | int) -> None:
pass
def __str__(self):
pass
def __eq__(self, other):
pass
def __ne__(self, other):
pass
def __gt__(self, other):
pass
def __ge__(self, other):
pass
def __lt__(self, other):
pass
def __le__(self, other):
pass
| 13 | 1 | 3 | 0 | 3 | 0 | 1 | 0.1 | 1 | 6 | 0 | 0 | 12 | 3 | 12 | 12 | 58 | 14 | 40 | 16 | 27 | 4 | 36 | 16 | 23 | 3 | 1 | 1 | 16 |
146,434 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/allegedb/core.py
|
lisien.allegedb.core.PlanningContext
|
class PlanningContext(ContextDecorator):
"""A context manager for 'hypothetical' edits.
Start a block of code like::
with orm.plan():
...
and any changes you make to the world state within that block will be
'plans,' meaning that they are used as defaults. The world will
obey your plan unless you make changes to the same entities outside
the plan, in which case the world will obey those, and cancel any
future plan.
Plans are *not* canceled when concerned entities are deleted, although
they are unlikely to be followed.
New branches cannot be started within plans. The ``with orm.forward():``
optimization is disabled within a ``with orm.plan():`` block, so
consider another approach instead of making a very large plan.
With ``reset=True`` (the default), when the plan block closes,
the time will reset to when it began.
"""
__slots__ = ["orm", "id", "forward", "reset"]
def __init__(self, orm: "ORM", reset=True):
self.orm = orm
if reset:
self.reset = orm._btt()
else:
self.reset = None
def __enter__(self):
orm = self.orm
if orm._planning:
raise ValueError("Already planning")
orm._planning = True
branch, turn, tick = orm._btt()
self.id = myid = orm._last_plan = orm._last_plan + 1
self.forward = orm._forward
if orm._forward:
orm._forward = False
orm._plans[myid] = branch, turn, tick
orm._plans_uncommitted.append((myid, branch, turn, tick))
orm._branches_plans[branch].add(myid)
return myid
def __exit__(self, exc_type, exc_val, exc_tb):
self.orm._planning = False
if self.reset is not None:
self.orm._set_btt(*self.reset)
if self.forward:
self.orm._forward = True
|
class PlanningContext(ContextDecorator):
'''A context manager for 'hypothetical' edits.
Start a block of code like::
with orm.plan():
...
and any changes you make to the world state within that block will be
'plans,' meaning that they are used as defaults. The world will
obey your plan unless you make changes to the same entities outside
the plan, in which case the world will obey those, and cancel any
future plan.
Plans are *not* canceled when concerned entities are deleted, although
they are unlikely to be followed.
New branches cannot be started within plans. The ``with orm.forward():``
optimization is disabled within a ``with orm.plan():`` block, so
consider another approach instead of making a very large plan.
With ``reset=True`` (the default), when the plan block closes,
the time will reset to when it began.
'''
def __init__(self, orm: "ORM", reset=True):
pass
def __enter__(self):
pass
def __exit__(self, exc_type, exc_val, exc_tb):
pass
| 4 | 1 | 9 | 0 | 9 | 0 | 3 | 0.61 | 1 | 1 | 0 | 0 | 3 | 4 | 3 | 5 | 57 | 12 | 28 | 11 | 24 | 17 | 27 | 11 | 23 | 3 | 2 | 1 | 8 |
146,435 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/allegedb/cache.py
|
lisien.allegedb.cache.StructuredDefaultDict
|
class StructuredDefaultDict(dict):
"""A `defaultdict`-like class with values stored at a specific depth.
Requires an integer to tell it how many layers deep to go.
The innermost layer will be ``PickyDefaultDict``, which will take the
``type``, ``args_munger``, and ``kwargs_munger`` arguments supplied
to my constructor.
"""
__slots__ = (
"layer",
"type",
"args_munger",
"kwargs_munger",
"parent",
"key",
"_stuff",
"_lock",
"gettest",
"settest",
)
def __init__(
self,
layers: int,
type: type = None,
args_munger: callable = _default_args_munger,
kwargs_munger: callable = _default_kwargs_munger,
gettest=lambda k: None,
settest=lambda k, v: None,
):
if layers < 1:
raise ValueError("Not enough layers")
self._lock = RLock()
self.layer = layers
self.type = type
self.args_munger = args_munger
self.kwargs_munger = kwargs_munger
self._stuff = (layers, type, args_munger, kwargs_munger)
self.gettest = gettest
self.settest = settest
def __getitem__(self, k):
with self._lock:
self.gettest(k)
if k in self:
return dict.__getitem__(self, k)
layer, typ, args_munger, kwargs_munger = self._stuff
if layer == 1:
if typ is None:
ret = {}
else:
ret = PickyDefaultDict(typ, args_munger, kwargs_munger)
ret.parent = self
ret.key = k
elif layer < 1:
raise ValueError("Invalid layer")
else:
ret = StructuredDefaultDict(
layer - 1, typ, args_munger, kwargs_munger
)
ret.parent = self
ret.key = k
dict.__setitem__(self, k, ret)
return ret
def __setitem__(self, k, v):
with self._lock:
self.settest(k, v)
if type(v) is StructuredDefaultDict:
layer, typ, args_munger, kwargs_munger = self._stuff
if (
v.layer == layer - 1
and (typ is None or v.type is typ)
and v.args_munger is args_munger
and v.kwargs_munger is kwargs_munger
):
super().__setitem__(k, v)
return
elif type(v) is PickyDefaultDict:
layer, typ, args_munger, kwargs_munger = self._stuff
if (
layer == 1
and v.type is typ
and v.args_munger is args_munger
and v.kwargs_munger is kwargs_munger
):
super().__setitem__(k, v)
return
raise TypeError("Can't set layer {}".format(self.layer))
|
class StructuredDefaultDict(dict):
'''A `defaultdict`-like class with values stored at a specific depth.
Requires an integer to tell it how many layers deep to go.
The innermost layer will be ``PickyDefaultDict``, which will take the
``type``, ``args_munger``, and ``kwargs_munger`` arguments supplied
to my constructor.
'''
def __init__(
self,
layers: int,
type: type = None,
args_munger: callable = _default_args_munger,
kwargs_munger: callable = _default_kwargs_munger,
gettest=lambda k: None,
settest=lambda k, v: None,
):
pass
def __getitem__(self, k):
pass
def __setitem__(self, k, v):
pass
| 4 | 1 | 22 | 0 | 22 | 0 | 4 | 0.08 | 1 | 6 | 1 | 0 | 3 | 8 | 3 | 30 | 91 | 6 | 79 | 24 | 67 | 6 | 44 | 16 | 40 | 5 | 2 | 3 | 12 |
146,436 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/allegedb/cache.py
|
lisien.allegedb.cache.PickyDefaultDict
|
class PickyDefaultDict(dict):
"""A ``defaultdict`` alternative that requires values of a specific type.
Pass some type object (such as a class) to the constructor to
specify what type to use by default, which is the only type I will
accept.
Default values are constructed with no arguments by default;
supply ``args_munger`` and/or ``kwargs_munger`` to override this.
They take arguments ``self`` and the unused key being looked up.
"""
__slots__ = [
"type",
"args_munger",
"kwargs_munger",
"parent",
"key",
"_lock",
]
def __init__(
self,
type: type,
args_munger: callable = _default_args_munger,
kwargs_munger: callable = _default_kwargs_munger,
):
self._lock = RLock()
self.type = type
self.args_munger = args_munger
self.kwargs_munger = kwargs_munger
def __getitem__(self, k):
with self._lock:
if k in self:
return super(PickyDefaultDict, self).__getitem__(k)
try:
ret = self[k] = self.type(
*self.args_munger(self, k), **self.kwargs_munger(self, k)
)
except TypeError:
raise KeyError(k)
return ret
def _create(self, v):
return self.type(v)
def __setitem__(self, k, v):
with self._lock:
if not isinstance(v, self.type):
v = self._create(v)
super(PickyDefaultDict, self).__setitem__(k, v)
|
class PickyDefaultDict(dict):
'''A ``defaultdict`` alternative that requires values of a specific type.
Pass some type object (such as a class) to the constructor to
specify what type to use by default, which is the only type I will
accept.
Default values are constructed with no arguments by default;
supply ``args_munger`` and/or ``kwargs_munger`` to override this.
They take arguments ``self`` and the unused key being looked up.
'''
def __init__(
self,
type: type,
args_munger: callable = _default_args_munger,
kwargs_munger: callable = _default_kwargs_munger,
):
pass
def __getitem__(self, k):
pass
def _create(self, v):
pass
def __setitem__(self, k, v):
pass
| 5 | 1 | 7 | 0 | 7 | 0 | 2 | 0.22 | 1 | 3 | 0 | 0 | 4 | 4 | 4 | 31 | 53 | 8 | 37 | 16 | 27 | 8 | 23 | 11 | 18 | 3 | 2 | 2 | 7 |
146,437 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/xcollections.py
|
lisien.xcollections.CharacterMapping
|
class CharacterMapping(GraphsMapping, Signal):
"""A mapping by which to access :class:`Character` objects.
If a character already exists, you can always get its name here to
get the :class:`Character` object. Deleting an item here will
delete the character from the world, even if there are still
:class:`Character` objects referring to it; those won't do
anything useful anymore.
"""
engine = getatt("orm")
def __init__(self, orm):
GraphsMapping.__init__(self, orm)
Signal.__init__(self)
def __getitem__(self, name):
"""Return the named character, if it's been created.
Try to use the cache if possible.
"""
from .character import Character
if name not in self:
raise KeyError("No such character", name)
cache = self.engine._graph_objs
if name not in cache:
cache[name] = Character(
self.engine, name, init_rulebooks=name not in self
)
ret = cache[name]
if not isinstance(ret, Character):
raise TypeError("""Tried to get a graph that isn't a Character.
This should never happen. It probably indicates
a bug in allegedb.""")
return ret
def __setitem__(self, name, value):
"""Make a new character by the given name, and initialize its data to
the given value.
"""
self.engine._init_graph(name, "DiGraph", value)
self.send(self, key=name, val=self.engine._graph_objs[name])
def __delitem__(self, name):
self.engine.del_graph(name)
self.send(self, key=name, val=None)
|
class CharacterMapping(GraphsMapping, Signal):
'''A mapping by which to access :class:`Character` objects.
If a character already exists, you can always get its name here to
get the :class:`Character` object. Deleting an item here will
delete the character from the world, even if there are still
:class:`Character` objects referring to it; those won't do
anything useful anymore.
'''
def __init__(self, orm):
pass
def __getitem__(self, name):
'''Return the named character, if it's been created.
Try to use the cache if possible.
'''
pass
def __setitem__(self, name, value):
'''Make a new character by the given name, and initialize its data to
the given value.
'''
pass
def __delitem__(self, name):
pass
| 5 | 3 | 9 | 1 | 6 | 2 | 2 | 0.5 | 2 | 3 | 1 | 0 | 4 | 0 | 4 | 52 | 50 | 11 | 26 | 9 | 20 | 13 | 22 | 9 | 16 | 4 | 8 | 1 | 7 |
146,438 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/xcollections.py
|
lisien.xcollections.CompositeDict
|
class CompositeDict(MutableMapping, Signal):
"""Combine two dictionaries into one"""
def __init__(self, d1, d2):
"""Store dictionaries"""
super().__init__()
self.d1 = d1
self.d2 = d2
def __iter__(self):
"""Iterate over both dictionaries' keys"""
for k in self.d1:
yield k
for k in self.d2:
yield k
def __len__(self):
"""Sum the lengths of both dictionaries"""
return len(self.d1) + len(self.d2)
def __contains__(self, item):
return item in self.d1 or item in self.d2
def __getitem__(self, k):
"""Get an item from ``d1`` if possible, then ``d2``"""
try:
return self.d1[k]
except KeyError:
return self.d2[k]
def __setitem__(self, key, value):
self.d1[key] = value
self.send(self, key=key, value=value)
def __delitem__(self, key):
deleted = False
if key in self.d2:
deleted = True
del self.d2[key]
if key in self.d1:
deleted = True
del self.d1[key]
if not deleted:
raise KeyError("{} is in neither of my wrapped dicts".format(key))
self.send(self, key=key, value=None)
def patch(self, d):
"""Recursive update"""
for k, v in d.items():
if k in self:
self[k].update(v)
else:
self[k] = deepcopy(v)
|
class CompositeDict(MutableMapping, Signal):
'''Combine two dictionaries into one'''
def __init__(self, d1, d2):
'''Store dictionaries'''
pass
def __iter__(self):
'''Iterate over both dictionaries' keys'''
pass
def __len__(self):
'''Sum the lengths of both dictionaries'''
pass
def __contains__(self, item):
pass
def __getitem__(self, k):
'''Get an item from ``d1`` if possible, then ``d2``'''
pass
def __setitem__(self, key, value):
pass
def __delitem__(self, key):
pass
def patch(self, d):
'''Recursive update'''
pass
| 9 | 6 | 5 | 0 | 5 | 1 | 2 | 0.15 | 2 | 2 | 0 | 0 | 8 | 2 | 8 | 49 | 53 | 8 | 39 | 14 | 30 | 6 | 38 | 14 | 29 | 4 | 7 | 2 | 16 |
146,439 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/xcollections.py
|
lisien.xcollections.FunctionStore
|
class FunctionStore(Signal):
"""A module-like object that lets you alter its code and save your changes.
Instantiate it with a path to a file that you want to keep the code in.
Assign functions to its attributes, then call its ``save()`` method,
and they'll be unparsed and written to the file.
This is a ``Signal``, so you can pass a function to its ``connect`` method,
and it will be called when a function is added, changed, or deleted.
The keyword arguments will be ``attr``, the name of the function, and ``val``,
the function itself.
"""
def __init__(self, filename):
if not filename.endswith(".py"):
raise ValueError(
"FunctionStore can only work with pure Python source code"
)
super().__init__()
self._filename = os.path.abspath(os.path.realpath(filename))
try:
self.reimport()
except (FileNotFoundError, ModuleNotFoundError) as ex:
self._module = None
self._ast = Module(body=[])
self._ast_idx = {}
self._need_save = False
self._locl = {}
def __getattr__(self, k):
if k in self._locl:
return self._locl[k]
elif self._need_save:
self.save()
return getattr(self._module, k)
elif self._module:
return getattr(self._module, k)
else:
raise AttributeError("No attribute " + repr(k))
def __setattr__(self, k, v):
if not callable(v):
super().__setattr__(k, v)
return
source = getsource(v)
outdented = dedent_source(source)
expr = Expr(parse(outdented))
expr.value.body[0].name = k
if k in self._ast_idx:
self._ast.body[self._ast_idx[k]] = expr
else:
self._ast_idx[k] = len(self._ast.body)
self._ast.body.append(expr)
self._need_save = True
self.send(self, attr=k, val=v)
def __call__(self, v):
setattr(self, v.__name__, v)
return v
def __delattr__(self, k):
del self._locl[k]
del self._ast.body[self._ast_idx[k]]
del self._ast_idx[k]
for name in list(self._ast_idx):
if name > k:
self._ast_idx[name] -= 1
self._need_save = True
self.send(self, attr=k, val=None)
def save(self, reimport=True):
with open(self._filename, "w", encoding="utf-8") as outf:
outf.write("# encoding: utf-8")
Unparser(self._ast, outf)
self._need_save = False
if reimport:
self.reimport()
def reimport(self):
importlib.invalidate_caches()
path, filename = os.path.split(self._filename)
modname = filename[:-3]
if modname in sys.modules:
del sys.modules[modname]
modname = filename[:-3]
spec = importlib.util.spec_from_file_location(modname, self._filename)
self._module = importlib.util.module_from_spec(spec)
sys.modules[modname] = self._module
spec.loader.exec_module(self._module)
self._ast = parse(self._module.__loader__.get_data(self._filename))
self._ast_idx = {}
for i, node in enumerate(self._ast.body):
if hasattr(node, "name"):
self._ast_idx[node.name] = i
elif hasattr(node, "__name__"):
self._ast_idx[node.__name__] = i
self.send(self, attr=None, val=None)
def iterplain(self):
for name, idx in self._ast_idx.items():
yield name, unparse(self._ast.body[idx])
def store_source(self, v, name=None):
self._need_save = True
outdented = dedent_source(v)
mod = parse(outdented)
expr = Expr(mod)
if len(expr.value.body) != 1:
raise ValueError("Tried to store more than one function")
if name is None:
name = expr.value.body[0].name
else:
expr.value.body[0].name = name
if name in self._ast_idx:
self._ast.body[self._ast_idx[name]] = expr
else:
self._ast_idx[name] = len(self._ast.body)
self._ast.body.append(expr)
locl = {}
exec(compile(mod, self._filename, "exec"), {}, locl)
self._locl.update(locl)
self.send(self, attr=name, val=locl[name])
def get_source(self, name):
if name == "truth":
return "def truth(*args):\n\treturn True"
return unparse(self._ast.body[self._ast_idx[name]])
@staticmethod
def truth(*args):
return True
|
class FunctionStore(Signal):
'''A module-like object that lets you alter its code and save your changes.
Instantiate it with a path to a file that you want to keep the code in.
Assign functions to its attributes, then call its ``save()`` method,
and they'll be unparsed and written to the file.
This is a ``Signal``, so you can pass a function to its ``connect`` method,
and it will be called when a function is added, changed, or deleted.
The keyword arguments will be ``attr``, the name of the function, and ``val``,
the function itself.
'''
def __init__(self, filename):
pass
def __getattr__(self, k):
pass
def __setattr__(self, k, v):
pass
def __call__(self, v):
pass
def __delattr__(self, k):
pass
def save(self, reimport=True):
pass
def reimport(self):
pass
def iterplain(self):
pass
def store_source(self, v, name=None):
pass
def get_source(self, name):
pass
@staticmethod
def truth(*args):
pass
| 13 | 1 | 10 | 0 | 10 | 0 | 3 | 0.09 | 1 | 6 | 0 | 1 | 10 | 6 | 11 | 11 | 132 | 14 | 109 | 34 | 96 | 10 | 99 | 31 | 87 | 5 | 1 | 2 | 30 |
146,440 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/xcollections.py
|
lisien.xcollections.Language
|
class Language(str):
sigs = {}
def __new__(cls, sig, v):
me = str.__new__(cls, v)
cls.sigs[me] = sig
return me
def connect(self, *args, **kwargs):
self.sigs[self].connect(*args, **kwargs)
|
class Language(str):
def __new__(cls, sig, v):
pass
def connect(self, *args, **kwargs):
pass
| 3 | 0 | 3 | 0 | 3 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 2 | 0 | 2 | 68 | 10 | 2 | 8 | 5 | 5 | 0 | 8 | 5 | 5 | 1 | 2 | 0 | 2 |
146,441 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/elide/elide/graph/arrow.py
|
elide.graph.arrow.GraphArrow
|
class GraphArrow:
@property
def slope(self) -> float:
"""Return a float of the increase in y divided by the increase in x,
both from left to right.
Returns ``None`` when vertical.
"""
orig = self.origin
dest = self.destination
ox = orig.x
oy = orig.y
dx = dest.x
dy = dest.y
if oy == dy:
return 0.0
elif ox == dx:
return None
else:
rise = dy - oy
run = dx - ox
return rise / run
@property
def y_intercept(self) -> Optional[float]:
"""Return my Y-intercept.
I probably don't really hit the left edge of the window, but
this is where I would, if I were long enough.
"""
orig = self.origin
dest = self.destination
(ox, oy) = orig.pos
(dx, dy) = dest.pos
denominator = dx - ox
x_numerator = (dy - oy) * ox
y_numerator = denominator * oy
return (y_numerator - x_numerator), denominator
@property
def reciprocal(self) -> Optional["GraphArrow"]:
"""The arrow connecting the same spots in the opposite direction"""
if self.destination.name not in self.board.pred_arrow:
return
if (
self.origin.name
not in self.board.pred_arrow[self.destination.name]
):
return
return self.board.pred_arrow[self.destination.name][self.origin.name]
@property
def selected(self) -> bool:
return self is self.board.app.selection
@selected.setter
def selected(self, b: bool):
self.board.app.selection = self
self.repoint(b)
def __init__(self, *, board, origin, destination):
self.board = board
self.origin = origin
self.destination = destination
def collide_point(self, x: float, y: float) -> bool:
od = (self.origin.name, self.destination.name)
if od not in self.board.arrow_plane._colliders_map:
if od not in self.board.arrow_plane._instructions_map:
shaft_points, head_points = get_points(
self.origin,
self.destination,
self.board.arrow_plane.arrowhead_size,
)
r = self.board.arrow_plane.arrow_width / 2
bg_points = get_quad_vertices(
*shaft_points,
*head_points,
r * self.board.arrow_plane.bg_scale_unselected,
r,
0,
0,
)["shaft_bg"]
else:
bg_points = self.board.arrow_plane._instructions_map[od][
"shaft_bg"
].points
self.board.arrow_plane._colliders_map[od] = Collide2DPoly(
points=bg_points, cache=True
)
return (x, y) in self.board.arrow_plane._colliders_map[
self.origin.name, self.destination.name
]
def pos_along(self, pct: float) -> Tuple[float, float]:
"""Return coordinates for where a Pawn should be if it has travelled
along ``pct`` of my length (between 0 and 1).
Might get complex when I switch over to using beziers for
arrows, but for now this is quite simple, using distance along
a line segment.
"""
if pct < 0 or pct > 1:
raise ValueError("Invalid portion")
(ox, oy) = self.origin.center
(dx, dy) = self.destination.center
xdist = (dx - ox) * pct
ydist = (dy - oy) * pct
return ox + xdist, oy + ydist
@mainthread
def repoint(self, selected: bool = None) -> None:
arrow_plane = self.board.arrow_plane
fbo = arrow_plane._fbo
fbo.bind()
fbo.clear_buffer()
shaft_points, head_points = get_points(
self.origin, self.destination, arrow_plane.arrowhead_size
)
r = arrow_plane.arrow_width / 2
if selected or self.selected:
bg_scale = arrow_plane.bg_scale_selected
bg_color = arrow_plane.bg_color_selected
fg_color = arrow_plane.fg_color_selected
else:
bg_scale = arrow_plane.bg_scale_unselected
bg_color = arrow_plane.bg_color_unselected
fg_color = arrow_plane.fg_color_unselected
plane = self.board.arrow_plane
portal = self.board.character.portal[self.origin.name][
self.destination.name
]
portal_text = str(portal.get(portal.get("_label_stat", None), ""))
label_kwargs = dict(portal.get("label_kwargs", ()))
if portal_text is not None:
label_kwargs["text"] = portal_text
try:
label = self.board.arrow_plane._labels[self.origin.name][
self.destination.name
]
label.text = portal_text
except KeyError:
label = self.board.arrow_plane.labels[self.origin.name][
self.destination.name
] = Label(**DEFAULT_ARROW_LABEL_KWARGS, **label_kwargs)
if (
self.origin.name,
self.destination.name,
) in plane._instructions_map:
verts = get_quad_vertices(
*shaft_points, *head_points, r * bg_scale, r, *label.render()
)
insts = self.board.arrow_plane._instructions_map[
self.origin.name, self.destination.name
]
insts["color0"].rgba = bg_color
insts["color1"].rgba = fg_color
insts["shaft_bg"].points = verts["shaft_bg"]
insts["left_head_bg"].points = verts["left_head_bg"]
insts["right_head_bg"].points = verts["right_head_bg"]
insts["shaft_fg"].points = verts["shaft_fg"]
insts["left_head_fg"].points = verts["left_head_fg"]
insts["right_head_fg"].points = verts["right_head_fg"]
insts["label_rect"].pos = verts["label_pos"]
insts["label_rect"].size = label.render()
label.refresh()
insts["label_rect"].texture = label.texture
plane._colliders_map[self.origin.name, self.destination.name] = (
Collide2DPoly(points=verts["shaft_bg"])
)
else:
plane._instructions_map[
self.origin.name, self.destination.name
] = insts = get_instructions(
*shaft_points,
*head_points,
bg_color,
fg_color,
*label.render(),
label,
)
plane._colliders_map[
self.origin.name, self.destination.name
].points = Collide2DPoly(points=insts["shaft_bg"].points)
myidx = plane._port_index[self.origin.name, self.destination.name]
(ox, oy, dx, dy) = shaft_points
plane._bot_left_corner_xs[myidx] = min((ox, dx))
plane._bot_left_corner_ys[myidx] = min((oy, dy))
plane._top_right_corner_xs[myidx] = max((ox, dx))
plane._top_right_corner_ys[myidx] = max((oy, dy))
fbo.release()
fbo.ask_update()
arrow_plane.canvas.ask_update()
|
class GraphArrow:
@property
def slope(self) -> float:
'''Return a float of the increase in y divided by the increase in x,
both from left to right.
Returns ``None`` when vertical.
'''
pass
@property
def y_intercept(self) -> Optional[float]:
'''Return my Y-intercept.
I probably don't really hit the left edge of the window, but
this is where I would, if I were long enough.
'''
pass
@property
def reciprocal(self) -> Optional["GraphArrow"]:
'''The arrow connecting the same spots in the opposite direction'''
pass
@property
def selected(self) -> bool:
pass
@selected.setter
def selected(self) -> bool:
pass
def __init__(self, *, board, origin, destination):
pass
def collide_point(self, x: float, y: float) -> bool:
pass
def pos_along(self, pct: float) -> Tuple[float, float]:
'''Return coordinates for where a Pawn should be if it has travelled
along ``pct`` of my length (between 0 and 1).
Might get complex when I switch over to using beziers for
arrows, but for now this is quite simple, using distance along
a line segment.
'''
pass
@mainthread
def repoint(self, selected: bool = None) -> None:
pass
| 16 | 4 | 20 | 1 | 18 | 2 | 2 | 0.09 | 0 | 6 | 0 | 1 | 9 | 3 | 9 | 9 | 196 | 14 | 167 | 58 | 151 | 15 | 108 | 52 | 98 | 5 | 0 | 2 | 20 |
146,442 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/elide/elide/graph/arrow.py
|
elide.graph.arrow.ArrowPlane
|
class ArrowPlane(Widget):
data = ListProperty()
arrowhead_size = NumericProperty(10)
arrow_width = NumericProperty(2)
bg_scale_unselected = NumericProperty(4)
bg_scale_selected = NumericProperty(5)
bg_color_selected = ListProperty([0.0, 1.0, 1.0, 1.0])
bg_color_unselected = ListProperty([0.5, 0.5, 0.5, 0.5])
fg_color_unselected = ListProperty([1.0, 1.0, 1.0, 1.0])
fg_color_selected = ListProperty([1.0, 1.0, 1.0, 1.0])
def __init__(self, **kwargs):
self._labels = defaultdict(dict)
self._trigger_redraw = Clock.create_trigger(self.redraw)
self._redraw_bind_uid = self.fbind("data", self._trigger_redraw)
self.bind(arrowhead_size=self._trigger_redraw)
self._colliders_map = {}
self._instructions_map = {}
self._port_index = {}
self._port_l = []
self._bot_left_corner_ys = np.array([])
self._bot_left_corner_xs = np.array([])
self._top_right_corner_ys = np.array([])
self._top_right_corner_xs = np.array([])
super().__init__(**kwargs)
def on_parent(self, *args):
if not self.canvas:
Clock.schedule_once(self.on_parent, 0)
return
with self.canvas:
self._fbo = Fbo(size=self.size)
self._translate = Translate(x=self.x, y=self.y)
self._rectangle = Rectangle(
size=self.size, texture=self._fbo.texture
)
self.redraw()
def have_arrow(self, orig, dest):
return (orig, dest) in self._port_index
def redraw(self, *args):
if not hasattr(self, "_rectangle"):
self._trigger_redraw()
return
fbo = self._fbo
fbo.bind()
fbo.clear()
fbo.clear_buffer()
add = fbo.add
r = self.arrow_width // 2
bg_scale_selected = self.bg_scale_selected
bg_color_unselected = self.bg_color_unselected
fg_color_unselected = self.fg_color_unselected
taillen = self.arrowhead_size
points_map = get_points_multi(
(datum["origspot"], datum["destspot"], taillen)
for datum in self.data
)
port_l = []
bot_left_corner_xs = []
bot_left_corner_ys = []
top_right_corner_xs = []
top_right_corner_ys = []
port_index = self._port_index
colliders_map = self._colliders_map
oxs = []
oys = []
dxs = []
dys = []
for (ox, oy, dx, dy), _ in points_map.values():
oxs.append(ox)
oys.append(oy)
dxs.append(dx)
dys.append(dy)
widths = np.abs(np.array(dxs) - np.array(oxs))
heights = np.abs(np.array(dys) - np.array(oys))
lengths = np.sqrt(np.square(widths) + np.square(heights))
for length, (
port,
((ox, oy, dx, dy), (x1, y1, endx, endy, x2, y2)),
), datum in zip(lengths, points_map.items(), self.data):
if length < r:
continue
bgr = r * bg_scale_selected # change for selectedness pls
instructions = get_instructions(
ox,
oy,
dx,
dy,
x1,
y1,
endx,
endy,
x2,
y2,
bgr,
r,
bg_color_unselected,
fg_color_unselected,
datum.get("label_kwargs", {}),
self._labels[port[0]].get(port[1]),
)
instructions["group"] = grp = InstructionGroup()
grp.add(instructions["color0"])
grp.add(instructions["shaft_bg"])
grp.add(instructions["left_head_bg"])
grp.add(instructions["right_head_bg"])
grp.add(instructions["color1"])
grp.add(instructions["shaft_fg"])
grp.add(instructions["left_head_fg"])
grp.add(instructions["right_head_fg"])
grp.add(instructions["label_rect"])
add(grp)
self._instructions_map[port] = instructions
self._labels[port[0]][port[1]] = instructions["label"]
if ox < dx:
leftx = ox
rightx = dx
else:
rightx = ox
leftx = dx
if oy < dy:
boty = oy
topy = dy
else:
boty = dy
topy = oy
port_index[port] = len(port_l)
port_l.append(port)
bot_left_corner_xs.append(leftx - bgr)
bot_left_corner_ys.append(boty - bgr)
top_right_corner_xs.append(rightx + bgr)
top_right_corner_ys.append(topy + bgr)
colliders_map[port] = Collide2DPoly(
points=instructions["shaft_bg"].points
)
fbo.release()
self.canvas.ask_update()
self._port_l = port_l
self._bot_left_corner_xs = np.array(bot_left_corner_xs)
self._bot_left_corner_ys = np.array(bot_left_corner_ys)
self._top_right_corner_xs = np.array(top_right_corner_xs)
self._top_right_corner_ys = np.array(top_right_corner_ys)
def add_new_portal(self, datum: dict) -> None:
orig_spot = datum["origspot"]
dest_spot = datum["destspot"]
shaft_points, head_points = get_points(
orig_spot, dest_spot, self.arrowhead_size
)
self.unbind_uid("data", self._redraw_bind_uid)
self.data.append(datum)
r = self.arrow_width / 2
bgr = r * self.bg_scale_unselected
instructions = self._instructions_map[
orig_spot.name, dest_spot.name
] = get_instructions(
*shaft_points,
*head_points,
bgr,
r,
self.bg_color_unselected,
self.fg_color_unselected,
datum.get("label_kwargs", {}),
self._labels[orig_spot.name][dest_spot.name],
)
instructions["group"] = grp = InstructionGroup()
grp.add(instructions["color0"])
grp.add(instructions["shaft_bg"])
grp.add(instructions["left_head_bg"])
grp.add(instructions["right_head_bg"])
grp.add(instructions["color1"])
grp.add(instructions["shaft_fg"])
grp.add(instructions["left_head_fg"])
grp.add(instructions["right_head_fg"])
grp.add(instructions["label"])
self._labels[orig_spot.name][dest_spot.name] = instructions["label"]
self._fbo.add(grp)
ox, oy, dx, dy = shaft_points
if ox < dx:
left_x = ox
right_x = dx
else:
right_x = ox
left_x = dx
if oy < dy:
bot_y = oy
top_y = dy
else:
bot_y = dy
top_y = oy
self._port_index[orig_spot.name, dest_spot.name] = len(self._port_l)
self._port_l.append((orig_spot.name, dest_spot.name))
self._bot_left_corner_xs = np.array(
list(self._bot_left_corner_xs) + [left_x - bgr]
)
self._bot_left_corner_ys = np.array(
list(self._bot_left_corner_ys) + [bot_y - bgr]
)
self._top_right_corner_xs = np.array(
list(self._top_right_corner_xs) + [right_x + bgr]
)
self._top_right_corner_ys = np.array(
list(self._top_right_corner_ys) + [top_y + bgr]
)
self._colliders_map[orig_spot.name, dest_spot.name] = Collide2DPoly(
points=instructions["shaft_bg"].points
)
self.canvas.ask_update()
self._redraw_bind_uid = self.fbind("data", self._trigger_redraw)
def remove_edge(self, orig, dest):
self._fbo.bind()
self._fbo.clear_buffer()
self._fbo.remove(self._instructions_map[orig, dest]["group"])
index = self._port_index[orig, dest]
for port in self._port_l[index:]:
self._port_index[port] -= 1
del self._port_index[orig, dest]
del self._instructions_map[orig, dest]
del self._colliders_map[orig, dest]
del self._port_l[index]
for arr in (
"_bot_left_corner_ys",
"_bot_left_corner_xs",
"_top_right_corner_ys",
"_top_right_corner_xs",
):
dat = list(getattr(self, arr))
del dat[index]
setattr(self, arr, np.array(dat))
self._fbo.release()
self.canvas.ask_update()
def update_portal_label(self, orig, dest, text):
rect = self._instructions_map[orig, dest]["label"]
label = self._labels[orig][dest]
label.text = text
label.refresh()
rect.texture = label.texture
def iter_collided_edges(self, x: float, y: float) -> Iterator:
x, y = map(float, (x, y))
collider_map = self._colliders_map
hits = (
(self._bot_left_corner_xs <= x)
& (self._bot_left_corner_ys <= y)
& (x <= self._top_right_corner_xs)
& (y <= self._top_right_corner_ys)
)
for port in map(
itemgetter(0), filter(itemgetter(1), zip(self._port_l, hits))
):
if collider_map[port].collide_point(x, y):
yield port
def on_pos(self, *args):
if not hasattr(self, "_translate"):
return
self._translate.x, self._translate.y = self.pos
self.canvas.ask_update()
def on_size(self, *args):
if not hasattr(self, "_rectangle") or not hasattr(self, "_fbo"):
return
self._rectangle.size = self._fbo.size = self.size
self.redraw()
|
class ArrowPlane(Widget):
def __init__(self, **kwargs):
pass
def on_parent(self, *args):
pass
def have_arrow(self, orig, dest):
pass
def redraw(self, *args):
pass
def add_new_portal(self, datum: dict) -> None:
pass
def remove_edge(self, orig, dest):
pass
def update_portal_label(self, orig, dest, text):
pass
def iter_collided_edges(self, x: float, y: float) -> Iterator:
pass
def on_pos(self, *args):
pass
def on_size(self, *args):
pass
| 11 | 0 | 25 | 0 | 25 | 0 | 3 | 0 | 1 | 8 | 0 | 0 | 10 | 16 | 10 | 10 | 268 | 10 | 258 | 89 | 247 | 1 | 192 | 84 | 181 | 7 | 1 | 2 | 25 |
146,443 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/elide/elide/gen.py
|
elide.gen.GridGeneratorDialog
|
class GridGeneratorDialog(BoxLayout):
xval = NumericProperty()
yval = NumericProperty()
directions = OptionProperty(None, options=[None, 4, 8])
def generate(self, engine):
x = int(self.xval)
y = int(self.yval)
if x < 1 or y < 1:
return False
elif self.directions == 4:
# instead, we're running just after game init, before the view is open on it, and we'll make a character ourselves
engine.add_character("physical", grid_2d_graph(x, y))
return True
elif self.directions == 8:
engine.add_character("physical", grid_2d_8graph(x, y))
return True
else:
return False
def validate(self):
return self.directions and int(self.xval) and int(self.yval)
|
class GridGeneratorDialog(BoxLayout):
def generate(self, engine):
pass
def validate(self):
pass
| 3 | 0 | 8 | 0 | 8 | 1 | 3 | 0.05 | 1 | 1 | 0 | 0 | 2 | 0 | 2 | 2 | 22 | 2 | 19 | 8 | 16 | 1 | 16 | 8 | 13 | 4 | 1 | 1 | 5 |
146,444 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/allegedb/graph.py
|
lisien.allegedb.graph.Edge
|
class Edge(AbstractEntityMapping):
"""Mapping for edge attributes"""
__slots__ = (
"graph",
"orig",
"dest",
"idx",
"db",
"__weakref__",
"_iter_stuff",
"_cache_contains_stuff",
"_len_stuff",
"_get_cache_stuff",
"_set_db_stuff",
"_set_cache_stuff",
)
set_db_time = set_cache_time = 0
def __init__(self, graph, orig, dest, idx=0):
"""Store the graph, the names of the nodes, and the index.
For non-multigraphs the index is always 0.
"""
super().__init__()
self.graph = graph
self.db = db = graph.db
self.orig = orig
self.dest = dest
self.idx = idx
edge_val_cache = db._edge_val_cache
graphn = graph.name
btt = db._btt
self._iter_stuff = (
edge_val_cache.iter_entity_keys,
graphn,
orig,
dest,
idx,
btt,
)
self._cache_contains_stuff = (
edge_val_cache.contains_key,
graphn,
orig,
dest,
idx,
)
self._len_stuff = (
edge_val_cache.count_entity_keys,
graphn,
orig,
dest,
idx,
btt,
)
self._get_cache_stuff = (
edge_val_cache.retrieve,
graphn,
orig,
dest,
idx,
)
self._set_db_stuff = (db.query.edge_val_set, graphn, orig, dest, idx)
self._set_cache_stuff = (edge_val_cache.store, graphn, orig, dest, idx)
def __repr__(self):
return "<{} in graph {} from {} to {} containing {}>".format(
self.__class__.__name__,
self.graph.name,
self.orig,
self.dest,
dict(self),
)
def __str__(self):
return str(dict(self))
def __iter__(self):
iter_entity_keys, graphn, orig, dest, idx, btt = self._iter_stuff
return iter_entity_keys(graphn, orig, dest, idx, *btt())
def _cache_contains(self, key, branch, turn, tick):
contains_key, graphn, orig, dest, idx = self._cache_contains_stuff
return contains_key(graphn, orig, dest, idx, key, branch, turn, tick)
def __len__(self):
count_entity_keys, graphn, orig, dest, idx, btt = self._len_stuff
return count_entity_keys(graphn, orig, dest, idx, *btt())
def _get_cache(self, key, branch, turn, tick):
retrieve, graphn, orig, dest, idx = self._get_cache_stuff
return retrieve(graphn, orig, dest, idx, key, branch, turn, tick)
def _set_db(self, key, branch, turn, tick, value):
edge_val_set, graphn, orig, dest, idx = self._set_db_stuff
edge_val_set(graphn, orig, dest, idx, key, branch, turn, tick, value)
def _set_cache(self, key, branch, turn, tick, value):
store, graphn, orig, dest, idx = self._set_cache_stuff
store(graphn, orig, dest, idx, key, branch, turn, tick, value)
|
class Edge(AbstractEntityMapping):
'''Mapping for edge attributes'''
def __init__(self, graph, orig, dest, idx=0):
'''Store the graph, the names of the nodes, and the index.
For non-multigraphs the index is always 0.
'''
pass
def __repr__(self):
pass
def __str__(self):
pass
def __iter__(self):
pass
def _cache_contains(self, key, branch, turn, tick):
pass
def __len__(self):
pass
def _get_cache(self, key, branch, turn, tick):
pass
def _set_db(self, key, branch, turn, tick, value):
pass
def _set_cache(self, key, branch, turn, tick, value):
pass
| 10 | 2 | 8 | 0 | 8 | 0 | 1 | 0.05 | 1 | 3 | 0 | 2 | 9 | 11 | 9 | 65 | 103 | 13 | 86 | 32 | 76 | 4 | 41 | 32 | 31 | 1 | 10 | 0 | 9 |
146,445 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/tests/test_proxy.py
|
lisien.tests.test_proxy.ProxyTest
|
class ProxyTest(lisien.allegedb.tests.test_all.AllegedTest):
def setUp(self):
self.manager = EngineProcessManager()
self.tmp_path = tempfile.mkdtemp()
self.engine = self.manager.start(
self.tmp_path,
connect_string="sqlite:///:memory:",
enforce_end_of_time=False,
workers=0,
threaded_triggers=False,
)
self.graphmakers = (self.engine.new_character,)
self.addCleanup(self._do_cleanup)
def _do_cleanup(self):
self.manager.shutdown()
shutil.rmtree(self.tmp_path)
|
class ProxyTest(lisien.allegedb.tests.test_all.AllegedTest):
def setUp(self):
pass
def _do_cleanup(self):
pass
| 3 | 0 | 8 | 0 | 8 | 0 | 1 | 0 | 1 | 1 | 1 | 5 | 2 | 4 | 2 | 75 | 17 | 1 | 16 | 7 | 13 | 0 | 10 | 7 | 7 | 1 | 3 | 0 | 2 |
146,446 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/tests/test_proxy.py
|
lisien.tests.test_proxy.TestSwitchMainBranch
|
class TestSwitchMainBranch(ProxyTest):
def test_switch_main_branch(self):
phys = self.engine.new_character("physical", hello="hi")
self.engine.next_turn()
phys.stat["hi"] = "hello"
with pytest.raises(ValueError):
self.engine.switch_main_branch("tronc")
self.engine.turn = 0
self.engine.tick = 0
self.engine.switch_main_branch("tronc")
assert self.engine.branch == "tronc"
assert phys
assert "hi" not in phys.stat
assert "hello" in phys.stat
self.engine.next_turn()
phys.stat["hi"] = "hey there"
self.engine.turn = 0
self.engine.tick = 0
self.engine.switch_main_branch("trunk")
assert phys.stat["hello"] == "hi"
self.engine.turn = 1
assert phys.stat["hello"] == "hi"
assert phys.stat["hi"] == "hello"
|
class TestSwitchMainBranch(ProxyTest):
def test_switch_main_branch(self):
pass
| 2 | 0 | 22 | 0 | 22 | 0 | 1 | 0 | 1 | 1 | 0 | 0 | 1 | 0 | 1 | 76 | 23 | 0 | 23 | 3 | 21 | 0 | 23 | 3 | 21 | 1 | 4 | 1 | 1 |
146,447 |
LogicalDash/LiSE
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LogicalDash_LiSE/lisien/lisien/util.py
|
lisien.util.EntityStatAccessor
|
class EntityStatAccessor(object):
__slots__ = [
"engine",
"entity",
"branch",
"turn",
"tick",
"stat",
"current",
"mungers",
]
def __init__(
self,
entity,
stat,
engine=None,
branch=None,
turn=None,
tick=None,
current=False,
mungers: list = None,
):
if engine is None:
engine = entity.engine
if branch is None:
branch = engine.branch
if turn is None:
turn = engine.turn
if mungers is None:
mungers = []
self.current = current
self.engine = engine
self.entity = entity
self.stat = stat
self.branch = branch
self.turn = turn
self.tick = tick
self.mungers = mungers
def __call__(self, branch=None, turn=None, tick=None):
if self.current:
res = self.entity[self.stat]
else:
branc, trn, tck = self.engine._btt()
self.engine.branch = branch or self.branch
self.engine.turn = turn if turn is not None else self.turn
if tick is not None:
self.engine.tick = tick
elif self.tick is not None:
self.engine.tick = self.tick
if hasattr(self.entity, "stat"):
res = self.entity.stat[self.stat]
else:
res = self.entity[self.stat]
self.engine._set_btt(branc, trn, tck)
for munger in self.mungers:
res = munger(res)
return res
def __ne__(self, other):
return self() != other
def __str__(self):
return str(self())
def __repr__(self):
return "EntityStatAccessor({}[{}]{}), {} mungers".format(
self.entity,
self.stat,
""
if self.current
else ", branch={}, turn={}, tick={}".format(
self.branch, self.turn, self.tick
),
len(self.mungers),
)
def __gt__(self, other):
return self() > other
def __ge__(self, other):
return self() >= other
def __lt__(self, other):
return self() < other
def __le__(self, other):
return self() <= other
def __eq__(self, other):
return self() == other
def munge(self, munger):
return EntityStatAccessor(
self.entity,
self.stat,
self.engine,
self.branch,
self.turn,
self.tick,
self.current,
self.mungers + [munger],
)
def __add__(self, other):
return self.munge(partial(add, other))
def __sub__(self, other):
return self.munge(partial(sub, other))
def __mul__(self, other):
return self.munge(partial(mul, other))
def __rpow__(self, other, modulo=None):
return self.munge(partial(pow, other, modulo=modulo))
def __rdiv__(self, other):
return self.munge(partial(truediv, other))
def __rfloordiv__(self, other):
return self.munge(partial(floordiv, other))
def __rmod__(self, other):
return self.munge(partial(mod, other))
def __getitem__(self, k):
return self.munge(lambda x: x[k])
def iter_history(self, beginning, end):
"""Iterate over all the values this stat has had in the given window, inclusive."""
# It might be useful to do this in a way that doesn't change the
# engine's time, perhaps for thread safety
engine = self.engine
entity = self.entity
oldturn = engine.turn
oldtick = engine.tick
stat = self.stat
for turn in range(beginning, end + 1):
engine.turn = turn
try:
y = entity[stat]
except KeyError:
yield None
continue
if hasattr(y, "unwrap"):
y = y.unwrap()
yield y
engine.turn = oldturn
engine.tick = oldtick
|
class EntityStatAccessor(object):
def __init__(
self,
entity,
stat,
engine=None,
branch=None,
turn=None,
tick=None,
current=False,
mungers: list = None,
):
pass
def __call__(self, branch=None, turn=None, tick=None):
pass
def __ne__(self, other):
pass
def __str__(self):
pass
def __repr__(self):
pass
def __gt__(self, other):
pass
def __ge__(self, other):
pass
def __lt__(self, other):
pass
def __le__(self, other):
pass
def __eq__(self, other):
pass
def munge(self, munger):
pass
def __add__(self, other):
pass
def __sub__(self, other):
pass
def __mul__(self, other):
pass
def __rpow__(self, other, modulo=None):
pass
def __rdiv__(self, other):
pass
def __rfloordiv__(self, other):
pass
def __rmod__(self, other):
pass
def __getitem__(self, k):
pass
def iter_history(self, beginning, end):
'''Iterate over all the values this stat has had in the given window, inclusive.'''
pass
| 21 | 1 | 6 | 0 | 6 | 0 | 2 | 0.02 | 1 | 5 | 0 | 1 | 20 | 8 | 20 | 20 | 150 | 20 | 127 | 50 | 96 | 3 | 87 | 40 | 66 | 7 | 1 | 2 | 34 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.