content
stringlengths 1
103k
⌀ | path
stringlengths 8
216
| filename
stringlengths 2
179
| language
stringclasses 15
values | size_bytes
int64 2
189k
| quality_score
float64 0.5
0.95
| complexity
float64 0
1
| documentation_ratio
float64 0
1
| repository
stringclasses 5
values | stars
int64 0
1k
| created_date
stringdate 2023-07-10 19:21:08
2025-07-09 19:11:45
| license
stringclasses 4
values | is_test
bool 2
classes | file_hash
stringlengths 32
32
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
\n\n
|
.venv\Lib\site-packages\fontTools\varLib\__pycache__\interpolatableTestContourOrder.cpython-313.pyc
|
interpolatableTestContourOrder.cpython-313.pyc
|
Other
| 2,180 | 0.8 | 0 | 0 |
node-utils
| 383 |
2024-09-22T02:45:00.206470
|
MIT
| true |
14fa99b1ec3c112ea344ed2651045872
|
\n\n
|
.venv\Lib\site-packages\fontTools\varLib\__pycache__\interpolatableTestStartingPoint.cpython-313.pyc
|
interpolatableTestStartingPoint.cpython-313.pyc
|
Other
| 3,881 | 0.8 | 0 | 0 |
python-kit
| 966 |
2024-03-09T01:00:20.374928
|
MIT
| true |
8f1fd1e53cbe2337aaeb014a351dfc7e
|
\n\n
|
.venv\Lib\site-packages\fontTools\varLib\__pycache__\interpolate_layout.cpython-313.pyc
|
interpolate_layout.cpython-313.pyc
|
Other
| 5,699 | 0.8 | 0.012821 | 0.013699 |
node-utils
| 444 |
2024-07-23T18:30:40.317388
|
BSD-3-Clause
| false |
8fdd942d94767fbc2aa682c6b1383b90
|
\n\n
|
.venv\Lib\site-packages\fontTools\varLib\__pycache__\iup.cpython-313.pyc
|
iup.cpython-313.pyc
|
Other
| 14,836 | 0.8 | 0.033473 | 0.013953 |
react-lib
| 74 |
2025-04-26T22:31:29.284666
|
BSD-3-Clause
| false |
c5b11f51fc2965baa4dda3d6b505cc97
|
\n\n
|
.venv\Lib\site-packages\fontTools\varLib\__pycache__\merger.cpython-313.pyc
|
merger.cpython-313.pyc
|
Other
| 80,420 | 0.75 | 0.006042 | 0.00313 |
python-kit
| 126 |
2024-08-21T00:10:49.109496
|
MIT
| false |
f042eb17afca3f9169f3e3dfca04b60f
|
\n\n
|
.venv\Lib\site-packages\fontTools\varLib\__pycache__\models.cpython-313.pyc
|
models.cpython-313.pyc
|
Other
| 28,607 | 0.95 | 0.024931 | 0.002915 |
vue-tools
| 920 |
2024-08-19T05:40:37.486244
|
MIT
| false |
3ebc7483a6fbeadbe98c711a203a7168
|
\n\n
|
.venv\Lib\site-packages\fontTools\varLib\__pycache__\multiVarStore.cpython-313.pyc
|
multiVarStore.cpython-313.pyc
|
Other
| 14,332 | 0.8 | 0 | 0 |
react-lib
| 129 |
2025-04-15T06:46:45.553105
|
Apache-2.0
| false |
98f810d82048773ab434edb465f4c2a6
|
\n\n
|
.venv\Lib\site-packages\fontTools\varLib\__pycache__\mutator.cpython-313.pyc
|
mutator.cpython-313.pyc
|
Other
| 22,425 | 0.95 | 0.004695 | 0.005102 |
awesome-app
| 719 |
2024-04-06T03:55:32.095984
|
Apache-2.0
| false |
b4501d754a32cd28d3c8985c46359008
|
\n\n
|
.venv\Lib\site-packages\fontTools\varLib\__pycache__\mvar.cpython-313.pyc
|
mvar.cpython-313.pyc
|
Other
| 1,372 | 0.8 | 0 | 0 |
node-utils
| 49 |
2024-08-17T00:34:45.811298
|
Apache-2.0
| false |
036c39feb576bac128f31e4019c507b1
|
\n\n
|
.venv\Lib\site-packages\fontTools\varLib\__pycache__\plot.cpython-313.pyc
|
plot.cpython-313.pyc
|
Other
| 13,648 | 0.8 | 0 | 0 |
python-kit
| 190 |
2025-01-02T13:30:42.957025
|
Apache-2.0
| false |
b5b88057c9b56acba8fa53ff76c540b6
|
\n\n
|
.venv\Lib\site-packages\fontTools\varLib\__pycache__\stat.cpython-313.pyc
|
stat.cpython-313.pyc
|
Other
| 6,624 | 0.8 | 0.060976 | 0 |
node-utils
| 206 |
2023-09-06T23:26:01.305032
|
BSD-3-Clause
| false |
24ddae2bf3a99e0fb357bd44e6a5bb16
|
\n\n
|
.venv\Lib\site-packages\fontTools\varLib\__pycache__\varStore.cpython-313.pyc
|
varStore.cpython-313.pyc
|
Other
| 29,828 | 0.8 | 0.017094 | 0.008929 |
vue-tools
| 394 |
2025-07-08T09:22:11.636682
|
Apache-2.0
| false |
5fee16f73cf954087322772a57fc8c2e
|
\n\n
|
.venv\Lib\site-packages\fontTools\varLib\__pycache__\__init__.cpython-313.pyc
|
__init__.cpython-313.pyc
|
Other
| 61,803 | 0.75 | 0.028571 | 0.003617 |
awesome-app
| 707 |
2024-04-13T22:13:42.475270
|
BSD-3-Clause
| false |
59c5b987f65a23ba37927a54964c25ff
|
\n\n
|
.venv\Lib\site-packages\fontTools\varLib\__pycache__\__main__.cpython-313.pyc
|
__main__.cpython-313.pyc
|
Other
| 370 | 0.7 | 0 | 0 |
awesome-app
| 61 |
2024-03-27T12:19:25.836471
|
Apache-2.0
| false |
bf971b1c315f50f4df517fdc04d5a9ae
|
from fontTools.voltLib.error import VoltLibError\nfrom typing import NamedTuple\n\n\nclass Pos(NamedTuple):\n adv: int\n dx: int\n dy: int\n adv_adjust_by: dict\n dx_adjust_by: dict\n dy_adjust_by: dict\n\n def __str__(self):\n res = " POS"\n for attr in ("adv", "dx", "dy"):\n value = getattr(self, attr)\n if value is not None:\n res += f" {attr.upper()} {value}"\n adjust_by = getattr(self, f"{attr}_adjust_by", {})\n for size, adjustment in adjust_by.items():\n res += f" ADJUST_BY {adjustment} AT {size}"\n res += " END_POS"\n return res\n\n\nclass Element(object):\n def __init__(self, location=None):\n self.location = location\n\n def build(self, builder):\n pass\n\n def __str__(self):\n raise NotImplementedError\n\n\nclass Statement(Element):\n pass\n\n\nclass Expression(Element):\n pass\n\n\nclass VoltFile(Statement):\n def __init__(self):\n Statement.__init__(self, location=None)\n self.statements = []\n\n def build(self, builder):\n for s in self.statements:\n s.build(builder)\n\n def __str__(self):\n return "\n" + "\n".join(str(s) for s in self.statements) + " END\n"\n\n\nclass GlyphDefinition(Statement):\n def __init__(self, name, gid, gunicode, gtype, components, location=None):\n Statement.__init__(self, location)\n self.name = name\n self.id = gid\n self.unicode = gunicode\n self.type = gtype\n self.components = components\n\n def __str__(self):\n res = f'DEF_GLYPH "{self.name}" ID {self.id}'\n if self.unicode is not None:\n if len(self.unicode) > 1:\n unicodes = ",".join(f"U+{u:04X}" for u in self.unicode)\n res += f' UNICODEVALUES "{unicodes}"'\n else:\n res += f" UNICODE {self.unicode[0]}"\n if self.type is not None:\n res += f" TYPE {self.type}"\n if self.components is not None:\n res += f" COMPONENTS {self.components}"\n res += " END_GLYPH"\n return res\n\n\nclass GroupDefinition(Statement):\n def __init__(self, name, enum, location=None):\n Statement.__init__(self, location)\n self.name = name\n self.enum = enum\n self.glyphs_ = None\n\n def glyphSet(self, groups=None):\n if groups is not None and self.name in groups:\n raise VoltLibError(\n 'Group "%s" contains itself.' % (self.name), self.location\n )\n if self.glyphs_ is None:\n if groups is None:\n groups = set({self.name})\n else:\n groups.add(self.name)\n self.glyphs_ = self.enum.glyphSet(groups)\n return self.glyphs_\n\n def __str__(self):\n enum = self.enum and str(self.enum) or ""\n return f'DEF_GROUP "{self.name}"\n{enum}\nEND_GROUP'\n\n\nclass GlyphName(Expression):\n """A single glyph name, such as cedilla."""\n\n def __init__(self, glyph, location=None):\n Expression.__init__(self, location)\n self.glyph = glyph\n\n def glyphSet(self):\n return (self.glyph,)\n\n def __str__(self):\n return f' GLYPH "{self.glyph}"'\n\n\nclass Enum(Expression):\n """An enum"""\n\n def __init__(self, enum, location=None):\n Expression.__init__(self, location)\n self.enum = enum\n\n def __iter__(self):\n for e in self.glyphSet():\n yield e\n\n def glyphSet(self, groups=None):\n glyphs = []\n for element in self.enum:\n if isinstance(element, (GroupName, Enum)):\n glyphs.extend(element.glyphSet(groups))\n else:\n glyphs.extend(element.glyphSet())\n return tuple(glyphs)\n\n def __str__(self):\n enum = "".join(str(e) for e in self.enum)\n return f" ENUM{enum} END_ENUM"\n\n\nclass GroupName(Expression):\n """A glyph group"""\n\n def __init__(self, group, parser, location=None):\n Expression.__init__(self, location)\n self.group = group\n self.parser_ = parser\n\n def glyphSet(self, groups=None):\n group = self.parser_.resolve_group(self.group)\n if group is not None:\n self.glyphs_ = group.glyphSet(groups)\n return self.glyphs_\n else:\n raise VoltLibError(\n 'Group "%s" is used but undefined.' % (self.group), self.location\n )\n\n def __str__(self):\n return f' GROUP "{self.group}"'\n\n\nclass Range(Expression):\n """A glyph range"""\n\n def __init__(self, start, end, parser, location=None):\n Expression.__init__(self, location)\n self.start = start\n self.end = end\n self.parser = parser\n\n def glyphSet(self):\n return tuple(self.parser.glyph_range(self.start, self.end))\n\n def __str__(self):\n return f' RANGE "{self.start}" TO "{self.end}"'\n\n\nclass ScriptDefinition(Statement):\n def __init__(self, name, tag, langs, location=None):\n Statement.__init__(self, location)\n self.name = name\n self.tag = tag\n self.langs = langs\n\n def __str__(self):\n res = "DEF_SCRIPT"\n if self.name is not None:\n res += f' NAME "{self.name}"'\n res += f' TAG "{self.tag}"\n\n'\n for lang in self.langs:\n res += f"{lang}"\n res += "END_SCRIPT"\n return res\n\n\nclass LangSysDefinition(Statement):\n def __init__(self, name, tag, features, location=None):\n Statement.__init__(self, location)\n self.name = name\n self.tag = tag\n self.features = features\n\n def __str__(self):\n res = "DEF_LANGSYS"\n if self.name is not None:\n res += f' NAME "{self.name}"'\n res += f' TAG "{self.tag}"\n\n'\n for feature in self.features:\n res += f"{feature}"\n res += "END_LANGSYS\n"\n return res\n\n\nclass FeatureDefinition(Statement):\n def __init__(self, name, tag, lookups, location=None):\n Statement.__init__(self, location)\n self.name = name\n self.tag = tag\n self.lookups = lookups\n\n def __str__(self):\n res = f'DEF_FEATURE NAME "{self.name}" TAG "{self.tag}"\n'\n res += " " + " ".join(f'LOOKUP "{l}"' for l in self.lookups) + "\n"\n res += "END_FEATURE\n"\n return res\n\n\nclass LookupDefinition(Statement):\n def __init__(\n self,\n name,\n process_base,\n process_marks,\n mark_glyph_set,\n direction,\n reversal,\n comments,\n context,\n sub,\n pos,\n location=None,\n ):\n Statement.__init__(self, location)\n self.name = name\n self.process_base = process_base\n self.process_marks = process_marks\n self.mark_glyph_set = mark_glyph_set\n self.direction = direction\n self.reversal = reversal\n self.comments = comments\n self.context = context\n self.sub = sub\n self.pos = pos\n\n def __str__(self):\n res = f'DEF_LOOKUP "{self.name}"'\n res += f' {self.process_base and "PROCESS_BASE" or "SKIP_BASE"}'\n if self.process_marks:\n res += " PROCESS_MARKS "\n if self.mark_glyph_set:\n res += f'MARK_GLYPH_SET "{self.mark_glyph_set}"'\n elif isinstance(self.process_marks, str):\n res += f'"{self.process_marks}"'\n else:\n res += "ALL"\n else:\n res += " SKIP_MARKS"\n if self.direction is not None:\n res += f" DIRECTION {self.direction}"\n if self.reversal:\n res += " REVERSAL"\n if self.comments is not None:\n comments = self.comments.replace("\n", r"\n")\n res += f'\nCOMMENTS "{comments}"'\n if self.context:\n res += "\n" + "\n".join(str(c) for c in self.context)\n else:\n res += "\nIN_CONTEXT\nEND_CONTEXT"\n if self.sub:\n res += f"\n{self.sub}"\n if self.pos:\n res += f"\n{self.pos}"\n return res\n\n\nclass SubstitutionDefinition(Statement):\n def __init__(self, mapping, location=None):\n Statement.__init__(self, location)\n self.mapping = mapping\n\n def __str__(self):\n res = "AS_SUBSTITUTION\n"\n for src, dst in self.mapping.items():\n src = "".join(str(s) for s in src)\n dst = "".join(str(d) for d in dst)\n res += f"SUB{src}\nWITH{dst}\nEND_SUB\n"\n res += "END_SUBSTITUTION"\n return res\n\n\nclass SubstitutionSingleDefinition(SubstitutionDefinition):\n pass\n\n\nclass SubstitutionMultipleDefinition(SubstitutionDefinition):\n pass\n\n\nclass SubstitutionLigatureDefinition(SubstitutionDefinition):\n pass\n\n\nclass SubstitutionAlternateDefinition(SubstitutionDefinition):\n pass\n\n\nclass SubstitutionReverseChainingSingleDefinition(SubstitutionDefinition):\n pass\n\n\nclass PositionAttachDefinition(Statement):\n def __init__(self, coverage, coverage_to, location=None):\n Statement.__init__(self, location)\n self.coverage = coverage\n self.coverage_to = coverage_to\n\n def __str__(self):\n coverage = "".join(str(c) for c in self.coverage)\n res = f"AS_POSITION\nATTACH{coverage}\nTO"\n for coverage, anchor in self.coverage_to:\n coverage = "".join(str(c) for c in coverage)\n res += f'{coverage} AT ANCHOR "{anchor}"'\n res += "\nEND_ATTACH\nEND_POSITION"\n return res\n\n\nclass PositionAttachCursiveDefinition(Statement):\n def __init__(self, coverages_exit, coverages_enter, location=None):\n Statement.__init__(self, location)\n self.coverages_exit = coverages_exit\n self.coverages_enter = coverages_enter\n\n def __str__(self):\n res = "AS_POSITION\nATTACH_CURSIVE"\n for coverage in self.coverages_exit:\n coverage = "".join(str(c) for c in coverage)\n res += f"\nEXIT {coverage}"\n for coverage in self.coverages_enter:\n coverage = "".join(str(c) for c in coverage)\n res += f"\nENTER {coverage}"\n res += "\nEND_ATTACH\nEND_POSITION"\n return res\n\n\nclass PositionAdjustPairDefinition(Statement):\n def __init__(self, coverages_1, coverages_2, adjust_pair, location=None):\n Statement.__init__(self, location)\n self.coverages_1 = coverages_1\n self.coverages_2 = coverages_2\n self.adjust_pair = adjust_pair\n\n def __str__(self):\n res = "AS_POSITION\nADJUST_PAIR\n"\n for coverage in self.coverages_1:\n coverage = " ".join(str(c) for c in coverage)\n res += f" FIRST {coverage}"\n res += "\n"\n for coverage in self.coverages_2:\n coverage = " ".join(str(c) for c in coverage)\n res += f" SECOND {coverage}"\n res += "\n"\n for (id_1, id_2), (pos_1, pos_2) in self.adjust_pair.items():\n res += f" {id_1} {id_2} BY{pos_1}{pos_2}\n"\n res += "\nEND_ADJUST\nEND_POSITION"\n return res\n\n\nclass PositionAdjustSingleDefinition(Statement):\n def __init__(self, adjust_single, location=None):\n Statement.__init__(self, location)\n self.adjust_single = adjust_single\n\n def __str__(self):\n res = "AS_POSITION\nADJUST_SINGLE"\n for coverage, pos in self.adjust_single:\n coverage = "".join(str(c) for c in coverage)\n res += f"{coverage} BY{pos}"\n res += "\nEND_ADJUST\nEND_POSITION"\n return res\n\n\nclass ContextDefinition(Statement):\n def __init__(self, ex_or_in, left=None, right=None, location=None):\n Statement.__init__(self, location)\n self.ex_or_in = ex_or_in\n self.left = left if left is not None else []\n self.right = right if right is not None else []\n\n def __str__(self):\n res = self.ex_or_in + "\n"\n for coverage in self.left:\n coverage = "".join(str(c) for c in coverage)\n res += f" LEFT{coverage}\n"\n for coverage in self.right:\n coverage = "".join(str(c) for c in coverage)\n res += f" RIGHT{coverage}\n"\n res += "END_CONTEXT"\n return res\n\n\nclass AnchorDefinition(Statement):\n def __init__(self, name, gid, glyph_name, component, locked, pos, location=None):\n Statement.__init__(self, location)\n self.name = name\n self.gid = gid\n self.glyph_name = glyph_name\n self.component = component\n self.locked = locked\n self.pos = pos\n\n def __str__(self):\n locked = self.locked and " LOCKED" or ""\n return (\n f'DEF_ANCHOR "{self.name}"'\n f" ON {self.gid}"\n f" GLYPH {self.glyph_name}"\n f" COMPONENT {self.component}"\n f"{locked}"\n f" AT {self.pos} END_ANCHOR"\n )\n\n\nclass SettingDefinition(Statement):\n def __init__(self, name, value, location=None):\n Statement.__init__(self, location)\n self.name = name\n self.value = value\n\n def __str__(self):\n if self.value is True:\n return f"{self.name}"\n if isinstance(self.value, (tuple, list)):\n value = " ".join(str(v) for v in self.value)\n return f"{self.name} {value}"\n return f"{self.name} {self.value}"\n
|
.venv\Lib\site-packages\fontTools\voltLib\ast.py
|
ast.py
|
Python
| 13,752 | 0.85 | 0.298673 | 0 |
node-utils
| 31 |
2024-05-26T13:14:04.706425
|
GPL-3.0
| false |
406499496da1821baba9d142fec15e8b
|
class VoltLibError(Exception):\n def __init__(self, message, location):\n Exception.__init__(self, message)\n self.location = location\n\n def __str__(self):\n message = Exception.__str__(self)\n if self.location:\n path, line, column = self.location\n return "%s:%d:%d: %s" % (path, line, column, message)\n else:\n return message\n
|
.venv\Lib\site-packages\fontTools\voltLib\error.py
|
error.py
|
Python
| 407 | 0.85 | 0.333333 | 0 |
vue-tools
| 409 |
2024-12-05T23:49:14.575989
|
MIT
| false |
605d41d5b4f4660f45dace79c1ea485d
|
from fontTools.voltLib.error import VoltLibError\n\n\nclass Lexer(object):\n NUMBER = "NUMBER"\n STRING = "STRING"\n NAME = "NAME"\n NEWLINE = "NEWLINE"\n\n CHAR_WHITESPACE_ = " \t"\n CHAR_NEWLINE_ = "\r\n"\n CHAR_DIGIT_ = "0123456789"\n CHAR_UC_LETTER_ = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"\n CHAR_LC_LETTER_ = "abcdefghijklmnopqrstuvwxyz"\n CHAR_UNDERSCORE_ = "_"\n CHAR_PERIOD_ = "."\n CHAR_NAME_START_ = (\n CHAR_UC_LETTER_ + CHAR_LC_LETTER_ + CHAR_PERIOD_ + CHAR_UNDERSCORE_\n )\n CHAR_NAME_CONTINUATION_ = CHAR_NAME_START_ + CHAR_DIGIT_\n\n def __init__(self, text, filename):\n self.filename_ = filename\n self.line_ = 1\n self.pos_ = 0\n self.line_start_ = 0\n self.text_ = text\n self.text_length_ = len(text)\n\n def __iter__(self):\n return self\n\n def next(self): # Python 2\n return self.__next__()\n\n def __next__(self): # Python 3\n while True:\n token_type, token, location = self.next_()\n if token_type not in {Lexer.NEWLINE}:\n return (token_type, token, location)\n\n def location_(self):\n column = self.pos_ - self.line_start_ + 1\n return (self.filename_ or "<volt>", self.line_, column)\n\n def next_(self):\n self.scan_over_(Lexer.CHAR_WHITESPACE_)\n location = self.location_()\n start = self.pos_\n text = self.text_\n limit = len(text)\n if start >= limit:\n raise StopIteration()\n cur_char = text[start]\n next_char = text[start + 1] if start + 1 < limit else None\n\n if cur_char == "\n":\n self.pos_ += 1\n self.line_ += 1\n self.line_start_ = self.pos_\n return (Lexer.NEWLINE, None, location)\n if cur_char == "\r":\n self.pos_ += 2 if next_char == "\n" else 1\n self.line_ += 1\n self.line_start_ = self.pos_\n return (Lexer.NEWLINE, None, location)\n if cur_char == '"':\n self.pos_ += 1\n self.scan_until_('"\r\n')\n if self.pos_ < self.text_length_ and self.text_[self.pos_] == '"':\n self.pos_ += 1\n return (Lexer.STRING, text[start + 1 : self.pos_ - 1], location)\n else:\n raise VoltLibError("Expected '\"' to terminate string", location)\n if cur_char in Lexer.CHAR_NAME_START_:\n self.pos_ += 1\n self.scan_over_(Lexer.CHAR_NAME_CONTINUATION_)\n token = text[start : self.pos_]\n return (Lexer.NAME, token, location)\n if cur_char in Lexer.CHAR_DIGIT_:\n self.scan_over_(Lexer.CHAR_DIGIT_)\n return (Lexer.NUMBER, int(text[start : self.pos_], 10), location)\n if cur_char == "-" and next_char in Lexer.CHAR_DIGIT_:\n self.pos_ += 1\n self.scan_over_(Lexer.CHAR_DIGIT_)\n return (Lexer.NUMBER, int(text[start : self.pos_], 10), location)\n raise VoltLibError("Unexpected character: '%s'" % cur_char, location)\n\n def scan_over_(self, valid):\n p = self.pos_\n while p < self.text_length_ and self.text_[p] in valid:\n p += 1\n self.pos_ = p\n\n def scan_until_(self, stop_at):\n p = self.pos_\n while p < self.text_length_ and self.text_[p] not in stop_at:\n p += 1\n self.pos_ = p\n
|
.venv\Lib\site-packages\fontTools\voltLib\lexer.py
|
lexer.py
|
Python
| 3,467 | 0.95 | 0.232323 | 0 |
node-utils
| 287 |
2025-06-05T14:26:55.192919
|
GPL-3.0
| false |
c9fb20bcf039c3f072ccd0076fec6a77
|
import fontTools.voltLib.ast as ast\nfrom fontTools.voltLib.lexer import Lexer\nfrom fontTools.voltLib.error import VoltLibError\nfrom io import open\n\nPARSE_FUNCS = {\n "DEF_GLYPH": "parse_def_glyph_",\n "DEF_GROUP": "parse_def_group_",\n "DEF_SCRIPT": "parse_def_script_",\n "DEF_LOOKUP": "parse_def_lookup_",\n "DEF_ANCHOR": "parse_def_anchor_",\n "GRID_PPEM": "parse_ppem_",\n "PRESENTATION_PPEM": "parse_ppem_",\n "PPOSITIONING_PPEM": "parse_ppem_",\n "COMPILER_USEEXTENSIONLOOKUPS": "parse_noarg_option_",\n "COMPILER_USEPAIRPOSFORMAT2": "parse_noarg_option_",\n "CMAP_FORMAT": "parse_cmap_format",\n "DO_NOT_TOUCH_CMAP": "parse_noarg_option_",\n}\n\n\nclass Parser(object):\n def __init__(self, path):\n self.doc_ = ast.VoltFile()\n self.glyphs_ = OrderedSymbolTable()\n self.groups_ = SymbolTable()\n self.anchors_ = {} # dictionary of SymbolTable() keyed by glyph\n self.scripts_ = SymbolTable()\n self.langs_ = SymbolTable()\n self.lookups_ = SymbolTable()\n self.next_token_type_, self.next_token_ = (None, None)\n self.next_token_location_ = None\n self.make_lexer_(path)\n self.advance_lexer_()\n\n def make_lexer_(self, file_or_path):\n if hasattr(file_or_path, "read"):\n filename = getattr(file_or_path, "name", None)\n data = file_or_path.read()\n else:\n filename = file_or_path\n with open(file_or_path, "r") as f:\n data = f.read()\n self.lexer_ = Lexer(data, filename)\n\n def parse(self):\n statements = self.doc_.statements\n while self.next_token_type_ is not None:\n self.advance_lexer_()\n if self.cur_token_ in PARSE_FUNCS.keys():\n func = getattr(self, PARSE_FUNCS[self.cur_token_])\n statements.append(func())\n elif self.is_cur_keyword_("END"):\n break\n else:\n raise VoltLibError(\n "Expected " + ", ".join(sorted(PARSE_FUNCS.keys())),\n self.cur_token_location_,\n )\n return self.doc_\n\n def parse_def_glyph_(self):\n assert self.is_cur_keyword_("DEF_GLYPH")\n location = self.cur_token_location_\n name = self.expect_string_()\n self.expect_keyword_("ID")\n gid = self.expect_number_()\n if gid < 0:\n raise VoltLibError("Invalid glyph ID", self.cur_token_location_)\n gunicode = None\n if self.next_token_ == "UNICODE":\n self.expect_keyword_("UNICODE")\n gunicode = [self.expect_number_()]\n if gunicode[0] < 0:\n raise VoltLibError("Invalid glyph UNICODE", self.cur_token_location_)\n elif self.next_token_ == "UNICODEVALUES":\n self.expect_keyword_("UNICODEVALUES")\n gunicode = self.parse_unicode_values_()\n gtype = None\n if self.next_token_ == "TYPE":\n self.expect_keyword_("TYPE")\n gtype = self.expect_name_()\n assert gtype in ("BASE", "LIGATURE", "MARK", "COMPONENT")\n components = None\n if self.next_token_ == "COMPONENTS":\n self.expect_keyword_("COMPONENTS")\n components = self.expect_number_()\n self.expect_keyword_("END_GLYPH")\n if self.glyphs_.resolve(name) is not None:\n raise VoltLibError(\n 'Glyph "%s" (gid %i) already defined' % (name, gid), location\n )\n def_glyph = ast.GlyphDefinition(\n name, gid, gunicode, gtype, components, location=location\n )\n self.glyphs_.define(name, def_glyph)\n return def_glyph\n\n def parse_def_group_(self):\n assert self.is_cur_keyword_("DEF_GROUP")\n location = self.cur_token_location_\n name = self.expect_string_()\n enum = None\n if self.next_token_ == "ENUM":\n enum = self.parse_enum_()\n self.expect_keyword_("END_GROUP")\n if self.groups_.resolve(name) is not None:\n raise VoltLibError(\n 'Glyph group "%s" already defined, '\n "group names are case insensitive" % name,\n location,\n )\n def_group = ast.GroupDefinition(name, enum, location=location)\n self.groups_.define(name, def_group)\n return def_group\n\n def parse_def_script_(self):\n assert self.is_cur_keyword_("DEF_SCRIPT")\n location = self.cur_token_location_\n name = None\n if self.next_token_ == "NAME":\n self.expect_keyword_("NAME")\n name = self.expect_string_()\n self.expect_keyword_("TAG")\n tag = self.expect_string_()\n if self.scripts_.resolve(tag) is not None:\n raise VoltLibError(\n 'Script "%s" already defined, '\n "script tags are case insensitive" % tag,\n location,\n )\n self.langs_.enter_scope()\n langs = []\n while self.next_token_ != "END_SCRIPT":\n self.advance_lexer_()\n lang = self.parse_langsys_()\n self.expect_keyword_("END_LANGSYS")\n if self.langs_.resolve(lang.tag) is not None:\n raise VoltLibError(\n 'Language "%s" already defined in script "%s", '\n "language tags are case insensitive" % (lang.tag, tag),\n location,\n )\n self.langs_.define(lang.tag, lang)\n langs.append(lang)\n self.expect_keyword_("END_SCRIPT")\n self.langs_.exit_scope()\n def_script = ast.ScriptDefinition(name, tag, langs, location=location)\n self.scripts_.define(tag, def_script)\n return def_script\n\n def parse_langsys_(self):\n assert self.is_cur_keyword_("DEF_LANGSYS")\n location = self.cur_token_location_\n name = None\n if self.next_token_ == "NAME":\n self.expect_keyword_("NAME")\n name = self.expect_string_()\n self.expect_keyword_("TAG")\n tag = self.expect_string_()\n features = []\n while self.next_token_ != "END_LANGSYS":\n self.advance_lexer_()\n feature = self.parse_feature_()\n self.expect_keyword_("END_FEATURE")\n features.append(feature)\n def_langsys = ast.LangSysDefinition(name, tag, features, location=location)\n return def_langsys\n\n def parse_feature_(self):\n assert self.is_cur_keyword_("DEF_FEATURE")\n location = self.cur_token_location_\n self.expect_keyword_("NAME")\n name = self.expect_string_()\n self.expect_keyword_("TAG")\n tag = self.expect_string_()\n lookups = []\n while self.next_token_ != "END_FEATURE":\n # self.advance_lexer_()\n self.expect_keyword_("LOOKUP")\n lookup = self.expect_string_()\n lookups.append(lookup)\n feature = ast.FeatureDefinition(name, tag, lookups, location=location)\n return feature\n\n def parse_def_lookup_(self):\n assert self.is_cur_keyword_("DEF_LOOKUP")\n location = self.cur_token_location_\n name = self.expect_string_()\n if not name[0].isalpha():\n raise VoltLibError(\n 'Lookup name "%s" must start with a letter' % name, location\n )\n if self.lookups_.resolve(name) is not None:\n raise VoltLibError(\n 'Lookup "%s" already defined, '\n "lookup names are case insensitive" % name,\n location,\n )\n process_base = True\n if self.next_token_ == "PROCESS_BASE":\n self.advance_lexer_()\n elif self.next_token_ == "SKIP_BASE":\n self.advance_lexer_()\n process_base = False\n process_marks = True\n mark_glyph_set = None\n if self.next_token_ == "PROCESS_MARKS":\n self.advance_lexer_()\n if self.next_token_ == "MARK_GLYPH_SET":\n self.advance_lexer_()\n mark_glyph_set = self.expect_string_()\n elif self.next_token_ == "ALL":\n self.advance_lexer_()\n elif self.next_token_ == "NONE":\n self.advance_lexer_()\n process_marks = False\n elif self.next_token_type_ == Lexer.STRING:\n process_marks = self.expect_string_()\n else:\n raise VoltLibError(\n "Expected ALL, NONE, MARK_GLYPH_SET or an ID. "\n "Got %s" % (self.next_token_type_),\n location,\n )\n elif self.next_token_ == "SKIP_MARKS":\n self.advance_lexer_()\n process_marks = False\n direction = None\n if self.next_token_ == "DIRECTION":\n self.expect_keyword_("DIRECTION")\n direction = self.expect_name_()\n assert direction in ("LTR", "RTL")\n reversal = None\n if self.next_token_ == "REVERSAL":\n self.expect_keyword_("REVERSAL")\n reversal = True\n comments = None\n if self.next_token_ == "COMMENTS":\n self.expect_keyword_("COMMENTS")\n comments = self.expect_string_().replace(r"\n", "\n")\n context = []\n while self.next_token_ in ("EXCEPT_CONTEXT", "IN_CONTEXT"):\n context = self.parse_context_()\n as_pos_or_sub = self.expect_name_()\n sub = None\n pos = None\n if as_pos_or_sub == "AS_SUBSTITUTION":\n sub = self.parse_substitution_(reversal)\n elif as_pos_or_sub == "AS_POSITION":\n pos = self.parse_position_()\n else:\n raise VoltLibError(\n "Expected AS_SUBSTITUTION or AS_POSITION. " "Got %s" % (as_pos_or_sub),\n location,\n )\n def_lookup = ast.LookupDefinition(\n name,\n process_base,\n process_marks,\n mark_glyph_set,\n direction,\n reversal,\n comments,\n context,\n sub,\n pos,\n location=location,\n )\n self.lookups_.define(name, def_lookup)\n return def_lookup\n\n def parse_context_(self):\n location = self.cur_token_location_\n contexts = []\n while self.next_token_ in ("EXCEPT_CONTEXT", "IN_CONTEXT"):\n side = None\n coverage = None\n ex_or_in = self.expect_name_()\n # side_contexts = [] # XXX\n if self.next_token_ != "END_CONTEXT":\n left = []\n right = []\n while self.next_token_ in ("LEFT", "RIGHT"):\n side = self.expect_name_()\n coverage = self.parse_coverage_()\n if side == "LEFT":\n left.append(coverage)\n else:\n right.append(coverage)\n self.expect_keyword_("END_CONTEXT")\n context = ast.ContextDefinition(\n ex_or_in, left, right, location=location\n )\n contexts.append(context)\n else:\n self.expect_keyword_("END_CONTEXT")\n return contexts\n\n def parse_substitution_(self, reversal):\n assert self.is_cur_keyword_("AS_SUBSTITUTION")\n location = self.cur_token_location_\n src = []\n dest = []\n if self.next_token_ != "SUB":\n raise VoltLibError("Expected SUB", location)\n while self.next_token_ == "SUB":\n self.expect_keyword_("SUB")\n src.append(self.parse_coverage_())\n self.expect_keyword_("WITH")\n dest.append(self.parse_coverage_())\n self.expect_keyword_("END_SUB")\n self.expect_keyword_("END_SUBSTITUTION")\n max_src = max([len(cov) for cov in src])\n max_dest = max([len(cov) for cov in dest])\n\n # many to many or mixed is invalid\n if max_src > 1 and max_dest > 1:\n raise VoltLibError("Invalid substitution type", location)\n\n mapping = dict(zip(tuple(src), tuple(dest)))\n if max_src == 1 and max_dest == 1:\n # Alternate substitutions are represented by adding multiple\n # substitutions for the same glyph, so we detect that here\n glyphs = [x.glyphSet() for cov in src for x in cov] # flatten src\n if len(set(glyphs)) != len(glyphs): # src has duplicates\n sub = ast.SubstitutionAlternateDefinition(mapping, location=location)\n else:\n if reversal:\n # Reversal is valid only for single glyph substitutions\n # and VOLT ignores it otherwise.\n sub = ast.SubstitutionReverseChainingSingleDefinition(\n mapping, location=location\n )\n else:\n sub = ast.SubstitutionSingleDefinition(mapping, location=location)\n elif max_src == 1 and max_dest > 1:\n sub = ast.SubstitutionMultipleDefinition(mapping, location=location)\n elif max_src > 1 and max_dest == 1:\n sub = ast.SubstitutionLigatureDefinition(mapping, location=location)\n return sub\n\n def parse_position_(self):\n assert self.is_cur_keyword_("AS_POSITION")\n location = self.cur_token_location_\n pos_type = self.expect_name_()\n if pos_type not in ("ATTACH", "ATTACH_CURSIVE", "ADJUST_PAIR", "ADJUST_SINGLE"):\n raise VoltLibError(\n "Expected ATTACH, ATTACH_CURSIVE, ADJUST_PAIR, ADJUST_SINGLE", location\n )\n if pos_type == "ATTACH":\n position = self.parse_attach_()\n elif pos_type == "ATTACH_CURSIVE":\n position = self.parse_attach_cursive_()\n elif pos_type == "ADJUST_PAIR":\n position = self.parse_adjust_pair_()\n elif pos_type == "ADJUST_SINGLE":\n position = self.parse_adjust_single_()\n self.expect_keyword_("END_POSITION")\n return position\n\n def parse_attach_(self):\n assert self.is_cur_keyword_("ATTACH")\n location = self.cur_token_location_\n coverage = self.parse_coverage_()\n coverage_to = []\n self.expect_keyword_("TO")\n while self.next_token_ != "END_ATTACH":\n cov = self.parse_coverage_()\n self.expect_keyword_("AT")\n self.expect_keyword_("ANCHOR")\n anchor_name = self.expect_string_()\n coverage_to.append((cov, anchor_name))\n self.expect_keyword_("END_ATTACH")\n position = ast.PositionAttachDefinition(\n coverage, coverage_to, location=location\n )\n return position\n\n def parse_attach_cursive_(self):\n assert self.is_cur_keyword_("ATTACH_CURSIVE")\n location = self.cur_token_location_\n coverages_exit = []\n coverages_enter = []\n while self.next_token_ != "ENTER":\n self.expect_keyword_("EXIT")\n coverages_exit.append(self.parse_coverage_())\n while self.next_token_ != "END_ATTACH":\n self.expect_keyword_("ENTER")\n coverages_enter.append(self.parse_coverage_())\n self.expect_keyword_("END_ATTACH")\n position = ast.PositionAttachCursiveDefinition(\n coverages_exit, coverages_enter, location=location\n )\n return position\n\n def parse_adjust_pair_(self):\n assert self.is_cur_keyword_("ADJUST_PAIR")\n location = self.cur_token_location_\n coverages_1 = []\n coverages_2 = []\n adjust_pair = {}\n while self.next_token_ == "FIRST":\n self.advance_lexer_()\n coverage_1 = self.parse_coverage_()\n coverages_1.append(coverage_1)\n while self.next_token_ == "SECOND":\n self.advance_lexer_()\n coverage_2 = self.parse_coverage_()\n coverages_2.append(coverage_2)\n while self.next_token_ != "END_ADJUST":\n id_1 = self.expect_number_()\n id_2 = self.expect_number_()\n self.expect_keyword_("BY")\n pos_1 = self.parse_pos_()\n pos_2 = self.parse_pos_()\n adjust_pair[(id_1, id_2)] = (pos_1, pos_2)\n self.expect_keyword_("END_ADJUST")\n position = ast.PositionAdjustPairDefinition(\n coverages_1, coverages_2, adjust_pair, location=location\n )\n return position\n\n def parse_adjust_single_(self):\n assert self.is_cur_keyword_("ADJUST_SINGLE")\n location = self.cur_token_location_\n adjust_single = []\n while self.next_token_ != "END_ADJUST":\n coverages = self.parse_coverage_()\n self.expect_keyword_("BY")\n pos = self.parse_pos_()\n adjust_single.append((coverages, pos))\n self.expect_keyword_("END_ADJUST")\n position = ast.PositionAdjustSingleDefinition(adjust_single, location=location)\n return position\n\n def parse_def_anchor_(self):\n assert self.is_cur_keyword_("DEF_ANCHOR")\n location = self.cur_token_location_\n name = self.expect_string_()\n self.expect_keyword_("ON")\n gid = self.expect_number_()\n self.expect_keyword_("GLYPH")\n glyph_name = self.expect_name_()\n self.expect_keyword_("COMPONENT")\n component = self.expect_number_()\n # check for duplicate anchor names on this glyph\n if glyph_name in self.anchors_:\n anchor = self.anchors_[glyph_name].resolve(name)\n if anchor is not None and anchor.component == component:\n raise VoltLibError(\n 'Anchor "%s" already defined, '\n "anchor names are case insensitive" % name,\n location,\n )\n if self.next_token_ == "LOCKED":\n locked = True\n self.advance_lexer_()\n else:\n locked = False\n self.expect_keyword_("AT")\n pos = self.parse_pos_()\n self.expect_keyword_("END_ANCHOR")\n anchor = ast.AnchorDefinition(\n name, gid, glyph_name, component, locked, pos, location=location\n )\n if glyph_name not in self.anchors_:\n self.anchors_[glyph_name] = SymbolTable()\n self.anchors_[glyph_name].define(name, anchor)\n return anchor\n\n def parse_adjust_by_(self):\n self.advance_lexer_()\n assert self.is_cur_keyword_("ADJUST_BY")\n adjustment = self.expect_number_()\n self.expect_keyword_("AT")\n size = self.expect_number_()\n return adjustment, size\n\n def parse_pos_(self):\n # VOLT syntax doesn't seem to take device Y advance\n self.advance_lexer_()\n location = self.cur_token_location_\n assert self.is_cur_keyword_("POS"), location\n adv = None\n dx = None\n dy = None\n adv_adjust_by = {}\n dx_adjust_by = {}\n dy_adjust_by = {}\n if self.next_token_ == "ADV":\n self.advance_lexer_()\n adv = self.expect_number_()\n while self.next_token_ == "ADJUST_BY":\n adjustment, size = self.parse_adjust_by_()\n adv_adjust_by[size] = adjustment\n if self.next_token_ == "DX":\n self.advance_lexer_()\n dx = self.expect_number_()\n while self.next_token_ == "ADJUST_BY":\n adjustment, size = self.parse_adjust_by_()\n dx_adjust_by[size] = adjustment\n if self.next_token_ == "DY":\n self.advance_lexer_()\n dy = self.expect_number_()\n while self.next_token_ == "ADJUST_BY":\n adjustment, size = self.parse_adjust_by_()\n dy_adjust_by[size] = adjustment\n self.expect_keyword_("END_POS")\n return ast.Pos(adv, dx, dy, adv_adjust_by, dx_adjust_by, dy_adjust_by)\n\n def parse_unicode_values_(self):\n location = self.cur_token_location_\n try:\n unicode_values = self.expect_string_().split(",")\n unicode_values = [int(uni[2:], 16) for uni in unicode_values if uni != ""]\n except ValueError as err:\n raise VoltLibError(str(err), location)\n return unicode_values if unicode_values != [] else None\n\n def parse_enum_(self):\n self.expect_keyword_("ENUM")\n location = self.cur_token_location_\n enum = ast.Enum(self.parse_coverage_(), location=location)\n self.expect_keyword_("END_ENUM")\n return enum\n\n def parse_coverage_(self):\n coverage = []\n location = self.cur_token_location_\n while self.next_token_ in ("GLYPH", "GROUP", "RANGE", "ENUM"):\n if self.next_token_ == "ENUM":\n enum = self.parse_enum_()\n coverage.append(enum)\n elif self.next_token_ == "GLYPH":\n self.expect_keyword_("GLYPH")\n name = self.expect_string_()\n coverage.append(ast.GlyphName(name, location=location))\n elif self.next_token_ == "GROUP":\n self.expect_keyword_("GROUP")\n name = self.expect_string_()\n coverage.append(ast.GroupName(name, self, location=location))\n elif self.next_token_ == "RANGE":\n self.expect_keyword_("RANGE")\n start = self.expect_string_()\n self.expect_keyword_("TO")\n end = self.expect_string_()\n coverage.append(ast.Range(start, end, self, location=location))\n return tuple(coverage)\n\n def resolve_group(self, group_name):\n return self.groups_.resolve(group_name)\n\n def glyph_range(self, start, end):\n return self.glyphs_.range(start, end)\n\n def parse_ppem_(self):\n location = self.cur_token_location_\n ppem_name = self.cur_token_\n value = self.expect_number_()\n setting = ast.SettingDefinition(ppem_name, value, location=location)\n return setting\n\n def parse_noarg_option_(self):\n location = self.cur_token_location_\n name = self.cur_token_\n value = True\n setting = ast.SettingDefinition(name, value, location=location)\n return setting\n\n def parse_cmap_format(self):\n location = self.cur_token_location_\n name = self.cur_token_\n value = (self.expect_number_(), self.expect_number_(), self.expect_number_())\n setting = ast.SettingDefinition(name, value, location=location)\n return setting\n\n def is_cur_keyword_(self, k):\n return (self.cur_token_type_ is Lexer.NAME) and (self.cur_token_ == k)\n\n def expect_string_(self):\n self.advance_lexer_()\n if self.cur_token_type_ is not Lexer.STRING:\n raise VoltLibError("Expected a string", self.cur_token_location_)\n return self.cur_token_\n\n def expect_keyword_(self, keyword):\n self.advance_lexer_()\n if self.cur_token_type_ is Lexer.NAME and self.cur_token_ == keyword:\n return self.cur_token_\n raise VoltLibError('Expected "%s"' % keyword, self.cur_token_location_)\n\n def expect_name_(self):\n self.advance_lexer_()\n if self.cur_token_type_ is Lexer.NAME:\n return self.cur_token_\n raise VoltLibError("Expected a name", self.cur_token_location_)\n\n def expect_number_(self):\n self.advance_lexer_()\n if self.cur_token_type_ is not Lexer.NUMBER:\n raise VoltLibError("Expected a number", self.cur_token_location_)\n return self.cur_token_\n\n def advance_lexer_(self):\n self.cur_token_type_, self.cur_token_, self.cur_token_location_ = (\n self.next_token_type_,\n self.next_token_,\n self.next_token_location_,\n )\n try:\n if self.is_cur_keyword_("END"):\n raise StopIteration\n (\n self.next_token_type_,\n self.next_token_,\n self.next_token_location_,\n ) = self.lexer_.next()\n except StopIteration:\n self.next_token_type_, self.next_token_ = (None, None)\n\n\nclass SymbolTable(object):\n def __init__(self):\n self.scopes_ = [{}]\n\n def enter_scope(self):\n self.scopes_.append({})\n\n def exit_scope(self):\n self.scopes_.pop()\n\n def define(self, name, item):\n self.scopes_[-1][name] = item\n\n def resolve(self, name, case_insensitive=True):\n for scope in reversed(self.scopes_):\n item = scope.get(name)\n if item:\n return item\n if case_insensitive:\n for key in scope:\n if key.lower() == name.lower():\n return scope[key]\n return None\n\n\nclass OrderedSymbolTable(SymbolTable):\n def __init__(self):\n self.scopes_ = [{}]\n\n def enter_scope(self):\n self.scopes_.append({})\n\n def resolve(self, name, case_insensitive=False):\n SymbolTable.resolve(self, name, case_insensitive=case_insensitive)\n\n def range(self, start, end):\n for scope in reversed(self.scopes_):\n if start in scope and end in scope:\n start_idx = list(scope.keys()).index(start)\n end_idx = list(scope.keys()).index(end)\n return list(scope.keys())[start_idx : end_idx + 1]\n return None\n
|
.venv\Lib\site-packages\fontTools\voltLib\parser.py
|
parser.py
|
Python
| 26,060 | 0.95 | 0.192771 | 0.01461 |
node-utils
| 848 |
2023-11-14T05:43:32.139165
|
MIT
| false |
6b954c9b77850379cd68ca9d02c03d27
|
"""\\nMS VOLT ``.vtp`` to AFDKO ``.fea`` OpenType Layout converter.\n\nUsage\n-----\n\nTo convert a VTP project file:\n\n\n.. code-block:: sh\n\n $ fonttools voltLib.voltToFea input.vtp output.fea\n\nIt is also possible convert font files with `TSIV` table (as saved from Volt),\nin this case the glyph names used in the Volt project will be mapped to the\nactual glyph names in the font files when written to the feature file:\n\n.. code-block:: sh\n\n $ fonttools voltLib.voltToFea input.ttf output.fea\n\nThe ``--quiet`` option can be used to suppress warnings.\n\nThe ``--traceback`` can be used to get Python traceback in case of exceptions,\ninstead of suppressing the traceback.\n\n\nLimitations\n-----------\n\n* Not all VOLT features are supported, the script will error if it it\n encounters something it does not understand. Please report an issue if this\n happens.\n* AFDKO feature file syntax for mark positioning is awkward and does not allow\n setting the mark coverage. It also defines mark anchors globally, as a result\n some mark positioning lookups might cover many marks than what was in the VOLT\n file. This should not be an issue in practice, but if it is then the only way\n is to modify the VOLT file or the generated feature file manually to use unique\n mark anchors for each lookup.\n* VOLT allows subtable breaks in any lookup type, but AFDKO feature file\n implementations vary in their support; currently AFDKO’s makeOTF supports\n subtable breaks in pair positioning lookups only, while FontTools’ feaLib\n support it for most substitution lookups and only some positioning lookups.\n"""\n\nimport logging\nimport re\nfrom io import StringIO\nfrom graphlib import TopologicalSorter\n\nfrom fontTools.feaLib import ast\nfrom fontTools.ttLib import TTFont, TTLibError\nfrom fontTools.voltLib import ast as VAst\nfrom fontTools.voltLib.parser import Parser as VoltParser\n\nlog = logging.getLogger("fontTools.voltLib.voltToFea")\n\nTABLES = ["GDEF", "GSUB", "GPOS"]\n\n\ndef _flatten_group(group):\n ret = []\n if isinstance(group, (tuple, list)):\n for item in group:\n ret.extend(_flatten_group(item))\n elif hasattr(group, "enum"):\n ret.extend(_flatten_group(group.enum))\n else:\n ret.append(group)\n return ret\n\n\n# Topologically sort of group definitions to ensure that all groups are defined\n# before they are referenced. This is necessary because FEA requires it but\n# VOLT does not, see below.\ndef sort_groups(groups):\n group_map = {group.name.lower(): group for group in groups}\n graph = {\n group.name.lower(): [\n x.group.lower()\n for x in _flatten_group(group)\n if isinstance(x, VAst.GroupName)\n ]\n for group in groups\n }\n sorter = TopologicalSorter(graph)\n return [group_map[name] for name in sorter.static_order()]\n\n\nclass Lookup(ast.LookupBlock):\n def __init__(self, name, use_extension=False, location=None):\n super().__init__(name, use_extension, location)\n self.chained = []\n\n\nclass VoltToFea:\n _NOT_LOOKUP_NAME_RE = re.compile(r"[^A-Za-z_0-9.]")\n _NOT_CLASS_NAME_RE = re.compile(r"[^A-Za-z_0-9.\-]")\n\n def __init__(self, file_or_path, font=None):\n if isinstance(file_or_path, VAst.VoltFile):\n self._doc, self._file_or_path = file_or_path, None\n else:\n self._doc, self._file_or_path = None, file_or_path\n self._font = font\n\n self._glyph_map = {}\n self._glyph_order = None\n\n self._gdef = {}\n self._glyphclasses = {}\n self._features = {}\n self._lookups = {}\n\n self._marks = set()\n self._ligatures = {}\n\n self._markclasses = {}\n self._anchors = {}\n\n self._settings = {}\n\n self._lookup_names = {}\n self._class_names = {}\n\n def _lookupName(self, name):\n if name not in self._lookup_names:\n res = self._NOT_LOOKUP_NAME_RE.sub("_", name)\n while res in self._lookup_names.values():\n res += "_"\n self._lookup_names[name] = res\n return self._lookup_names[name]\n\n def _className(self, name):\n if name not in self._class_names:\n res = self._NOT_CLASS_NAME_RE.sub("_", name)\n while res in self._class_names.values():\n res += "_"\n self._class_names[name] = res\n return self._class_names[name]\n\n def _collectStatements(self, doc, tables, ignore_unsupported_settings=False):\n # Collect glyph difinitions first, as we need them to map VOLT glyph names to font glyph name.\n for statement in doc.statements:\n if isinstance(statement, VAst.GlyphDefinition):\n self._glyphDefinition(statement)\n\n # Collect and sort group definitions first, to make sure a group\n # definition that references other groups comes after them since VOLT\n # does not enforce such ordering, and feature file require it.\n groups = [s for s in doc.statements if isinstance(s, VAst.GroupDefinition)]\n for group in sort_groups(groups):\n self._groupDefinition(group)\n\n for statement in doc.statements:\n if isinstance(statement, VAst.AnchorDefinition):\n if "GPOS" in tables:\n self._anchorDefinition(statement)\n elif isinstance(statement, VAst.SettingDefinition):\n self._settingDefinition(statement, ignore_unsupported_settings)\n elif isinstance(statement, (VAst.GlyphDefinition, VAst.GroupDefinition)):\n pass # Handled above\n elif isinstance(statement, VAst.ScriptDefinition):\n self._scriptDefinition(statement)\n elif not isinstance(statement, VAst.LookupDefinition):\n raise NotImplementedError(statement)\n\n # Lookup definitions need to be handled last as they reference glyph\n # and mark classes that might be defined after them.\n for statement in doc.statements:\n if isinstance(statement, VAst.LookupDefinition):\n if statement.pos and "GPOS" not in tables:\n continue\n if statement.sub and "GSUB" not in tables:\n continue\n self._lookupDefinition(statement)\n\n def _buildFeatureFile(self, tables):\n doc = ast.FeatureFile()\n statements = doc.statements\n\n if self._glyphclasses:\n statements.append(ast.Comment("# Glyph classes"))\n statements.extend(self._glyphclasses.values())\n\n if self._markclasses:\n statements.append(ast.Comment("\n# Mark classes"))\n statements.extend(c[1] for c in sorted(self._markclasses.items()))\n\n if self._lookups:\n statements.append(ast.Comment("\n# Lookups"))\n for lookup in self._lookups.values():\n statements.extend(lookup.chained)\n statements.append(lookup)\n\n # Prune features\n features = self._features.copy()\n for feature_tag in features:\n scripts = features[feature_tag]\n for script_tag in scripts:\n langs = scripts[script_tag]\n for language_tag in langs:\n langs[language_tag] = [\n l for l in langs[language_tag] if l.lower() in self._lookups\n ]\n scripts[script_tag] = {t: l for t, l in langs.items() if l}\n features[feature_tag] = {t: s for t, s in scripts.items() if s}\n features = {t: f for t, f in features.items() if f}\n\n if features:\n statements.append(ast.Comment("# Features"))\n for feature_tag, scripts in features.items():\n feature = ast.FeatureBlock(feature_tag)\n script_tags = sorted(scripts, key=lambda k: 0 if k == "DFLT" else 1)\n if feature_tag == "aalt" and len(script_tags) > 1:\n log.warning(\n "FEA syntax does not allow script statements in 'aalt' feature, "\n "so only lookups from the first script will be included."\n )\n script_tags = script_tags[:1]\n for script_tag in script_tags:\n if feature_tag != "aalt":\n feature.statements.append(ast.ScriptStatement(script_tag))\n language_tags = sorted(\n scripts[script_tag],\n key=lambda k: 0 if k == "dflt" else 1,\n )\n if feature_tag == "aalt" and len(language_tags) > 1:\n log.warning(\n "FEA syntax does not allow language statements in 'aalt' feature, "\n "so only lookups from the first language will be included."\n )\n language_tags = language_tags[:1]\n for language_tag in language_tags:\n if feature_tag != "aalt":\n include_default = True if language_tag == "dflt" else False\n feature.statements.append(\n ast.LanguageStatement(\n language_tag.ljust(4),\n include_default=include_default,\n )\n )\n for name in scripts[script_tag][language_tag]:\n lookup = self._lookups[name.lower()]\n lookupref = ast.LookupReferenceStatement(lookup)\n feature.statements.append(lookupref)\n statements.append(feature)\n\n if self._gdef and "GDEF" in tables:\n classes = []\n for name in ("BASE", "MARK", "LIGATURE", "COMPONENT"):\n if name in self._gdef:\n classname = "GDEF_" + name.lower()\n glyphclass = ast.GlyphClassDefinition(classname, self._gdef[name])\n statements.append(glyphclass)\n classes.append(ast.GlyphClassName(glyphclass))\n else:\n classes.append(None)\n\n gdef = ast.TableBlock("GDEF")\n gdef.statements.append(ast.GlyphClassDefStatement(*classes))\n statements.append(gdef)\n\n return doc\n\n def convert(self, tables=None, ignore_unsupported_settings=False):\n if self._doc is None:\n self._doc = VoltParser(self._file_or_path).parse()\n doc = self._doc\n\n if tables is None:\n tables = TABLES\n if self._font is not None:\n self._glyph_order = self._font.getGlyphOrder()\n\n self._collectStatements(doc, tables, ignore_unsupported_settings)\n fea = self._buildFeatureFile(tables)\n return fea.asFea()\n\n def _glyphName(self, glyph):\n try:\n name = glyph.glyph\n except AttributeError:\n name = glyph\n return ast.GlyphName(self._glyph_map.get(name, name))\n\n def _groupName(self, group):\n try:\n name = group.group\n except AttributeError:\n name = group\n return ast.GlyphClassName(self._glyphclasses[name.lower()])\n\n def _glyphSet(self, item):\n return [\n (self._glyphName(x) if isinstance(x, (str, VAst.GlyphName)) else x)\n for x in item.glyphSet()\n ]\n\n def _coverage(self, coverage, flatten=False):\n items = []\n for item in coverage:\n if isinstance(item, VAst.GlyphName):\n items.append(self._glyphName(item))\n elif isinstance(item, VAst.GroupName):\n items.append(self._groupName(item))\n elif isinstance(item, VAst.Enum):\n item = self._coverage(item.enum, flatten=True)\n if flatten:\n items.extend(item)\n else:\n items.append(ast.GlyphClass(item))\n elif isinstance(item, VAst.Range):\n item = self._glyphSet(item)\n if flatten:\n items.extend(item)\n else:\n items.append(ast.GlyphClass(item))\n else:\n raise NotImplementedError(item)\n return items\n\n def _context(self, context):\n out = []\n for item in context:\n coverage = self._coverage(item, flatten=True)\n if len(coverage) > 1:\n coverage = ast.GlyphClass(coverage)\n else:\n coverage = coverage[0]\n out.append(coverage)\n return out\n\n def _groupDefinition(self, group):\n name = self._className(group.name)\n glyphs = self._coverage(group.enum.enum, flatten=True)\n glyphclass = ast.GlyphClass(glyphs)\n classdef = ast.GlyphClassDefinition(name, glyphclass)\n self._glyphclasses[group.name.lower()] = classdef\n\n def _glyphDefinition(self, glyph):\n try:\n self._glyph_map[glyph.name] = self._glyph_order[glyph.id]\n except TypeError:\n pass\n\n if glyph.type in ("BASE", "MARK", "LIGATURE", "COMPONENT"):\n if glyph.type not in self._gdef:\n self._gdef[glyph.type] = ast.GlyphClass()\n self._gdef[glyph.type].glyphs.append(self._glyphName(glyph.name))\n\n if glyph.type == "MARK":\n self._marks.add(glyph.name)\n elif glyph.type == "LIGATURE":\n self._ligatures[glyph.name] = glyph.components\n\n def _scriptDefinition(self, script):\n stag = script.tag\n for lang in script.langs:\n ltag = lang.tag\n for feature in lang.features:\n lookups = {l.split("\\")[0]: True for l in feature.lookups}\n ftag = feature.tag\n if ftag not in self._features:\n self._features[ftag] = {}\n if stag not in self._features[ftag]:\n self._features[ftag][stag] = {}\n assert ltag not in self._features[ftag][stag]\n self._features[ftag][stag][ltag] = lookups.keys()\n\n def _settingDefinition(self, setting, ignore_unsupported=False):\n if setting.name.startswith("COMPILER_"):\n self._settings[setting.name] = setting.value\n elif not ignore_unsupported:\n log.warning(f"Unsupported setting ignored: {setting.name}")\n\n def _adjustment(self, adjustment):\n adv, dx, dy, adv_adjust_by, dx_adjust_by, dy_adjust_by = adjustment\n\n adv_device = adv_adjust_by and adv_adjust_by.items() or None\n dx_device = dx_adjust_by and dx_adjust_by.items() or None\n dy_device = dy_adjust_by and dy_adjust_by.items() or None\n\n return ast.ValueRecord(\n xPlacement=dx,\n yPlacement=dy,\n xAdvance=adv,\n xPlaDevice=dx_device,\n yPlaDevice=dy_device,\n xAdvDevice=adv_device,\n )\n\n def _anchor(self, adjustment):\n adv, dx, dy, adv_adjust_by, dx_adjust_by, dy_adjust_by = adjustment\n\n assert not adv_adjust_by\n dx_device = dx_adjust_by and dx_adjust_by.items() or None\n dy_device = dy_adjust_by and dy_adjust_by.items() or None\n\n return ast.Anchor(\n dx or 0,\n dy or 0,\n xDeviceTable=dx_device or None,\n yDeviceTable=dy_device or None,\n )\n\n def _anchorDefinition(self, anchordef):\n anchorname = anchordef.name\n glyphname = anchordef.glyph_name\n anchor = self._anchor(anchordef.pos)\n\n if glyphname not in self._anchors:\n self._anchors[glyphname] = {}\n if anchorname.startswith("MARK_"):\n anchorname = anchorname[:5] + anchorname[5:].lower()\n else:\n anchorname = anchorname.lower()\n if anchorname not in self._anchors[glyphname]:\n self._anchors[glyphname][anchorname] = {}\n self._anchors[glyphname][anchorname][anchordef.component] = anchor\n\n def _gposLookup(self, lookup, fealookup):\n statements = fealookup.statements\n\n pos = lookup.pos\n if isinstance(pos, VAst.PositionAdjustPairDefinition):\n for (idx1, idx2), (pos1, pos2) in pos.adjust_pair.items():\n coverage_1 = pos.coverages_1[idx1 - 1]\n coverage_2 = pos.coverages_2[idx2 - 1]\n\n # If not both are groups, use “enum pos” otherwise makeotf will\n # fail.\n enumerated = False\n for item in coverage_1 + coverage_2:\n if not isinstance(item, VAst.GroupName):\n enumerated = True\n\n glyphs1 = self._coverage(coverage_1)\n glyphs2 = self._coverage(coverage_2)\n record1 = self._adjustment(pos1)\n record2 = self._adjustment(pos2)\n assert len(glyphs1) == 1\n assert len(glyphs2) == 1\n statements.append(\n ast.PairPosStatement(\n glyphs1[0], record1, glyphs2[0], record2, enumerated=enumerated\n )\n )\n elif isinstance(pos, VAst.PositionAdjustSingleDefinition):\n for a, b in pos.adjust_single:\n glyphs = self._coverage(a)\n record = self._adjustment(b)\n assert len(glyphs) == 1\n statements.append(\n ast.SinglePosStatement([(glyphs[0], record)], [], [], False)\n )\n elif isinstance(pos, VAst.PositionAttachDefinition):\n anchors = {}\n allmarks = set()\n for coverage, anchorname in pos.coverage_to:\n # In feature files mark classes are global, but in VOLT they\n # are defined per-lookup. If we output mark class definitions\n # for all marks that use a given anchor, we might end up with a\n # mark used in two different classes in the same lookup, which\n # is causes feature file compilation error.\n # At the expense of uglier feature code, we make the mark class\n # name by appending the current lookup name not the anchor\n # name, and output mark class definitions only for marks used\n # in this lookup.\n classname = self._className(f"{anchorname}.{lookup.name}")\n markclass = ast.MarkClass(classname)\n\n # Anchor names are case-insensitive in VOLT\n anchorname = anchorname.lower()\n\n # We might still end in marks used in two different anchor\n # classes, so we filter out already used marks.\n marks = set()\n for mark in coverage:\n marks.update(mark.glyphSet())\n if not marks.isdisjoint(allmarks):\n marks.difference_update(allmarks)\n if not marks:\n continue\n allmarks.update(marks)\n\n for glyphname in marks:\n glyph = self._glyphName(glyphname)\n anchor = self._anchors[glyphname][f"MARK_{anchorname}"][1]\n markdef = ast.MarkClassDefinition(markclass, anchor, glyph)\n self._markclasses[(glyphname, classname)] = markdef\n\n for base in pos.coverage:\n for name in base.glyphSet():\n if name not in anchors:\n anchors[name] = []\n if (anchorname, classname) not in anchors[name]:\n anchors[name].append((anchorname, classname))\n\n is_ligature = all(n in self._ligatures for n in anchors)\n is_mark = all(n in self._marks for n in anchors)\n for name in anchors:\n components = 1\n if is_ligature:\n components = self._ligatures[name]\n\n marks = [[] for _ in range(components)]\n for mark, classname in anchors[name]:\n markclass = ast.MarkClass(classname)\n for component in range(1, components + 1):\n if component in self._anchors[name][mark]:\n anchor = self._anchors[name][mark][component]\n marks[component - 1].append((anchor, markclass))\n\n base = self._glyphName(name)\n if is_mark:\n mark = ast.MarkMarkPosStatement(base, marks[0])\n elif is_ligature:\n mark = ast.MarkLigPosStatement(base, marks)\n else:\n mark = ast.MarkBasePosStatement(base, marks[0])\n statements.append(mark)\n elif isinstance(pos, VAst.PositionAttachCursiveDefinition):\n # Collect enter and exit glyphs\n enter_coverage = []\n for coverage in pos.coverages_enter:\n for base in coverage:\n for name in base.glyphSet():\n enter_coverage.append(name)\n exit_coverage = []\n for coverage in pos.coverages_exit:\n for base in coverage:\n for name in base.glyphSet():\n exit_coverage.append(name)\n\n # Write enter anchors, also check if the glyph has exit anchor and\n # write it, too.\n for name in enter_coverage:\n glyph = self._glyphName(name)\n entry = self._anchors[name]["entry"][1]\n exit = None\n if name in exit_coverage:\n exit = self._anchors[name]["exit"][1]\n exit_coverage.pop(exit_coverage.index(name))\n statements.append(ast.CursivePosStatement(glyph, entry, exit))\n\n # Write any remaining exit anchors.\n for name in exit_coverage:\n glyph = self._glyphName(name)\n exit = self._anchors[name]["exit"][1]\n statements.append(ast.CursivePosStatement(glyph, None, exit))\n else:\n raise NotImplementedError(pos)\n\n def _gposContextLookup(self, lookup, prefix, suffix, ignore, fealookup, chained):\n statements = fealookup.statements\n\n pos = lookup.pos\n if isinstance(pos, VAst.PositionAdjustPairDefinition):\n for (idx1, idx2), (pos1, pos2) in pos.adjust_pair.items():\n glyphs1 = self._coverage(pos.coverages_1[idx1 - 1])\n glyphs2 = self._coverage(pos.coverages_2[idx2 - 1])\n assert len(glyphs1) == 1\n assert len(glyphs2) == 1\n glyphs = (glyphs1[0], glyphs2[0])\n\n if ignore:\n statement = ast.IgnorePosStatement([(prefix, glyphs, suffix)])\n else:\n statement = ast.ChainContextPosStatement(\n prefix, glyphs, suffix, [chained, chained]\n )\n statements.append(statement)\n elif isinstance(pos, VAst.PositionAdjustSingleDefinition):\n glyphs = [ast.GlyphClass()]\n for a, _ in pos.adjust_single:\n glyphs[0].extend(self._coverage(a, flatten=True))\n\n if ignore:\n statement = ast.IgnorePosStatement([(prefix, glyphs, suffix)])\n else:\n statement = ast.ChainContextPosStatement(\n prefix, glyphs, suffix, [chained]\n )\n statements.append(statement)\n elif isinstance(pos, VAst.PositionAttachDefinition):\n glyphs = [ast.GlyphClass()]\n for coverage, _ in pos.coverage_to:\n glyphs[0].extend(self._coverage(coverage, flatten=True))\n\n if ignore:\n statement = ast.IgnorePosStatement([(prefix, glyphs, suffix)])\n else:\n statement = ast.ChainContextPosStatement(\n prefix, glyphs, suffix, [chained]\n )\n statements.append(statement)\n else:\n raise NotImplementedError(pos)\n\n def _gsubLookup(self, lookup, fealookup):\n statements = fealookup.statements\n\n sub = lookup.sub\n\n # Alternate substitutions are represented by adding multiple\n # substitutions for the same glyph, so we need to collect them into one\n # to many mapping.\n if isinstance(sub, VAst.SubstitutionAlternateDefinition):\n alternates = {}\n for key, val in sub.mapping.items():\n if not key or not val:\n path, line, column = sub.location\n log.warning(f"{path}:{line}:{column}: Ignoring empty substitution")\n continue\n glyphs = self._coverage(key)\n replacements = self._coverage(val)\n assert len(glyphs) == 1\n for src_glyph, repl_glyph in zip(\n glyphs[0].glyphSet(), replacements[0].glyphSet()\n ):\n alternates.setdefault(str(self._glyphName(src_glyph)), []).append(\n str(self._glyphName(repl_glyph))\n )\n\n for glyph, replacements in alternates.items():\n statement = ast.AlternateSubstStatement(\n [], glyph, [], ast.GlyphClass(replacements)\n )\n statements.append(statement)\n return\n\n for key, val in sub.mapping.items():\n if not key or not val:\n path, line, column = sub.location\n log.warning(f"{path}:{line}:{column}: Ignoring empty substitution")\n continue\n glyphs = self._coverage(key)\n replacements = self._coverage(val)\n if isinstance(sub, VAst.SubstitutionSingleDefinition):\n assert len(glyphs) == 1\n assert len(replacements) == 1\n statements.append(\n ast.SingleSubstStatement(glyphs, replacements, [], [], False)\n )\n elif isinstance(sub, VAst.SubstitutionReverseChainingSingleDefinition):\n # This is handled in gsubContextLookup()\n pass\n elif isinstance(sub, VAst.SubstitutionMultipleDefinition):\n assert len(glyphs) == 1\n statements.append(\n ast.MultipleSubstStatement([], glyphs[0], [], replacements)\n )\n elif isinstance(sub, VAst.SubstitutionLigatureDefinition):\n assert len(replacements) == 1\n statement = ast.LigatureSubstStatement(\n [], glyphs, [], replacements[0], False\n )\n\n # If any of the input glyphs is a group, we need to\n # explode the substitution into multiple ligature substitutions\n # since feature file syntax does not support classes in\n # ligature substitutions.\n n = max(len(x.glyphSet()) for x in glyphs)\n if n > 1:\n # All input should either be groups of the same length or single glyphs\n assert all(len(x.glyphSet()) in (n, 1) for x in glyphs)\n glyphs = [x.glyphSet() for x in glyphs]\n glyphs = [([x[0]] * n if len(x) == 1 else x) for x in glyphs]\n\n # In this case ligature replacements must be a group of the same length\n # as the input groups, or a single glyph. VOLT\n # allows the replacement glyphs to be longer and truncates them.\n # So well allow that and zip() below will do the truncation\n # for us.\n replacement = replacements[0].glyphSet()\n if len(replacement) == 1:\n replacement = [replacement[0]] * n\n assert len(replacement) >= n\n\n # Add the unexploded statement commented out for reference.\n statements.append(ast.Comment(f"# {statement}"))\n\n for zipped in zip(*glyphs, replacement):\n zipped = [self._glyphName(x) for x in zipped]\n statements.append(\n ast.LigatureSubstStatement(\n [], zipped[:-1], [], zipped[-1], False\n )\n )\n else:\n statements.append(statement)\n else:\n raise NotImplementedError(sub)\n\n def _gsubContextLookup(self, lookup, prefix, suffix, ignore, fealookup, chained):\n statements = fealookup.statements\n\n sub = lookup.sub\n\n if isinstance(sub, VAst.SubstitutionReverseChainingSingleDefinition):\n # Reverse substitutions is a special case, it can’t use chained lookups.\n for key, val in sub.mapping.items():\n if not key or not val:\n path, line, column = sub.location\n log.warning(f"{path}:{line}:{column}: Ignoring empty substitution")\n continue\n glyphs = self._coverage(key)\n replacements = self._coverage(val)\n statements.append(\n ast.ReverseChainSingleSubstStatement(\n prefix, suffix, glyphs, replacements\n )\n )\n fealookup.chained = []\n return\n\n if not isinstance(\n sub,\n (\n VAst.SubstitutionSingleDefinition,\n VAst.SubstitutionMultipleDefinition,\n VAst.SubstitutionLigatureDefinition,\n VAst.SubstitutionAlternateDefinition,\n ),\n ):\n raise NotImplementedError(type(sub))\n\n glyphs = []\n for key, val in sub.mapping.items():\n if not key or not val:\n path, line, column = sub.location\n log.warning(f"{path}:{line}:{column}: Ignoring empty substitution")\n continue\n glyphs.extend(self._coverage(key, flatten=True))\n\n if len(glyphs) > 1:\n glyphs = [ast.GlyphClass(glyphs)]\n if ignore:\n statements.append(ast.IgnoreSubstStatement([(prefix, glyphs, suffix)]))\n else:\n statements.append(\n ast.ChainContextSubstStatement(prefix, glyphs, suffix, [chained])\n )\n\n def _lookupDefinition(self, lookup):\n mark_attachement = None\n mark_filtering = None\n\n flags = 0\n if lookup.direction == "RTL":\n flags |= 1\n if not lookup.process_base:\n flags |= 2\n # FIXME: Does VOLT support this?\n # if not lookup.process_ligatures:\n # flags |= 4\n if not lookup.process_marks:\n flags |= 8\n elif isinstance(lookup.process_marks, str):\n mark_attachement = self._groupName(lookup.process_marks)\n elif lookup.mark_glyph_set is not None:\n mark_filtering = self._groupName(lookup.mark_glyph_set)\n\n lookupflags = None\n if flags or mark_attachement is not None or mark_filtering is not None:\n lookupflags = ast.LookupFlagStatement(\n flags, mark_attachement, mark_filtering\n )\n\n use_extension = False\n if self._settings.get("COMPILER_USEEXTENSIONLOOKUPS"):\n use_extension = True\n\n if "\\" in lookup.name:\n # Merge sub lookups as subtables (lookups named “base\sub”),\n # makeotf/feaLib will issue a warning and ignore the subtable\n # statement if it is not a pairpos lookup, though.\n name = lookup.name.split("\\")[0]\n if name.lower() not in self._lookups:\n fealookup = Lookup(\n self._lookupName(name),\n use_extension=use_extension,\n )\n if lookupflags is not None:\n fealookup.statements.append(lookupflags)\n fealookup.statements.append(ast.Comment("# " + lookup.name))\n else:\n fealookup = self._lookups[name.lower()]\n fealookup.statements.append(ast.SubtableStatement())\n fealookup.statements.append(ast.Comment("# " + lookup.name))\n self._lookups[name.lower()] = fealookup\n else:\n fealookup = Lookup(\n self._lookupName(lookup.name),\n use_extension=use_extension,\n )\n if lookupflags is not None:\n fealookup.statements.append(lookupflags)\n self._lookups[lookup.name.lower()] = fealookup\n\n if lookup.comments is not None:\n fealookup.statements.append(ast.Comment("# " + lookup.comments))\n\n contexts = []\n for context in lookup.context:\n prefix = self._context(context.left)\n suffix = self._context(context.right)\n ignore = context.ex_or_in == "EXCEPT_CONTEXT"\n contexts.append([prefix, suffix, ignore])\n # It seems that VOLT will create contextual substitution using\n # only the input if there is no other contexts in this lookup.\n if ignore and len(lookup.context) == 1:\n contexts.append([[], [], False])\n\n if contexts:\n chained = ast.LookupBlock(\n self._lookupName(lookup.name + " chained"),\n use_extension=use_extension,\n )\n fealookup.chained.append(chained)\n if lookup.sub is not None:\n self._gsubLookup(lookup, chained)\n elif lookup.pos is not None:\n self._gposLookup(lookup, chained)\n for prefix, suffix, ignore in contexts:\n if lookup.sub is not None:\n self._gsubContextLookup(\n lookup, prefix, suffix, ignore, fealookup, chained\n )\n elif lookup.pos is not None:\n self._gposContextLookup(\n lookup, prefix, suffix, ignore, fealookup, chained\n )\n else:\n if lookup.sub is not None:\n self._gsubLookup(lookup, fealookup)\n elif lookup.pos is not None:\n self._gposLookup(lookup, fealookup)\n\n\ndef main(args=None):\n """Convert MS VOLT to AFDKO feature files."""\n\n import argparse\n from pathlib import Path\n\n from fontTools import configLogger\n\n parser = argparse.ArgumentParser(\n "fonttools voltLib.voltToFea", description=main.__doc__\n )\n parser.add_argument(\n "input", metavar="INPUT", type=Path, help="input font/VTP file to process"\n )\n parser.add_argument(\n "featurefile", metavar="OUTPUT", type=Path, help="output feature file"\n )\n parser.add_argument(\n "-t",\n "--table",\n action="append",\n choices=TABLES,\n dest="tables",\n help="List of tables to write, by default all tables are written",\n )\n parser.add_argument(\n "-q", "--quiet", action="store_true", help="Suppress non-error messages"\n )\n parser.add_argument(\n "--traceback", action="store_true", help="Don’t catch exceptions"\n )\n\n options = parser.parse_args(args)\n\n configLogger(level=("ERROR" if options.quiet else "INFO"))\n\n file_or_path = options.input\n font = None\n try:\n font = TTFont(file_or_path)\n if "TSIV" in font:\n file_or_path = StringIO(font["TSIV"].data.decode("utf-8"))\n else:\n log.error('"TSIV" table is missing, font was not saved from VOLT?')\n return 1\n except TTLibError:\n pass\n\n converter = VoltToFea(file_or_path, font)\n try:\n fea = converter.convert(options.tables)\n except NotImplementedError as e:\n if options.traceback:\n raise\n location = getattr(e.args[0], "location", None)\n message = f'"{e}" is not supported'\n if location:\n path, line, column = location\n log.error(f"{path}:{line}:{column}: {message}")\n else:\n log.error(message)\n return 1\n with open(options.featurefile, "w") as feafile:\n feafile.write(fea)\n\n\nif __name__ == "__main__":\n import sys\n\n sys.exit(main())\n
|
.venv\Lib\site-packages\fontTools\voltLib\voltToFea.py
|
voltToFea.py
|
Python
| 37,460 | 0.95 | 0.239297 | 0.069708 |
node-utils
| 764 |
2024-02-26T03:33:50.562766
|
MIT
| false |
a0c52415602069bc4bf9daf592a4d076
|
"""fontTools.voltLib -- a package for dealing with Visual OpenType Layout Tool\n(VOLT) files."""\n\n# See\n# http://www.microsoft.com/typography/VOLT.mspx\n
|
.venv\Lib\site-packages\fontTools\voltLib\__init__.py
|
__init__.py
|
Python
| 156 | 0.8 | 0.2 | 0.5 |
vue-tools
| 729 |
2024-02-09T06:20:27.748898
|
MIT
| false |
79f64ef8a4ccb4ab0cbc6180318d3ebf
|
import argparse\nimport logging\nimport sys\nfrom io import StringIO\nfrom pathlib import Path\n\nfrom fontTools import configLogger\nfrom fontTools.feaLib.builder import addOpenTypeFeaturesFromString\nfrom fontTools.feaLib.error import FeatureLibError\nfrom fontTools.feaLib.lexer import Lexer\nfrom fontTools.misc.cliTools import makeOutputFileName\nfrom fontTools.ttLib import TTFont, TTLibError\nfrom fontTools.voltLib.parser import Parser\nfrom fontTools.voltLib.voltToFea import TABLES, VoltToFea\n\nlog = logging.getLogger("fontTools.feaLib")\n\nSUPPORTED_TABLES = TABLES + ["cmap"]\n\n\ndef invalid_fea_glyph_name(name):\n """Check if the glyph name is valid according to FEA syntax."""\n if name[0] not in Lexer.CHAR_NAME_START_:\n return True\n if any(c not in Lexer.CHAR_NAME_CONTINUATION_ for c in name[1:]):\n return True\n return False\n\n\ndef sanitize_glyph_name(name):\n """Sanitize the glyph name to ensure it is valid according to FEA syntax."""\n sanitized = ""\n for i, c in enumerate(name):\n if i == 0 and c not in Lexer.CHAR_NAME_START_:\n sanitized += "a" + c\n elif c not in Lexer.CHAR_NAME_CONTINUATION_:\n sanitized += "_"\n else:\n sanitized += c\n\n return sanitized\n\n\ndef main(args=None):\n """Build tables from a MS VOLT project into an OTF font"""\n parser = argparse.ArgumentParser(\n description="Use fontTools to compile MS VOLT projects."\n )\n parser.add_argument(\n "input",\n metavar="INPUT",\n help="Path to the input font/VTP file to process",\n type=Path,\n )\n parser.add_argument(\n "-f",\n "--font",\n metavar="INPUT_FONT",\n help="Path to the input font (if INPUT is a VTP file)",\n type=Path,\n )\n parser.add_argument(\n "-o",\n "--output",\n dest="output",\n metavar="OUTPUT",\n help="Path to the output font.",\n type=Path,\n )\n parser.add_argument(\n "-t",\n "--tables",\n metavar="TABLE_TAG",\n choices=SUPPORTED_TABLES,\n nargs="+",\n help="Specify the table(s) to be built.",\n )\n parser.add_argument(\n "-F",\n "--debug-feature-file",\n help="Write the generated feature file to disk.",\n action="store_true",\n )\n parser.add_argument(\n "--ship",\n help="Remove source VOLT tables from output font.",\n action="store_true",\n )\n parser.add_argument(\n "-v",\n "--verbose",\n help="Increase the logger verbosity. Multiple -v options are allowed.",\n action="count",\n default=0,\n )\n parser.add_argument(\n "-T",\n "--traceback",\n help="show traceback for exceptions.",\n action="store_true",\n )\n options = parser.parse_args(args)\n\n levels = ["WARNING", "INFO", "DEBUG"]\n configLogger(level=levels[min(len(levels) - 1, options.verbose)])\n\n output_font = options.output or Path(\n makeOutputFileName(options.font or options.input)\n )\n log.info(f"Compiling MS VOLT to '{output_font}'")\n\n file_or_path = options.input\n font = None\n\n # If the input is a font file, extract the VOLT data from the "TSIV" table\n try:\n font = TTFont(file_or_path)\n if "TSIV" in font:\n file_or_path = StringIO(font["TSIV"].data.decode("utf-8"))\n else:\n log.error('"TSIV" table is missing')\n return 1\n except TTLibError:\n pass\n\n # If input is not a font file, the font must be provided\n if font is None:\n if not options.font:\n log.error("Please provide an input font")\n return 1\n font = TTFont(options.font)\n\n # FEA syntax does not allow some glyph names that VOLT accepts, so if we\n # found such glyph name we will temporarily rename such glyphs.\n glyphOrder = font.getGlyphOrder()\n tempGlyphOrder = None\n if any(invalid_fea_glyph_name(n) for n in glyphOrder):\n tempGlyphOrder = []\n for n in glyphOrder:\n if invalid_fea_glyph_name(n):\n n = sanitize_glyph_name(n)\n existing = set(tempGlyphOrder) | set(glyphOrder)\n while n in existing:\n n = "a" + n\n tempGlyphOrder.append(n)\n font.setGlyphOrder(tempGlyphOrder)\n\n doc = Parser(file_or_path).parse()\n\n log.info("Converting VTP data to FEA")\n converter = VoltToFea(doc, font)\n try:\n fea = converter.convert(options.tables, ignore_unsupported_settings=True)\n except NotImplementedError as e:\n if options.traceback:\n raise\n location = getattr(e.args[0], "location", None)\n message = f'"{e}" is not supported'\n if location:\n path, line, column = location\n log.error(f"{path}:{line}:{column}: {message}")\n else:\n log.error(message)\n return 1\n\n fea_filename = options.input\n if options.debug_feature_file:\n fea_filename = output_font.with_suffix(".fea")\n log.info(f"Writing FEA to '{fea_filename}'")\n with open(fea_filename, "w") as fp:\n fp.write(fea)\n\n log.info("Compiling FEA to OpenType tables")\n try:\n addOpenTypeFeaturesFromString(\n font,\n fea,\n filename=fea_filename,\n tables=options.tables,\n )\n except FeatureLibError as e:\n if options.traceback:\n raise\n log.error(e)\n return 1\n\n if options.ship:\n for tag in ["TSIV", "TSIS", "TSIP", "TSID"]:\n if tag in font:\n del font[tag]\n\n # Restore original glyph names.\n if tempGlyphOrder:\n import io\n\n f = io.BytesIO()\n font.save(f)\n font = TTFont(f)\n font.setGlyphOrder(glyphOrder)\n font["post"].extraNames = []\n\n font.save(output_font)\n\n\nif __name__ == "__main__":\n sys.exit(main())\n
|
.venv\Lib\site-packages\fontTools\voltLib\__main__.py
|
__main__.py
|
Python
| 6,134 | 0.95 | 0.15534 | 0.027778 |
node-utils
| 460 |
2023-12-26T17:40:30.087857
|
BSD-3-Clause
| false |
596c8fe8b12efe075c92e2d3cf9bd111
|
\n\n
|
.venv\Lib\site-packages\fontTools\voltLib\__pycache__\ast.cpython-313.pyc
|
ast.cpython-313.pyc
|
Other
| 27,620 | 0.8 | 0 | 0 |
awesome-app
| 895 |
2025-04-26T15:30:23.913203
|
BSD-3-Clause
| false |
0d24e77602a164d2b2883af375313b12
|
\n\n
|
.venv\Lib\site-packages\fontTools\voltLib\__pycache__\error.cpython-313.pyc
|
error.cpython-313.pyc
|
Other
| 1,017 | 0.8 | 0 | 0 |
awesome-app
| 416 |
2024-11-30T10:51:25.830109
|
MIT
| false |
7a59428e3b6af31f9c304a2d16ed22c1
|
\n\n
|
.venv\Lib\site-packages\fontTools\voltLib\__pycache__\lexer.cpython-313.pyc
|
lexer.cpython-313.pyc
|
Other
| 5,608 | 0.8 | 0 | 0 |
react-lib
| 852 |
2023-09-24T21:03:39.918949
|
GPL-3.0
| false |
dbac759c11f0c0c14b239c88271f09a8
|
\n\n
|
.venv\Lib\site-packages\fontTools\voltLib\__pycache__\parser.cpython-313.pyc
|
parser.cpython-313.pyc
|
Other
| 34,490 | 0.8 | 0 | 0 |
react-lib
| 256 |
2024-11-04T07:15:51.269411
|
MIT
| false |
1d4ca46e90244ef5899d7d3a0795018e
|
\n\n
|
.venv\Lib\site-packages\fontTools\voltLib\__pycache__\voltToFea.cpython-313.pyc
|
voltToFea.cpython-313.pyc
|
Other
| 45,760 | 0.8 | 0.020942 | 0.017094 |
node-utils
| 147 |
2024-09-05T11:54:03.474535
|
GPL-3.0
| false |
1ad56ba87d3bf79380180bf09682e7cd
|
\n\n
|
.venv\Lib\site-packages\fontTools\voltLib\__pycache__\__init__.cpython-313.pyc
|
__init__.cpython-313.pyc
|
Other
| 298 | 0.7 | 0.25 | 0 |
python-kit
| 826 |
2025-01-22T09:01:16.436022
|
Apache-2.0
| false |
fba6574d22294bb53cedb95d512d08f0
|
\n\n
|
.venv\Lib\site-packages\fontTools\voltLib\__pycache__\__main__.cpython-313.pyc
|
__main__.cpython-313.pyc
|
Other
| 8,022 | 0.8 | 0.034091 | 0 |
vue-tools
| 208 |
2024-11-28T09:41:49.317593
|
BSD-3-Clause
| false |
d86eea36d12d86da086371e0c9609e18
|
\n\n
|
.venv\Lib\site-packages\fontTools\__pycache__\afmLib.cpython-313.pyc
|
afmLib.cpython-313.pyc
|
Other
| 16,977 | 0.95 | 0.028902 | 0.012579 |
vue-tools
| 531 |
2024-10-15T22:18:31.154438
|
MIT
| false |
a5bc0e7c8131ef889c6151a8f4496b4b
|
\n\n
|
.venv\Lib\site-packages\fontTools\__pycache__\fontBuilder.cpython-313.pyc
|
fontBuilder.cpython-313.pyc
|
Other
| 35,702 | 0.8 | 0.052356 | 0.002817 |
awesome-app
| 886 |
2024-08-10T15:53:46.734829
|
Apache-2.0
| false |
b6fc6a0fb12109c5f7e3edf12a4b500c
|
\n\n
|
.venv\Lib\site-packages\fontTools\__pycache__\help.cpython-313.pyc
|
help.cpython-313.pyc
|
Other
| 2,050 | 0.95 | 0 | 0 |
react-lib
| 648 |
2024-02-06T12:58:55.745795
|
MIT
| false |
a1f5b1ce05600c4b33a2c4573e78bead
|
\n\n
|
.venv\Lib\site-packages\fontTools\__pycache__\tfmLib.cpython-313.pyc
|
tfmLib.cpython-313.pyc
|
Other
| 18,874 | 0.95 | 0.012931 | 0.027273 |
vue-tools
| 33 |
2024-08-11T13:07:31.230622
|
BSD-3-Clause
| false |
6696888d8290a9dd2a3302f362881b74
|
\n\n
|
.venv\Lib\site-packages\fontTools\__pycache__\ttx.cpython-313.pyc
|
ttx.cpython-313.pyc
|
Other
| 20,250 | 0.95 | 0.028571 | 0 |
awesome-app
| 57 |
2024-01-02T06:23:18.698094
|
GPL-3.0
| false |
0020705f38dae205b39f2ff7766c6ad0
|
\n\n
|
.venv\Lib\site-packages\fontTools\__pycache__\unicode.cpython-313.pyc
|
unicode.cpython-313.pyc
|
Other
| 2,414 | 0.8 | 0 | 0 |
awesome-app
| 795 |
2024-05-24T17:13:09.252940
|
BSD-3-Clause
| false |
9ce1762062bb325f2264ea94823d9155
|
\n\n
|
.venv\Lib\site-packages\fontTools\__pycache__\__init__.cpython-313.pyc
|
__init__.cpython-313.pyc
|
Other
| 436 | 0.7 | 0 | 0 |
react-lib
| 815 |
2024-07-01T21:48:31.084708
|
GPL-3.0
| false |
6efc7b3c41c98c7e665305f8c6c7a138
|
\n\n
|
.venv\Lib\site-packages\fontTools\__pycache__\__main__.cpython-313.pyc
|
__main__.cpython-313.pyc
|
Other
| 1,259 | 0.8 | 0 | 0 |
python-kit
| 871 |
2025-06-17T20:04:35.667933
|
MIT
| false |
c70c572ab0a585d4440d85f474fed67a
|
[console_scripts]\nfonttools = fontTools.__main__:main\npyftmerge = fontTools.merge:main\npyftsubset = fontTools.subset:main\nttx = fontTools.ttx:main\n
|
.venv\Lib\site-packages\fonttools-4.58.5.dist-info\entry_points.txt
|
entry_points.txt
|
Other
| 147 | 0.7 | 0 | 0 |
python-kit
| 592 |
2024-10-21T18:51:22.338636
|
GPL-3.0
| false |
6fa1c43ebe4f8f57866f4b53f2ca54d2
|
pip\n
|
.venv\Lib\site-packages\fonttools-4.58.5.dist-info\INSTALLER
|
INSTALLER
|
Other
| 4 | 0.5 | 0 | 0 |
python-kit
| 260 |
2023-07-10T19:36:06.540322
|
BSD-3-Clause
| false |
365c9bfeb7d89244f2ce01c1de44cb85
|
../../Scripts/fonttools.exe,sha256=RoQAjwYjKSxRpeB6pH4OLZWXfVaqG3qxD5oqIFk2EF8,108419\n../../Scripts/pyftmerge.exe,sha256=mtRGL3USi3bfOdirr3Q8jzaVsCuQYmIXQE2O8-TgOY4,108416\n../../Scripts/pyftsubset.exe,sha256=tgAXuVheu4Qv1arRHNDqbjEDf_giz19XtXTHxqfHY1o,108417\n../../Scripts/ttx.exe,sha256=_8aKI-UUzddREaFoW5nQ-Ky4mxx07vBNJDlc9AVtH58,108414\n../../share/man/man1/ttx.1,sha256=E71F9mRNWlttVpzlnP7w_fqkQygPkph5s-AtVa0Js50,5601\nfontTools/__init__.py,sha256=CMvLHAX8mH1p7lpibUmfxRpuUumFA3KUNRLvSOB4KYU,191\nfontTools/__main__.py,sha256=T8Tg8xPKHOCVoYVG82p_zpQXfW7_ERRAphBkZVvhWN8,960\nfontTools/__pycache__/__init__.cpython-313.pyc,,\nfontTools/__pycache__/__main__.cpython-313.pyc,,\nfontTools/__pycache__/afmLib.cpython-313.pyc,,\nfontTools/__pycache__/agl.cpython-313.pyc,,\nfontTools/__pycache__/fontBuilder.cpython-313.pyc,,\nfontTools/__pycache__/help.cpython-313.pyc,,\nfontTools/__pycache__/tfmLib.cpython-313.pyc,,\nfontTools/__pycache__/ttx.cpython-313.pyc,,\nfontTools/__pycache__/unicode.cpython-313.pyc,,\nfontTools/afmLib.py,sha256=YbmmjT8Du6qFUhFHwnAhOdvsyfXszODVjSJtd18CCjY,13603\nfontTools/agl.py,sha256=4aKwnbvSVUa39eV5Ka8e5ULwV-IEp4pcfwlMwEH_z3k,118208\nfontTools/cffLib/CFF2ToCFF.py,sha256=5uPKDFwoJvH0KVDrCjpf3MdOpqbyvdZMe0jZ3emjdsQ,6291\nfontTools/cffLib/CFFToCFF2.py,sha256=0dCYSSozptUC9BVUre49e6LgjSxJRtVyMl8vDB6i3r4,10424\nfontTools/cffLib/__init__.py,sha256=E4wzLsJ1LxWO7CIR7fjZMHaYQJSVdqCO08fOVFowwpM,111580\nfontTools/cffLib/__pycache__/CFF2ToCFF.cpython-313.pyc,,\nfontTools/cffLib/__pycache__/CFFToCFF2.cpython-313.pyc,,\nfontTools/cffLib/__pycache__/__init__.cpython-313.pyc,,\nfontTools/cffLib/__pycache__/specializer.cpython-313.pyc,,\nfontTools/cffLib/__pycache__/transforms.cpython-313.pyc,,\nfontTools/cffLib/__pycache__/width.cpython-313.pyc,,\nfontTools/cffLib/specializer.py,sha256=dznFa-7VrKZkx6D8klaixTaqEAnrnT6YLX9jzA6S0Cc,33536\nfontTools/cffLib/transforms.py,sha256=8hffhsWRhBhVukNSL-7ieuygTVV5Ta3Cz9s4s8Awvgg,17861\nfontTools/cffLib/width.py,sha256=3L9NWI0uQrJHvHF_IvC_tbW1cq94zgDEPSjubdug8qM,6284\nfontTools/colorLib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0\nfontTools/colorLib/__pycache__/__init__.cpython-313.pyc,,\nfontTools/colorLib/__pycache__/builder.cpython-313.pyc,,\nfontTools/colorLib/__pycache__/errors.cpython-313.pyc,,\nfontTools/colorLib/__pycache__/geometry.cpython-313.pyc,,\nfontTools/colorLib/__pycache__/table_builder.cpython-313.pyc,,\nfontTools/colorLib/__pycache__/unbuilder.cpython-313.pyc,,\nfontTools/colorLib/builder.py,sha256=S8z4Qzw2FAE-d1Zm1eHyqDBYh6FW4W_hQJWjVeVicOk,23672\nfontTools/colorLib/errors.py,sha256=_3vbGsi6nlkRxxglt82uxK89K8tjURX59G3BBQIy5ps,43\nfontTools/colorLib/geometry.py,sha256=RH7sl0oP9othrawGMeLVDAIocv8I2HrMd3aW857Xi_s,5661\nfontTools/colorLib/table_builder.py,sha256=0k6SHt8JBwP6hy-nZ9k6VXnPywdPRBe91yZyGq3Mzb8,7692\nfontTools/colorLib/unbuilder.py,sha256=nw8YKKiJiSsZAPcvPzRvXO-oZnvWmCWE7Y8nU1g75iE,2223\nfontTools/config/__init__.py,sha256=qdbu1XOSFLOEZSe9Rfgj33Pkff8wEaaWJJExf_dzv0A,3244\nfontTools/config/__pycache__/__init__.cpython-313.pyc,,\nfontTools/cu2qu/__init__.py,sha256=OoM_nBJAleZal6kxeNJn1ESy1pNm5c3DG417yVIE0-Q,633\nfontTools/cu2qu/__main__.py,sha256=6Vb8Ler3yqJ5w84UwlMJV6cS01uhV4PN10OlXQ6jlqo,98\nfontTools/cu2qu/__pycache__/__init__.cpython-313.pyc,,\nfontTools/cu2qu/__pycache__/__main__.cpython-313.pyc,,\nfontTools/cu2qu/__pycache__/benchmark.cpython-313.pyc,,\nfontTools/cu2qu/__pycache__/cli.cpython-313.pyc,,\nfontTools/cu2qu/__pycache__/cu2qu.cpython-313.pyc,,\nfontTools/cu2qu/__pycache__/errors.cpython-313.pyc,,\nfontTools/cu2qu/__pycache__/ufo.cpython-313.pyc,,\nfontTools/cu2qu/benchmark.py,sha256=FwdvNjKfWHo18_CX0CO8AY5c68XSBE4M4TJo_EkB4q8,1350\nfontTools/cu2qu/cli.py,sha256=CvWzC5a6XF_v5o0yrS4vGI1JXiVVLzSJahTIqpJmiPk,6274\nfontTools/cu2qu/cu2qu.c,sha256=vGZopOBC5bnIqTGmwPiW107oLG0YSrHSoyq1BFoqppI,644884\nfontTools/cu2qu/cu2qu.cp313-win_amd64.pyd,sha256=TPjal59ksiRviLs8PMyf3i0hHtnGHGSckbfkslcB5DU,99840\nfontTools/cu2qu/cu2qu.py,sha256=XH2bnQ5aG9ic921ZWzQzU1-q3MQU6INCjLk4XjRj5_Y,16970\nfontTools/cu2qu/errors.py,sha256=uYyPSs_x-EMJKO2S3cLGWyk_KlHoOoh_XEtdB_oKBp0,2518\nfontTools/cu2qu/ufo.py,sha256=Mpd_7Be9jxNcOKFqkyRp8Oem3CS3R-ZYMMSD03LJL6o,12143\nfontTools/designspaceLib/__init__.py,sha256=80fzbsWaoTMaXsPGMnevXAxR4eqvZeYCwV_GYpBvlkM,132601\nfontTools/designspaceLib/__main__.py,sha256=QOn1SNf8xmw-zQ5EJN0JnrHllu9rbRm8kTpWF9b3jlo,109\nfontTools/designspaceLib/__pycache__/__init__.cpython-313.pyc,,\nfontTools/designspaceLib/__pycache__/__main__.cpython-313.pyc,,\nfontTools/designspaceLib/__pycache__/split.cpython-313.pyc,,\nfontTools/designspaceLib/__pycache__/statNames.cpython-313.pyc,,\nfontTools/designspaceLib/__pycache__/types.cpython-313.pyc,,\nfontTools/designspaceLib/split.py,sha256=MjgyVDfhLEdb844nioL3xIN6VinHqY4jcdOlwmvr03M,19714\nfontTools/designspaceLib/statNames.py,sha256=RxxSLfkniuJ9I1aeXiLEdCS8uTL4w952_5D9DSfiRM4,9497\nfontTools/designspaceLib/types.py,sha256=HtM5ibhj1FeoS5Yq2Q5YAlP8CL5WDI_W_0v-qJyKJww,5467\nfontTools/encodings/MacRoman.py,sha256=rxWvh1yMTg_pY7_sSKpjfD6bYcA-BVHZL4S8JUH33fc,3834\nfontTools/encodings/StandardEncoding.py,sha256=z0Uh0ZLnz5SsO6T2dxN0S646ZYRfpC_F6HtUIsidC94,3839\nfontTools/encodings/__init__.py,sha256=QoK6HlOoqtVqX5gOyv0bJiTXsVBbBRreUifdccWNp2k,76\nfontTools/encodings/__pycache__/MacRoman.cpython-313.pyc,,\nfontTools/encodings/__pycache__/StandardEncoding.cpython-313.pyc,,\nfontTools/encodings/__pycache__/__init__.cpython-313.pyc,,\nfontTools/encodings/__pycache__/codecs.cpython-313.pyc,,\nfontTools/encodings/codecs.py,sha256=bSpO6kuPbEIDsXSVHhzftqsm_FFUiXpLVfPSk410SqE,4856\nfontTools/feaLib/__init__.py,sha256=RprjP6BKswq4pt0J-9L1XGuZfjIFAGD6HDly_haMAN4,217\nfontTools/feaLib/__main__.py,sha256=niUAPkiYxeRAJMlJuvVJZism2VFufZrNaQtieA7sNLk,2318\nfontTools/feaLib/__pycache__/__init__.cpython-313.pyc,,\nfontTools/feaLib/__pycache__/__main__.cpython-313.pyc,,\nfontTools/feaLib/__pycache__/ast.cpython-313.pyc,,\nfontTools/feaLib/__pycache__/builder.cpython-313.pyc,,\nfontTools/feaLib/__pycache__/error.cpython-313.pyc,,\nfontTools/feaLib/__pycache__/lexer.cpython-313.pyc,,\nfontTools/feaLib/__pycache__/location.cpython-313.pyc,,\nfontTools/feaLib/__pycache__/lookupDebugInfo.cpython-313.pyc,,\nfontTools/feaLib/__pycache__/parser.cpython-313.pyc,,\nfontTools/feaLib/__pycache__/variableScalar.cpython-313.pyc,,\nfontTools/feaLib/ast.py,sha256=q-UvEPZ97AAHpggVOzVHdgfTcE072kuOK08rdAYpCXU,76301\nfontTools/feaLib/builder.py,sha256=9f7v9Vfo0HkC6Pqj-hP4B6xj5GDIoV_XLCeDajmbI2g,74962\nfontTools/feaLib/error.py,sha256=pqi8F2tnH2h7pXVffxwzuBuWaSHMzZsXs5VckdQKQAI,670\nfontTools/feaLib/lexer.c,sha256=PXU52K9r0MSS_E2tiD4R_ac1r-uXCeye8yHgRfcww44,770662\nfontTools/feaLib/lexer.cp313-win_amd64.pyd,sha256=BIAYIYHAAregYhO_Be5PEqWeuFpJwqx5mao0JDimG3s,118272\nfontTools/feaLib/lexer.py,sha256=7VZ3NPFH7V1mvRbym111BNKvbB4hLfGLTMS0VV_3Ipw,11408\nfontTools/feaLib/location.py,sha256=teHrhjT8zzImcGBEJS1J43oaX9onCPu_pynxS8d-tUg,246\nfontTools/feaLib/lookupDebugInfo.py,sha256=h4Ig8kmEk5WlGf1C9JJAbbOKQK5OwkFLdj8CT7fOkmU,316\nfontTools/feaLib/parser.py,sha256=fdycJS5E1RtBvFifNx4rub360J6LmUyX5XpvfoEGxNI,101710\nfontTools/feaLib/variableScalar.py,sha256=RiLHKQh2-wa-BZ015H2e7XkbshssTj2PjlapaMNJfAs,4182\nfontTools/fontBuilder.py,sha256=qeUEUzacKdZXZ9dS_e2AuLE62c17eg_oHwnZgD04TaQ,35144\nfontTools/help.py,sha256=8yn5iAonGPsijFSHmU6aLuuZtaLMhR5CIkSp9hVYL2c,1161\nfontTools/merge/__init__.py,sha256=6MOtk0FXWmSmZsLf1sfjiN2lteVm-u9tI0RVWFewYHM,8498\nfontTools/merge/__main__.py,sha256=3_u3dnyEOyh0O-SrLMLlkXxOfCFT-0SlwJpimosVJ-c,100\nfontTools/merge/__pycache__/__init__.cpython-313.pyc,,\nfontTools/merge/__pycache__/__main__.cpython-313.pyc,,\nfontTools/merge/__pycache__/base.cpython-313.pyc,,\nfontTools/merge/__pycache__/cmap.cpython-313.pyc,,\nfontTools/merge/__pycache__/layout.cpython-313.pyc,,\nfontTools/merge/__pycache__/options.cpython-313.pyc,,\nfontTools/merge/__pycache__/tables.cpython-313.pyc,,\nfontTools/merge/__pycache__/unicode.cpython-313.pyc,,\nfontTools/merge/__pycache__/util.cpython-313.pyc,,\nfontTools/merge/base.py,sha256=LPJKOwMiDwayLGzA1xH325CtYHPvahAA17lihvKjiPw,2470\nfontTools/merge/cmap.py,sha256=zoOze0gVp4YQXGs-zFf5k7DgEPdFMs-A3sm5v-Rtz5M,6901\nfontTools/merge/layout.py,sha256=S9j0FOUDOtXAzfO7_L6IrLBHplSLfxFqIi_IJUunXCg,16601\nfontTools/merge/options.py,sha256=b-9GZ-nN7fh1VrpnEFhK_eRZPIIlRArtYOndOCetoUY,2586\nfontTools/merge/tables.py,sha256=xjWt2uqgfxmrDvpLfo_ngsPr7aY8CTkDwwjYBToLnm0,11310\nfontTools/merge/unicode.py,sha256=mgqRFhRugda62Xt0r28SduaN7YBzRfHxrpNprjLqoX8,4351\nfontTools/merge/util.py,sha256=3alo4b7mhFNC6h8PjeqNU99dS7EuO8sdZkZpvRsEE6E,3521\nfontTools/misc/__init__.py,sha256=QoK6HlOoqtVqX5gOyv0bJiTXsVBbBRreUifdccWNp2k,76\nfontTools/misc/__pycache__/__init__.cpython-313.pyc,,\nfontTools/misc/__pycache__/arrayTools.cpython-313.pyc,,\nfontTools/misc/__pycache__/bezierTools.cpython-313.pyc,,\nfontTools/misc/__pycache__/classifyTools.cpython-313.pyc,,\nfontTools/misc/__pycache__/cliTools.cpython-313.pyc,,\nfontTools/misc/__pycache__/configTools.cpython-313.pyc,,\nfontTools/misc/__pycache__/cython.cpython-313.pyc,,\nfontTools/misc/__pycache__/dictTools.cpython-313.pyc,,\nfontTools/misc/__pycache__/eexec.cpython-313.pyc,,\nfontTools/misc/__pycache__/encodingTools.cpython-313.pyc,,\nfontTools/misc/__pycache__/etree.cpython-313.pyc,,\nfontTools/misc/__pycache__/filenames.cpython-313.pyc,,\nfontTools/misc/__pycache__/fixedTools.cpython-313.pyc,,\nfontTools/misc/__pycache__/intTools.cpython-313.pyc,,\nfontTools/misc/__pycache__/iterTools.cpython-313.pyc,,\nfontTools/misc/__pycache__/lazyTools.cpython-313.pyc,,\nfontTools/misc/__pycache__/loggingTools.cpython-313.pyc,,\nfontTools/misc/__pycache__/macCreatorType.cpython-313.pyc,,\nfontTools/misc/__pycache__/macRes.cpython-313.pyc,,\nfontTools/misc/__pycache__/psCharStrings.cpython-313.pyc,,\nfontTools/misc/__pycache__/psLib.cpython-313.pyc,,\nfontTools/misc/__pycache__/psOperators.cpython-313.pyc,,\nfontTools/misc/__pycache__/py23.cpython-313.pyc,,\nfontTools/misc/__pycache__/roundTools.cpython-313.pyc,,\nfontTools/misc/__pycache__/sstruct.cpython-313.pyc,,\nfontTools/misc/__pycache__/symfont.cpython-313.pyc,,\nfontTools/misc/__pycache__/testTools.cpython-313.pyc,,\nfontTools/misc/__pycache__/textTools.cpython-313.pyc,,\nfontTools/misc/__pycache__/timeTools.cpython-313.pyc,,\nfontTools/misc/__pycache__/transform.cpython-313.pyc,,\nfontTools/misc/__pycache__/treeTools.cpython-313.pyc,,\nfontTools/misc/__pycache__/vector.cpython-313.pyc,,\nfontTools/misc/__pycache__/visitor.cpython-313.pyc,,\nfontTools/misc/__pycache__/xmlReader.cpython-313.pyc,,\nfontTools/misc/__pycache__/xmlWriter.cpython-313.pyc,,\nfontTools/misc/arrayTools.py,sha256=baENNALPvYRUhS4rdx_F3ltOmVIf1PV9G2EaMt7gAHM,11907\nfontTools/misc/bezierTools.c,sha256=R4i5TUxLgiDFCwkoBZUpdBhGzRp5AI4GYDW497S1f54,1860910\nfontTools/misc/bezierTools.cp313-win_amd64.pyd,sha256=0Dqv-Ln1DSbo2QY6kuz713sD2V91CWt5lpKMOrL2pAs,338432\nfontTools/misc/bezierTools.py,sha256=m4j14ckKYtrKy8NhFFFY_Uv3kuL8g-SWNdEKUzqGjRQ,46535\nfontTools/misc/classifyTools.py,sha256=wLTjOhLiZaLiwwUTj2Ad5eZ5T_38W0Eo_uzRGWHWYvE,5783\nfontTools/misc/cliTools.py,sha256=7zKOXczaCKRMW6Yv5jdCZYHco8y0-lfimhIWzQ2IL8A,1915\nfontTools/misc/configTools.py,sha256=JNR7HqId8zudAlFcK4lwocHZkwgaTSH4u6BOyFLTujw,11537\nfontTools/misc/cython.py,sha256=fZ9_mObkVzdJoK6sufiIU95k5GStjp6LWOk4AQ8zW_Q,709\nfontTools/misc/dictTools.py,sha256=GZa83GxwQD4-kZYkbCCefW-ggH4WG8G6f5jCy0NcO6w,2500\nfontTools/misc/eexec.py,sha256=eN9R1_67tWaeWn3ikEs0VwB1N7yr4vBbzs-aMbAUROw,3450\nfontTools/misc/encodingTools.py,sha256=rlAZpxgcKXPzfpfHKk0BQW2Edz2JwTT8d0IIMRib3VE,2145\nfontTools/misc/etree.py,sha256=M_4wKgaiaV7ALP3Uiv3HnK_KXFJmb37SUIK4tFZFVws,16760\nfontTools/misc/filenames.py,sha256=IZuoPgh88KI2Rdo56FrHAtNSUoCeIaiWqrQk2VEeRoQ,8468\nfontTools/misc/fixedTools.py,sha256=3HzMFAs57LqsGBnbslq2btQ3KJbKwxmxkJPvTvOi8sY,7900\nfontTools/misc/intTools.py,sha256=kRNjD5_2jyTKo07C0sFT0jT3dcVnU5XGJEjbXCErm4E,611\nfontTools/misc/iterTools.py,sha256=hyLQrAPuUOzDoQWKtKhFLjV8-Gx3jHd9SvBEwQRSeTE,402\nfontTools/misc/lazyTools.py,sha256=LJ7QvDG65xOBw2AI43qGCLxVmfdbsf-PUECfrenbkAU,1062\nfontTools/misc/loggingTools.py,sha256=27VatVrX8Yu-w5rFYSUjOnPLJIJ9Hx2R6hJ5YpP_djA,20476\nfontTools/misc/macCreatorType.py,sha256=5JZKTsnkI_VBhC52lwMSrdmzqgUOhwC42jPvbGahsPo,1649\nfontTools/misc/macRes.py,sha256=ewiYDKioxxBKW6JQcRmxpNYw5JgtJZIJyqWBG_KplUo,8840\nfontTools/misc/plistlib/__init__.py,sha256=doPqlGry1mRywSup0ahnwuT7mNeClhYQ82y7kd86hWQ,21794\nfontTools/misc/plistlib/__pycache__/__init__.cpython-313.pyc,,\nfontTools/misc/plistlib/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0\nfontTools/misc/psCharStrings.py,sha256=e5kR55Gm3orJsDLo3eu6CxpoZ1pMNZh5Wm-Zj4m7wJs,44532\nfontTools/misc/psLib.py,sha256=cqxG8yMZ7_5VTxgTUl2ARNhIhNu_iTzxLTEd1Egwugo,12497\nfontTools/misc/psOperators.py,sha256=9nZ4ymbiiCApY9V8OARpYqvO73OEcJgGyTtCuGzD-rw,16272\nfontTools/misc/py23.py,sha256=BhByQabxZis6fDvK3ZVeI-YRj_1rMQeBZCFzGWIac0U,2334\nfontTools/misc/roundTools.py,sha256=2rmbuk73NYGPmJqP58FQCFioSLilvNffd0WbL5znKUg,3283\nfontTools/misc/sstruct.py,sha256=RG8qOzTkp9LIN5bis5XkbA-6amnuv2Pi-foZTzIQRRE,7389\nfontTools/misc/symfont.py,sha256=KYAtw-ZnG5YReS8XkSDIvxc1bl0xzZl-Wx4J7k7u7LA,7219\nfontTools/misc/testTools.py,sha256=SG48M4TJIQ4_cPpitUzGEITPnwL-o0yNZKXzWSQdwVE,7285\nfontTools/misc/textTools.py,sha256=NIBmM6k9PXIs8DMpio-9ckHS35QxL2EMFwBXP6zG-8w,3531\nfontTools/misc/timeTools.py,sha256=lmncKUKvxQKO4Kqx2k7UNFkYYpj2n5CwR1lPiLZv3tA,2322\nfontTools/misc/transform.py,sha256=pCR0tbKzmhH6crB_rDT5hnAWySztW_XqL0efmKOVsCU,16314\nfontTools/misc/treeTools.py,sha256=IMopMUcuhelvz8gNra50Zc1w8DSlWywnL6DFaz1ijQs,1314\nfontTools/misc/vector.py,sha256=yaNixq5pXXpPCD_wRP-LsXYSLr4WPX_y92Po05FeLU0,4209\nfontTools/misc/visitor.py,sha256=30EPyUKvNsYU81uiYKjp_9gFj2KSqF8W2y_ldyIBJYQ,5760\nfontTools/misc/xmlReader.py,sha256=gqYg3qlDkrKsO55DPaJ-dU0i5rltqZgnKlrXmR2Z7dQ,6768\nfontTools/misc/xmlWriter.py,sha256=3gHeiyhbXDqDK-jn44f4znND3nEPWnk2Bdlm2Y8JZYo,6250\nfontTools/mtiLib/__init__.py,sha256=izRpPCoQfLoDjrlgKqP6gAE6JF9LU73aqH2_qi0NpaM,48002\nfontTools/mtiLib/__main__.py,sha256=MnVcMQ1TxmYged20wKcjrpZDIvetmkzfRVKHCb5dsUc,99\nfontTools/mtiLib/__pycache__/__init__.cpython-313.pyc,,\nfontTools/mtiLib/__pycache__/__main__.cpython-313.pyc,,\nfontTools/otlLib/__init__.py,sha256=WhTONAtlItZxWAkHNit_EBW19pP32TFZSqIJ_GG6Peg,46\nfontTools/otlLib/__pycache__/__init__.cpython-313.pyc,,\nfontTools/otlLib/__pycache__/builder.cpython-313.pyc,,\nfontTools/otlLib/__pycache__/error.cpython-313.pyc,,\nfontTools/otlLib/__pycache__/maxContextCalc.cpython-313.pyc,,\nfontTools/otlLib/builder.py,sha256=YTmrntgVrFbeSkAv44AZqBdIUBUUPyKcKPHj_kkh_pc,132192\nfontTools/otlLib/error.py,sha256=0OQ2AuxKNEqvoHIkgouf47LDGDEmPUlhdZIW5DROL8k,346\nfontTools/otlLib/maxContextCalc.py,sha256=sVU7LLwkjhV16ADcpjbUwCt5PZbWWdc8_yZo9Lv7HaI,3271\nfontTools/otlLib/optimize/__init__.py,sha256=NKqA7fqHyzjkmuBL_ZVpc3u9OMbWxbKDtymC8CnVGNY,1583\nfontTools/otlLib/optimize/__main__.py,sha256=ZZDwg21yVtdQi9GkNQe70w49hn9fPmObFEEDWGlCj3U,110\nfontTools/otlLib/optimize/__pycache__/__init__.cpython-313.pyc,,\nfontTools/otlLib/optimize/__pycache__/__main__.cpython-313.pyc,,\nfontTools/otlLib/optimize/__pycache__/gpos.cpython-313.pyc,,\nfontTools/otlLib/optimize/gpos.py,sha256=Rr9o9BJjQt_hLKxROqRFT41vY0eAcsSCogPhItPN3R8,18107\nfontTools/pens/__init__.py,sha256=QoK6HlOoqtVqX5gOyv0bJiTXsVBbBRreUifdccWNp2k,76\nfontTools/pens/__pycache__/__init__.cpython-313.pyc,,\nfontTools/pens/__pycache__/areaPen.cpython-313.pyc,,\nfontTools/pens/__pycache__/basePen.cpython-313.pyc,,\nfontTools/pens/__pycache__/boundsPen.cpython-313.pyc,,\nfontTools/pens/__pycache__/cairoPen.cpython-313.pyc,,\nfontTools/pens/__pycache__/cocoaPen.cpython-313.pyc,,\nfontTools/pens/__pycache__/cu2quPen.cpython-313.pyc,,\nfontTools/pens/__pycache__/explicitClosingLinePen.cpython-313.pyc,,\nfontTools/pens/__pycache__/filterPen.cpython-313.pyc,,\nfontTools/pens/__pycache__/freetypePen.cpython-313.pyc,,\nfontTools/pens/__pycache__/hashPointPen.cpython-313.pyc,,\nfontTools/pens/__pycache__/momentsPen.cpython-313.pyc,,\nfontTools/pens/__pycache__/perimeterPen.cpython-313.pyc,,\nfontTools/pens/__pycache__/pointInsidePen.cpython-313.pyc,,\nfontTools/pens/__pycache__/pointPen.cpython-313.pyc,,\nfontTools/pens/__pycache__/qtPen.cpython-313.pyc,,\nfontTools/pens/__pycache__/qu2cuPen.cpython-313.pyc,,\nfontTools/pens/__pycache__/quartzPen.cpython-313.pyc,,\nfontTools/pens/__pycache__/recordingPen.cpython-313.pyc,,\nfontTools/pens/__pycache__/reportLabPen.cpython-313.pyc,,\nfontTools/pens/__pycache__/reverseContourPen.cpython-313.pyc,,\nfontTools/pens/__pycache__/roundingPen.cpython-313.pyc,,\nfontTools/pens/__pycache__/statisticsPen.cpython-313.pyc,,\nfontTools/pens/__pycache__/svgPathPen.cpython-313.pyc,,\nfontTools/pens/__pycache__/t2CharStringPen.cpython-313.pyc,,\nfontTools/pens/__pycache__/teePen.cpython-313.pyc,,\nfontTools/pens/__pycache__/transformPen.cpython-313.pyc,,\nfontTools/pens/__pycache__/ttGlyphPen.cpython-313.pyc,,\nfontTools/pens/__pycache__/wxPen.cpython-313.pyc,,\nfontTools/pens/areaPen.py,sha256=SJnD7HwRg6JL_p7HaAy5DB64G75So9sqIdmzCSRv1bI,1524\nfontTools/pens/basePen.py,sha256=Wrd4xNl2apH4fdpkCPbV8z0QuNX7k46JHwylZer72G0,17548\nfontTools/pens/boundsPen.py,sha256=JPqvmslPlv2kgdmhgjeJo-CTYbloxxkkaJD8wVTVpng,3227\nfontTools/pens/cairoPen.py,sha256=jQL-9usqCU_FvfFpH4uaKjOcGd6jsarPpVM3vrhdyOU,618\nfontTools/pens/cocoaPen.py,sha256=ReJkXzlgP8qe4zi_6X4oO_I6m0jQGATeB6ZHjJhNv_I,638\nfontTools/pens/cu2quPen.py,sha256=w9xTNmhb96kvNZwcM5WT9q8FnRgA51AOISzVRpkiI3g,13332\nfontTools/pens/explicitClosingLinePen.py,sha256=knCXcjSl2iPy6mLCDnsdDYx6J5rV7FH4S24OXFdINjg,3320\nfontTools/pens/filterPen.py,sha256=tWhgklyaCTUt7oQRTBbFUcOlc702V0NfadCH3X93CYg,8031\nfontTools/pens/freetypePen.py,sha256=NqNzXrOTDckoH4N6WLnj-KuxGcg6z7DlqSCfmpq8qAE,20370\nfontTools/pens/hashPointPen.py,sha256=ZAU87uw5ge3Kb4i9kRV28a5VFeZ_TWSsJabyAzwAHrU,3662\nfontTools/pens/momentsPen.c,sha256=JgH7cc_5ZKbJJUq0iiwppphdXhD4XL33KsNAIsBejx8,578346\nfontTools/pens/momentsPen.cp313-win_amd64.pyd,sha256=i1VEp9D3PfEivtaxNF_cDXKRAzZcIgdVVv1tWrcOgM4,89088\nfontTools/pens/momentsPen.py,sha256=Z-V5CjQBSj3qPxg3C_DBFKExqno89nOe3jWwHT9_xsM,26537\nfontTools/pens/perimeterPen.py,sha256=Zy5F8QzaNJAkkQQSb2QJCp-wZTvDAjBn-B099t2ABds,2222\nfontTools/pens/pointInsidePen.py,sha256=Hy48iR5NWV3x_wWoos-UC7GMtwvvUhd_q_ykiwaWdzQ,6547\nfontTools/pens/pointPen.py,sha256=GV28cLEwSgpZZ4QGV7_eUw4Mdks_UqIpShQU0DBcvRs,23339\nfontTools/pens/qtPen.py,sha256=KHHQggFQc6Gq-kPdn9X2_wBXTPWzvyzKTSUeq0mqvSM,663\nfontTools/pens/qu2cuPen.py,sha256=VIqUzA_y_6xnRmTESKzlKkoByh7ZU5TnQwHnVAoy4no,4090\nfontTools/pens/quartzPen.py,sha256=6DMDWPYfsOb374VDnLLpKLqcMJig4GCGbTsW1Jr0fgg,1330\nfontTools/pens/recordingPen.py,sha256=hw393TStvhoF1XT7aidpVQ8glASbxZuARnUAyUyZAGM,12824\nfontTools/pens/reportLabPen.py,sha256=vVRG044LvUvFtqrRFYRiMFS_USHAeAvz9y9-7__WbY4,2145\nfontTools/pens/reverseContourPen.py,sha256=E_Ny86JfiMoQ04VfswMtdpaKCU37wNy9ifOccb0aWKQ,4118\nfontTools/pens/roundingPen.py,sha256=AHC1J0dgRChtFmkkgeR1D1ZNFUoHZTcHpWRIyL5d1_Q,4779\nfontTools/pens/statisticsPen.py,sha256=F_JjbNtvYmJ0b3Fbv3BA3-LZhecodPr4tJEQZZd4Jxc,10120\nfontTools/pens/svgPathPen.py,sha256=4aU4iTlnGuzzyXrBgfHvrjMOkC2rdSF8HOkJ_q8tZ38,8882\nfontTools/pens/t2CharStringPen.py,sha256=g0lcaRhSAs4T2NuWvn89TODikC1t9x4KyBq0Dnkso-0,3019\nfontTools/pens/teePen.py,sha256=19N3FEaFm4mGMTZrEn5Qg4YiXGGK61zcXjh2LcRxe_s,1345\nfontTools/pens/transformPen.py,sha256=_Zvyxp0yQ7iFZ1_FYfr3KFWKWYOUY2eSxrRk41BRO2w,4171\nfontTools/pens/ttGlyphPen.py,sha256=gAglwTL9DSsJGI8TUPVz-YBdPSMUcvd2S9jF-FzmckE,12205\nfontTools/pens/wxPen.py,sha256=bolMLl06Q-TxsN8-SsSDbmJStTPGXMYJQZ7Vb67FhLw,709\nfontTools/qu2cu/__init__.py,sha256=MpdE0XsHSDo9M3hyHLkPPLxB3FKr3aiT0dPW5qHCuSo,633\nfontTools/qu2cu/__main__.py,sha256=leKpToUNNyHf0nobr1I19vus2ziA1pO7rRKkreat-Xw,100\nfontTools/qu2cu/__pycache__/__init__.cpython-313.pyc,,\nfontTools/qu2cu/__pycache__/__main__.cpython-313.pyc,,\nfontTools/qu2cu/__pycache__/benchmark.cpython-313.pyc,,\nfontTools/qu2cu/__pycache__/cli.cpython-313.pyc,,\nfontTools/qu2cu/__pycache__/qu2cu.cpython-313.pyc,,\nfontTools/qu2cu/benchmark.py,sha256=PFxx2Bfu7-KuNrzdOIBXHPZvyNphqqcTVy4CneaCo3M,1456\nfontTools/qu2cu/cli.py,sha256=1QLBTSZW7e_VATJN9vjszRxIk_-Xjxu1KP53yX4T7q8,3839\nfontTools/qu2cu/qu2cu.c,sha256=mPAVbeH6SgLkKCnPhergCFE2-evs0Z8E1WlvBhDxo5E,706100\nfontTools/qu2cu/qu2cu.cp313-win_amd64.pyd,sha256=LHUJ8YE0pKtnKcI6OjggKa8hKbNHWroYhBVipSnSETM,107008\nfontTools/qu2cu/qu2cu.py,sha256=dtp5Zqhcs_NePwA2U5fgG2LtWleRwmBilTurau8sLL0,12693\nfontTools/subset/__init__.py,sha256=UUOidAx7b_LNNsRnEizxG8tPF5JX5-4fooerwyTWFvU,141588\nfontTools/subset/__main__.py,sha256=cEIC52EtGOJvFDfHXzi0M2EAYmyHAcI-ZZ0lb2y4r7s,101\nfontTools/subset/__pycache__/__init__.cpython-313.pyc,,\nfontTools/subset/__pycache__/__main__.cpython-313.pyc,,\nfontTools/subset/__pycache__/cff.cpython-313.pyc,,\nfontTools/subset/__pycache__/svg.cpython-313.pyc,,\nfontTools/subset/__pycache__/util.cpython-313.pyc,,\nfontTools/subset/cff.py,sha256=GSmxdsokxuFKvJJQVcAIOhd5hYQq8KkzxnXE_dgm8yo,6329\nfontTools/subset/svg.py,sha256=y_yTZuAm3bjcoEOFu5likXoHuG5u1oNiv0mOni2Z9fQ,9637\nfontTools/subset/util.py,sha256=gh2hkLaUmhHKRkdxxdLcFjz8clCmncLqdnDZm_2QNco,779\nfontTools/svgLib/__init__.py,sha256=2igTH8FIxCzEp02sRijWni-ocuGqqwuPPPSpgjozrK0,78\nfontTools/svgLib/__pycache__/__init__.cpython-313.pyc,,\nfontTools/svgLib/path/__init__.py,sha256=xfTh9zD_JOjEq6EEDtDxYCtn73O33d5wCIaVEfsIb0U,2061\nfontTools/svgLib/path/__pycache__/__init__.cpython-313.pyc,,\nfontTools/svgLib/path/__pycache__/arc.cpython-313.pyc,,\nfontTools/svgLib/path/__pycache__/parser.cpython-313.pyc,,\nfontTools/svgLib/path/__pycache__/shapes.cpython-313.pyc,,\nfontTools/svgLib/path/arc.py,sha256=-jU7F3gO_DdTO6MrDbOLxmFBZ_h5eb02Eq3Z_Ia35Nw,5966\nfontTools/svgLib/path/parser.py,sha256=mMxmJjU1Z9beD0CqFrvBx9LkCutJ2LfKbTLgidLQvNw,11110\nfontTools/svgLib/path/shapes.py,sha256=h3aOhsZ0pPUOLtNab2bj5cJuqPIlgdtOOOT4VYvnRww,5505\nfontTools/t1Lib/__init__.py,sha256=eBp3X5XcHZIV4uurKxyakurcT2bfFdoTVpw4AOMx2TU,21513\nfontTools/t1Lib/__pycache__/__init__.cpython-313.pyc,,\nfontTools/tfmLib.py,sha256=-bv4iv2VhUSse5pA0oXdudf7o7ZuFWdWNsiHElO06dk,14730\nfontTools/ttLib/__init__.py,sha256=2dJ9-KzN_5AwttwMEhmusrxR2IdFTZ73hJiPjeVwuwU,691\nfontTools/ttLib/__main__.py,sha256=gSaKy1O2Hws3_1xGHGdLL-lEUVxw9q8ymNx9YlwIFXs,4881\nfontTools/ttLib/__pycache__/__init__.cpython-313.pyc,,\nfontTools/ttLib/__pycache__/__main__.cpython-313.pyc,,\nfontTools/ttLib/__pycache__/macUtils.cpython-313.pyc,,\nfontTools/ttLib/__pycache__/removeOverlaps.cpython-313.pyc,,\nfontTools/ttLib/__pycache__/reorderGlyphs.cpython-313.pyc,,\nfontTools/ttLib/__pycache__/scaleUpem.cpython-313.pyc,,\nfontTools/ttLib/__pycache__/sfnt.cpython-313.pyc,,\nfontTools/ttLib/__pycache__/standardGlyphOrder.cpython-313.pyc,,\nfontTools/ttLib/__pycache__/ttCollection.cpython-313.pyc,,\nfontTools/ttLib/__pycache__/ttFont.cpython-313.pyc,,\nfontTools/ttLib/__pycache__/ttGlyphSet.cpython-313.pyc,,\nfontTools/ttLib/__pycache__/ttVisitor.cpython-313.pyc,,\nfontTools/ttLib/__pycache__/woff2.cpython-313.pyc,,\nfontTools/ttLib/macUtils.py,sha256=B5UhZU8gQerJMXEG9-BGZsuv3aewFRAGQ5HCuZMzMkQ,1791\nfontTools/ttLib/removeOverlaps.py,sha256=PTxICjLx89JxKfboLruoV_OwuwCIxcJ4feNcCCkrsTQ,13005\nfontTools/ttLib/reorderGlyphs.py,sha256=PAHvoh4yN3u-_aDACH8H1ResVMCmVE7Kp5_mIKAG0TI,10656\nfontTools/ttLib/scaleUpem.py,sha256=Qz-kS48q7a5GibgnPoUglyVk_qIVkYp5KZ-r1aMx_7Q,15054\nfontTools/ttLib/sfnt.py,sha256=7X9xujgV0Za4nOEfUD3mSrrRb-f9NuzEqgJ-IFLNVQU,23494\nfontTools/ttLib/standardGlyphOrder.py,sha256=VG-8hW1VgQIro7cDJusSXThILIr4pQgmU37t85SQ65Y,6056\nfontTools/ttLib/tables/B_A_S_E_.py,sha256=KpUf8_XEoFNEv3RcoQjfOaJUtBBaCxMzfifEcGtAydI,383\nfontTools/ttLib/tables/BitmapGlyphMetrics.py,sha256=cQuhook-kYL6AoUS9vQIAr65Ls6xN-e15l_lCxDwM2w,1833\nfontTools/ttLib/tables/C_B_D_T_.py,sha256=zg-Knjto2WgnEjl-_foLbeecNp0KHUOHg8ZgCmAyCqI,3759\nfontTools/ttLib/tables/C_B_L_C_.py,sha256=b8FTbwHE4RinasyZ1ieLW7lo5gmmWQB3fO73g4DMVAE,539\nfontTools/ttLib/tables/C_F_F_.py,sha256=Jo_pbWzq6im8Jh4N47RTl9E6-6YQbRiNI2mOY-JY8Js,2039\nfontTools/ttLib/tables/C_F_F__2.py,sha256=q9Y6-yvA8JjrMjBKvb0jtg5T0Z1qKPBduTv8A5-WTZk,833\nfontTools/ttLib/tables/C_O_L_R_.py,sha256=pq9xotYUq19Gq8GghKqUlru0nBqlmKdAcqrxa25wboM,6158\nfontTools/ttLib/tables/C_P_A_L_.py,sha256=DEB9H9TXDg9uYhmrNEsNgXJ6cj9NMCVpVDJ-XMsBJzo,12247\nfontTools/ttLib/tables/D_S_I_G_.py,sha256=ngvrE19I7s5t6N3QUsvoUONkTvdVtGC7vItIVDun0r4,5675\nfontTools/ttLib/tables/D__e_b_g.py,sha256=Co-AyQ7kQQs4x7fhIjBt33E9I48mIoZNZR4Z8OUVXUU,1169\nfontTools/ttLib/tables/DefaultTable.py,sha256=_pMaYi_MrvHzioY5s3NvKdzEFaueppMeJIpnfQDwWqg,1536\nfontTools/ttLib/tables/E_B_D_T_.py,sha256=f96YN8zj5Qcp9kweWU0fmG1W-uUewNxLR8Ox3yzsnjo,33369\nfontTools/ttLib/tables/E_B_L_C_.py,sha256=ED5j8COGRPBUXxoey7bXVoRNiZCC2rgx0cWls2YNp6g,30772\nfontTools/ttLib/tables/F_F_T_M_.py,sha256=wpjIN0MfovCM0JEHdzC7ZCTRjBx9-mNjIMRRQGYstCA,1735\nfontTools/ttLib/tables/F__e_a_t.py,sha256=JgTjN_z2Wo55Ul6hNGqluRu4zgegtnu2H8mvq1hNjfs,5632\nfontTools/ttLib/tables/G_D_E_F_.py,sha256=AjrMHUMYg75zrZloeRbuMXW1VJkYei1iouYpIVZ_mgk,312\nfontTools/ttLib/tables/G_M_A_P_.py,sha256=6_EJEwWdE4Jz6Y2BsRNLpGJPbcuozKMAUSMGhqVqWuc,4868\nfontTools/ttLib/tables/G_P_K_G_.py,sha256=GBwAX4zOC5fAcK7m9bC2Cf_8kcVu-39tdFUaSYH0jFg,4779\nfontTools/ttLib/tables/G_P_O_S_.py,sha256=TU0AI44SJonvGkfF9GO7vH3Ca0R8_DhHDSn5CDUbOfI,411\nfontTools/ttLib/tables/G_S_U_B_.py,sha256=x09o8a8tcnGHdbW--RgA7tDao860uh3Lp183DkeMWpc,307\nfontTools/ttLib/tables/G_V_A_R_.py,sha256=S1dGZnMDJ2kIndVo-I5FGDW05rXuMoIlsoFbjZvtPSM,99\nfontTools/ttLib/tables/G__l_a_t.py,sha256=xmyj4nsf1cpYxBAXrvaZ9zY_G1gclGWpfG1m6qOzgw4,8880\nfontTools/ttLib/tables/G__l_o_c.py,sha256=sTNUnvHMvFSiU1QOhLT9A8Fw0mTOOyUumhxAOEpB4So,2770\nfontTools/ttLib/tables/H_V_A_R_.py,sha256=sVJ4MK33ZenyUq8Hg-tmqu_FlR7tJOsqZgpbUnIQL6E,326\nfontTools/ttLib/tables/J_S_T_F_.py,sha256=Pp8tE_w6YNJaCCnzteYQ7B70pZ1_q1nGci4zfASt-4Q,328\nfontTools/ttLib/tables/L_T_S_H_.py,sha256=pgoHEK-9iBRmAfjO9lYROT5cqOMxsQjO13MMXU0RXp4,2247\nfontTools/ttLib/tables/M_A_T_H_.py,sha256=gU7yDMPDZ_XyA_pYZBYoA_p8rxzJtB65CrNV4Ta35tI,355\nfontTools/ttLib/tables/M_E_T_A_.py,sha256=E_jO_lkeLBuGEtlYfH1sDbzIZhP0ZaJX4u3lgF3ZAMs,12341\nfontTools/ttLib/tables/M_V_A_R_.py,sha256=sKe1GfahViCwY4kFXSU8t8WYH-FzUOwZdO_q6NnQZbM,321\nfontTools/ttLib/tables/O_S_2f_2.py,sha256=HFVzQYVZDqETgRCTbRqa3NyyuwlTwmc6CNSngjt89ZY,28782\nfontTools/ttLib/tables/S_I_N_G_.py,sha256=6deN-m2-k5C20NE8iTdajju8D2Mw_0tcPiHQln_RaMo,3416\nfontTools/ttLib/tables/S_T_A_T_.py,sha256=clj8sbU60dzo16KApGXNp54CSS387GjIjxuiu5cU09c,513\nfontTools/ttLib/tables/S_V_G_.py,sha256=O6Aik0j7t02ODsZRwI_tJUwNJQiZ3Dl3oxPqQhyRXH8,7899\nfontTools/ttLib/tables/S__i_l_f.py,sha256=nXCNpzLpeKDruzavbQVQ_VYLtIB0Yq-i46iMqPClIwg,36027\nfontTools/ttLib/tables/S__i_l_l.py,sha256=LeN6U0y4VLNgtIa7uCX_cpsZW1Ue_yKY8dsZJyS75ec,3316\nfontTools/ttLib/tables/T_S_I_B_.py,sha256=zbtLbMfCSVRd9hc5qoxPoQ8j3tNYtrvofTy7Kl6TBwE,354\nfontTools/ttLib/tables/T_S_I_C_.py,sha256=xpE9EYI3hFETe0CFG4RMe4G52_2aBsOs9kiCwXISKeo,395\nfontTools/ttLib/tables/T_S_I_D_.py,sha256=wYcFELNUSziSax21UlqOnEBpVl4k4aDOXBYI9F3NwMk,354\nfontTools/ttLib/tables/T_S_I_J_.py,sha256=3Q-tPCl04mggf5bIY6p6RvV2ZUmVMfCQ5WYyfJdPfPA,354\nfontTools/ttLib/tables/T_S_I_P_.py,sha256=4h7p-ssF_gklUJHtkPy62eA8LvidV38K3HHmiJ2p0ek,354\nfontTools/ttLib/tables/T_S_I_S_.py,sha256=eDzfFEZHN4-sawGAF5gtAIj8LKRAFXe8z1ve7aHmY9M,354\nfontTools/ttLib/tables/T_S_I_V_.py,sha256=IX-V7mRFxXNmj-wtEfFvpDjkevGZE-OEo5-Dvd6jfgY,881\nfontTools/ttLib/tables/T_S_I__0.py,sha256=-ahx5aDKeZAkoeZHXZRw95aZ85905sYAPhDlKGb_1_A,2575\nfontTools/ttLib/tables/T_S_I__1.py,sha256=-QZug_3dTIAYU25tXrig3gOF7RUvZr_ygjg7jZYajUM,7224\nfontTools/ttLib/tables/T_S_I__2.py,sha256=QU05Fvz1L-OE7bqXwsRuMjrhtGwi7WtI-UwS7lBS1jM,513\nfontTools/ttLib/tables/T_S_I__3.py,sha256=bBo8nZ2bXDcKAmym1rRM--4nrjSXl7CvQhqe6h544SY,565\nfontTools/ttLib/tables/T_S_I__5.py,sha256=hA0iN5Pvq0_8py1PxNn8XcAfqMBM92TaGUJdT_UaCl0,1973\nfontTools/ttLib/tables/T_T_F_A_.py,sha256=2lCDLx_UmkWTvNLrKkDm7T4MErKOvojLDvr6iQ9tXpM,406\nfontTools/ttLib/tables/TupleVariation.py,sha256=m7TWmB4TBmJ6DD_onpYidGp5qq3ogTd4qYRXa8XHJ90,33119\nfontTools/ttLib/tables/V_A_R_C_.py,sha256=6CgniBLKLlrLXRqDC-z6aYHQD1QzXZYF8w9DMof1PMc,301\nfontTools/ttLib/tables/V_D_M_X_.py,sha256=gDeNfw0f1YzJqdad4NSuP5KzuoTbH5bP4GFQOKv58i0,10686\nfontTools/ttLib/tables/V_O_R_G_.py,sha256=s9g03_qeTV3qoJAWpXxpRCmao0l1wj4WagR_YsTlyBQ,6130\nfontTools/ttLib/tables/V_V_A_R_.py,sha256=PiwzLv95tfXH25hYQFAxL11mwgbLjeg4R1LvVH5m7lU,332\nfontTools/ttLib/tables/__init__.py,sha256=pYmftKvp7RCNIaEJuUNjcZFaiIfOC0zzcApViNKUxkU,2749\nfontTools/ttLib/tables/__pycache__/B_A_S_E_.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/BitmapGlyphMetrics.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/C_B_D_T_.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/C_B_L_C_.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/C_F_F_.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/C_F_F__2.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/C_O_L_R_.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/C_P_A_L_.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/D_S_I_G_.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/D__e_b_g.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/DefaultTable.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/E_B_D_T_.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/E_B_L_C_.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/F_F_T_M_.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/F__e_a_t.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/G_D_E_F_.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/G_M_A_P_.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/G_P_K_G_.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/G_P_O_S_.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/G_S_U_B_.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/G_V_A_R_.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/G__l_a_t.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/G__l_o_c.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/H_V_A_R_.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/J_S_T_F_.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/L_T_S_H_.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/M_A_T_H_.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/M_E_T_A_.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/M_V_A_R_.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/O_S_2f_2.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/S_I_N_G_.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/S_T_A_T_.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/S_V_G_.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/S__i_l_f.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/S__i_l_l.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/T_S_I_B_.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/T_S_I_C_.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/T_S_I_D_.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/T_S_I_J_.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/T_S_I_P_.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/T_S_I_S_.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/T_S_I_V_.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/T_S_I__0.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/T_S_I__1.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/T_S_I__2.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/T_S_I__3.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/T_S_I__5.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/T_T_F_A_.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/TupleVariation.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/V_A_R_C_.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/V_D_M_X_.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/V_O_R_G_.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/V_V_A_R_.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/__init__.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/_a_n_k_r.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/_a_v_a_r.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/_b_s_l_n.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/_c_i_d_g.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/_c_m_a_p.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/_c_v_a_r.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/_c_v_t.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/_f_e_a_t.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/_f_p_g_m.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/_f_v_a_r.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/_g_a_s_p.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/_g_c_i_d.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/_g_l_y_f.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/_g_v_a_r.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/_h_d_m_x.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/_h_e_a_d.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/_h_h_e_a.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/_h_m_t_x.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/_k_e_r_n.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/_l_c_a_r.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/_l_o_c_a.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/_l_t_a_g.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/_m_a_x_p.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/_m_e_t_a.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/_m_o_r_t.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/_m_o_r_x.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/_n_a_m_e.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/_o_p_b_d.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/_p_o_s_t.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/_p_r_e_p.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/_p_r_o_p.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/_s_b_i_x.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/_t_r_a_k.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/_v_h_e_a.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/_v_m_t_x.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/asciiTable.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/grUtils.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/otBase.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/otConverters.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/otData.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/otTables.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/otTraverse.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/sbixGlyph.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/sbixStrike.cpython-313.pyc,,\nfontTools/ttLib/tables/__pycache__/ttProgram.cpython-313.pyc,,\nfontTools/ttLib/tables/_a_n_k_r.py,sha256=eiy6DKxPGw-H9QCLWIQBFveuTFQSKgcPItwgyBOghk8,498\nfontTools/ttLib/tables/_a_v_a_r.py,sha256=cIqfWyyU5hNrJ-SIfVCKQcpyi2DUjobAgrmyE2In9FI,7307\nfontTools/ttLib/tables/_b_s_l_n.py,sha256=iHLFy3sjFFoGa-pDGdcLCCudv4QFMt2VBL06gigGA_k,480\nfontTools/ttLib/tables/_c_i_d_g.py,sha256=BPa6b0yrmT8OXPW3USRpn_H8DOLKFlDe9subtRDJBrc,937\nfontTools/ttLib/tables/_c_m_a_p.py,sha256=BTWxvUta7Y_ZUq2zyIsKY86yXfhs6JGl0bEBJ4hilsY,63799\nfontTools/ttLib/tables/_c_v_a_r.py,sha256=5LCuL07YXkqcqkSQeW4F6Vlx5XYtkjIIyiYfQ88Ydw0,3621\nfontTools/ttLib/tables/_c_v_t.py,sha256=cJ_9JHh-Tud0CjWNaFqyO3X4pKO2_rdL82rtrWhO4Vc,1704\nfontTools/ttLib/tables/_f_e_a_t.py,sha256=g-B57skN59uZa3dQ88gMVpY1RmQH92cCkttJq2T4TzY,484\nfontTools/ttLib/tables/_f_p_g_m.py,sha256=eX1DSy29t_BFai90WQKEgHgCuMLqFp01Ff9a1txuuLI,1695\nfontTools/ttLib/tables/_f_v_a_r.py,sha256=hpxU0-u7_pZYDmQdKpmd_7c6BjVpXHdoQ97jKk9Kz5U,9098\nfontTools/ttLib/tables/_g_a_s_p.py,sha256=Pr4X2CEg3a_nYAZrKSWT0auQ5HU2WutP1Shxxg7ALPw,2266\nfontTools/ttLib/tables/_g_c_i_d.py,sha256=diZlew4U8nFK5vimh-GMOjwHw8ccZtIEl9cPq0PrNdA,375\nfontTools/ttLib/tables/_g_l_y_f.py,sha256=hdaJvO2KUL3wqOkP5ti5jE30e6Jt6Stb2aM5-vzb9l8,87901\nfontTools/ttLib/tables/_g_v_a_r.py,sha256=bXLh4wXqtbL_hEmds5PXWJ5FTx58_1VojaqVvgesYH4,12412\nfontTools/ttLib/tables/_h_d_m_x.py,sha256=Da8Og1KFmOLJGkBlLDlcgdvrIeCTx1USGwnmlg93r3E,4398\nfontTools/ttLib/tables/_h_e_a_d.py,sha256=ft9ghTA1NZsGBvB0yElFFCqVHecuCKGjT2m2GfYB3Yc,5056\nfontTools/ttLib/tables/_h_h_e_a.py,sha256=pY92ZLt3o0jZ3KQVd_qtxYtk_tbP2DLzSWm_wVP8FNM,4914\nfontTools/ttLib/tables/_h_m_t_x.py,sha256=p-9K-E3LcdJByagZ-0F0OA11pCVfNS9HtKRjbpvMM6I,6202\nfontTools/ttLib/tables/_k_e_r_n.py,sha256=AjG5Fd6XaPAdXi5puDtLuMrfCsHUi9X7uFh76QGCMrc,11083\nfontTools/ttLib/tables/_l_c_a_r.py,sha256=N-1I6OJHvnF_YfGktyfFTRAG5lrExV7q6HX-0ffSRyQ,403\nfontTools/ttLib/tables/_l_o_c_a.py,sha256=d1pX-QByFSJ00K9NQaOGbAHDr0pax_48PkfCupvekkw,2264\nfontTools/ttLib/tables/_l_t_a_g.py,sha256=rnf8P_C_RIb37HBNk0qDSxP7rK-N9j5CcQHgMrPSuxw,2624\nfontTools/ttLib/tables/_m_a_x_p.py,sha256=9B6lvWo4y42dyLPIvG6CsVOlWCk7bs4DoVJDB8ViEew,5411\nfontTools/ttLib/tables/_m_e_t_a.py,sha256=I8HaZgcIPQZcCxBiSX0rGrfrs-zXRGUfEbJ8eGvZ07A,4025\nfontTools/ttLib/tables/_m_o_r_t.py,sha256=LU3D9PmV_nFs6hoccGmr1pfUzjJaeB_WRW2OIS0RwPc,501\nfontTools/ttLib/tables/_m_o_r_x.py,sha256=vLyrtx_O__BwnPi7Qo3oT8WHaANRARtHcqHSdZ5ct0E,563\nfontTools/ttLib/tables/_n_a_m_e.py,sha256=bvahvBMX21pC6G83D35WZap6F7BDd7Xi2AmhVMBZz00,42300\nfontTools/ttLib/tables/_o_p_b_d.py,sha256=lfJi6kblt_nGmGmRSupwEaud3Ri_y6ftWNuyrCPpzQ0,462\nfontTools/ttLib/tables/_p_o_s_t.py,sha256=Sifgmfgu6UQLfun6nkVGj5jBgMqmYTs3fpDDm_zOOMY,12073\nfontTools/ttLib/tables/_p_r_e_p.py,sha256=qWDjHiHvHaJCx2hYFmjJeMwpgwvD-cG5zkibMh9TWuk,443\nfontTools/ttLib/tables/_p_r_o_p.py,sha256=ux5Z0FrE7uuKQrO-SCQwButVtKmEAsvfDE6mOP_SOnE,439\nfontTools/ttLib/tables/_s_b_i_x.py,sha256=KF9acCLqBcYpg92h5vJBp5LsNT7c4MDKD4rocixRPKw,4994\nfontTools/ttLib/tables/_t_r_a_k.py,sha256=7PLK_3VaZxxdgdn4wiPbMLvmUl0JZIWLWqgl-wvVvvQ,11711\nfontTools/ttLib/tables/_v_h_e_a.py,sha256=ay73lNwQR72zZeyQ00ejfds2XmUp7sOLidnSzMvawUw,4598\nfontTools/ttLib/tables/_v_m_t_x.py,sha256=933DMeQTI9JFfJ3TOjAFE6G8qHXJ7ZI2GukIKSQjaFU,519\nfontTools/ttLib/tables/asciiTable.py,sha256=xJtOWy5lATZJILItU-A0dK4-jNXBByzyVWeO81FW8nc,657\nfontTools/ttLib/tables/grUtils.py,sha256=T_WsEtpW60m9X6Rulko3bGI9aFdSC8Iyffwg_9ky0_I,2362\nfontTools/ttLib/tables/otBase.py,sha256=UMBHfYAUbhe_NkrpsTvgL_vJCbQ9vksZDrdkr1DqPcE,54850\nfontTools/ttLib/tables/otConverters.py,sha256=7_vqtb3-OV_iyHnVBrbQCWLg7lqkrX55F5PwRVDkck8,76270\nfontTools/ttLib/tables/otData.py,sha256=i6KD2n1OqJvYgRlXV9-ya_QSmhYl8jcBQm61zCnBt90,203662\nfontTools/ttLib/tables/otTables.py,sha256=GCxuX-FIMLKbRd4TdE2c9YLsWRkufpzX1TAAxEAJBK4,99836\nfontTools/ttLib/tables/otTraverse.py,sha256=T1fnamNXqvFPUBspFm7aYsq_P0jUSJSy1ab9t48p_ZI,5681\nfontTools/ttLib/tables/sbixGlyph.py,sha256=a-mCmO5EibN_He7QQohG06Qg-fCOHWiNFMAbCpxa25w,5945\nfontTools/ttLib/tables/sbixStrike.py,sha256=9-UIVPormVd27lOU5fuGZvzkckA2U5975jBXXUEPxKA,6840\nfontTools/ttLib/tables/table_API_readme.txt,sha256=E9lwGW1P_dGqy1FYBcYLVEDDmikbsqW4pUtpv1RKCJU,2839\nfontTools/ttLib/tables/ttProgram.py,sha256=vkRtptH7QXD0Ng8LNzh-A_Ln27VPCxSJOXgW8878nSo,36482\nfontTools/ttLib/ttCollection.py,sha256=1_wMr_ONgwPZh6wfbS_a7lNeE2IxUqd029TGObOsWs0,4088\nfontTools/ttLib/ttFont.py,sha256=t57l-KFaSAs29LQz7NOqatjsZSnqt-VVqPloySwetEU,42122\nfontTools/ttLib/ttGlyphSet.py,sha256=JQZTE5TKXaVdceqYHZiC7gYanl_1mpHWjtW6GYjAtZ0,17966\nfontTools/ttLib/ttVisitor.py,sha256=_Dkmz0tDs-5AFUR46kyg3Ku6BMPifrZzRU8-9UvXdz4,1057\nfontTools/ttLib/woff2.py,sha256=LuX5SHMlhuhNRI0W5J2UnDua7_3NCnGaoofXhmjkf3g,62741\nfontTools/ttx.py,sha256=CpfOtEVTXAv79XM2jiWKrOFXHFtWSyAniIgC7b9tWf8,17756\nfontTools/ufoLib/__init__.py,sha256=LYIIhFjl4flv5NidE5T6u57Sda5-xYu82Vrva7dN1aY,96908\nfontTools/ufoLib/__pycache__/__init__.cpython-313.pyc,,\nfontTools/ufoLib/__pycache__/converters.cpython-313.pyc,,\nfontTools/ufoLib/__pycache__/errors.cpython-313.pyc,,\nfontTools/ufoLib/__pycache__/etree.cpython-313.pyc,,\nfontTools/ufoLib/__pycache__/filenames.cpython-313.pyc,,\nfontTools/ufoLib/__pycache__/glifLib.cpython-313.pyc,,\nfontTools/ufoLib/__pycache__/kerning.cpython-313.pyc,,\nfontTools/ufoLib/__pycache__/plistlib.cpython-313.pyc,,\nfontTools/ufoLib/__pycache__/pointPen.cpython-313.pyc,,\nfontTools/ufoLib/__pycache__/utils.cpython-313.pyc,,\nfontTools/ufoLib/__pycache__/validators.cpython-313.pyc,,\nfontTools/ufoLib/converters.py,sha256=hPVFC0K1IPXG8tCGZQOIUXB4ILdiGPuzbgtqSiWHZn4,13442\nfontTools/ufoLib/errors.py,sha256=pgJKS2A5RcsfQS2Z6Y_l3mIz62-VD_SrpIysKmywuYA,875\nfontTools/ufoLib/etree.py,sha256=kTUP1EzN2wSXZ4jwAX8waNfKz52u7jc2qQ2LrqPYLBw,237\nfontTools/ufoLib/filenames.py,sha256=wuXjT9VX3791TBJ0WL4lWhFQ6pInkcYML9Mgc2cdJsE,10764\nfontTools/ufoLib/glifLib.py,sha256=O75JqjD9gZwpJPD6Pvh0lyYMk5m-rqIwIbr0EJ1kYdU,74781\nfontTools/ufoLib/kerning.py,sha256=VgE-xhGMAD2ipauleB-liNxwBuR_Ze5Jceb4XDi2Ob4,4354\nfontTools/ufoLib/plistlib.py,sha256=GpWReRtO7S1JCv6gJnnuiYooo4Hwbgc2vagT041kFk8,1557\nfontTools/ufoLib/pointPen.py,sha256=bU0-DLHrWKyutmwjw0tvhT-QPE-kmqs2Dqe0cflYgOk,250\nfontTools/ufoLib/utils.py,sha256=3hKaFRkCBhmwP6NRst6H0d5ZKSXYTzKK8kNfGGwzEaw,2074\nfontTools/ufoLib/validators.py,sha256=nZgijPWW40jzcQb6YgujVFVi53mPPlmrQ1uU8QJT_Gs,31991\nfontTools/unicode.py,sha256=a7460sU25TnVYGzrVl0uv0lI_pDbANZp8Jfmqx9tAag,1287\nfontTools/unicodedata/Blocks.py,sha256=R0rSdM3NktyDMxtyLQJV4nvlTJylX9osWKkQQ_ZTEpQ,33216\nfontTools/unicodedata/Mirrored.py,sha256=I6Fy7stp4cphy9JQ2zFZOynXvqIp3eKL6Clw7CTI8IU,9688\nfontTools/unicodedata/OTTags.py,sha256=IAt8NXaZOhu5cuuks46DDX3E7Ovoqp-PMUQC-WJUPIs,1246\nfontTools/unicodedata/ScriptExtensions.py,sha256=eIAXBnM9BbI5V_MWeA9I9Iv2rvgWi8mt8dCWN3cN1gY,29033\nfontTools/unicodedata/Scripts.py,sha256=jCKY8wlKrSFmsFndzLegVS6vrhVGZ-S3T0dw2vO9Drg,133888\nfontTools/unicodedata/__init__.py,sha256=isIrE4vSaQoKiU9Hrz07UuVNp9pctma5GmESj53WBMw,9285\nfontTools/unicodedata/__pycache__/Blocks.cpython-313.pyc,,\nfontTools/unicodedata/__pycache__/Mirrored.cpython-313.pyc,,\nfontTools/unicodedata/__pycache__/OTTags.cpython-313.pyc,,\nfontTools/unicodedata/__pycache__/ScriptExtensions.cpython-313.pyc,,\nfontTools/unicodedata/__pycache__/Scripts.cpython-313.pyc,,\nfontTools/unicodedata/__pycache__/__init__.cpython-313.pyc,,\nfontTools/varLib/__init__.py,sha256=E38iOYGLna0PhK-t7G33KNl36B11w_Lq7rd6KV5Pt8s,55753\nfontTools/varLib/__main__.py,sha256=ykyZY5GG9IPDsPrUWiHgXEnsgKrQudZkneCTes6GUpU,101\nfontTools/varLib/__pycache__/__init__.cpython-313.pyc,,\nfontTools/varLib/__pycache__/__main__.cpython-313.pyc,,\nfontTools/varLib/__pycache__/avar.cpython-313.pyc,,\nfontTools/varLib/__pycache__/avarPlanner.cpython-313.pyc,,\nfontTools/varLib/__pycache__/builder.cpython-313.pyc,,\nfontTools/varLib/__pycache__/cff.cpython-313.pyc,,\nfontTools/varLib/__pycache__/errors.cpython-313.pyc,,\nfontTools/varLib/__pycache__/featureVars.cpython-313.pyc,,\nfontTools/varLib/__pycache__/hvar.cpython-313.pyc,,\nfontTools/varLib/__pycache__/interpolatable.cpython-313.pyc,,\nfontTools/varLib/__pycache__/interpolatableHelpers.cpython-313.pyc,,\nfontTools/varLib/__pycache__/interpolatablePlot.cpython-313.pyc,,\nfontTools/varLib/__pycache__/interpolatableTestContourOrder.cpython-313.pyc,,\nfontTools/varLib/__pycache__/interpolatableTestStartingPoint.cpython-313.pyc,,\nfontTools/varLib/__pycache__/interpolate_layout.cpython-313.pyc,,\nfontTools/varLib/__pycache__/iup.cpython-313.pyc,,\nfontTools/varLib/__pycache__/merger.cpython-313.pyc,,\nfontTools/varLib/__pycache__/models.cpython-313.pyc,,\nfontTools/varLib/__pycache__/multiVarStore.cpython-313.pyc,,\nfontTools/varLib/__pycache__/mutator.cpython-313.pyc,,\nfontTools/varLib/__pycache__/mvar.cpython-313.pyc,,\nfontTools/varLib/__pycache__/plot.cpython-313.pyc,,\nfontTools/varLib/__pycache__/stat.cpython-313.pyc,,\nfontTools/varLib/__pycache__/varStore.cpython-313.pyc,,\nfontTools/varLib/avar.py,sha256=tRgKAUn_K5MTCSkB2MgPYYZ2U6Qo_Cg3jFQV0TDKFgc,9907\nfontTools/varLib/avarPlanner.py,sha256=orjyFvg3YkC-slt7fgSEU1AGjLCkGgMEJ7hTRV6CqUA,28362\nfontTools/varLib/builder.py,sha256=1k-N-rTwnZqQpzhNLBx2tqu2oYGG44sJSXKTCjAvIVM,6824\nfontTools/varLib/cff.py,sha256=bl8rrPHHpwzUdZBY80_5JJLWYcXQOolhKKvTJiiU-Bs,23532\nfontTools/varLib/errors.py,sha256=mXl-quT2Z75_t7Uwb6ug3VMhmbQjO841YNLeghwuY_s,7153\nfontTools/varLib/featureVars.py,sha256=fBt7iJtohfsfqO7AULmYMD56hb3apCDXRgpR18pDoG8,26390\nfontTools/varLib/hvar.py,sha256=3bd_J1eRrF7q1YIKFF-NBZo_lU-r3VlgDwhtkMo9MSc,3808\nfontTools/varLib/instancer/__init__.py,sha256=Dp4bxw_aOCdpECbQ4QPj8PujUzzpZAxQso4huWUx2J4,74220\nfontTools/varLib/instancer/__main__.py,sha256=YN_tyJDdmLlH3umiLDS2ue0Zc3fSFexa9wCuk3Wuod0,109\nfontTools/varLib/instancer/__pycache__/__init__.cpython-313.pyc,,\nfontTools/varLib/instancer/__pycache__/__main__.cpython-313.pyc,,\nfontTools/varLib/instancer/__pycache__/featureVars.cpython-313.pyc,,\nfontTools/varLib/instancer/__pycache__/names.cpython-313.pyc,,\nfontTools/varLib/instancer/__pycache__/solver.cpython-313.pyc,,\nfontTools/varLib/instancer/featureVars.py,sha256=b3qtGCYVZ9fqkqcgFQUikYQBX_3_x0YgdrvvxIALbuU,7300\nfontTools/varLib/instancer/names.py,sha256=vmHi7JZlh-N4amxKdaTJ-5DN9mDJ8Wnh_s9W1gJAQ4Y,15338\nfontTools/varLib/instancer/solver.py,sha256=7noVYZ6gHrv4tV7kaXHn4iMKs_YP2YNssr4zgCHk4qI,11311\nfontTools/varLib/interpolatable.py,sha256=8AXrhsnYY1z0hR6gskqYRYx8qcFsvUKmIIHZRpIOlAU,46430\nfontTools/varLib/interpolatableHelpers.py,sha256=JnabttZY7sY9-QzdiqkgzQ_S5nG8k_O1TzLEmfNUvNo,11892\nfontTools/varLib/interpolatablePlot.py,sha256=tUKFd8H9B2eD_GE6jV13J-dZkkIeLmk3ojAYrf-edsA,45644\nfontTools/varLib/interpolatableTestContourOrder.py,sha256=Pbt0jW0LoVggIwrtADZ7HWK6Ftdoo1bjuWz0ost0HD0,3103\nfontTools/varLib/interpolatableTestStartingPoint.py,sha256=f5MJ3mj8MctJCvDJwqmW1fIVOgovUMYAOela9HweaRU,4403\nfontTools/varLib/interpolate_layout.py,sha256=tTPUes_K7MwooUO_wac9AeFEVgL1uGSz4ITYiOizaME,3813\nfontTools/varLib/iup.c,sha256=Eld9QWThV5wgZlhHNvj15DILGRPArLAkU2071G1fSEY,846546\nfontTools/varLib/iup.cp313-win_amd64.pyd,sha256=9T-QjH8YrisQuyrsUl2sPbSESyPASLhgPpYduQforc4,130560\nfontTools/varLib/iup.py,sha256=O_xPJOBECrNDbQqCC3e5xf9KsWXUd1i3BAp9Fl6Hv2Y,15474\nfontTools/varLib/merger.py,sha256=V-B17poOYbbrRsfUYJbdqt46GtRfG833MKwtv9NOB3Q,62519\nfontTools/varLib/models.py,sha256=ZqQb1Lapj5dCO8dwa3UTx1LsIpF0-GiDte32t_TMJJQ,23040\nfontTools/varLib/multiVarStore.py,sha256=OvrrTaKrCZCXP40Rrv-2w416P-dNz3xE6gPOEyS3PrY,8558\nfontTools/varLib/mutator.py,sha256=bUkUP27sxhEVkdljzbHNylHkj6Ob3FfQ9AoDYTRIwdo,19796\nfontTools/varLib/mvar.py,sha256=Gf3q54ICH-E9oAwKYeIKUPLZabfjY0bUT4t220zLzYI,2489\nfontTools/varLib/plot.py,sha256=BtozrcnKoEyCs0rGy7PZmrUvUNTmZT-5_sylW5PuJ28,7732\nfontTools/varLib/stat.py,sha256=ScaVFIVpXTqA-F07umv_66GoxtcjaZ54MPLFvFK4s68,4960\nfontTools/varLib/varStore.py,sha256=GWz-B1YcR-JnIh2aDmeQg621GDEBj9M4pKYcbZraA3w,24808\nfontTools/voltLib/__init__.py,sha256=J7W0S2YED0GOqW9B_ZOhw-oL0-ltuRDYgAbrd8XHjqA,156\nfontTools/voltLib/__main__.py,sha256=Ye6x5R_I9L1UuyWJsKyAajYMwV2B7OCdKsCRQ3leeJ4,6134\nfontTools/voltLib/__pycache__/__init__.cpython-313.pyc,,\nfontTools/voltLib/__pycache__/__main__.cpython-313.pyc,,\nfontTools/voltLib/__pycache__/ast.cpython-313.pyc,,\nfontTools/voltLib/__pycache__/error.cpython-313.pyc,,\nfontTools/voltLib/__pycache__/lexer.cpython-313.pyc,,\nfontTools/voltLib/__pycache__/parser.cpython-313.pyc,,\nfontTools/voltLib/__pycache__/voltToFea.cpython-313.pyc,,\nfontTools/voltLib/ast.py,sha256=DBxJygWUT5gE_tuxx0j2nmKFm3RvRSEF_rTXeKiisEo,13752\nfontTools/voltLib/error.py,sha256=3TsaZBA82acFd2j5Beq3WUQTURTKM0zxOnUFGZovSNA,407\nfontTools/voltLib/lexer.py,sha256=v9V4zdBO2VqVJG__IWrL8fv_CRURmh2eD_1UpbIJn9g,3467\nfontTools/voltLib/parser.py,sha256=HS72gxtFzvcPSwEbUYj3E41CPK7ZqK9mSe0nLRxn-IY,26060\nfontTools/voltLib/voltToFea.py,sha256=nS-OSlx_a-TngGICFNKyFxMhjqkV3OQLcvyzw4sQFyk,37460\nfonttools-4.58.5.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4\nfonttools-4.58.5.dist-info/METADATA,sha256=btf65Ggqrp-gkUSzPHqVQAstBMwthSS-fKTo3hZuPpk,109046\nfonttools-4.58.5.dist-info/RECORD,,\nfonttools-4.58.5.dist-info/WHEEL,sha256=qV0EIPljj1XC_vuSatRWjn02nZIz3N1t8jsZz7HBr2U,101\nfonttools-4.58.5.dist-info/entry_points.txt,sha256=8kVHddxfFWA44FSD4mBpmC-4uCynQnkoz_9aNJb227Y,147\nfonttools-4.58.5.dist-info/licenses/LICENSE,sha256=Ir74Bpfs-qF_l-YrmibfoSggvgVYPo3RKtFpskEnTJk,1093\nfonttools-4.58.5.dist-info/licenses/LICENSE.external,sha256=p5eWRJLxSGv9_M1uYYVeOjFkXzYCPqXeeF2jfqwvy04,19046\nfonttools-4.58.5.dist-info/top_level.txt,sha256=rRgRylrXzekqWOsrhygzib12pQ7WILf7UGjqEwkIFDM,10\n
|
.venv\Lib\site-packages\fonttools-4.58.5.dist-info\RECORD
|
RECORD
|
Other
| 48,445 | 0.7 | 0 | 0 |
node-utils
| 236 |
2023-07-11T07:50:23.099834
|
Apache-2.0
| false |
dcd3a9379ebd6b9af5451b1c805f9576
|
fontTools\n
|
.venv\Lib\site-packages\fonttools-4.58.5.dist-info\top_level.txt
|
top_level.txt
|
Other
| 10 | 0.5 | 0 | 0 |
node-utils
| 558 |
2023-12-17T10:32:38.313629
|
BSD-3-Clause
| false |
54e9f52ffac41b3ca024c2b3e5fcd3b1
|
Wheel-Version: 1.0\nGenerator: setuptools (80.9.0)\nRoot-Is-Purelib: false\nTag: cp313-cp313-win_amd64\n\n
|
.venv\Lib\site-packages\fonttools-4.58.5.dist-info\WHEEL
|
WHEEL
|
Other
| 101 | 0.7 | 0 | 0 |
python-kit
| 427 |
2023-11-28T23:08:53.714204
|
GPL-3.0
| false |
eb6c9e665bbbd698545236600675f165
|
MIT License\n\nCopyright (c) 2017 Just van Rossum\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the "Software"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n
|
.venv\Lib\site-packages\fonttools-4.58.5.dist-info\licenses\LICENSE
|
LICENSE
|
Other
| 1,093 | 0.7 | 0 | 0 |
awesome-app
| 745 |
2024-08-16T18:00:05.672850
|
BSD-3-Clause
| false |
211c9e4671bde3881351f22a2901f692
|
FontTools includes the following font projects for testing purposes, which are\nunder SIL Open Font License, Version 1.1:\n\nLobster\n Copyright (c) 2010, Pablo Impallari (www.impallari.com|impallari@gmail.com),\n with Reserved Font Name Lobster.\n This Font Software is licensed under the SIL Open Font License, Version 1.1.\n\nNoto Fonts\n This Font Software is licensed under the SIL Open Font License, Version 1.1.\n\nXITS font project\n Copyright (c) 2001-2010 by the STI Pub Companies, consisting of the American\n Institute of Physics, the American Chemical Society, the American\n Mathematical Society, the American Physical Society, Elsevier, Inc., and The\n Institute of Electrical and Electronic Engineers, Inc. (www.stixfonts.org),\n with Reserved Font Name STIX Fonts, STIX Fonts (TM) is a trademark of The\n Institute of Electrical and Electronics Engineers, Inc.\n\n Portions copyright (c) 1998-2003 by MicroPress, Inc.\n (www.micropress-inc.com), with Reserved Font Name TM Math. To obtain\n additional mathematical fonts, please contact MicroPress, Inc., 68-30 Harrow\n Street, Forest Hills, NY 11375, USA, Phone: (718) 575-1816.\n\n Portions copyright (c) 1990 by Elsevier, Inc.\n\n This Font Software is licensed under the SIL Open Font License, Version 1.1.\n\nIosevka\n Copyright (c) 2015-2020 Belleve Invis (belleve@typeof.net).\n This Font Software is licensed under the SIL Open Font License, Version 1.1.\n\nThis license is copied below, and is also available with a FAQ at:\nhttp://scripts.sil.org/OFL\n\n-----------------------------------------------------------\nSIL OPEN FONT LICENSE Version 1.1 - 26 February 2007\n-----------------------------------------------------------\n\nPREAMBLE\nThe goals of the Open Font License (OFL) are to stimulate worldwide\ndevelopment of collaborative font projects, to support the font\ncreation efforts of academic and linguistic communities, and to\nprovide a free and open framework in which fonts may be shared and\nimproved in partnership with others.\n\nThe OFL allows the licensed fonts to be used, studied, modified and\nredistributed freely as long as they are not sold by themselves. The\nfonts, including any derivative works, can be bundled, embedded,\nredistributed and/or sold with any software provided that any reserved\nnames are not used by derivative works. The fonts and derivatives,\nhowever, cannot be released under any other type of license. The\nrequirement for fonts to remain under this license does not apply to\nany document created using the fonts or their derivatives.\n\nDEFINITIONS\n"Font Software" refers to the set of files released by the Copyright\nHolder(s) under this license and clearly marked as such. This may\ninclude source files, build scripts and documentation.\n\n"Reserved Font Name" refers to any names specified as such after the\ncopyright statement(s).\n\n"Original Version" refers to the collection of Font Software\ncomponents as distributed by the Copyright Holder(s).\n\n"Modified Version" refers to any derivative made by adding to,\ndeleting, or substituting -- in part or in whole -- any of the\ncomponents of the Original Version, by changing formats or by porting\nthe Font Software to a new environment.\n\n"Author" refers to any designer, engineer, programmer, technical\nwriter or other person who contributed to the Font Software.\n\nPERMISSION & CONDITIONS\nPermission is hereby granted, free of charge, to any person obtaining\na copy of the Font Software, to use, study, copy, merge, embed,\nmodify, redistribute, and sell modified and unmodified copies of the\nFont Software, subject to the following conditions:\n\n1) Neither the Font Software nor any of its individual components, in\nOriginal or Modified Versions, may be sold by itself.\n\n2) Original or Modified Versions of the Font Software may be bundled,\nredistributed and/or sold with any software, provided that each copy\ncontains the above copyright notice and this license. These can be\nincluded either as stand-alone text files, human-readable headers or\nin the appropriate machine-readable metadata fields within text or\nbinary files as long as those fields can be easily viewed by the user.\n\n3) No Modified Version of the Font Software may use the Reserved Font\nName(s) unless explicit written permission is granted by the\ncorresponding Copyright Holder. This restriction only applies to the\nprimary font name as presented to the users.\n\n4) The name(s) of the Copyright Holder(s) or the Author(s) of the Font\nSoftware shall not be used to promote, endorse or advertise any\nModified Version, except to acknowledge the contribution(s) of the\nCopyright Holder(s) and the Author(s) or with their explicit written\npermission.\n\n5) The Font Software, modified or unmodified, in part or in whole,\nmust be distributed entirely under this license, and must not be\ndistributed under any other license. The requirement for fonts to\nremain under this license does not apply to any document created using\nthe Font Software.\n\nTERMINATION\nThis license becomes null and void if any of the above conditions are\nnot met.\n\nDISCLAIMER\nTHE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT\nOF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE\nCOPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,\nINCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL\nDAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\nFROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM\nOTHER DEALINGS IN THE FONT SOFTWARE.\n\n=====\n\nFontTools includes Adobe AGL & AGLFN, which is under 3-clauses BSD license:\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are\nmet:\n\nRedistributions of source code must retain the above copyright notice,\nthis list of conditions and the following disclaimer.\n\nRedistributions in binary form must reproduce the above copyright\nnotice, this list of conditions and the following disclaimer in the\ndocumentation and/or other materials provided with the distribution.\n\nNeither the name of Adobe Systems Incorporated nor the names of its\ncontributors may be used to endorse or promote products derived from\nthis software without specific prior written permission.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\nLIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\nA PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\nHOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\nSPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\nLIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\nDATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\nTHEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\nOF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n=====\n\nFontTools includes cu2qu, which is Copyright 2016 Google Inc. All Rights Reserved.\nLicensed under the Apache License, Version 2.0, a copy of which is reproduced below:\n\n Apache License\n Version 2.0, January 2004\n http://www.apache.org/licenses/\n\n TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n 1. Definitions.\n\n "License" shall mean the terms and conditions for use, reproduction,\n and distribution as defined by Sections 1 through 9 of this document.\n\n "Licensor" shall mean the copyright owner or entity authorized by\n the copyright owner that is granting the License.\n\n "Legal Entity" shall mean the union of the acting entity and all\n other entities that control, are controlled by, or are under common\n control with that entity. For the purposes of this definition,\n "control" means (i) the power, direct or indirect, to cause the\n direction or management of such entity, whether by contract or\n otherwise, or (ii) ownership of fifty percent (50%) or more of the\n outstanding shares, or (iii) beneficial ownership of such entity.\n\n "You" (or "Your") shall mean an individual or Legal Entity\n exercising permissions granted by this License.\n\n "Source" form shall mean the preferred form for making modifications,\n including but not limited to software source code, documentation\n source, and configuration files.\n\n "Object" form shall mean any form resulting from mechanical\n transformation or translation of a Source form, including but\n not limited to compiled object code, generated documentation,\n and conversions to other media types.\n\n "Work" shall mean the work of authorship, whether in Source or\n Object form, made available under the License, as indicated by a\n copyright notice that is included in or attached to the work\n (an example is provided in the Appendix below).\n\n "Derivative Works" shall mean any work, whether in Source or Object\n form, that is based on (or derived from) the Work and for which the\n editorial revisions, annotations, elaborations, or other modifications\n represent, as a whole, an original work of authorship. For the purposes\n of this License, Derivative Works shall not include works that remain\n separable from, or merely link (or bind by name) to the interfaces of,\n the Work and Derivative Works thereof.\n\n "Contribution" shall mean any work of authorship, including\n the original version of the Work and any modifications or additions\n to that Work or Derivative Works thereof, that is intentionally\n submitted to Licensor for inclusion in the Work by the copyright owner\n or by an individual or Legal Entity authorized to submit on behalf of\n the copyright owner. For the purposes of this definition, "submitted"\n means any form of electronic, verbal, or written communication sent\n to the Licensor or its representatives, including but not limited to\n communication on electronic mailing lists, source code control systems,\n and issue tracking systems that are managed by, or on behalf of, the\n Licensor for the purpose of discussing and improving the Work, but\n excluding communication that is conspicuously marked or otherwise\n designated in writing by the copyright owner as "Not a Contribution."\n\n "Contributor" shall mean Licensor and any individual or Legal Entity\n on behalf of whom a Contribution has been received by Licensor and\n subsequently incorporated within the Work.\n\n 2. Grant of Copyright License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n copyright license to reproduce, prepare Derivative Works of,\n publicly display, publicly perform, sublicense, and distribute the\n Work and such Derivative Works in Source or Object form.\n\n 3. Grant of Patent License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n (except as stated in this section) patent license to make, have made,\n use, offer to sell, sell, import, and otherwise transfer the Work,\n where such license applies only to those patent claims licensable\n by such Contributor that are necessarily infringed by their\n Contribution(s) alone or by combination of their Contribution(s)\n with the Work to which such Contribution(s) was submitted. If You\n institute patent litigation against any entity (including a\n cross-claim or counterclaim in a lawsuit) alleging that the Work\n or a Contribution incorporated within the Work constitutes direct\n or contributory patent infringement, then any patent licenses\n granted to You under this License for that Work shall terminate\n as of the date such litigation is filed.\n\n 4. Redistribution. You may reproduce and distribute copies of the\n Work or Derivative Works thereof in any medium, with or without\n modifications, and in Source or Object form, provided that You\n meet the following conditions:\n\n (a) You must give any other recipients of the Work or\n Derivative Works a copy of this License; and\n\n (b) You must cause any modified files to carry prominent notices\n stating that You changed the files; and\n\n (c) You must retain, in the Source form of any Derivative Works\n that You distribute, all copyright, patent, trademark, and\n attribution notices from the Source form of the Work,\n excluding those notices that do not pertain to any part of\n the Derivative Works; and\n\n (d) If the Work includes a "NOTICE" text file as part of its\n distribution, then any Derivative Works that You distribute must\n include a readable copy of the attribution notices contained\n within such NOTICE file, excluding those notices that do not\n pertain to any part of the Derivative Works, in at least one\n of the following places: within a NOTICE text file distributed\n as part of the Derivative Works; within the Source form or\n documentation, if provided along with the Derivative Works; or,\n within a display generated by the Derivative Works, if and\n wherever such third-party notices normally appear. The contents\n of the NOTICE file are for informational purposes only and\n do not modify the License. You may add Your own attribution\n notices within Derivative Works that You distribute, alongside\n or as an addendum to the NOTICE text from the Work, provided\n that such additional attribution notices cannot be construed\n as modifying the License.\n\n You may add Your own copyright statement to Your modifications and\n may provide additional or different license terms and conditions\n for use, reproduction, or distribution of Your modifications, or\n for any such Derivative Works as a whole, provided Your use,\n reproduction, and distribution of the Work otherwise complies with\n the conditions stated in this License.\n\n 5. Submission of Contributions. Unless You explicitly state otherwise,\n any Contribution intentionally submitted for inclusion in the Work\n by You to the Licensor shall be under the terms and conditions of\n this License, without any additional terms or conditions.\n Notwithstanding the above, nothing herein shall supersede or modify\n the terms of any separate license agreement you may have executed\n with Licensor regarding such Contributions.\n\n 6. Trademarks. This License does not grant permission to use the trade\n names, trademarks, service marks, or product names of the Licensor,\n except as required for reasonable and customary use in describing the\n origin of the Work and reproducing the content of the NOTICE file.\n\n 7. Disclaimer of Warranty. Unless required by applicable law or\n agreed to in writing, Licensor provides the Work (and each\n Contributor provides its Contributions) on an "AS IS" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n implied, including, without limitation, any warranties or conditions\n of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n PARTICULAR PURPOSE. You are solely responsible for determining the\n appropriateness of using or redistributing the Work and assume any\n risks associated with Your exercise of permissions under this License.\n\n 8. Limitation of Liability. In no event and under no legal theory,\n whether in tort (including negligence), contract, or otherwise,\n unless required by applicable law (such as deliberate and grossly\n negligent acts) or agreed to in writing, shall any Contributor be\n liable to You for damages, including any direct, indirect, special,\n incidental, or consequential damages of any character arising as a\n result of this License or out of the use or inability to use the\n Work (including but not limited to damages for loss of goodwill,\n work stoppage, computer failure or malfunction, or any and all\n other commercial damages or losses), even if such Contributor\n has been advised of the possibility of such damages.\n\n 9. Accepting Warranty or Additional Liability. While redistributing\n the Work or Derivative Works thereof, You may choose to offer,\n and charge a fee for, acceptance of support, warranty, indemnity,\n or other liability obligations and/or rights consistent with this\n License. However, in accepting such obligations, You may act only\n on Your own behalf and on Your sole responsibility, not on behalf\n of any other Contributor, and only if You agree to indemnify,\n defend, and hold each Contributor harmless for any liability\n incurred by, or claims asserted against, such Contributor by reason\n of your accepting any such warranty or additional liability.\n\n END OF TERMS AND CONDITIONS\n\n APPENDIX: How to apply the Apache License to your work.\n\n To apply the Apache License to your work, attach the following\n boilerplate notice, with the fields enclosed by brackets "[]"\n replaced with your own identifying information. (Don't include\n the brackets!) The text should be enclosed in the appropriate\n comment syntax for the file format. We also recommend that a\n file or class name and description of purpose be included on the\n same "printed page" as the copyright notice for easier\n identification within third-party archives.\n\n Copyright [yyyy] [name of copyright owner]\n\n Licensed under the Apache License, Version 2.0 (the "License");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an "AS IS" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n
|
.venv\Lib\site-packages\fonttools-4.58.5.dist-info\licenses\LICENSE.external
|
LICENSE.external
|
Other
| 19,046 | 0.95 | 0.077994 | 0 |
node-utils
| 695 |
2025-01-16T04:28:46.017535
|
BSD-3-Clause
| false |
16d85827c0050c10e34f478d3c514957
|
import sys\n\nif sys.version_info[:2] >= (3, 8):\n from functools import cached_property\nelse:\n from cached_property import cached_property\n\n__all__ = ["cached_property"]\n
|
.venv\Lib\site-packages\fqdn\_compat.py
|
_compat.py
|
Python
| 174 | 0.85 | 0.125 | 0 |
awesome-app
| 614 |
2023-12-24T14:34:17.615255
|
GPL-3.0
| false |
cedd5c768548907f829b289dc0b1b89f
|
import re\n\nfrom fqdn._compat import cached_property\n\n\nclass FQDN:\n """\n From https://tools.ietf.org/html/rfc1035#page-9, RFC 1035 3.1. Name space\n definitions:\n\n Domain names in messages are expressed in terms of a sequence of\n labels. Each label is represented as a one octet length field followed\n by that number of octets. Since every domain name ends with the null\n label of the root, a domain name is terminated by a length byte of\n zero. The high order two bits of every length octet must be zero, and\n the remaining six bits of the length field limit the label to 63 octets\n or less.\n\n To simplify implementations, the total length of a domain name (i.e.,\n label octets and label length octets) is restricted to 255 octets or\n less.\n\n\n Therefore the max length of a domain name is actually 253 ASCII bytes\n without the trailing null byte or the leading length byte, and the max\n length of a label is 63 bytes without the leading length byte.\n """\n\n PREFERRED_NAME_SYNTAX_REGEXSTR = (\n r"^((?![-])[-A-Z\d]{1,63}(?<!-)[.])*(?!-)[-A-Z\d]{1,63}(?<!-)[.]?$"\n )\n ALLOW_UNDERSCORES_REGEXSTR = (\n r"^((?![-])[-_A-Z\d]{1,63}(?<!-)[.])*(?!-)[-_A-Z\d]{1,63}(?<!-)[.]?$"\n )\n\n def __init__(self, fqdn, *nothing, **kwargs):\n if nothing:\n raise ValueError("got extra positional parameter, try kwargs")\n unknown_kwargs = set(kwargs.keys()) - {"allow_underscores", "min_labels"}\n if unknown_kwargs:\n raise ValueError("got extra kwargs: {}".format(unknown_kwargs))\n\n if not (fqdn and isinstance(fqdn, str)):\n raise ValueError("fqdn must be str")\n self._fqdn = fqdn.lower()\n self._allow_underscores = kwargs.get("allow_underscores", False)\n self._min_labels = kwargs.get("min_labels", 2)\n\n def __str__(self):\n """\n The FQDN as a string in absolute form\n """\n return self.absolute\n\n @property\n def _regex(self):\n regexstr = (\n FQDN.PREFERRED_NAME_SYNTAX_REGEXSTR\n if not self._allow_underscores\n else FQDN.ALLOW_UNDERSCORES_REGEXSTR\n )\n return re.compile(regexstr, re.IGNORECASE)\n\n @cached_property\n def is_valid(self):\n """\n True for a validated fully-qualified domain nam (FQDN), in full\n compliance with RFC 1035, and the "preferred form" specified in RFC\n 3686 s. 2, whether relative or absolute.\n\n https://tools.ietf.org/html/rfc3696#section-2\n https://tools.ietf.org/html/rfc1035\n\n If and only if the FQDN ends with a dot (in place of the RFC1035\n trailing null byte), it may have a total length of 254 bytes, still it\n must be less than 253 bytes.\n """\n length = len(self._fqdn)\n if self._fqdn.endswith("."):\n length -= 1\n if length > 253:\n return False\n regex_pass = self._regex.match(self._fqdn)\n if not regex_pass:\n return False\n\n return self.labels_count >= self._min_labels\n\n @property\n def labels_count(self):\n has_terminal_dot = self._fqdn[-1] == "."\n count = self._fqdn.count(".") + (0 if has_terminal_dot else 1)\n return count\n\n @cached_property\n def is_valid_absolute(self):\n """\n True for a fully-qualified domain name (FQDN) that is RFC\n preferred-form compliant and ends with a `.`.\n\n With relative FQDNS in DNS lookups, the current hosts domain name or\n search domains may be appended.\n """\n return self._fqdn.endswith(".") and self.is_valid\n\n @cached_property\n def is_valid_relative(self):\n """\n True for a validated fully-qualified domain name that compiles with the\n RFC preferred-form and does not ends with a `.`.\n """\n return not self._fqdn.endswith(".") and self.is_valid\n\n @cached_property\n def absolute(self):\n """\n The FQDN as a string in absolute form\n """\n if not self.is_valid:\n raise ValueError("invalid FQDN `{0}`".format(self._fqdn))\n\n if self.is_valid_absolute:\n return self._fqdn\n\n return "{0}.".format(self._fqdn)\n\n @cached_property\n def relative(self):\n """\n The FQDN as a string in relative form\n """\n if not self.is_valid:\n raise ValueError("invalid FQDN `{0}`".format(self._fqdn))\n\n if self.is_valid_absolute:\n return self._fqdn[:-1]\n\n return self._fqdn\n\n def __eq__(self, other):\n if isinstance(other, FQDN):\n return self.absolute == other.absolute\n\n def __hash__(self):\n return hash(self.absolute) + hash("fqdn")\n
|
.venv\Lib\site-packages\fqdn\__init__.py
|
__init__.py
|
Python
| 4,787 | 0.95 | 0.206897 | 0 |
react-lib
| 658 |
2023-11-25T15:05:31.195914
|
Apache-2.0
| false |
58b9bddaf26ae4bebd718e4973c32999
|
\n\n
|
.venv\Lib\site-packages\fqdn\__pycache__\_compat.cpython-313.pyc
|
_compat.cpython-313.pyc
|
Other
| 381 | 0.7 | 0 | 0 |
awesome-app
| 938 |
2024-04-14T09:50:36.862779
|
MIT
| false |
185cd8a4f237d0c8a91f8d11caea0cfb
|
\n\n
|
.venv\Lib\site-packages\fqdn\__pycache__\__init__.cpython-313.pyc
|
__init__.cpython-313.pyc
|
Other
| 6,883 | 0.8 | 0.060976 | 0 |
awesome-app
| 503 |
2024-05-05T09:08:37.387596
|
Apache-2.0
| false |
0eff37e150779d250938523fe0abaabe
|
pip\n
|
.venv\Lib\site-packages\fqdn-1.5.1.dist-info\INSTALLER
|
INSTALLER
|
Other
| 4 | 0.5 | 0 | 0 |
awesome-app
| 964 |
2024-07-24T18:08:07.363888
|
MIT
| false |
365c9bfeb7d89244f2ce01c1de44cb85
|
Mozilla Public License Version 2.0\n==================================\n\n1. Definitions\n--------------\n\n1.1. "Contributor"\n means each individual or legal entity that creates, contributes to\n the creation of, or owns Covered Software.\n\n1.2. "Contributor Version"\n means the combination of the Contributions of others (if any) used\n by a Contributor and that particular Contributor's Contribution.\n\n1.3. "Contribution"\n means Covered Software of a particular Contributor.\n\n1.4. "Covered Software"\n means Source Code Form to which the initial Contributor has attached\n the notice in Exhibit A, the Executable Form of such Source Code\n Form, and Modifications of such Source Code Form, in each case\n including portions thereof.\n\n1.5. "Incompatible With Secondary Licenses"\n means\n\n (a) that the initial Contributor has attached the notice described\n in Exhibit B to the Covered Software; or\n\n (b) that the Covered Software was made available under the terms of\n version 1.1 or earlier of the License, but not also under the\n terms of a Secondary License.\n\n1.6. "Executable Form"\n means any form of the work other than Source Code Form.\n\n1.7. "Larger Work"\n means a work that combines Covered Software with other material, in\n a separate file or files, that is not Covered Software.\n\n1.8. "License"\n means this document.\n\n1.9. "Licensable"\n means having the right to grant, to the maximum extent possible,\n whether at the time of the initial grant or subsequently, any and\n all of the rights conveyed by this License.\n\n1.10. "Modifications"\n means any of the following:\n\n (a) any file in Source Code Form that results from an addition to,\n deletion from, or modification of the contents of Covered\n Software; or\n\n (b) any new file in Source Code Form that contains any Covered\n Software.\n\n1.11. "Patent Claims" of a Contributor\n means any patent claim(s), including without limitation, method,\n process, and apparatus claims, in any patent Licensable by such\n Contributor that would be infringed, but for the grant of the\n License, by the making, using, selling, offering for sale, having\n made, import, or transfer of either its Contributions or its\n Contributor Version.\n\n1.12. "Secondary License"\n means either the GNU General Public License, Version 2.0, the GNU\n Lesser General Public License, Version 2.1, the GNU Affero General\n Public License, Version 3.0, or any later versions of those\n licenses.\n\n1.13. "Source Code Form"\n means the form of the work preferred for making modifications.\n\n1.14. "You" (or "Your")\n means an individual or a legal entity exercising rights under this\n License. For legal entities, "You" includes any entity that\n controls, is controlled by, or is under common control with You. For\n purposes of this definition, "control" means (a) the power, direct\n or indirect, to cause the direction or management of such entity,\n whether by contract or otherwise, or (b) ownership of more than\n fifty percent (50%) of the outstanding shares or beneficial\n ownership of such entity.\n\n2. License Grants and Conditions\n--------------------------------\n\n2.1. Grants\n\nEach Contributor hereby grants You a world-wide, royalty-free,\nnon-exclusive license:\n\n(a) under intellectual property rights (other than patent or trademark)\n Licensable by such Contributor to use, reproduce, make available,\n modify, display, perform, distribute, and otherwise exploit its\n Contributions, either on an unmodified basis, with Modifications, or\n as part of a Larger Work; and\n\n(b) under Patent Claims of such Contributor to make, use, sell, offer\n for sale, have made, import, and otherwise transfer either its\n Contributions or its Contributor Version.\n\n2.2. Effective Date\n\nThe licenses granted in Section 2.1 with respect to any Contribution\nbecome effective for each Contribution on the date the Contributor first\ndistributes such Contribution.\n\n2.3. Limitations on Grant Scope\n\nThe licenses granted in this Section 2 are the only rights granted under\nthis License. No additional rights or licenses will be implied from the\ndistribution or licensing of Covered Software under this License.\nNotwithstanding Section 2.1(b) above, no patent license is granted by a\nContributor:\n\n(a) for any code that a Contributor has removed from Covered Software;\n or\n\n(b) for infringements caused by: (i) Your and any other third party's\n modifications of Covered Software, or (ii) the combination of its\n Contributions with other software (except as part of its Contributor\n Version); or\n\n(c) under Patent Claims infringed by Covered Software in the absence of\n its Contributions.\n\nThis License does not grant any rights in the trademarks, service marks,\nor logos of any Contributor (except as may be necessary to comply with\nthe notice requirements in Section 3.4).\n\n2.4. Subsequent Licenses\n\nNo Contributor makes additional grants as a result of Your choice to\ndistribute the Covered Software under a subsequent version of this\nLicense (see Section 10.2) or under the terms of a Secondary License (if\npermitted under the terms of Section 3.3).\n\n2.5. Representation\n\nEach Contributor represents that the Contributor believes its\nContributions are its original creation(s) or it has sufficient rights\nto grant the rights to its Contributions conveyed by this License.\n\n2.6. Fair Use\n\nThis License is not intended to limit any rights You have under\napplicable copyright doctrines of fair use, fair dealing, or other\nequivalents.\n\n2.7. Conditions\n\nSections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted\nin Section 2.1.\n\n3. Responsibilities\n-------------------\n\n3.1. Distribution of Source Form\n\nAll distribution of Covered Software in Source Code Form, including any\nModifications that You create or to which You contribute, must be under\nthe terms of this License. You must inform recipients that the Source\nCode Form of the Covered Software is governed by the terms of this\nLicense, and how they can obtain a copy of this License. You may not\nattempt to alter or restrict the recipients' rights in the Source Code\nForm.\n\n3.2. Distribution of Executable Form\n\nIf You distribute Covered Software in Executable Form then:\n\n(a) such Covered Software must also be made available in Source Code\n Form, as described in Section 3.1, and You must inform recipients of\n the Executable Form how they can obtain a copy of such Source Code\n Form by reasonable means in a timely manner, at a charge no more\n than the cost of distribution to the recipient; and\n\n(b) You may distribute such Executable Form under the terms of this\n License, or sublicense it under different terms, provided that the\n license for the Executable Form does not attempt to limit or alter\n the recipients' rights in the Source Code Form under this License.\n\n3.3. Distribution of a Larger Work\n\nYou may create and distribute a Larger Work under terms of Your choice,\nprovided that You also comply with the requirements of this License for\nthe Covered Software. If the Larger Work is a combination of Covered\nSoftware with a work governed by one or more Secondary Licenses, and the\nCovered Software is not Incompatible With Secondary Licenses, this\nLicense permits You to additionally distribute such Covered Software\nunder the terms of such Secondary License(s), so that the recipient of\nthe Larger Work may, at their option, further distribute the Covered\nSoftware under the terms of either this License or such Secondary\nLicense(s).\n\n3.4. Notices\n\nYou may not remove or alter the substance of any license notices\n(including copyright notices, patent notices, disclaimers of warranty,\nor limitations of liability) contained within the Source Code Form of\nthe Covered Software, except that You may alter any license notices to\nthe extent required to remedy known factual inaccuracies.\n\n3.5. Application of Additional Terms\n\nYou may choose to offer, and to charge a fee for, warranty, support,\nindemnity or liability obligations to one or more recipients of Covered\nSoftware. However, You may do so only on Your own behalf, and not on\nbehalf of any Contributor. You must make it absolutely clear that any\nsuch warranty, support, indemnity, or liability obligation is offered by\nYou alone, and You hereby agree to indemnify every Contributor for any\nliability incurred by such Contributor as a result of warranty, support,\nindemnity or liability terms You offer. You may include additional\ndisclaimers of warranty and limitations of liability specific to any\njurisdiction.\n\n4. Inability to Comply Due to Statute or Regulation\n---------------------------------------------------\n\nIf it is impossible for You to comply with any of the terms of this\nLicense with respect to some or all of the Covered Software due to\nstatute, judicial order, or regulation then You must: (a) comply with\nthe terms of this License to the maximum extent possible; and (b)\ndescribe the limitations and the code they affect. Such description must\nbe placed in a text file included with all distributions of the Covered\nSoftware under this License. Except to the extent prohibited by statute\nor regulation, such description must be sufficiently detailed for a\nrecipient of ordinary skill to be able to understand it.\n\n5. Termination\n--------------\n\n5.1. The rights granted under this License will terminate automatically\nif You fail to comply with any of its terms. However, if You become\ncompliant, then the rights granted under this License from a particular\nContributor are reinstated (a) provisionally, unless and until such\nContributor explicitly and finally terminates Your grants, and (b) on an\nongoing basis, if such Contributor fails to notify You of the\nnon-compliance by some reasonable means prior to 60 days after You have\ncome back into compliance. Moreover, Your grants from a particular\nContributor are reinstated on an ongoing basis if such Contributor\nnotifies You of the non-compliance by some reasonable means, this is the\nfirst time You have received notice of non-compliance with this License\nfrom such Contributor, and You become compliant prior to 30 days after\nYour receipt of the notice.\n\n5.2. If You initiate litigation against any entity by asserting a patent\ninfringement claim (excluding declaratory judgment actions,\ncounter-claims, and cross-claims) alleging that a Contributor Version\ndirectly or indirectly infringes any patent, then the rights granted to\nYou by any and all Contributors for the Covered Software under Section\n2.1 of this License shall terminate.\n\n5.3. In the event of termination under Sections 5.1 or 5.2 above, all\nend user license agreements (excluding distributors and resellers) which\nhave been validly granted by You or Your distributors under this License\nprior to termination shall survive termination.\n\n************************************************************************\n* *\n* 6. Disclaimer of Warranty *\n* ------------------------- *\n* *\n* Covered Software is provided under this License on an "as is" *\n* basis, without warranty of any kind, either expressed, implied, or *\n* statutory, including, without limitation, warranties that the *\n* Covered Software is free of defects, merchantable, fit for a *\n* particular purpose or non-infringing. The entire risk as to the *\n* quality and performance of the Covered Software is with You. *\n* Should any Covered Software prove defective in any respect, You *\n* (not any Contributor) assume the cost of any necessary servicing, *\n* repair, or correction. This disclaimer of warranty constitutes an *\n* essential part of this License. No use of any Covered Software is *\n* authorized under this License except under this disclaimer. *\n* *\n************************************************************************\n\n************************************************************************\n* *\n* 7. Limitation of Liability *\n* -------------------------- *\n* *\n* Under no circumstances and under no legal theory, whether tort *\n* (including negligence), contract, or otherwise, shall any *\n* Contributor, or anyone who distributes Covered Software as *\n* permitted above, be liable to You for any direct, indirect, *\n* special, incidental, or consequential damages of any character *\n* including, without limitation, damages for lost profits, loss of *\n* goodwill, work stoppage, computer failure or malfunction, or any *\n* and all other commercial damages or losses, even if such party *\n* shall have been informed of the possibility of such damages. This *\n* limitation of liability shall not apply to liability for death or *\n* personal injury resulting from such party's negligence to the *\n* extent applicable law prohibits such limitation. Some *\n* jurisdictions do not allow the exclusion or limitation of *\n* incidental or consequential damages, so this exclusion and *\n* limitation may not apply to You. *\n* *\n************************************************************************\n\n8. Litigation\n-------------\n\nAny litigation relating to this License may be brought only in the\ncourts of a jurisdiction where the defendant maintains its principal\nplace of business and such litigation shall be governed by laws of that\njurisdiction, without reference to its conflict-of-law provisions.\nNothing in this Section shall prevent a party's ability to bring\ncross-claims or counter-claims.\n\n9. Miscellaneous\n----------------\n\nThis License represents the complete agreement concerning the subject\nmatter hereof. If any provision of this License is held to be\nunenforceable, such provision shall be reformed only to the extent\nnecessary to make it enforceable. Any law or regulation which provides\nthat the language of a contract shall be construed against the drafter\nshall not be used to construe this License against a Contributor.\n\n10. Versions of the License\n---------------------------\n\n10.1. New Versions\n\nMozilla Foundation is the license steward. Except as provided in Section\n10.3, no one other than the license steward has the right to modify or\npublish new versions of this License. Each version will be given a\ndistinguishing version number.\n\n10.2. Effect of New Versions\n\nYou may distribute the Covered Software under the terms of the version\nof the License under which You originally received the Covered Software,\nor under the terms of any subsequent version published by the license\nsteward.\n\n10.3. Modified Versions\n\nIf you create software not governed by this License, and you want to\ncreate a new license for such software, you may create and use a\nmodified version of this License if you rename the license and remove\nany references to the name of the license steward (except to note that\nsuch modified license differs from this License).\n\n10.4. Distributing Source Code Form that is Incompatible With Secondary\nLicenses\n\nIf You choose to distribute Source Code Form that is Incompatible With\nSecondary Licenses under the terms of this version of the License, the\nnotice described in Exhibit B of this License must be attached.\n\nExhibit A - Source Code Form License Notice\n-------------------------------------------\n\n This Source Code Form is subject to the terms of the Mozilla Public\n License, v. 2.0. If a copy of the MPL was not distributed with this\n file, You can obtain one at http://mozilla.org/MPL/2.0/.\n\nIf it is not possible or desirable to put the notice in a particular\nfile, then You may include the notice in a location (such as a LICENSE\nfile in a relevant directory) where a recipient would be likely to look\nfor such a notice.\n\nYou may add additional accurate notices of copyright ownership.\n\nExhibit B - "Incompatible With Secondary Licenses" Notice\n---------------------------------------------------------\n\n This Source Code Form is "Incompatible With Secondary Licenses", as\n defined by the Mozilla Public License, v. 2.0.\n
|
.venv\Lib\site-packages\fqdn-1.5.1.dist-info\LICENSE
|
LICENSE
|
Other
| 16,725 | 0.95 | 0.075067 | 0.136519 |
react-lib
| 90 |
2024-08-27T07:14:22.908399
|
MIT
| false |
9741c346eef56131163e13b9db1241b3
|
Metadata-Version: 2.1\nName: fqdn\nVersion: 1.5.1\nSummary: Validates fully-qualified domain names against RFC 1123, so that they are acceptable to modern bowsers\nHome-page: https://github.com/ypcrts/fqdn\nAuthor: ypcrts\nAuthor-email: ypcrts@users.noreply.github.com\nLicense: MPL 2.0\nKeywords: fqdn,domain,hostname,RFC3686,dns\nPlatform: UNKNOWN\nClassifier: Development Status :: 5 - Production/Stable\nClassifier: Environment :: Web Environment\nClassifier: Intended Audience :: Developers\nClassifier: Intended Audience :: System Administrators\nClassifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)\nClassifier: Operating System :: OS Independent\nClassifier: Programming Language :: Python :: 2.7\nClassifier: Programming Language :: Python :: 3.5\nClassifier: Programming Language :: Python :: 3.6\nClassifier: Programming Language :: Python :: 3.7\nClassifier: Programming Language :: Python :: 3.8\nClassifier: Programming Language :: Python :: 3.9\nClassifier: Programming Language :: Python :: Implementation :: CPython\nClassifier: Programming Language :: Python :: Implementation :: PyPy\nClassifier: Topic :: Internet :: Name Service (DNS)\nClassifier: Topic :: Internet\nClassifier: Topic :: System :: Systems Administration\nClassifier: Topic :: Utilities\nRequires-Python: >=2.7, !=3.0, !=3.1, !=3.2, !=3.3, !=3.4, <4\nRequires-Dist: cached-property (>=1.3.0) ; python_version < "3.8"\n\nUNKNOWN\n\n\n
|
.venv\Lib\site-packages\fqdn-1.5.1.dist-info\METADATA
|
METADATA
|
Other
| 1,409 | 0.8 | 0 | 0 |
python-kit
| 975 |
2024-08-21T00:31:24.586886
|
Apache-2.0
| false |
459c39b34a39244ff99c75707240402f
|
fqdn-1.5.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4\nfqdn-1.5.1.dist-info/LICENSE,sha256=HyVuytGSiAUQ6ErWBHTqt1iSGHhLmlC8fO7jTCuR8dU,16725\nfqdn-1.5.1.dist-info/METADATA,sha256=uPT0AgGNHcVvsg1B6yw5ufj6yhf26AgPvgWr3QvouPs,1409\nfqdn-1.5.1.dist-info/RECORD,,\nfqdn-1.5.1.dist-info/WHEEL,sha256=OqRkF0eY5GHssMorFjlbTIq072vpHpF60fIQA6lS9xA,92\nfqdn-1.5.1.dist-info/top_level.txt,sha256=QKP1XrMGfjtTipB6yr4RCysXHKPeYckwCeqYYm0EAJk,5\nfqdn-1.5.1.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1\nfqdn/__init__.py,sha256=eaTdZpqad8P2L6KOM70g3OhtA_xa-kT0PmEv1EXMckA,4787\nfqdn/__pycache__/__init__.cpython-313.pyc,,\nfqdn/__pycache__/_compat.cpython-313.pyc,,\nfqdn/_compat.py,sha256=tieFjStwm0tooDH9ghndAd-DgySiK4f1im4cpvMccdg,174\n
|
.venv\Lib\site-packages\fqdn-1.5.1.dist-info\RECORD
|
RECORD
|
Other
| 780 | 0.7 | 0 | 0 |
awesome-app
| 626 |
2024-04-26T05:50:25.727101
|
BSD-3-Clause
| false |
11f57a2797b0678742859f2a36fb2bb5
|
fqdn\n
|
.venv\Lib\site-packages\fqdn-1.5.1.dist-info\top_level.txt
|
top_level.txt
|
Other
| 5 | 0.5 | 0 | 0 |
vue-tools
| 128 |
2024-09-25T20:58:55.328301
|
GPL-3.0
| false |
9bdb4b00a91275d7b36a159b4df17056
|
Wheel-Version: 1.0\nGenerator: bdist_wheel (0.36.2)\nRoot-Is-Purelib: true\nTag: py3-none-any\n\n
|
.venv\Lib\site-packages\fqdn-1.5.1.dist-info\WHEEL
|
WHEEL
|
Other
| 92 | 0.5 | 0 | 0 |
awesome-app
| 4 |
2023-09-10T18:44:47.216063
|
GPL-3.0
| false |
11aa48dbe7e7cc631b11dd66dc493aeb
|
Marker\n
|
.venv\Lib\site-packages\frozenlist\py.typed
|
py.typed
|
Other
| 7 | 0.5 | 0 | 0 |
vue-tools
| 689 |
2024-12-03T23:42:31.764264
|
MIT
| false |
3522f1a61602da93a3a5e4600cc1f05f
|
MZ
|
.venv\Lib\site-packages\frozenlist\_frozenlist.cp313-win_amd64.pyd
|
_frozenlist.cp313-win_amd64.pyd
|
Other
| 68,096 | 0.75 | 0.02544 | 0.005906 |
vue-tools
| 665 |
2025-06-15T10:31:20.303088
|
GPL-3.0
| false |
e925ef94d9f2e0f150474b5bb4e747ea
|
# cython: freethreading_compatible = True\n# distutils: language = c++\n\nfrom cpython.bool cimport PyBool_FromLong\nfrom libcpp.atomic cimport atomic\n\nimport copy\nimport types\nfrom collections.abc import MutableSequence\n\n\ncdef class FrozenList:\n __class_getitem__ = classmethod(types.GenericAlias)\n\n cdef atomic[bint] _frozen\n cdef list _items\n\n def __init__(self, items=None):\n self._frozen.store(False)\n if items is not None:\n items = list(items)\n else:\n items = []\n self._items = items\n\n @property\n def frozen(self):\n return PyBool_FromLong(self._frozen.load())\n\n cdef object _check_frozen(self):\n if self._frozen.load():\n raise RuntimeError("Cannot modify frozen list.")\n\n cdef inline object _fast_len(self):\n return len(self._items)\n\n def freeze(self):\n self._frozen.store(True)\n\n def __getitem__(self, index):\n return self._items[index]\n\n def __setitem__(self, index, value):\n self._check_frozen()\n self._items[index] = value\n\n def __delitem__(self, index):\n self._check_frozen()\n del self._items[index]\n\n def __len__(self):\n return self._fast_len()\n\n def __iter__(self):\n return self._items.__iter__()\n\n def __reversed__(self):\n return self._items.__reversed__()\n\n def __richcmp__(self, other, op):\n if op == 0: # <\n return list(self) < other\n if op == 1: # <=\n return list(self) <= other\n if op == 2: # ==\n return list(self) == other\n if op == 3: # !=\n return list(self) != other\n if op == 4: # >\n return list(self) > other\n if op == 5: # =>\n return list(self) >= other\n\n def insert(self, pos, item):\n self._check_frozen()\n self._items.insert(pos, item)\n\n def __contains__(self, item):\n return item in self._items\n\n def __iadd__(self, items):\n self._check_frozen()\n self._items += list(items)\n return self\n\n def index(self, item):\n return self._items.index(item)\n\n def remove(self, item):\n self._check_frozen()\n self._items.remove(item)\n\n def clear(self):\n self._check_frozen()\n self._items.clear()\n\n def extend(self, items):\n self._check_frozen()\n self._items += list(items)\n\n def reverse(self):\n self._check_frozen()\n self._items.reverse()\n\n def pop(self, index=-1):\n self._check_frozen()\n return self._items.pop(index)\n\n def append(self, item):\n self._check_frozen()\n return self._items.append(item)\n\n def count(self, item):\n return self._items.count(item)\n\n def __repr__(self):\n return '<FrozenList(frozen={}, {!r})>'.format(self._frozen.load(),\n self._items)\n\n def __hash__(self):\n if self._frozen.load():\n return hash(tuple(self._items))\n else:\n raise RuntimeError("Cannot hash unfrozen list.")\n\n def __deepcopy__(self, memo):\n cdef FrozenList new_list\n obj_id = id(self)\n\n # Return existing copy if already processed (circular reference)\n if obj_id in memo:\n return memo[obj_id]\n\n # Create new instance and register immediately\n new_list = self.__class__([])\n memo[obj_id] = new_list\n\n # Deep copy items\n new_list._items[:] = [copy.deepcopy(item, memo) for item in self._items]\n\n # Preserve frozen state\n if self._frozen.load():\n new_list.freeze()\n\n return new_list\n\n\nMutableSequence.register(FrozenList)\n
|
.venv\Lib\site-packages\frozenlist\_frozenlist.pyx
|
_frozenlist.pyx
|
Other
| 3,708 | 0.95 | 0.256757 | 0.054545 |
node-utils
| 266 |
2024-05-06T15:58:45.814551
|
BSD-3-Clause
| false |
1db08799d2cd481da25a6a2c18d31d5b
|
import os\nimport types\nfrom collections.abc import MutableSequence\nfrom functools import total_ordering\n\n__version__ = "1.7.0"\n\n__all__ = ("FrozenList", "PyFrozenList") # type: Tuple[str, ...]\n\n\nNO_EXTENSIONS = bool(os.environ.get("FROZENLIST_NO_EXTENSIONS")) # type: bool\n\n\n@total_ordering\nclass FrozenList(MutableSequence):\n __slots__ = ("_frozen", "_items")\n __class_getitem__ = classmethod(types.GenericAlias)\n\n def __init__(self, items=None):\n self._frozen = False\n if items is not None:\n items = list(items)\n else:\n items = []\n self._items = items\n\n @property\n def frozen(self):\n return self._frozen\n\n def freeze(self):\n self._frozen = True\n\n def __getitem__(self, index):\n return self._items[index]\n\n def __setitem__(self, index, value):\n if self._frozen:\n raise RuntimeError("Cannot modify frozen list.")\n self._items[index] = value\n\n def __delitem__(self, index):\n if self._frozen:\n raise RuntimeError("Cannot modify frozen list.")\n del self._items[index]\n\n def __len__(self):\n return self._items.__len__()\n\n def __iter__(self):\n return self._items.__iter__()\n\n def __reversed__(self):\n return self._items.__reversed__()\n\n def __eq__(self, other):\n return list(self) == other\n\n def __le__(self, other):\n return list(self) <= other\n\n def insert(self, pos, item):\n if self._frozen:\n raise RuntimeError("Cannot modify frozen list.")\n self._items.insert(pos, item)\n\n def __repr__(self):\n return f"<FrozenList(frozen={self._frozen}, {self._items!r})>"\n\n def __hash__(self):\n if self._frozen:\n return hash(tuple(self))\n else:\n raise RuntimeError("Cannot hash unfrozen list.")\n\n\nPyFrozenList = FrozenList\n\n\nif not NO_EXTENSIONS:\n try:\n from ._frozenlist import FrozenList as CFrozenList # type: ignore\n except ImportError: # pragma: no cover\n pass\n else:\n FrozenList = CFrozenList # type: ignore\n
|
.venv\Lib\site-packages\frozenlist\__init__.py
|
__init__.py
|
Python
| 2,108 | 0.95 | 0.255814 | 0 |
node-utils
| 739 |
2025-04-15T07:46:00.353225
|
Apache-2.0
| false |
830a4838faad1667d0d2da950e821e4c
|
from typing import (\n Generic,\n Iterable,\n Iterator,\n List,\n MutableSequence,\n Optional,\n TypeVar,\n Union,\n overload,\n)\n\n_T = TypeVar("_T")\n_Arg = Union[List[_T], Iterable[_T]]\n\nclass FrozenList(MutableSequence[_T], Generic[_T]):\n def __init__(self, items: Optional[_Arg[_T]] = None) -> None: ...\n @property\n def frozen(self) -> bool: ...\n def freeze(self) -> None: ...\n @overload\n def __getitem__(self, i: int) -> _T: ...\n @overload\n def __getitem__(self, s: slice) -> FrozenList[_T]: ...\n @overload\n def __setitem__(self, i: int, o: _T) -> None: ...\n @overload\n def __setitem__(self, s: slice, o: Iterable[_T]) -> None: ...\n @overload\n def __delitem__(self, i: int) -> None: ...\n @overload\n def __delitem__(self, i: slice) -> None: ...\n def __len__(self) -> int: ...\n def __iter__(self) -> Iterator[_T]: ...\n def __reversed__(self) -> Iterator[_T]: ...\n def __eq__(self, other: object) -> bool: ...\n def __le__(self, other: FrozenList[_T]) -> bool: ...\n def __ne__(self, other: object) -> bool: ...\n def __lt__(self, other: FrozenList[_T]) -> bool: ...\n def __ge__(self, other: FrozenList[_T]) -> bool: ...\n def __gt__(self, other: FrozenList[_T]) -> bool: ...\n def insert(self, pos: int, item: _T) -> None: ...\n def __repr__(self) -> str: ...\n def __hash__(self) -> int: ...\n\n# types for C accelerators are the same\nCFrozenList = PyFrozenList = FrozenList\n
|
.venv\Lib\site-packages\frozenlist\__init__.pyi
|
__init__.pyi
|
Other
| 1,470 | 0.95 | 0.489362 | 0.022727 |
python-kit
| 78 |
2024-01-26T18:48:19.578102
|
MIT
| false |
f6ae02458b1f8f9ecb0f342417229d65
|
\n\n
|
.venv\Lib\site-packages\frozenlist\__pycache__\__init__.cpython-313.pyc
|
__init__.cpython-313.pyc
|
Other
| 4,136 | 0.8 | 0 | 0 |
vue-tools
| 140 |
2024-02-17T09:20:25.545716
|
Apache-2.0
| false |
0d31bc60bf064d27acfc67b1cb2e9dd5
|
pip\n
|
.venv\Lib\site-packages\frozenlist-1.7.0.dist-info\INSTALLER
|
INSTALLER
|
Other
| 4 | 0.5 | 0 | 0 |
react-lib
| 886 |
2023-08-19T10:07:51.029406
|
GPL-3.0
| false |
365c9bfeb7d89244f2ce01c1de44cb85
|
Metadata-Version: 2.4\nName: frozenlist\nVersion: 1.7.0\nSummary: A list-like structure which implements collections.abc.MutableSequence\nHome-page: https://github.com/aio-libs/frozenlist\nMaintainer: aiohttp team <team@aiohttp.org>\nMaintainer-email: team@aiohttp.org\nLicense: Apache-2.0\nProject-URL: Chat: Matrix, https://matrix.to/#/#aio-libs:matrix.org\nProject-URL: Chat: Matrix Space, https://matrix.to/#/#aio-libs-space:matrix.org\nProject-URL: CI: Github Actions, https://github.com/aio-libs/frozenlist/actions\nProject-URL: Code of Conduct, https://github.com/aio-libs/.github/blob/master/CODE_OF_CONDUCT.md\nProject-URL: Coverage: codecov, https://codecov.io/github/aio-libs/frozenlist\nProject-URL: Docs: Changelog, https://github.com/aio-libs/frozenlist/blob/master/CHANGES.rst#changelog\nProject-URL: Docs: RTD, https://frozenlist.aio-libs.org\nProject-URL: GitHub: issues, https://github.com/aio-libs/frozenlist/issues\nProject-URL: GitHub: repo, https://github.com/aio-libs/frozenlist\nClassifier: Development Status :: 5 - Production/Stable\nClassifier: Intended Audience :: Developers\nClassifier: Operating System :: POSIX\nClassifier: Operating System :: MacOS :: MacOS X\nClassifier: Operating System :: Microsoft :: Windows\nClassifier: Programming Language :: Cython\nClassifier: Programming Language :: Python\nClassifier: Programming Language :: Python :: 3\nClassifier: Programming Language :: Python :: 3.9\nClassifier: Programming Language :: Python :: 3.10\nClassifier: Programming Language :: Python :: 3.11\nClassifier: Programming Language :: Python :: 3.12\nClassifier: Programming Language :: Python :: 3.13\nClassifier: Programming Language :: Python :: Implementation :: CPython\nClassifier: Programming Language :: Python :: Implementation :: PyPy\nRequires-Python: >=3.9\nDescription-Content-Type: text/x-rst\nLicense-File: LICENSE\nDynamic: license-file\n\nfrozenlist\n==========\n\n.. image:: https://github.com/aio-libs/frozenlist/workflows/CI/badge.svg\n :target: https://github.com/aio-libs/frozenlist/actions\n :alt: GitHub status for master branch\n\n.. image:: https://codecov.io/gh/aio-libs/frozenlist/branch/master/graph/badge.svg?flag=pytest\n :target: https://codecov.io/gh/aio-libs/frozenlist?flags[]=pytest\n :alt: codecov.io status for master branch\n\n.. image:: https://img.shields.io/pypi/v/frozenlist.svg?logo=Python&logoColor=white\n :target: https://pypi.org/project/frozenlist\n :alt: frozenlist @ PyPI\n\n.. image:: https://readthedocs.org/projects/frozenlist/badge/?version=latest\n :target: https://frozenlist.aio-libs.org\n :alt: Read The Docs build status badge\n\n.. image:: https://img.shields.io/matrix/aio-libs:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat\n :target: https://matrix.to/#/%23aio-libs:matrix.org\n :alt: Matrix Room — #aio-libs:matrix.org\n\n.. image:: https://img.shields.io/matrix/aio-libs-space:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs-space%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat\n :target: https://matrix.to/#/%23aio-libs-space:matrix.org\n :alt: Matrix Space — #aio-libs-space:matrix.org\n\nIntroduction\n------------\n\n``frozenlist.FrozenList`` is a list-like structure which implements\n``collections.abc.MutableSequence``. The list is *mutable* until ``FrozenList.freeze``\nis called, after which list modifications raise ``RuntimeError``:\n\n\n>>> from frozenlist import FrozenList\n>>> fl = FrozenList([17, 42])\n>>> fl.append('spam')\n>>> fl.append('Vikings')\n>>> fl\n<FrozenList(frozen=False, [17, 42, 'spam', 'Vikings'])>\n>>> fl.freeze()\n>>> fl\n<FrozenList(frozen=True, [17, 42, 'spam', 'Vikings'])>\n>>> fl.frozen\nTrue\n>>> fl.append("Monty")\nTraceback (most recent call last):\n File "<stdin>", line 1, in <module>\n File "frozenlist/_frozenlist.pyx", line 97, in frozenlist._frozenlist.FrozenList.append\n self._check_frozen()\n File "frozenlist/_frozenlist.pyx", line 19, in frozenlist._frozenlist.FrozenList._check_frozen\n raise RuntimeError("Cannot modify frozen list.")\nRuntimeError: Cannot modify frozen list.\n\n\nFrozenList is also hashable, but only when frozen. Otherwise it also throws a RuntimeError:\n\n\n>>> fl = FrozenList([17, 42, 'spam'])\n>>> hash(fl)\nTraceback (most recent call last):\n File "<stdin>", line 1, in <module>\n File "frozenlist/_frozenlist.pyx", line 111, in frozenlist._frozenlist.FrozenList.__hash__\n raise RuntimeError("Cannot hash unfrozen list.")\nRuntimeError: Cannot hash unfrozen list.\n>>> fl.freeze()\n>>> hash(fl)\n3713081631934410656\n>>> dictionary = {fl: 'Vikings'} # frozen fl can be a dict key\n>>> dictionary\n{<FrozenList(frozen=True, [1, 2])>: 'Vikings'}\n\n\nInstallation\n------------\n\n::\n\n $ pip install frozenlist\n\n\nDocumentation\n-------------\n\nhttps://frozenlist.aio-libs.org\n\nCommunication channels\n----------------------\n\nWe have a *Matrix Space* `#aio-libs-space:matrix.org\n<https://matrix.to/#/%23aio-libs-space:matrix.org>`_ which is\nalso accessible via Gitter.\n\nLicense\n-------\n\n``frozenlist`` is offered under the Apache 2 license.\n\nSource code\n-----------\n\nThe project is hosted on GitHub_\n\nPlease file an issue in the `bug tracker\n<https://github.com/aio-libs/frozenlist/issues>`_ if you have found a bug\nor have some suggestions to improve the library.\n\n.. _GitHub: https://github.com/aio-libs/frozenlist\n\n=========\nChangelog\n=========\n\n..\n You should *NOT* be adding new change log entries to this file, this\n file is managed by towncrier. You *may* edit previous change logs to\n fix problems like typo corrections or such.\n To add a new change log entry, please see\n https://pip.pypa.io/en/latest/development/contributing/#news-entries\n we named the news folder "changes".\n\n WARNING: Don't drop the next directive!\n\n.. towncrier release notes start\n\nv1.7.0\n======\n\n*(2025-06-09)*\n\n\nFeatures\n--------\n\n- Added deepcopy support to FrozenList -- by `@bdraco <https://github.com/sponsors/bdraco>`__.\n\n *Related issues and pull requests on GitHub:*\n `#659 <https://github.com/aio-libs/frozenlist/issues/659>`__.\n\n\nPackaging updates and notes for downstreams\n-------------------------------------------\n\n- Fixed an issue where ``frozenlist`` binary wheels would be built with debugging symbols and line tracing enabled, which significantly impacted performance. Line tracing is now disabled by default and can only be enabled explicitly -- by `@bdraco <https://github.com/sponsors/bdraco>`__.\n\n This change ensures that production builds are optimized for performance. Developers who need line tracing for debugging purposes can still enable it by:\n\n 1. Setting the ``FROZENLIST_CYTHON_TRACING`` environment variable\n 2. Using the ``--config-setting=with-cython-tracing=true`` option with pip\n\n *Related issues and pull requests on GitHub:*\n `#660 <https://github.com/aio-libs/frozenlist/issues/660>`__.\n\n- Enabled ``PIP_CONSTRAINT`` environment variable in the build configuration to ensure the pinned Cython version from ``requirements/cython.txt`` is used during wheel builds.\n\n *Related issues and pull requests on GitHub:*\n `#661 <https://github.com/aio-libs/frozenlist/issues/661>`__.\n\n\n----\n\n\nv1.6.2\n======\n\n*(2025-06-03)*\n\n\nNo significant changes.\n\n\n----\n\n\nv1.6.1\n======\n\n*(2025-06-02)*\n\n\nBug fixes\n---------\n\n- Correctly use ``cimport`` for including ``PyBool_FromLong`` -- by `@lysnikolaou <https://github.com/sponsors/lysnikolaou>`__.\n\n *Related issues and pull requests on GitHub:*\n `#653 <https://github.com/aio-libs/frozenlist/issues/653>`__.\n\n\nPackaging updates and notes for downstreams\n-------------------------------------------\n\n- Exclude ``_frozenlist.cpp`` from bdists/wheels -- by `@musicinmybrain <https://github.com/sponsors/musicinmybrain>`__.\n\n *Related issues and pull requests on GitHub:*\n `#649 <https://github.com/aio-libs/frozenlist/issues/649>`__.\n\n- Updated to use Cython 3.1 universally across the build path -- by `@lysnikolaou <https://github.com/sponsors/lysnikolaou>`__.\n\n *Related issues and pull requests on GitHub:*\n `#654 <https://github.com/aio-libs/frozenlist/issues/654>`__.\n\n\n----\n\n\nv1.6.0\n======\n\n*(2025-04-17)*\n\n\nBug fixes\n---------\n\n- Stopped implicitly allowing the use of Cython pre-release versions when\n building the distribution package -- by `@ajsanchezsanz <https://github.com/sponsors/ajsanchezsanz>`__ and\n `@markgreene74 <https://github.com/sponsors/markgreene74>`__.\n\n *Related commits on GitHub:*\n `41591f2 <https://github.com/aio-libs/frozenlist/commit/41591f2>`__.\n\n\nFeatures\n--------\n\n- Implemented support for the free-threaded build of CPython 3.13 -- by `@lysnikolaou <https://github.com/sponsors/lysnikolaou>`__.\n\n *Related issues and pull requests on GitHub:*\n `#618 <https://github.com/aio-libs/frozenlist/issues/618>`__.\n\n- Started building armv7l wheels -- by `@bdraco <https://github.com/sponsors/bdraco>`__.\n\n *Related issues and pull requests on GitHub:*\n `#642 <https://github.com/aio-libs/frozenlist/issues/642>`__.\n\n\nPackaging updates and notes for downstreams\n-------------------------------------------\n\n- Stopped implicitly allowing the use of Cython pre-release versions when\n building the distribution package -- by `@ajsanchezsanz <https://github.com/sponsors/ajsanchezsanz>`__ and\n `@markgreene74 <https://github.com/sponsors/markgreene74>`__.\n\n *Related commits on GitHub:*\n `41591f2 <https://github.com/aio-libs/frozenlist/commit/41591f2>`__.\n\n- Started building wheels for the free-threaded build of CPython 3.13 -- by `@lysnikolaou <https://github.com/sponsors/lysnikolaou>`__.\n\n *Related issues and pull requests on GitHub:*\n `#618 <https://github.com/aio-libs/frozenlist/issues/618>`__.\n\n- The packaging metadata switched to including an SPDX license identifier introduced in `PEP 639 <https://peps.python.org/pep-639>`__ -- by `@cdce8p <https://github.com/sponsors/cdce8p>`__.\n\n *Related issues and pull requests on GitHub:*\n `#639 <https://github.com/aio-libs/frozenlist/issues/639>`__.\n\n\nContributor-facing changes\n--------------------------\n\n- GitHub Actions CI/CD is now configured to manage caching pip-ecosystem\n dependencies using `re-actors/cache-python-deps`_ -- an action by\n `@webknjaz <https://github.com/sponsors/webknjaz>`__ that takes into account ABI stability and the exact\n version of Python runtime.\n\n .. _`re-actors/cache-python-deps`:\n https://github.com/marketplace/actions/cache-python-deps\n\n *Related issues and pull requests on GitHub:*\n `#633 <https://github.com/aio-libs/frozenlist/issues/633>`__.\n\n- Organized dependencies into test and lint dependencies so that no\n unnecessary ones are installed during CI runs -- by `@lysnikolaou <https://github.com/sponsors/lysnikolaou>`__.\n\n *Related issues and pull requests on GitHub:*\n `#636 <https://github.com/aio-libs/frozenlist/issues/636>`__.\n\n\n----\n\n\n1.5.0 (2024-10-22)\n==================\n\nBug fixes\n---------\n\n- An incorrect signature of the ``__class_getitem__`` class method\n has been fixed, adding a missing ``class_item`` argument under\n Python 3.8 and older.\n\n This change also improves the code coverage of this method that\n was previously missing -- by `@webknjaz <https://github.com/sponsors/webknjaz>`__.\n\n\n *Related issues and pull requests on GitHub:*\n `#567 <https://github.com/aio-libs/frozenlist/issues/567>`__, `#571 <https://github.com/aio-libs/frozenlist/issues/571>`__.\n\n\nImproved documentation\n----------------------\n\n- Rendered issue, PR, and commit links now lead to\n ``frozenlist``'s repo instead of ``yarl``'s repo.\n\n\n *Related issues and pull requests on GitHub:*\n `#573 <https://github.com/aio-libs/frozenlist/issues/573>`__.\n\n- On the ``Contributing docs`` page,\n a link to the ``Towncrier philosophy`` has been fixed.\n\n\n *Related issues and pull requests on GitHub:*\n `#574 <https://github.com/aio-libs/frozenlist/issues/574>`__.\n\n\nPackaging updates and notes for downstreams\n-------------------------------------------\n\n- A name of a temporary building directory now reflects\n that it's related to ``frozenlist``, not ``yarl``.\n\n\n *Related issues and pull requests on GitHub:*\n `#573 <https://github.com/aio-libs/frozenlist/issues/573>`__.\n\n- Declared Python 3.13 supported officially in the distribution package metadata.\n\n\n *Related issues and pull requests on GitHub:*\n `#595 <https://github.com/aio-libs/frozenlist/issues/595>`__.\n\n\n----\n\n\n1.4.1 (2023-12-15)\n==================\n\nPackaging updates and notes for downstreams\n-------------------------------------------\n\n- Declared Python 3.12 and PyPy 3.8-3.10 supported officially\n in the distribution package metadata.\n\n\n *Related issues and pull requests on GitHub:*\n `#553 <https://github.com/aio-libs/frozenlist/issues/553>`__.\n\n- Replaced the packaging is replaced from an old-fashioned ``setup.py`` to an\n in-tree `PEP 517 <https://peps.python.org/pep-517>`__ build backend -- by `@webknjaz <https://github.com/sponsors/webknjaz>`__.\n\n Whenever the end-users or downstream packagers need to build ``frozenlist``\n from source (a Git checkout or an sdist), they may pass a ``config_settings``\n flag ``pure-python``. If this flag is not set, a C-extension will be built\n and included into the distribution.\n\n Here is how this can be done with ``pip``:\n\n .. code-block:: console\n\n $ python3 -m pip install . --config-settings=pure-python=\n\n This will also work with ``-e | --editable``.\n\n The same can be achieved via ``pypa/build``:\n\n .. code-block:: console\n\n $ python3 -m build --config-setting=pure-python=\n\n Adding ``-w | --wheel`` can force ``pypa/build`` produce a wheel from source\n directly, as opposed to building an ``sdist`` and then building from it.\n\n\n *Related issues and pull requests on GitHub:*\n `#560 <https://github.com/aio-libs/frozenlist/issues/560>`__.\n\n\nContributor-facing changes\n--------------------------\n\n- It is now possible to request line tracing in Cython builds using the\n ``with-cython-tracing`` `PEP 517 <https://peps.python.org/pep-517>`__ config setting\n -- `@webknjaz <https://github.com/sponsors/webknjaz>`__.\n\n This can be used in CI and development environment to measure coverage\n on Cython modules, but is not normally useful to the end-users or\n downstream packagers.\n\n Here's a usage example:\n\n .. code-block:: console\n\n $ python3 -Im pip install . --config-settings=with-cython-tracing=true\n\n For editable installs, this setting is on by default. Otherwise, it's\n off unless requested explicitly.\n\n The following produces C-files required for the Cython coverage\n plugin to map the measurements back to the PYX-files:\n\n .. code-block:: console\n\n $ python -Im pip install -e .\n\n Alternatively, the ``FROZENLIST_CYTHON_TRACING=1`` environment variable\n can be set to do the same as the `PEP 517 <https://peps.python.org/pep-517>`__ config setting.\n\n\n *Related issues and pull requests on GitHub:*\n `#560 <https://github.com/aio-libs/frozenlist/issues/560>`__.\n\n- Coverage collection has been implemented for the Cython modules\n -- by `@webknjaz <https://github.com/sponsors/webknjaz>`__.\n\n It will also be reported to Codecov from any non-release CI jobs.\n\n\n *Related issues and pull requests on GitHub:*\n `#561 <https://github.com/aio-libs/frozenlist/issues/561>`__.\n\n- A step-by-step ``Release Guide`` guide has\n been added, describing how to release *frozenlist* -- by `@webknjaz <https://github.com/sponsors/webknjaz>`__.\n\n This is primarily targeting the maintainers.\n\n\n *Related issues and pull requests on GitHub:*\n `#563 <https://github.com/aio-libs/frozenlist/issues/563>`__.\n\n- Detailed ``Contributing Guidelines`` on\n authoring the changelog fragments have been published in the\n documentation -- by `@webknjaz <https://github.com/sponsors/webknjaz>`__.\n\n\n *Related issues and pull requests on GitHub:*\n `#564 <https://github.com/aio-libs/frozenlist/issues/564>`__.\n\n\n----\n\n\n1.4.0 (2023-07-12)\n==================\n\nThe published source distribution package became buildable\nunder Python 3.12.\n\n\n----\n\n\nBugfixes\n--------\n\n- Removed an unused ``typing.Tuple`` import\n `#411 <https://github.com/aio-libs/frozenlist/issues/411>`_\n\n\nDeprecations and Removals\n-------------------------\n\n- Dropped Python 3.7 support.\n `#413 <https://github.com/aio-libs/frozenlist/issues/413>`_\n\n\nMisc\n----\n\n- `#410 <https://github.com/aio-libs/frozenlist/issues/410>`_, `#433 <https://github.com/aio-libs/frozenlist/issues/433>`_\n\n\n----\n\n\n1.3.3 (2022-11-08)\n==================\n\n- Fixed CI runs when creating a new release, where new towncrier versions\n fail when the current version section is already present.\n\n\n----\n\n\n1.3.2 (2022-11-08)\n==================\n\nMisc\n----\n\n- Updated the CI runs to better check for test results and to avoid deprecated syntax. `#327 <https://github.com/aio-libs/frozenlist/issues/327>`_\n\n\n----\n\n\n1.3.1 (2022-08-02)\n==================\n\nThe published source distribution package became buildable\nunder Python 3.11.\n\n\n----\n\n\n1.3.0 (2022-01-18)\n==================\n\nBugfixes\n--------\n\n- Do not install C sources with binary distributions.\n `#250 <https://github.com/aio-libs/frozenlist/issues/250>`_\n\n\nDeprecations and Removals\n-------------------------\n\n- Dropped Python 3.6 support\n `#274 <https://github.com/aio-libs/frozenlist/issues/274>`_\n\n\n----\n\n\n1.2.0 (2021-10-16)\n==================\n\nFeatures\n--------\n\n- ``FrozenList`` now supports being used as a generic type as per PEP 585, e.g. ``frozen_int_list: FrozenList[int]`` (requires Python 3.9 or newer).\n `#172 <https://github.com/aio-libs/frozenlist/issues/172>`_\n- Added support for Python 3.10.\n `#227 <https://github.com/aio-libs/frozenlist/issues/227>`_\n- Started shipping platform-specific wheels with the ``musl`` tag targeting typical Alpine Linux runtimes.\n `#227 <https://github.com/aio-libs/frozenlist/issues/227>`_\n- Started shipping platform-specific arm64 wheels for Apple Silicon.\n `#227 <https://github.com/aio-libs/frozenlist/issues/227>`_\n\n\n----\n\n\n1.1.1 (2020-11-14)\n==================\n\nBugfixes\n--------\n\n- Provide x86 Windows wheels.\n `#169 <https://github.com/aio-libs/frozenlist/issues/169>`_\n\n\n----\n\n\n1.1.0 (2020-10-13)\n==================\n\nFeatures\n--------\n\n- Add support for hashing of a frozen list.\n `#136 <https://github.com/aio-libs/frozenlist/issues/136>`_\n\n- Support Python 3.8 and 3.9.\n\n- Provide wheels for ``aarch64``, ``i686``, ``ppc64le``, ``s390x`` architectures on\n Linux as well as ``x86_64``.\n\n\n----\n\n\n1.0.0 (2019-11-09)\n==================\n\nDeprecations and Removals\n-------------------------\n\n- Dropped support for Python 3.5; only 3.6, 3.7 and 3.8 are supported going forward.\n `#24 <https://github.com/aio-libs/frozenlist/issues/24>`_\n
|
.venv\Lib\site-packages\frozenlist-1.7.0.dist-info\METADATA
|
METADATA
|
Other
| 19,246 | 0.95 | 0.035144 | 0.07455 |
python-kit
| 872 |
2023-09-15T07:27:08.710128
|
GPL-3.0
| false |
42b25483ba7c6944c0a08b2e1056aa1e
|
frozenlist-1.7.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4\nfrozenlist-1.7.0.dist-info/METADATA,sha256=UnA8XurkLp3uLNyvmsONivLBLBSf-gs3B_sgcZtogD0,19246\nfrozenlist-1.7.0.dist-info/RECORD,,\nfrozenlist-1.7.0.dist-info/WHEEL,sha256=qV0EIPljj1XC_vuSatRWjn02nZIz3N1t8jsZz7HBr2U,101\nfrozenlist-1.7.0.dist-info/licenses/LICENSE,sha256=b9UkPpLdf5jsacesN3co50kFcJ_1J6W_mNbQJjwE9bY,11332\nfrozenlist-1.7.0.dist-info/top_level.txt,sha256=jivtxsPXA3nK3WBWW2LW5Mtu_GHt8UZA13NeCs2cKuA,11\nfrozenlist/__init__.py,sha256=lK5sikKJ0TltduFRDrIHU9o0tU_6wsgi43kJ0vucBOw,2108\nfrozenlist/__init__.pyi,sha256=vMEoES1xGegPtVXoCi9XydEeHsyuIq-KdeXwP5PdsaA,1470\nfrozenlist/__pycache__/__init__.cpython-313.pyc,,\nfrozenlist/_frozenlist.cp313-win_amd64.pyd,sha256=BlebAlz0MdDRtbo0zlhe81pv_FAW1T4iHcMCDgZ8BH0,68096\nfrozenlist/_frozenlist.pyx,sha256=t-aGjuEiVt_MZPBJ0RnraavVmPBK6arz3i48ZvXuYsU,3708\nfrozenlist/py.typed,sha256=sow9soTwP9T_gEAQSVh7Gb8855h04Nwmhs2We-JRgZM,7\n
|
.venv\Lib\site-packages\frozenlist-1.7.0.dist-info\RECORD
|
RECORD
|
Other
| 980 | 0.7 | 0 | 0 |
awesome-app
| 941 |
2025-05-06T15:35:23.855579
|
MIT
| false |
52f8ee3795ff631f13338bc04c6014f0
|
frozenlist\n
|
.venv\Lib\site-packages\frozenlist-1.7.0.dist-info\top_level.txt
|
top_level.txt
|
Other
| 11 | 0.5 | 0 | 0 |
python-kit
| 232 |
2024-06-22T04:14:39.695016
|
BSD-3-Clause
| false |
8e0dbd99fb6fca908d940949d4dad235
|
Wheel-Version: 1.0\nGenerator: setuptools (80.9.0)\nRoot-Is-Purelib: false\nTag: cp313-cp313-win_amd64\n\n
|
.venv\Lib\site-packages\frozenlist-1.7.0.dist-info\WHEEL
|
WHEEL
|
Other
| 101 | 0.7 | 0 | 0 |
python-kit
| 774 |
2023-10-19T00:47:09.292929
|
Apache-2.0
| false |
eb6c9e665bbbd698545236600675f165
|
Apache License\n Version 2.0, January 2004\n http://www.apache.org/licenses/\n\n TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n 1. Definitions.\n\n "License" shall mean the terms and conditions for use, reproduction,\n and distribution as defined by Sections 1 through 9 of this document.\n\n "Licensor" shall mean the copyright owner or entity authorized by\n the copyright owner that is granting the License.\n\n "Legal Entity" shall mean the union of the acting entity and all\n other entities that control, are controlled by, or are under common\n control with that entity. For the purposes of this definition,\n "control" means (i) the power, direct or indirect, to cause the\n direction or management of such entity, whether by contract or\n otherwise, or (ii) ownership of fifty percent (50%) or more of the\n outstanding shares, or (iii) beneficial ownership of such entity.\n\n "You" (or "Your") shall mean an individual or Legal Entity\n exercising permissions granted by this License.\n\n "Source" form shall mean the preferred form for making modifications,\n including but not limited to software source code, documentation\n source, and configuration files.\n\n "Object" form shall mean any form resulting from mechanical\n transformation or translation of a Source form, including but\n not limited to compiled object code, generated documentation,\n and conversions to other media types.\n\n "Work" shall mean the work of authorship, whether in Source or\n Object form, made available under the License, as indicated by a\n copyright notice that is included in or attached to the work\n (an example is provided in the Appendix below).\n\n "Derivative Works" shall mean any work, whether in Source or Object\n form, that is based on (or derived from) the Work and for which the\n editorial revisions, annotations, elaborations, or other modifications\n represent, as a whole, an original work of authorship. For the purposes\n of this License, Derivative Works shall not include works that remain\n separable from, or merely link (or bind by name) to the interfaces of,\n the Work and Derivative Works thereof.\n\n "Contribution" shall mean any work of authorship, including\n the original version of the Work and any modifications or additions\n to that Work or Derivative Works thereof, that is intentionally\n submitted to Licensor for inclusion in the Work by the copyright owner\n or by an individual or Legal Entity authorized to submit on behalf of\n the copyright owner. For the purposes of this definition, "submitted"\n means any form of electronic, verbal, or written communication sent\n to the Licensor or its representatives, including but not limited to\n communication on electronic mailing lists, source code control systems,\n and issue tracking systems that are managed by, or on behalf of, the\n Licensor for the purpose of discussing and improving the Work, but\n excluding communication that is conspicuously marked or otherwise\n designated in writing by the copyright owner as "Not a Contribution."\n\n "Contributor" shall mean Licensor and any individual or Legal Entity\n on behalf of whom a Contribution has been received by Licensor and\n subsequently incorporated within the Work.\n\n 2. Grant of Copyright License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n copyright license to reproduce, prepare Derivative Works of,\n publicly display, publicly perform, sublicense, and distribute the\n Work and such Derivative Works in Source or Object form.\n\n 3. Grant of Patent License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n (except as stated in this section) patent license to make, have made,\n use, offer to sell, sell, import, and otherwise transfer the Work,\n where such license applies only to those patent claims licensable\n by such Contributor that are necessarily infringed by their\n Contribution(s) alone or by combination of their Contribution(s)\n with the Work to which such Contribution(s) was submitted. If You\n institute patent litigation against any entity (including a\n cross-claim or counterclaim in a lawsuit) alleging that the Work\n or a Contribution incorporated within the Work constitutes direct\n or contributory patent infringement, then any patent licenses\n granted to You under this License for that Work shall terminate\n as of the date such litigation is filed.\n\n 4. Redistribution. You may reproduce and distribute copies of the\n Work or Derivative Works thereof in any medium, with or without\n modifications, and in Source or Object form, provided that You\n meet the following conditions:\n\n (a) You must give any other recipients of the Work or\n Derivative Works a copy of this License; and\n\n (b) You must cause any modified files to carry prominent notices\n stating that You changed the files; and\n\n (c) You must retain, in the Source form of any Derivative Works\n that You distribute, all copyright, patent, trademark, and\n attribution notices from the Source form of the Work,\n excluding those notices that do not pertain to any part of\n the Derivative Works; and\n\n (d) If the Work includes a "NOTICE" text file as part of its\n distribution, then any Derivative Works that You distribute must\n include a readable copy of the attribution notices contained\n within such NOTICE file, excluding those notices that do not\n pertain to any part of the Derivative Works, in at least one\n of the following places: within a NOTICE text file distributed\n as part of the Derivative Works; within the Source form or\n documentation, if provided along with the Derivative Works; or,\n within a display generated by the Derivative Works, if and\n wherever such third-party notices normally appear. The contents\n of the NOTICE file are for informational purposes only and\n do not modify the License. You may add Your own attribution\n notices within Derivative Works that You distribute, alongside\n or as an addendum to the NOTICE text from the Work, provided\n that such additional attribution notices cannot be construed\n as modifying the License.\n\n You may add Your own copyright statement to Your modifications and\n may provide additional or different license terms and conditions\n for use, reproduction, or distribution of Your modifications, or\n for any such Derivative Works as a whole, provided Your use,\n reproduction, and distribution of the Work otherwise complies with\n the conditions stated in this License.\n\n 5. Submission of Contributions. Unless You explicitly state otherwise,\n any Contribution intentionally submitted for inclusion in the Work\n by You to the Licensor shall be under the terms and conditions of\n this License, without any additional terms or conditions.\n Notwithstanding the above, nothing herein shall supersede or modify\n the terms of any separate license agreement you may have executed\n with Licensor regarding such Contributions.\n\n 6. Trademarks. This License does not grant permission to use the trade\n names, trademarks, service marks, or product names of the Licensor,\n except as required for reasonable and customary use in describing the\n origin of the Work and reproducing the content of the NOTICE file.\n\n 7. Disclaimer of Warranty. Unless required by applicable law or\n agreed to in writing, Licensor provides the Work (and each\n Contributor provides its Contributions) on an "AS IS" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n implied, including, without limitation, any warranties or conditions\n of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n PARTICULAR PURPOSE. You are solely responsible for determining the\n appropriateness of using or redistributing the Work and assume any\n risks associated with Your exercise of permissions under this License.\n\n 8. Limitation of Liability. In no event and under no legal theory,\n whether in tort (including negligence), contract, or otherwise,\n unless required by applicable law (such as deliberate and grossly\n negligent acts) or agreed to in writing, shall any Contributor be\n liable to You for damages, including any direct, indirect, special,\n incidental, or consequential damages of any character arising as a\n result of this License or out of the use or inability to use the\n Work (including but not limited to damages for loss of goodwill,\n work stoppage, computer failure or malfunction, or any and all\n other commercial damages or losses), even if such Contributor\n has been advised of the possibility of such damages.\n\n 9. Accepting Warranty or Additional Liability. While redistributing\n the Work or Derivative Works thereof, You may choose to offer,\n and charge a fee for, acceptance of support, warranty, indemnity,\n or other liability obligations and/or rights consistent with this\n License. However, in accepting such obligations, You may act only\n on Your own behalf and on Your sole responsibility, not on behalf\n of any other Contributor, and only if You agree to indemnify,\n defend, and hold each Contributor harmless for any liability\n incurred by, or claims asserted against, such Contributor by reason\n of your accepting any such warranty or additional liability.\n\n END OF TERMS AND CONDITIONS\n\n APPENDIX: How to apply the Apache License to your work.\n\n To apply the Apache License to your work, attach the following\n boilerplate notice, with the fields enclosed by brackets "{}"\n replaced with your own identifying information. (Don't include\n the brackets!) The text should be enclosed in the appropriate\n comment syntax for the file format. We also recommend that a\n file or class name and description of purpose be included on the\n same "printed page" as the copyright notice for easier\n identification within third-party archives.\n\n Copyright 2013-2019 Nikolay Kim and Andrew Svetlov\n\n Licensed under the Apache License, Version 2.0 (the "License");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an "AS IS" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n
|
.venv\Lib\site-packages\frozenlist-1.7.0.dist-info\licenses\LICENSE
|
LICENSE
|
Other
| 11,332 | 0.95 | 0.119403 | 0 |
node-utils
| 888 |
2024-10-11T18:22:24.652914
|
MIT
| false |
cf056e8e7a0a5477451af18b7b5aa98c
|
"""Helper functions for a standard streaming compression API"""\n\nfrom zipfile import ZipFile\n\nimport fsspec.utils\nfrom fsspec.spec import AbstractBufferedFile\n\n\ndef noop_file(file, mode, **kwargs):\n return file\n\n\n# TODO: files should also be available as contexts\n# should be functions of the form func(infile, mode=, **kwargs) -> file-like\ncompr = {None: noop_file}\n\n\ndef register_compression(name, callback, extensions, force=False):\n """Register an "inferable" file compression type.\n\n Registers transparent file compression type for use with fsspec.open.\n Compression can be specified by name in open, or "infer"-ed for any files\n ending with the given extensions.\n\n Args:\n name: (str) The compression type name. Eg. "gzip".\n callback: A callable of form (infile, mode, **kwargs) -> file-like.\n Accepts an input file-like object, the target mode and kwargs.\n Returns a wrapped file-like object.\n extensions: (str, Iterable[str]) A file extension, or list of file\n extensions for which to infer this compression scheme. Eg. "gz".\n force: (bool) Force re-registration of compression type or extensions.\n\n Raises:\n ValueError: If name or extensions already registered, and not force.\n\n """\n if isinstance(extensions, str):\n extensions = [extensions]\n\n # Validate registration\n if name in compr and not force:\n raise ValueError(f"Duplicate compression registration: {name}")\n\n for ext in extensions:\n if ext in fsspec.utils.compressions and not force:\n raise ValueError(f"Duplicate compression file extension: {ext} ({name})")\n\n compr[name] = callback\n\n for ext in extensions:\n fsspec.utils.compressions[ext] = name\n\n\ndef unzip(infile, mode="rb", filename=None, **kwargs):\n if "r" not in mode:\n filename = filename or "file"\n z = ZipFile(infile, mode="w", **kwargs)\n fo = z.open(filename, mode="w")\n fo.close = lambda closer=fo.close: closer() or z.close()\n return fo\n z = ZipFile(infile)\n if filename is None:\n filename = z.namelist()[0]\n return z.open(filename, mode="r", **kwargs)\n\n\nregister_compression("zip", unzip, "zip")\n\ntry:\n from bz2 import BZ2File\nexcept ImportError:\n pass\nelse:\n register_compression("bz2", BZ2File, "bz2")\n\ntry: # pragma: no cover\n from isal import igzip\n\n def isal(infile, mode="rb", **kwargs):\n return igzip.IGzipFile(fileobj=infile, mode=mode, **kwargs)\n\n register_compression("gzip", isal, "gz")\nexcept ImportError:\n from gzip import GzipFile\n\n register_compression(\n "gzip", lambda f, **kwargs: GzipFile(fileobj=f, **kwargs), "gz"\n )\n\ntry:\n from lzma import LZMAFile\n\n register_compression("lzma", LZMAFile, "lzma")\n register_compression("xz", LZMAFile, "xz")\nexcept ImportError:\n pass\n\ntry:\n import lzmaffi\n\n register_compression("lzma", lzmaffi.LZMAFile, "lzma", force=True)\n register_compression("xz", lzmaffi.LZMAFile, "xz", force=True)\nexcept ImportError:\n pass\n\n\nclass SnappyFile(AbstractBufferedFile):\n def __init__(self, infile, mode, **kwargs):\n import snappy\n\n super().__init__(\n fs=None, path="snappy", mode=mode.strip("b") + "b", size=999999999, **kwargs\n )\n self.infile = infile\n if "r" in mode:\n self.codec = snappy.StreamDecompressor()\n else:\n self.codec = snappy.StreamCompressor()\n\n def _upload_chunk(self, final=False):\n self.buffer.seek(0)\n out = self.codec.add_chunk(self.buffer.read())\n self.infile.write(out)\n return True\n\n def seek(self, loc, whence=0):\n raise NotImplementedError("SnappyFile is not seekable")\n\n def seekable(self):\n return False\n\n def _fetch_range(self, start, end):\n """Get the specified set of bytes from remote"""\n data = self.infile.read(end - start)\n return self.codec.decompress(data)\n\n\ntry:\n import snappy\n\n snappy.compress(b"")\n # Snappy may use the .sz file extension, but this is not part of the\n # standard implementation.\n register_compression("snappy", SnappyFile, [])\n\nexcept (ImportError, NameError, AttributeError):\n pass\n\ntry:\n import lz4.frame\n\n register_compression("lz4", lz4.frame.open, "lz4")\nexcept ImportError:\n pass\n\ntry:\n import zstandard as zstd\n\n def zstandard_file(infile, mode="rb"):\n if "r" in mode:\n cctx = zstd.ZstdDecompressor()\n return cctx.stream_reader(infile)\n else:\n cctx = zstd.ZstdCompressor(level=10)\n return cctx.stream_writer(infile)\n\n register_compression("zstd", zstandard_file, "zst")\nexcept ImportError:\n pass\n\n\ndef available_compressions():\n """Return a list of the implemented compressions."""\n return list(compr)\n
|
.venv\Lib\site-packages\fsspec\compression.py
|
compression.py
|
Python
| 4,865 | 0.95 | 0.182857 | 0.039063 |
node-utils
| 848 |
2024-03-01T17:40:52.757004
|
GPL-3.0
| false |
db2ee787725ac65877de5be5173702de
|
from __future__ import annotations\n\nimport configparser\nimport json\nimport os\nimport warnings\nfrom typing import Any\n\nconf: dict[str, dict[str, Any]] = {}\ndefault_conf_dir = os.path.join(os.path.expanduser("~"), ".config/fsspec")\nconf_dir = os.environ.get("FSSPEC_CONFIG_DIR", default_conf_dir)\n\n\ndef set_conf_env(conf_dict, envdict=os.environ):\n """Set config values from environment variables\n\n Looks for variables of the form ``FSSPEC_<protocol>`` and\n ``FSSPEC_<protocol>_<kwarg>``. For ``FSSPEC_<protocol>`` the value is parsed\n as a json dictionary and used to ``update`` the config of the\n corresponding protocol. For ``FSSPEC_<protocol>_<kwarg>`` there is no\n attempt to convert the string value, but the kwarg keys will be lower-cased.\n\n The ``FSSPEC_<protocol>_<kwarg>`` variables are applied after the\n ``FSSPEC_<protocol>`` ones.\n\n Parameters\n ----------\n conf_dict : dict(str, dict)\n This dict will be mutated\n envdict : dict-like(str, str)\n Source for the values - usually the real environment\n """\n kwarg_keys = []\n for key in envdict:\n if key.startswith("FSSPEC_") and len(key) > 7 and key[7] != "_":\n if key.count("_") > 1:\n kwarg_keys.append(key)\n continue\n try:\n value = json.loads(envdict[key])\n except json.decoder.JSONDecodeError as ex:\n warnings.warn(\n f"Ignoring environment variable {key} due to a parse failure: {ex}"\n )\n else:\n if isinstance(value, dict):\n _, proto = key.split("_", 1)\n conf_dict.setdefault(proto.lower(), {}).update(value)\n else:\n warnings.warn(\n f"Ignoring environment variable {key} due to not being a dict:"\n f" {type(value)}"\n )\n elif key.startswith("FSSPEC"):\n warnings.warn(\n f"Ignoring environment variable {key} due to having an unexpected name"\n )\n\n for key in kwarg_keys:\n _, proto, kwarg = key.split("_", 2)\n conf_dict.setdefault(proto.lower(), {})[kwarg.lower()] = envdict[key]\n\n\ndef set_conf_files(cdir, conf_dict):\n """Set config values from files\n\n Scans for INI and JSON files in the given dictionary, and uses their\n contents to set the config. In case of repeated values, later values\n win.\n\n In the case of INI files, all values are strings, and these will not\n be converted.\n\n Parameters\n ----------\n cdir : str\n Directory to search\n conf_dict : dict(str, dict)\n This dict will be mutated\n """\n if not os.path.isdir(cdir):\n return\n allfiles = sorted(os.listdir(cdir))\n for fn in allfiles:\n if fn.endswith(".ini"):\n ini = configparser.ConfigParser()\n ini.read(os.path.join(cdir, fn))\n for key in ini:\n if key == "DEFAULT":\n continue\n conf_dict.setdefault(key, {}).update(dict(ini[key]))\n if fn.endswith(".json"):\n with open(os.path.join(cdir, fn)) as f:\n js = json.load(f)\n for key in js:\n conf_dict.setdefault(key, {}).update(dict(js[key]))\n\n\ndef apply_config(cls, kwargs, conf_dict=None):\n """Supply default values for kwargs when instantiating class\n\n Augments the passed kwargs, by finding entries in the config dict\n which match the classes ``.protocol`` attribute (one or more str)\n\n Parameters\n ----------\n cls : file system implementation\n kwargs : dict\n conf_dict : dict of dict\n Typically this is the global configuration\n\n Returns\n -------\n dict : the modified set of kwargs\n """\n if conf_dict is None:\n conf_dict = conf\n protos = cls.protocol if isinstance(cls.protocol, (tuple, list)) else [cls.protocol]\n kw = {}\n for proto in protos:\n # default kwargs from the current state of the config\n if proto in conf_dict:\n kw.update(conf_dict[proto])\n # explicit kwargs always win\n kw.update(**kwargs)\n kwargs = kw\n return kwargs\n\n\nset_conf_files(conf_dir, conf)\nset_conf_env(conf)\n
|
.venv\Lib\site-packages\fsspec\config.py
|
config.py
|
Python
| 4,279 | 0.95 | 0.19084 | 0.018018 |
python-kit
| 939 |
2024-10-07T14:48:21.359988
|
BSD-3-Clause
| false |
70c9445ee7c09b34493f4d027d07c779
|
import time\nfrom collections.abc import MutableMapping\nfrom functools import lru_cache\n\n\nclass DirCache(MutableMapping):\n """\n Caching of directory listings, in a structure like::\n\n {"path0": [\n {"name": "path0/file0",\n "size": 123,\n "type": "file",\n ...\n },\n {"name": "path0/file1",\n },\n ...\n ],\n "path1": [...]\n }\n\n Parameters to this class control listing expiry or indeed turn\n caching off\n """\n\n def __init__(\n self,\n use_listings_cache=True,\n listings_expiry_time=None,\n max_paths=None,\n **kwargs,\n ):\n """\n\n Parameters\n ----------\n use_listings_cache: bool\n If False, this cache never returns items, but always reports KeyError,\n and setting items has no effect\n listings_expiry_time: int or float (optional)\n Time in seconds that a listing is considered valid. If None,\n listings do not expire.\n max_paths: int (optional)\n The number of most recent listings that are considered valid; 'recent'\n refers to when the entry was set.\n """\n self._cache = {}\n self._times = {}\n if max_paths:\n self._q = lru_cache(max_paths + 1)(lambda key: self._cache.pop(key, None))\n self.use_listings_cache = use_listings_cache\n self.listings_expiry_time = listings_expiry_time\n self.max_paths = max_paths\n\n def __getitem__(self, item):\n if self.listings_expiry_time is not None:\n if self._times.get(item, 0) - time.time() < -self.listings_expiry_time:\n del self._cache[item]\n if self.max_paths:\n self._q(item)\n return self._cache[item] # maybe raises KeyError\n\n def clear(self):\n self._cache.clear()\n\n def __len__(self):\n return len(self._cache)\n\n def __contains__(self, item):\n try:\n self[item]\n return True\n except KeyError:\n return False\n\n def __setitem__(self, key, value):\n if not self.use_listings_cache:\n return\n if self.max_paths:\n self._q(key)\n self._cache[key] = value\n if self.listings_expiry_time is not None:\n self._times[key] = time.time()\n\n def __delitem__(self, key):\n del self._cache[key]\n\n def __iter__(self):\n entries = list(self._cache)\n\n return (k for k in entries if k in self)\n\n def __reduce__(self):\n return (\n DirCache,\n (self.use_listings_cache, self.listings_expiry_time, self.max_paths),\n )\n
|
.venv\Lib\site-packages\fsspec\dircache.py
|
dircache.py
|
Python
| 2,717 | 0.95 | 0.214286 | 0.012048 |
awesome-app
| 503 |
2023-10-30T04:53:51.727618
|
Apache-2.0
| false |
079f8de17db0a1cae80371e0f03c790b
|
"""\nfsspec user-defined exception classes\n"""\n\nimport asyncio\n\n\nclass BlocksizeMismatchError(ValueError):\n """\n Raised when a cached file is opened with a different blocksize than it was\n written with\n """\n\n\nclass FSTimeoutError(asyncio.TimeoutError):\n """\n Raised when a fsspec function timed out occurs\n """\n
|
.venv\Lib\site-packages\fsspec\exceptions.py
|
exceptions.py
|
Python
| 331 | 0.85 | 0.166667 | 0 |
node-utils
| 923 |
2024-12-01T22:51:56.963166
|
Apache-2.0
| false |
22f1fa4928259ee1bad55e44f02ec2b7
|
import io\nimport json\nimport warnings\n\nfrom .core import url_to_fs\nfrom .utils import merge_offset_ranges\n\n# Parquet-Specific Utilities for fsspec\n#\n# Most of the functions defined in this module are NOT\n# intended for public consumption. The only exception\n# to this is `open_parquet_file`, which should be used\n# place of `fs.open()` to open parquet-formatted files\n# on remote file systems.\n\n\ndef open_parquet_file(\n path,\n mode="rb",\n fs=None,\n metadata=None,\n columns=None,\n row_groups=None,\n storage_options=None,\n strict=False,\n engine="auto",\n max_gap=64_000,\n max_block=256_000_000,\n footer_sample_size=1_000_000,\n **kwargs,\n):\n """\n Return a file-like object for a single Parquet file.\n\n The specified parquet `engine` will be used to parse the\n footer metadata, and determine the required byte ranges\n from the file. The target path will then be opened with\n the "parts" (`KnownPartsOfAFile`) caching strategy.\n\n Note that this method is intended for usage with remote\n file systems, and is unlikely to improve parquet-read\n performance on local file systems.\n\n Parameters\n ----------\n path: str\n Target file path.\n mode: str, optional\n Mode option to be passed through to `fs.open`. Default is "rb".\n metadata: Any, optional\n Parquet metadata object. Object type must be supported\n by the backend parquet engine. For now, only the "fastparquet"\n engine supports an explicit `ParquetFile` metadata object.\n If a metadata object is supplied, the remote footer metadata\n will not need to be transferred into local memory.\n fs: AbstractFileSystem, optional\n Filesystem object to use for opening the file. If nothing is\n specified, an `AbstractFileSystem` object will be inferred.\n engine : str, default "auto"\n Parquet engine to use for metadata parsing. Allowed options\n include "fastparquet", "pyarrow", and "auto". The specified\n engine must be installed in the current environment. If\n "auto" is specified, and both engines are installed,\n "fastparquet" will take precedence over "pyarrow".\n columns: list, optional\n List of all column names that may be read from the file.\n row_groups : list, optional\n List of all row-groups that may be read from the file. This\n may be a list of row-group indices (integers), or it may be\n a list of `RowGroup` metadata objects (if the "fastparquet"\n engine is used).\n storage_options : dict, optional\n Used to generate an `AbstractFileSystem` object if `fs` was\n not specified.\n strict : bool, optional\n Whether the resulting `KnownPartsOfAFile` cache should\n fetch reads that go beyond a known byte-range boundary.\n If `False` (the default), any read that ends outside a\n known part will be zero padded. Note that using\n `strict=True` may be useful for debugging.\n max_gap : int, optional\n Neighboring byte ranges will only be merged when their\n inter-range gap is <= `max_gap`. Default is 64KB.\n max_block : int, optional\n Neighboring byte ranges will only be merged when the size of\n the aggregated range is <= `max_block`. Default is 256MB.\n footer_sample_size : int, optional\n Number of bytes to read from the end of the path to look\n for the footer metadata. If the sampled bytes do not contain\n the footer, a second read request will be required, and\n performance will suffer. Default is 1MB.\n **kwargs :\n Optional key-word arguments to pass to `fs.open`\n """\n\n # Make sure we have an `AbstractFileSystem` object\n # to work with\n if fs is None:\n fs = url_to_fs(path, **(storage_options or {}))[0]\n\n # For now, `columns == []` not supported. Just use\n # default `open` command with `path` input\n if columns is not None and len(columns) == 0:\n return fs.open(path, mode=mode)\n\n # Set the engine\n engine = _set_engine(engine)\n\n # Fetch the known byte ranges needed to read\n # `columns` and/or `row_groups`\n data = _get_parquet_byte_ranges(\n [path],\n fs,\n metadata=metadata,\n columns=columns,\n row_groups=row_groups,\n engine=engine,\n max_gap=max_gap,\n max_block=max_block,\n footer_sample_size=footer_sample_size,\n )\n\n # Extract file name from `data`\n fn = next(iter(data)) if data else path\n\n # Call self.open with "parts" caching\n options = kwargs.pop("cache_options", {}).copy()\n return fs.open(\n fn,\n mode=mode,\n cache_type="parts",\n cache_options={\n **options,\n "data": data.get(fn, {}),\n "strict": strict,\n },\n **kwargs,\n )\n\n\ndef _get_parquet_byte_ranges(\n paths,\n fs,\n metadata=None,\n columns=None,\n row_groups=None,\n max_gap=64_000,\n max_block=256_000_000,\n footer_sample_size=1_000_000,\n engine="auto",\n):\n """Get a dictionary of the known byte ranges needed\n to read a specific column/row-group selection from a\n Parquet dataset. Each value in the output dictionary\n is intended for use as the `data` argument for the\n `KnownPartsOfAFile` caching strategy of a single path.\n """\n\n # Set engine if necessary\n if isinstance(engine, str):\n engine = _set_engine(engine)\n\n # Pass to specialized function if metadata is defined\n if metadata is not None:\n # Use the provided parquet metadata object\n # to avoid transferring/parsing footer metadata\n return _get_parquet_byte_ranges_from_metadata(\n metadata,\n fs,\n engine,\n columns=columns,\n row_groups=row_groups,\n max_gap=max_gap,\n max_block=max_block,\n )\n\n # Get file sizes asynchronously\n file_sizes = fs.sizes(paths)\n\n # Populate global paths, starts, & ends\n result = {}\n data_paths = []\n data_starts = []\n data_ends = []\n add_header_magic = True\n if columns is None and row_groups is None:\n # We are NOT selecting specific columns or row-groups.\n #\n # We can avoid sampling the footers, and just transfer\n # all file data with cat_ranges\n for i, path in enumerate(paths):\n result[path] = {}\n for b in range(0, file_sizes[i], max_block):\n data_paths.append(path)\n data_starts.append(b)\n data_ends.append(min(b + max_block, file_sizes[i]))\n add_header_magic = False # "Magic" should already be included\n else:\n # We ARE selecting specific columns or row-groups.\n #\n # Gather file footers.\n # We just take the last `footer_sample_size` bytes of each\n # file (or the entire file if it is smaller than that)\n footer_starts = []\n footer_ends = []\n for i, path in enumerate(paths):\n footer_ends.append(file_sizes[i])\n sample_size = max(0, file_sizes[i] - footer_sample_size)\n footer_starts.append(sample_size)\n footer_samples = fs.cat_ranges(paths, footer_starts, footer_ends)\n\n # Check our footer samples and re-sample if necessary.\n missing_footer_starts = footer_starts.copy()\n large_footer = 0\n for i, path in enumerate(paths):\n footer_size = int.from_bytes(footer_samples[i][-8:-4], "little")\n real_footer_start = file_sizes[i] - (footer_size + 8)\n if real_footer_start < footer_starts[i]:\n missing_footer_starts[i] = real_footer_start\n large_footer = max(large_footer, (footer_size + 8))\n if large_footer:\n warnings.warn(\n f"Not enough data was used to sample the parquet footer. "\n f"Try setting footer_sample_size >= {large_footer}."\n )\n for i, block in enumerate(\n fs.cat_ranges(\n paths,\n missing_footer_starts,\n footer_starts,\n )\n ):\n footer_samples[i] = block + footer_samples[i]\n footer_starts[i] = missing_footer_starts[i]\n\n # Calculate required byte ranges for each path\n for i, path in enumerate(paths):\n # Deal with small-file case.\n # Just include all remaining bytes of the file\n # in a single range.\n if file_sizes[i] < max_block:\n if footer_starts[i] > 0:\n # Only need to transfer the data if the\n # footer sample isn't already the whole file\n data_paths.append(path)\n data_starts.append(0)\n data_ends.append(footer_starts[i])\n continue\n\n # Use "engine" to collect data byte ranges\n path_data_starts, path_data_ends = engine._parquet_byte_ranges(\n columns,\n row_groups=row_groups,\n footer=footer_samples[i],\n footer_start=footer_starts[i],\n )\n\n data_paths += [path] * len(path_data_starts)\n data_starts += path_data_starts\n data_ends += path_data_ends\n\n # Merge adjacent offset ranges\n data_paths, data_starts, data_ends = merge_offset_ranges(\n data_paths,\n data_starts,\n data_ends,\n max_gap=max_gap,\n max_block=max_block,\n sort=False, # Should already be sorted\n )\n\n # Start by populating `result` with footer samples\n for i, path in enumerate(paths):\n result[path] = {(footer_starts[i], footer_ends[i]): footer_samples[i]}\n\n # Transfer the data byte-ranges into local memory\n _transfer_ranges(fs, result, data_paths, data_starts, data_ends)\n\n # Add b"PAR1" to header if necessary\n if add_header_magic:\n _add_header_magic(result)\n\n return result\n\n\ndef _get_parquet_byte_ranges_from_metadata(\n metadata,\n fs,\n engine,\n columns=None,\n row_groups=None,\n max_gap=64_000,\n max_block=256_000_000,\n):\n """Simplified version of `_get_parquet_byte_ranges` for\n the case that an engine-specific `metadata` object is\n provided, and the remote footer metadata does not need to\n be transferred before calculating the required byte ranges.\n """\n\n # Use "engine" to collect data byte ranges\n data_paths, data_starts, data_ends = engine._parquet_byte_ranges(\n columns,\n row_groups=row_groups,\n metadata=metadata,\n )\n\n # Merge adjacent offset ranges\n data_paths, data_starts, data_ends = merge_offset_ranges(\n data_paths,\n data_starts,\n data_ends,\n max_gap=max_gap,\n max_block=max_block,\n sort=False, # Should be sorted\n )\n\n # Transfer the data byte-ranges into local memory\n result = {fn: {} for fn in list(set(data_paths))}\n _transfer_ranges(fs, result, data_paths, data_starts, data_ends)\n\n # Add b"PAR1" to header\n _add_header_magic(result)\n\n return result\n\n\ndef _transfer_ranges(fs, blocks, paths, starts, ends):\n # Use cat_ranges to gather the data byte_ranges\n ranges = (paths, starts, ends)\n for path, start, stop, data in zip(*ranges, fs.cat_ranges(*ranges)):\n blocks[path][(start, stop)] = data\n\n\ndef _add_header_magic(data):\n # Add b"PAR1" to file headers\n for path in list(data.keys()):\n add_magic = True\n for k in data[path]:\n if k[0] == 0 and k[1] >= 4:\n add_magic = False\n break\n if add_magic:\n data[path][(0, 4)] = b"PAR1"\n\n\ndef _set_engine(engine_str):\n # Define a list of parquet engines to try\n if engine_str == "auto":\n try_engines = ("fastparquet", "pyarrow")\n elif not isinstance(engine_str, str):\n raise ValueError(\n "Failed to set parquet engine! "\n "Please pass 'fastparquet', 'pyarrow', or 'auto'"\n )\n elif engine_str not in ("fastparquet", "pyarrow"):\n raise ValueError(f"{engine_str} engine not supported by `fsspec.parquet`")\n else:\n try_engines = [engine_str]\n\n # Try importing the engines in `try_engines`,\n # and choose the first one that succeeds\n for engine in try_engines:\n try:\n if engine == "fastparquet":\n return FastparquetEngine()\n elif engine == "pyarrow":\n return PyarrowEngine()\n except ImportError:\n pass\n\n # Raise an error if a supported parquet engine\n # was not found\n raise ImportError(\n f"The following parquet engines are not installed "\n f"in your python environment: {try_engines}."\n f"Please install 'fastparquert' or 'pyarrow' to "\n f"utilize the `fsspec.parquet` module."\n )\n\n\nclass FastparquetEngine:\n # The purpose of the FastparquetEngine class is\n # to check if fastparquet can be imported (on initialization)\n # and to define a `_parquet_byte_ranges` method. In the\n # future, this class may also be used to define other\n # methods/logic that are specific to fastparquet.\n\n def __init__(self):\n import fastparquet as fp\n\n self.fp = fp\n\n def _row_group_filename(self, row_group, pf):\n return pf.row_group_filename(row_group)\n\n def _parquet_byte_ranges(\n self,\n columns,\n row_groups=None,\n metadata=None,\n footer=None,\n footer_start=None,\n ):\n # Initialize offset ranges and define ParqetFile metadata\n pf = metadata\n data_paths, data_starts, data_ends = [], [], []\n if pf is None:\n pf = self.fp.ParquetFile(io.BytesIO(footer))\n\n # Convert columns to a set and add any index columns\n # specified in the pandas metadata (just in case)\n column_set = None if columns is None else set(columns)\n if column_set is not None and hasattr(pf, "pandas_metadata"):\n md_index = [\n ind\n for ind in pf.pandas_metadata.get("index_columns", [])\n # Ignore RangeIndex information\n if not isinstance(ind, dict)\n ]\n column_set |= set(md_index)\n\n # Check if row_groups is a list of integers\n # or a list of row-group metadata\n if row_groups and not isinstance(row_groups[0], int):\n # Input row_groups contains row-group metadata\n row_group_indices = None\n else:\n # Input row_groups contains row-group indices\n row_group_indices = row_groups\n row_groups = pf.row_groups\n\n # Loop through column chunks to add required byte ranges\n for r, row_group in enumerate(row_groups):\n # Skip this row-group if we are targeting\n # specific row-groups\n if row_group_indices is None or r in row_group_indices:\n # Find the target parquet-file path for `row_group`\n fn = self._row_group_filename(row_group, pf)\n\n for column in row_group.columns:\n name = column.meta_data.path_in_schema[0]\n # Skip this column if we are targeting a\n # specific columns\n if column_set is None or name in column_set:\n file_offset0 = column.meta_data.dictionary_page_offset\n if file_offset0 is None:\n file_offset0 = column.meta_data.data_page_offset\n num_bytes = column.meta_data.total_compressed_size\n if footer_start is None or file_offset0 < footer_start:\n data_paths.append(fn)\n data_starts.append(file_offset0)\n data_ends.append(\n min(\n file_offset0 + num_bytes,\n footer_start or (file_offset0 + num_bytes),\n )\n )\n\n if metadata:\n # The metadata in this call may map to multiple\n # file paths. Need to include `data_paths`\n return data_paths, data_starts, data_ends\n return data_starts, data_ends\n\n\nclass PyarrowEngine:\n # The purpose of the PyarrowEngine class is\n # to check if pyarrow can be imported (on initialization)\n # and to define a `_parquet_byte_ranges` method. In the\n # future, this class may also be used to define other\n # methods/logic that are specific to pyarrow.\n\n def __init__(self):\n import pyarrow.parquet as pq\n\n self.pq = pq\n\n def _row_group_filename(self, row_group, metadata):\n raise NotImplementedError\n\n def _parquet_byte_ranges(\n self,\n columns,\n row_groups=None,\n metadata=None,\n footer=None,\n footer_start=None,\n ):\n if metadata is not None:\n raise ValueError("metadata input not supported for PyarrowEngine")\n\n data_starts, data_ends = [], []\n md = self.pq.ParquetFile(io.BytesIO(footer)).metadata\n\n # Convert columns to a set and add any index columns\n # specified in the pandas metadata (just in case)\n column_set = None if columns is None else set(columns)\n if column_set is not None:\n schema = md.schema.to_arrow_schema()\n has_pandas_metadata = (\n schema.metadata is not None and b"pandas" in schema.metadata\n )\n if has_pandas_metadata:\n md_index = [\n ind\n for ind in json.loads(\n schema.metadata[b"pandas"].decode("utf8")\n ).get("index_columns", [])\n # Ignore RangeIndex information\n if not isinstance(ind, dict)\n ]\n column_set |= set(md_index)\n\n # Loop through column chunks to add required byte ranges\n for r in range(md.num_row_groups):\n # Skip this row-group if we are targeting\n # specific row-groups\n if row_groups is None or r in row_groups:\n row_group = md.row_group(r)\n for c in range(row_group.num_columns):\n column = row_group.column(c)\n name = column.path_in_schema\n # Skip this column if we are targeting a\n # specific columns\n split_name = name.split(".")[0]\n if (\n column_set is None\n or name in column_set\n or split_name in column_set\n ):\n file_offset0 = column.dictionary_page_offset\n if file_offset0 is None:\n file_offset0 = column.data_page_offset\n num_bytes = column.total_compressed_size\n if file_offset0 < footer_start:\n data_starts.append(file_offset0)\n data_ends.append(\n min(file_offset0 + num_bytes, footer_start)\n )\n return data_starts, data_ends\n
|
.venv\Lib\site-packages\fsspec\parquet.py
|
parquet.py
|
Python
| 19,448 | 0.95 | 0.190388 | 0.192469 |
vue-tools
| 915 |
2023-11-01T14:30:41.988059
|
MIT
| false |
bf5714707984da5ddfc12998deb596df
|
from collections import deque\n\n\nclass Transaction:\n """Filesystem transaction write context\n\n Gathers files for deferred commit or discard, so that several write\n operations can be finalized semi-atomically. This works by having this\n instance as the ``.transaction`` attribute of the given filesystem\n """\n\n def __init__(self, fs, **kwargs):\n """\n Parameters\n ----------\n fs: FileSystem instance\n """\n self.fs = fs\n self.files = deque()\n\n def __enter__(self):\n self.start()\n return self\n\n def __exit__(self, exc_type, exc_val, exc_tb):\n """End transaction and commit, if exit is not due to exception"""\n # only commit if there was no exception\n self.complete(commit=exc_type is None)\n if self.fs:\n self.fs._intrans = False\n self.fs._transaction = None\n self.fs = None\n\n def start(self):\n """Start a transaction on this FileSystem"""\n self.files = deque() # clean up after previous failed completions\n self.fs._intrans = True\n\n def complete(self, commit=True):\n """Finish transaction: commit or discard all deferred files"""\n while self.files:\n f = self.files.popleft()\n if commit:\n f.commit()\n else:\n f.discard()\n self.fs._intrans = False\n self.fs._transaction = None\n self.fs = None\n\n\nclass FileActor:\n def __init__(self):\n self.files = []\n\n def commit(self):\n for f in self.files:\n f.commit()\n self.files.clear()\n\n def discard(self):\n for f in self.files:\n f.discard()\n self.files.clear()\n\n def append(self, f):\n self.files.append(f)\n\n\nclass DaskTransaction(Transaction):\n def __init__(self, fs):\n """\n Parameters\n ----------\n fs: FileSystem instance\n """\n import distributed\n\n super().__init__(fs)\n client = distributed.default_client()\n self.files = client.submit(FileActor, actor=True).result()\n\n def complete(self, commit=True):\n """Finish transaction: commit or discard all deferred files"""\n if commit:\n self.files.commit().result()\n else:\n self.files.discard().result()\n self.fs._intrans = False\n self.fs = None\n
|
.venv\Lib\site-packages\fsspec\transaction.py
|
transaction.py
|
Python
| 2,398 | 0.95 | 0.255556 | 0.013699 |
awesome-app
| 500 |
2025-02-22T16:29:45.176941
|
Apache-2.0
| false |
e68be6a1d3bccd50f0af7a966eaf4581
|
# file generated by setuptools-scm\n# don't change, don't track in version control\n\n__all__ = ["__version__", "__version_tuple__", "version", "version_tuple"]\n\nTYPE_CHECKING = False\nif TYPE_CHECKING:\n from typing import Tuple\n from typing import Union\n\n VERSION_TUPLE = Tuple[Union[int, str], ...]\nelse:\n VERSION_TUPLE = object\n\nversion: str\n__version__: str\n__version_tuple__: VERSION_TUPLE\nversion_tuple: VERSION_TUPLE\n\n__version__ = version = '2025.3.0'\n__version_tuple__ = version_tuple = (2025, 3, 0)\n
|
.venv\Lib\site-packages\fsspec\_version.py
|
_version.py
|
Python
| 517 | 0.95 | 0.047619 | 0.125 |
vue-tools
| 778 |
2023-09-08T10:02:16.959219
|
Apache-2.0
| false |
dffb41fd836090efd3b92d4c42d3a5d5
|
from importlib.metadata import entry_points\n\nfrom . import caching\nfrom ._version import __version__ # noqa: F401\nfrom .callbacks import Callback\nfrom .compression import available_compressions\nfrom .core import get_fs_token_paths, open, open_files, open_local, url_to_fs\nfrom .exceptions import FSTimeoutError\nfrom .mapping import FSMap, get_mapper\nfrom .registry import (\n available_protocols,\n filesystem,\n get_filesystem_class,\n register_implementation,\n registry,\n)\nfrom .spec import AbstractFileSystem\n\n__all__ = [\n "AbstractFileSystem",\n "FSTimeoutError",\n "FSMap",\n "filesystem",\n "register_implementation",\n "get_filesystem_class",\n "get_fs_token_paths",\n "get_mapper",\n "open",\n "open_files",\n "open_local",\n "registry",\n "caching",\n "Callback",\n "available_protocols",\n "available_compressions",\n "url_to_fs",\n]\n\n\ndef process_entries():\n if entry_points is not None:\n try:\n eps = entry_points()\n except TypeError:\n pass # importlib-metadata < 0.8\n else:\n if hasattr(eps, "select"): # Python 3.10+ / importlib_metadata >= 3.9.0\n specs = eps.select(group="fsspec.specs")\n else:\n specs = eps.get("fsspec.specs", [])\n registered_names = {}\n for spec in specs:\n err_msg = f"Unable to load filesystem from {spec}"\n name = spec.name\n if name in registered_names:\n continue\n registered_names[name] = True\n register_implementation(\n name,\n spec.value.replace(":", "."),\n errtxt=err_msg,\n # We take our implementations as the ones to overload with if\n # for some reason we encounter some, may be the same, already\n # registered\n clobber=True,\n )\n\n\nprocess_entries()\n
|
.venv\Lib\site-packages\fsspec\__init__.py
|
__init__.py
|
Python
| 1,998 | 0.95 | 0.115942 | 0.047619 |
node-utils
| 764 |
2025-04-27T18:31:47.778102
|
MIT
| false |
d92780d32526c9f76c98ea7d73ebf66a
|
import asyncio\nimport functools\nimport inspect\n\nfrom fsspec.asyn import AsyncFileSystem, running_async\n\n\ndef async_wrapper(func, obj=None):\n """\n Wraps a synchronous function to make it awaitable.\n\n Parameters\n ----------\n func : callable\n The synchronous function to wrap.\n obj : object, optional\n The instance to bind the function to, if applicable.\n\n Returns\n -------\n coroutine\n An awaitable version of the function.\n """\n\n @functools.wraps(func)\n async def wrapper(*args, **kwargs):\n return await asyncio.to_thread(func, *args, **kwargs)\n\n return wrapper\n\n\nclass AsyncFileSystemWrapper(AsyncFileSystem):\n """\n A wrapper class to convert a synchronous filesystem into an asynchronous one.\n\n This class takes an existing synchronous filesystem implementation and wraps all\n its methods to provide an asynchronous interface.\n\n Parameters\n ----------\n sync_fs : AbstractFileSystem\n The synchronous filesystem instance to wrap.\n """\n\n protocol = "async_wrapper"\n cachable = False\n\n def __init__(self, fs, *args, asynchronous=None, **kwargs):\n if asynchronous is None:\n asynchronous = running_async()\n super().__init__(*args, asynchronous=asynchronous, **kwargs)\n self.sync_fs = fs\n self.protocol = self.sync_fs.protocol\n self._wrap_all_sync_methods()\n\n @property\n def fsid(self):\n return f"async_{self.sync_fs.fsid}"\n\n def _wrap_all_sync_methods(self):\n """\n Wrap all synchronous methods of the underlying filesystem with asynchronous versions.\n """\n excluded_methods = {"open"}\n for method_name in dir(self.sync_fs):\n if method_name.startswith("_") or method_name in excluded_methods:\n continue\n\n attr = inspect.getattr_static(self.sync_fs, method_name)\n if isinstance(attr, property):\n continue\n\n method = getattr(self.sync_fs, method_name)\n if callable(method) and not asyncio.iscoroutinefunction(method):\n async_method = async_wrapper(method, obj=self)\n setattr(self, f"_{method_name}", async_method)\n\n @classmethod\n def wrap_class(cls, sync_fs_class):\n """\n Create a new class that can be used to instantiate an AsyncFileSystemWrapper\n with lazy instantiation of the underlying synchronous filesystem.\n\n Parameters\n ----------\n sync_fs_class : type\n The class of the synchronous filesystem to wrap.\n\n Returns\n -------\n type\n A new class that wraps the provided synchronous filesystem class.\n """\n\n class GeneratedAsyncFileSystemWrapper(cls):\n def __init__(self, *args, **kwargs):\n sync_fs = sync_fs_class(*args, **kwargs)\n super().__init__(sync_fs)\n\n GeneratedAsyncFileSystemWrapper.__name__ = (\n f"Async{sync_fs_class.__name__}Wrapper"\n )\n return GeneratedAsyncFileSystemWrapper\n
|
.venv\Lib\site-packages\fsspec\implementations\asyn_wrapper.py
|
asyn_wrapper.py
|
Python
| 3,082 | 0.85 | 0.242718 | 0 |
react-lib
| 944 |
2023-09-23T14:41:52.890635
|
Apache-2.0
| false |
5dabd15ec88c26fe84014f87d76017a1
|
from __future__ import annotations\n\nimport os\nimport pickle\nimport time\nfrom typing import TYPE_CHECKING\n\nfrom fsspec.utils import atomic_write\n\ntry:\n import ujson as json\nexcept ImportError:\n if not TYPE_CHECKING:\n import json\n\nif TYPE_CHECKING:\n from typing import Any, Dict, Iterator, Literal\n\n from typing_extensions import TypeAlias\n\n from .cached import CachingFileSystem\n\n Detail: TypeAlias = Dict[str, Any]\n\n\nclass CacheMetadata:\n """Cache metadata.\n\n All reading and writing of cache metadata is performed by this class,\n accessing the cached files and blocks is not.\n\n Metadata is stored in a single file per storage directory in JSON format.\n For backward compatibility, also reads metadata stored in pickle format\n which is converted to JSON when next saved.\n """\n\n def __init__(self, storage: list[str]):\n """\n\n Parameters\n ----------\n storage: list[str]\n Directories containing cached files, must be at least one. Metadata\n is stored in the last of these directories by convention.\n """\n if not storage:\n raise ValueError("CacheMetadata expects at least one storage location")\n\n self._storage = storage\n self.cached_files: list[Detail] = [{}]\n\n # Private attribute to force saving of metadata in pickle format rather than\n # JSON for use in tests to confirm can read both pickle and JSON formats.\n self._force_save_pickle = False\n\n def _load(self, fn: str) -> Detail:\n """Low-level function to load metadata from specific file"""\n try:\n with open(fn, "r") as f:\n loaded = json.load(f)\n except ValueError:\n with open(fn, "rb") as f:\n loaded = pickle.load(f)\n for c in loaded.values():\n if isinstance(c.get("blocks"), list):\n c["blocks"] = set(c["blocks"])\n return loaded\n\n def _save(self, metadata_to_save: Detail, fn: str) -> None:\n """Low-level function to save metadata to specific file"""\n if self._force_save_pickle:\n with atomic_write(fn) as f:\n pickle.dump(metadata_to_save, f)\n else:\n with atomic_write(fn, mode="w") as f:\n json.dump(metadata_to_save, f)\n\n def _scan_locations(\n self, writable_only: bool = False\n ) -> Iterator[tuple[str, str, bool]]:\n """Yield locations (filenames) where metadata is stored, and whether\n writable or not.\n\n Parameters\n ----------\n writable: bool\n Set to True to only yield writable locations.\n\n Returns\n -------\n Yields (str, str, bool)\n """\n n = len(self._storage)\n for i, storage in enumerate(self._storage):\n writable = i == n - 1\n if writable_only and not writable:\n continue\n yield os.path.join(storage, "cache"), storage, writable\n\n def check_file(\n self, path: str, cfs: CachingFileSystem | None\n ) -> Literal[False] | tuple[Detail, str]:\n """If path is in cache return its details, otherwise return ``False``.\n\n If the optional CachingFileSystem is specified then it is used to\n perform extra checks to reject possible matches, such as if they are\n too old.\n """\n for (fn, base, _), cache in zip(self._scan_locations(), self.cached_files):\n if path not in cache:\n continue\n detail = cache[path].copy()\n\n if cfs is not None:\n if cfs.check_files and detail["uid"] != cfs.fs.ukey(path):\n # Wrong file as determined by hash of file properties\n continue\n if cfs.expiry and time.time() - detail["time"] > cfs.expiry:\n # Cached file has expired\n continue\n\n fn = os.path.join(base, detail["fn"])\n if os.path.exists(fn):\n return detail, fn\n return False\n\n def clear_expired(self, expiry_time: int) -> tuple[list[str], bool]:\n """Remove expired metadata from the cache.\n\n Returns names of files corresponding to expired metadata and a boolean\n flag indicating whether the writable cache is empty. Caller is\n responsible for deleting the expired files.\n """\n expired_files = []\n for path, detail in self.cached_files[-1].copy().items():\n if time.time() - detail["time"] > expiry_time:\n fn = detail.get("fn", "")\n if not fn:\n raise RuntimeError(\n f"Cache metadata does not contain 'fn' for {path}"\n )\n fn = os.path.join(self._storage[-1], fn)\n expired_files.append(fn)\n self.cached_files[-1].pop(path)\n\n if self.cached_files[-1]:\n cache_path = os.path.join(self._storage[-1], "cache")\n self._save(self.cached_files[-1], cache_path)\n\n writable_cache_empty = not self.cached_files[-1]\n return expired_files, writable_cache_empty\n\n def load(self) -> None:\n """Load all metadata from disk and store in ``self.cached_files``"""\n cached_files = []\n for fn, _, _ in self._scan_locations():\n if os.path.exists(fn):\n # TODO: consolidate blocks here\n cached_files.append(self._load(fn))\n else:\n cached_files.append({})\n self.cached_files = cached_files or [{}]\n\n def on_close_cached_file(self, f: Any, path: str) -> None:\n """Perform side-effect actions on closing a cached file.\n\n The actual closing of the file is the responsibility of the caller.\n """\n # File must be writeble, so in self.cached_files[-1]\n c = self.cached_files[-1][path]\n if c["blocks"] is not True and len(c["blocks"]) * f.blocksize >= f.size:\n c["blocks"] = True\n\n def pop_file(self, path: str) -> str | None:\n """Remove metadata of cached file.\n\n If path is in the cache, return the filename of the cached file,\n otherwise return ``None``. Caller is responsible for deleting the\n cached file.\n """\n details = self.check_file(path, None)\n if not details:\n return None\n _, fn = details\n if fn.startswith(self._storage[-1]):\n self.cached_files[-1].pop(path)\n self.save()\n else:\n raise PermissionError(\n "Can only delete cached file in last, writable cache location"\n )\n return fn\n\n def save(self) -> None:\n """Save metadata to disk"""\n for (fn, _, writable), cache in zip(self._scan_locations(), self.cached_files):\n if not writable:\n continue\n\n if os.path.exists(fn):\n cached_files = self._load(fn)\n for k, c in cached_files.items():\n if k in cache:\n if c["blocks"] is True or cache[k]["blocks"] is True:\n c["blocks"] = True\n else:\n # self.cached_files[*][*]["blocks"] must continue to\n # point to the same set object so that updates\n # performed by MMapCache are propagated back to\n # self.cached_files.\n blocks = cache[k]["blocks"]\n blocks.update(c["blocks"])\n c["blocks"] = blocks\n c["time"] = max(c["time"], cache[k]["time"])\n c["uid"] = cache[k]["uid"]\n\n # Files can be added to cache after it was written once\n for k, c in cache.items():\n if k not in cached_files:\n cached_files[k] = c\n else:\n cached_files = cache\n cache = {k: v.copy() for k, v in cached_files.items()}\n for c in cache.values():\n if isinstance(c["blocks"], set):\n c["blocks"] = list(c["blocks"])\n self._save(cache, fn)\n self.cached_files[-1] = cached_files\n\n def update_file(self, path: str, detail: Detail) -> None:\n """Update metadata for specific file in memory, do not save"""\n self.cached_files[-1][path] = detail\n
|
.venv\Lib\site-packages\fsspec\implementations\cache_metadata.py
|
cache_metadata.py
|
Python
| 8,511 | 0.95 | 0.24569 | 0.05641 |
react-lib
| 598 |
2025-01-28T13:41:58.398804
|
GPL-3.0
| false |
642c264bdc3f7505a07a2b2da35757c0
|
\n\n
|
.venv\Lib\site-packages\fsspec\implementations\__pycache__\arrow.cpython-313.pyc
|
arrow.cpython-313.pyc
|
Other
| 13,741 | 0.95 | 0.047619 | 0 |
vue-tools
| 922 |
2023-07-22T21:44:55.145058
|
BSD-3-Clause
| false |
a66c74bcb6bcb6eeee5f3de1618a6c59
|
\n\n
|
.venv\Lib\site-packages\fsspec\implementations\__pycache__\asyn_wrapper.cpython-313.pyc
|
asyn_wrapper.cpython-313.pyc
|
Other
| 4,789 | 0.95 | 0.152778 | 0.030769 |
python-kit
| 293 |
2024-11-24T14:01:15.721436
|
GPL-3.0
| false |
aeb27049aa01c7c48e6e74592ae29315
|
\n\n
|
.venv\Lib\site-packages\fsspec\implementations\__pycache__\cached.cpython-313.pyc
|
cached.cpython-313.pyc
|
Other
| 43,907 | 0.95 | 0.037344 | 0.008969 |
react-lib
| 946 |
2023-09-05T00:30:03.259485
|
BSD-3-Clause
| false |
e3901586b2697d0a423511b05cf7ce7b
|
\n\n
|
.venv\Lib\site-packages\fsspec\implementations\__pycache__\cache_mapper.cpython-313.pyc
|
cache_mapper.cpython-313.pyc
|
Other
| 4,141 | 0.95 | 0.078947 | 0 |
react-lib
| 405 |
2024-02-21T17:48:21.251321
|
BSD-3-Clause
| false |
c79259a6d0614758a4d9355962f95613
|
\n\n
|
.venv\Lib\site-packages\fsspec\implementations\__pycache__\cache_metadata.cpython-313.pyc
|
cache_metadata.cpython-313.pyc
|
Other
| 11,215 | 0.95 | 0.073394 | 0 |
vue-tools
| 980 |
2024-09-08T06:01:04.908542
|
GPL-3.0
| false |
83b2f8e0370490b86d36ec8a1a4ee4b2
|
\n\n
|
.venv\Lib\site-packages\fsspec\implementations\__pycache__\dask.cpython-313.pyc
|
dask.cpython-313.pyc
|
Other
| 7,315 | 0.8 | 0.017241 | 0.018182 |
vue-tools
| 748 |
2025-01-08T13:00:42.261038
|
BSD-3-Clause
| false |
65303df388bde7859603419763fa8c93
|
\n\n
|
.venv\Lib\site-packages\fsspec\implementations\__pycache__\data.cpython-313.pyc
|
data.cpython-313.pyc
|
Other
| 3,091 | 0.8 | 0.064516 | 0 |
vue-tools
| 50 |
2025-05-08T17:12:35.346027
|
GPL-3.0
| false |
d5ca9a00867f8b771e4c99dd5d05508e
|
\n\n
|
.venv\Lib\site-packages\fsspec\implementations\__pycache__\dbfs.cpython-313.pyc
|
dbfs.cpython-313.pyc
|
Other
| 17,308 | 0.95 | 0.057627 | 0 |
python-kit
| 242 |
2023-12-07T13:56:05.741803
|
BSD-3-Clause
| false |
bd86192c114a109b6efed442a2cb7c9d
|
\n\n
|
.venv\Lib\site-packages\fsspec\implementations\__pycache__\dirfs.cpython-313.pyc
|
dirfs.cpython-313.pyc
|
Other
| 25,002 | 0.8 | 0.012579 | 0 |
python-kit
| 201 |
2025-04-16T22:42:51.712694
|
MIT
| false |
62dcf0d7c9618d02ab79170e7be12748
|
\n\n
|
.venv\Lib\site-packages\fsspec\implementations\__pycache__\ftp.cpython-313.pyc
|
ftp.cpython-313.pyc
|
Other
| 17,782 | 0.8 | 0.038889 | 0.04878 |
python-kit
| 612 |
2024-07-25T11:00:58.535356
|
Apache-2.0
| false |
a69a90b03d7817520620c323db46dfd1
|
\n\n
|
.venv\Lib\site-packages\fsspec\implementations\__pycache__\git.cpython-313.pyc
|
git.cpython-313.pyc
|
Other
| 6,026 | 0.95 | 0.070423 | 0 |
vue-tools
| 658 |
2024-01-22T10:59:05.255465
|
GPL-3.0
| false |
0181c4545bada6a057b433ac30fc6b5c
|
\n\n
|
.venv\Lib\site-packages\fsspec\implementations\__pycache__\github.cpython-313.pyc
|
github.cpython-313.pyc
|
Other
| 11,113 | 0.95 | 0.019481 | 0.007143 |
python-kit
| 931 |
2025-06-20T17:41:27.715126
|
MIT
| false |
e857b66026980929ad80f73d3cd31eb9
|
\n\n
|
.venv\Lib\site-packages\fsspec\implementations\__pycache__\http.cpython-313.pyc
|
http.cpython-313.pyc
|
Other
| 42,004 | 0.8 | 0.034803 | 0.002519 |
node-utils
| 435 |
2024-11-19T04:49:25.556481
|
Apache-2.0
| false |
4a7aae91a5fa2e62be40cc27075b9f3f
|
\n\n
|
.venv\Lib\site-packages\fsspec\implementations\__pycache__\http_sync.cpython-313.pyc
|
http_sync.cpython-313.pyc
|
Other
| 40,493 | 0.8 | 0.038043 | 0.006024 |
awesome-app
| 64 |
2023-10-17T01:22:33.026772
|
GPL-3.0
| false |
7a0d50acf5ce20217aa16681e22c93f4
|
\n\n
|
.venv\Lib\site-packages\fsspec\implementations\__pycache__\jupyter.cpython-313.pyc
|
jupyter.cpython-313.pyc
|
Other
| 6,540 | 0.8 | 0.016949 | 0 |
node-utils
| 704 |
2024-12-08T01:14:04.550526
|
GPL-3.0
| false |
3b38fbf8190e62d733500e0670d120e8
|
\n\n
|
.venv\Lib\site-packages\fsspec\implementations\__pycache__\libarchive.cpython-313.pyc
|
libarchive.cpython-313.pyc
|
Other
| 8,689 | 0.95 | 0.037594 | 0.008333 |
node-utils
| 583 |
2024-12-08T01:41:51.121636
|
GPL-3.0
| false |
e076996b15ebc81ac61474ddd0c74e96
|
\n\n
|
.venv\Lib\site-packages\fsspec\implementations\__pycache__\local.cpython-313.pyc
|
local.cpython-313.pyc
|
Other
| 24,845 | 0.8 | 0.035971 | 0 |
vue-tools
| 366 |
2025-06-01T15:53:13.093540
|
GPL-3.0
| false |
36137a68dae09492fac2574eed1dcbcd
|
\n\n
|
.venv\Lib\site-packages\fsspec\implementations\__pycache__\memory.cpython-313.pyc
|
memory.cpython-313.pyc
|
Other
| 14,759 | 0.95 | 0.036036 | 0 |
python-kit
| 888 |
2024-04-01T15:07:53.155715
|
BSD-3-Clause
| false |
afec6f40a9657b58eb84a6360eb9bf55
|
\n\n
|
.venv\Lib\site-packages\fsspec\implementations\__pycache__\reference.cpython-313.pyc
|
reference.cpython-313.pyc
|
Other
| 65,343 | 0.75 | 0.054101 | 0 |
react-lib
| 205 |
2023-08-16T03:53:44.021511
|
Apache-2.0
| false |
a09cc8c396bca6dee9d5d499a0e611ae
|
\n\n
|
.venv\Lib\site-packages\fsspec\implementations\__pycache__\sftp.cpython-313.pyc
|
sftp.cpython-313.pyc
|
Other
| 9,835 | 0.8 | 0.058824 | 0.013514 |
awesome-app
| 749 |
2025-04-23T09:05:14.912069
|
GPL-3.0
| false |
6a583d67cc2f3583922760e34c6cb5de
|
\n\n
|
.venv\Lib\site-packages\fsspec\implementations\__pycache__\smb.cpython-313.pyc
|
smb.cpython-313.pyc
|
Other
| 18,939 | 0.95 | 0.066079 | 0.024752 |
vue-tools
| 132 |
2025-06-23T11:14:03.347293
|
Apache-2.0
| false |
21a0b2eb2ea2c07e2bee9eea7a6750b1
|
\n\n
|
.venv\Lib\site-packages\fsspec\implementations\__pycache__\tar.cpython-313.pyc
|
tar.cpython-313.pyc
|
Other
| 4,851 | 0.8 | 0 | 0 |
vue-tools
| 554 |
2025-05-17T14:34:44.491214
|
Apache-2.0
| false |
43b2caafc8c9e15836fc90352ecef1b9
|
\n\n
|
.venv\Lib\site-packages\fsspec\implementations\__pycache__\webhdfs.cpython-313.pyc
|
webhdfs.cpython-313.pyc
|
Other
| 22,331 | 0.95 | 0.05948 | 0 |
react-lib
| 413 |
2025-05-18T03:16:03.362220
|
MIT
| false |
b7b3b3a493ead57cabfd968c43dd52f4
|
\n\n
|
.venv\Lib\site-packages\fsspec\implementations\__pycache__\zip.cpython-313.pyc
|
zip.cpython-313.pyc
|
Other
| 8,393 | 0.95 | 0.054795 | 0 |
awesome-app
| 862 |
2025-05-17T02:49:06.055134
|
GPL-3.0
| false |
201588d68a1a3dc66b288ce7df2cc1fc
|
\n\n
|
.venv\Lib\site-packages\fsspec\implementations\__pycache__\__init__.cpython-313.pyc
|
__init__.cpython-313.pyc
|
Other
| 197 | 0.7 | 0 | 0 |
node-utils
| 983 |
2023-07-27T19:51:27.521044
|
BSD-3-Clause
| false |
e62b816705d14644912bbfc8ad2980df
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.