id
int64 0
843k
| repository_name
stringlengths 7
55
| file_path
stringlengths 9
332
| class_name
stringlengths 3
290
| human_written_code
stringlengths 12
4.36M
| class_skeleton
stringlengths 19
2.2M
| total_program_units
int64 1
9.57k
| total_doc_str
int64 0
4.2k
| AvgCountLine
float64 0
7.89k
| AvgCountLineBlank
float64 0
300
| AvgCountLineCode
float64 0
7.89k
| AvgCountLineComment
float64 0
7.89k
| AvgCyclomatic
float64 0
130
| CommentToCodeRatio
float64 0
176
| CountClassBase
float64 0
48
| CountClassCoupled
float64 0
589
| CountClassCoupledModified
float64 0
581
| CountClassDerived
float64 0
5.37k
| CountDeclInstanceMethod
float64 0
4.2k
| CountDeclInstanceVariable
float64 0
299
| CountDeclMethod
float64 0
4.2k
| CountDeclMethodAll
float64 0
4.2k
| CountLine
float64 1
115k
| CountLineBlank
float64 0
9.01k
| CountLineCode
float64 0
94.4k
| CountLineCodeDecl
float64 0
46.1k
| CountLineCodeExe
float64 0
91.3k
| CountLineComment
float64 0
27k
| CountStmt
float64 1
93.2k
| CountStmtDecl
float64 0
46.1k
| CountStmtExe
float64 0
90.2k
| MaxCyclomatic
float64 0
759
| MaxInheritanceTree
float64 0
16
| MaxNesting
float64 0
34
| SumCyclomatic
float64 0
6k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
146,148 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/docs/source/tutorial2_scripts/type_mangling.py
|
type_mangling.MyVar
|
class MyVar(MySymbol, Var):
def __init__(self, *args, **kwargs):
MySymbol.__init__(self, *args, **kwargs)
Var.__init__(self, *args, **kwargs)
|
class MyVar(MySymbol, Var):
def __init__(self, *args, **kwargs):
pass
| 2 | 0 | 3 | 0 | 3 | 0 | 1 | 0 | 2 | 0 | 0 | 0 | 1 | 0 | 1 | 4 | 5 | 1 | 4 | 2 | 2 | 0 | 4 | 2 | 2 | 1 | 2 | 0 | 1 |
146,149 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/docs/source/tutorial2_scripts/type_mangling.py
|
type_mangling.MySymbol
|
class MySymbol(Symbol):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def show_name(self):
return '.'.join(self.get_scope_names()) + '.' + self.name
def internal_name(self):
return '_'.join(self.get_scope_names()) + '.' + self.name
|
class MySymbol(Symbol):
def __init__(self, *args, **kwargs):
pass
def show_name(self):
pass
def internal_name(self):
pass
| 4 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 1 | 1 | 0 | 2 | 3 | 0 | 3 | 3 | 10 | 3 | 7 | 4 | 3 | 0 | 7 | 4 | 3 | 1 | 1 | 0 | 3 |
146,150 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/docs/source/tutorial2_scripts/type_mangling.py
|
type_mangling.MyFun
|
class MyFun(MySymbol, Fun):
def __init__(self, *args, **kwargs):
MySymbol.__init__(self, *args, **kwargs)
Fun.__init__(self, *args, **kwargs)
def show_name(self):
paramstr = ''
if self.tparams is not None:
paramstr = ', '.join(self.tparams)
return super().show_name() + '(' + paramstr + ')'
|
class MyFun(MySymbol, Fun):
def __init__(self, *args, **kwargs):
pass
def show_name(self):
pass
| 3 | 0 | 4 | 0 | 4 | 0 | 2 | 0 | 2 | 1 | 0 | 0 | 2 | 0 | 2 | 5 | 11 | 2 | 9 | 4 | 6 | 0 | 9 | 4 | 6 | 2 | 2 | 1 | 3 |
146,151 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/tests/grammar/tl4t.py
|
tl4t.Unary
|
class Unary(Expr):
def __init__(self, op: Operator, expr: Expr):
super().__init__(op, [expr])
def to_tl4t(self):
return fmt.sep("", [self.call_expr.to_tl4t(), self.p[0].to_tl4t()])
|
class Unary(Expr):
def __init__(self, op: Operator, expr: Expr):
pass
def to_tl4t(self):
pass
| 3 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 1 | 3 | 2 | 0 | 2 | 0 | 2 | 40 | 6 | 1 | 5 | 3 | 2 | 0 | 5 | 3 | 2 | 1 | 5 | 0 | 2 |
146,152 |
LionelAuroux/pyrser
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LionelAuroux_pyrser/tests/gen_dsl.py
|
tests.gen_dsl.GenDsl_Test.test_09_until.Until
|
class Until(grammar.Grammar):
entry = "test"
grammar = """
test = [ 'a'..'z'+ ->'A'..'Z' 'A'..'Z'+ ]
"""
|
class Until(grammar.Grammar):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5 | 0 | 5 | 2 | 4 | 0 | 3 | 2 | 2 | 0 | 1 | 0 | 0 |
146,153 |
LionelAuroux/pyrser
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LionelAuroux_pyrser/tests/gen_dsl.py
|
tests.gen_dsl.GenDsl_Test.test_08_lookahead.LookAhead
|
class LookAhead(grammar.Grammar):
entry = "test"
grammar = """
test = [ !!["toto"| '0'..'9'] ["toto"| '0'..'9' ['0'..'9']+ ] | !'0'..'9' ~' ' ]
"""
|
class LookAhead(grammar.Grammar):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5 | 0 | 5 | 2 | 4 | 0 | 3 | 2 | 2 | 0 | 1 | 0 | 0 |
146,154 |
LionelAuroux/pyrser
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LionelAuroux_pyrser/tests/gen_dsl.py
|
tests.gen_dsl.GenDsl_Test.test_07_string.String
|
class String(grammar.Grammar):
entry = "test"
grammar = """
test = [ '"' [ ~["\\\\"|'"'] | "\\\\" ~' ']* '"' ]
"""
|
class String(grammar.Grammar):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5 | 0 | 5 | 2 | 4 | 0 | 3 | 2 | 2 | 0 | 1 | 0 | 0 |
146,155 |
LionelAuroux/pyrser
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LionelAuroux_pyrser/tests/gen_dsl.py
|
tests.gen_dsl.GenDsl_Test.test_06_complement.Complement
|
class Complement(grammar.Grammar):
entry = "test"
# grammar = """test = [ '"' [~"\\\\" | "\\\\" ~' ']* '"' ]
# """
grammar = """
test = [ [~'A']+ 'A' ]
"""
|
class Complement(grammar.Grammar):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0.4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 | 0 | 5 | 2 | 4 | 2 | 3 | 2 | 2 | 0 | 1 | 0 | 0 |
146,156 |
LionelAuroux/pyrser
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LionelAuroux_pyrser/tests/gen_dsl.py
|
tests.gen_dsl.GenDsl_Test.test_05_neg.Neg
|
class Neg(grammar.Grammar):
entry = "test"
grammar = """test = [ '=' !'=' ]
"""
|
class Neg(grammar.Grammar):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0 | 4 | 2 | 3 | 0 | 3 | 2 | 2 | 0 | 1 | 0 | 0 |
146,157 |
LionelAuroux/pyrser
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LionelAuroux_pyrser/tests/grammar_file.py
|
tests.grammar_file.GrammarFile_Test
|
class GrammarFile_Test(unittest.TestCase):
def test_01_dynparse(self):
txtbnf = grammar.from_string("""
plop =[ id:i #test_hook(_, i)]
""")
@meta.hook(txtbnf)
def test_hook(self, l, i):
self.test.assertEqual(self.value(i), "cool")
l.node = self.value(i)
return True
itxt = txtbnf()
itxt.test = self
res = itxt.parse("cool", "plop")
self.assertEqual(res.node, "cool")
def test_02_json(self):
"""
Test JSON
"""
JSON = grammar.from_file(os.getcwd() + "/tests/bnf/json.bnf", 'json')
# add hook to the dynamically created base class
@meta.hook(JSON)
def is_num(self, ast, n):
ast.node = float(self.value(n))
return True
@meta.hook(JSON)
def is_str(self, ast, s):
ast.node = self.value(s).strip('"')
return True
@meta.hook(JSON)
def is_bool(self, ast, b):
bval = self.value(b)
if bval == "true":
ast.node = True
if bval == "false":
ast.node = False
return True
@meta.hook(JSON)
def is_none(self, ast):
ast.node = None
return True
@meta.hook(JSON)
def is_pair(self, ast, s, v):
ast.node = (self.value(s).strip('"'), v.node)
return True
@meta.hook(JSON)
def is_array(self, ast):
ast.node = []
return True
@meta.hook(JSON)
def add_item(self, ast, item):
ast.node.append(item.node)
return True
@meta.hook(JSON)
def is_dict(self, ast):
ast.node = {}
return True
@meta.hook(JSON)
def add_kv(self, ast, item):
ast.node[item.node[0]] = item.node[1]
return True
json = JSON()
res = json.parse('{"test":12}')
self.assertEqual(res.node['test'], 12)
res = json.parse('{"test":12,"puf":[1,2,3]}')
self.assertEqual(res.node['puf'][1], 2)
res = json.parse('{"test":12,"puf":[1,2,3],"obj":{"flags":true}}')
self.assertTrue(res.node['obj']['flags'])
def test_03_tl4t_parse(self):
"""
Test TL4T
"""
test = TL4T()
res = test.parse("""
var a : int;
""")
self.assertTrue(res)
self.assertTrue(isinstance(res.body[0], DeclVar))
txt = res.to_tl4t()
self.assertEqual(str(txt), "var a : int;\n")
res = test.parse("""
fun a() : int;
""")
self.assertTrue(res)
self.assertTrue(isinstance(res.body[0], DeclFun))
self.assertTrue(res.body[0].t == 'int')
txt = res.to_tl4t()
self.assertEqual(str(txt), "fun a() : int;\n")
res = test.parse("""
fun a(x : str) : int;
""")
self.assertTrue(res)
self.assertTrue(isinstance(res.body[0], DeclFun))
self.assertTrue(isinstance(res.body[0].p[0], Param))
self.assertTrue(res.body[0].p[0].name, "x")
self.assertTrue(res.body[0].p[0].t, "int")
txt = res.to_tl4t()
self.assertEqual(str(txt), "fun a(x : str) : int;\n")
res = test.parse("""
fun a(x : str, y : int) : int;
""")
self.assertTrue(res)
self.assertTrue(isinstance(res.body[0], DeclFun))
self.assertTrue(isinstance(res.body[0].p[0], Param))
self.assertTrue(res.body[0].p[0].name, "x")
self.assertTrue(res.body[0].p[0].t, "int")
txt = res.to_tl4t()
self.assertEqual(str(txt), "fun a(x : str, y : int) : int;\n")
res = test.parse("""
fun a(x : str) : int
{
var z : toto;
}
""")
self.assertTrue(res)
self.assertTrue(isinstance(res.body[0], DeclFun))
self.assertTrue(isinstance(res.body[0].p[0], Param))
self.assertTrue(res.body[0].p[0].name, "x")
self.assertTrue(res.body[0].p[0].t, "int")
self.assertTrue(isinstance(res.body[0].block.body[0], DeclVar))
self.assertTrue(res.body[0].block.body[0].name, "z")
self.assertTrue(res.body[0].block.body[0].t, "toto")
txt = res.to_tl4t()
self.assertEqual(
str(txt),
"fun a(x : str) : int\n{\n var z : toto;\n}"
)
res = test.parse("""
a = 42;
""")
self.assertTrue(res)
self.assertTrue(isinstance(res.body[0], ExprStmt))
txt = res.to_tl4t()
self.assertEqual(str(txt), "a = 42;\n")
res = test.parse("""
a = +--+42;
""")
self.assertTrue(res)
self.assertTrue(isinstance(res.body[0], ExprStmt))
txt = res.to_tl4t()
self.assertEqual(str(txt), "a = +--+42;\n")
res = test.parse("""
a = 12 - 42;
""")
self.assertTrue(res)
self.assertTrue(isinstance(res.body[0], ExprStmt))
txt = res.to_tl4t()
self.assertEqual(str(txt), "a = 12 - 42;\n")
res = test.parse("""
a = f(12, "blabla", z);
""")
self.assertTrue(res)
self.assertTrue(isinstance(res.body[0], ExprStmt))
txt = res.to_tl4t()
self.assertEqual(str(txt), """a = f(12, "blabla", z);\n""")
res = test.parse("""
a = (7 - 8) * 43;
""")
self.assertTrue(res)
self.assertTrue(isinstance(res.body[0], ExprStmt))
txt = res.to_tl4t()
self.assertEqual(str(txt), """a = (7 - 8) * 43;\n""")
res = test.parse("""
a = (7 - 8) * 43 - 5;
""", "expr")
self.assertTrue(res)
res = test.parse("""
a = 1 < 2 || 3;
""", "expr")
self.assertTrue(res)
res = test.parse("""
a = 1 < 2 << 3;
""", "expr")
self.assertTrue(res)
def test_04_file_error(self):
"""
Test ERROR
"""
with self.assertRaises(error.Diagnostic) as pe:
T = grammar.from_file(
os.getcwd() + "/tests/bnf/error_bracket.bnf",
'source'
)
self.assertTrue(pe.exception, "Can't detect error in BNF")
self.assertEqual(
pe.exception.logs[0].msg,
"Expected ']'",
"Bad message in Error"
)
self.assertEqual(pe.exception.logs[0].location.line, 2, "Bad line")
self.assertEqual(pe.exception.logs[0].location.col, 7, "Bad col")
with self.assertRaises(error.Diagnostic) as pe:
T = grammar.from_file(
os.getcwd() + "/tests/bnf/error_bracket2.bnf",
'source'
)
self.assertTrue(pe.exception, "Can't detect error in BNF")
self.assertEqual(
pe.exception.logs[0].msg,
"Expected '['",
"Bad message in Error"
)
self.assertEqual(pe.exception.logs[0].location.line, 2, "Bad line")
self.assertEqual(pe.exception.logs[0].location.col, 1, "Bad col")
with self.assertRaises(error.Diagnostic) as pe:
T = grammar.from_file(
os.getcwd() + "/tests/bnf/error_rule.bnf",
'source'
)
self.assertTrue(pe.exception, "Can't detect error in BNF")
self.assertEqual(
pe.exception.logs[0].msg,
"Expected '='",
"Bad message in Error"
)
self.assertEqual(pe.exception.logs[0].location.line, 2, "Bad line")
self.assertEqual(pe.exception.logs[0].location.col, 1, "Bad col")
with self.assertRaises(error.Diagnostic) as pe:
T = grammar.from_file(
os.getcwd() + "/tests/bnf/error_bracket3.bnf",
'source'
)
self.assertTrue(pe.exception, "Can't detect error in BNF")
self.assertEqual(
pe.exception.logs[0].msg,
"Expected sequences",
"Bad message in Error"
)
self.assertEqual(pe.exception.logs[0].location.line, 1, "Bad line")
self.assertEqual(pe.exception.logs[0].location.col, 8, "Bad col")
with self.assertRaises(error.Diagnostic) as pe:
T = grammar.from_file(
os.getcwd() + "/tests/bnf/error_empty.bnf",
'source'
)
self.assertTrue(pe.exception, "Can't detect error in BNF")
self.assertEqual(
pe.exception.logs[0].msg,
"Parse error in 'directive' in EBNF bnf",
"Bad message in Error"
)
self.assertEqual(pe.exception.logs[0].location.line, 1, "Bad line")
self.assertEqual(pe.exception.logs[0].location.col, 7, "Bad col")
|
class GrammarFile_Test(unittest.TestCase):
def test_01_dynparse(self):
pass
@meta.hook(txtbnf)
def test_hook(self, l, i):
pass
def test_02_json(self):
'''
Test JSON
'''
pass
@meta.hook(JSON)
def is_num(self, ast, n):
pass
@meta.hook(JSON)
def is_str(self, ast, s):
pass
@meta.hook(JSON)
def is_bool(self, ast, b):
pass
@meta.hook(JSON)
def is_none(self, ast):
pass
@meta.hook(JSON)
def is_pair(self, ast, s, v):
pass
@meta.hook(JSON)
def is_array(self, ast):
pass
@meta.hook(JSON)
def add_item(self, ast, item):
pass
@meta.hook(JSON)
def is_dict(self, ast):
pass
@meta.hook(JSON)
def add_kv(self, ast, item):
pass
def test_03_tl4t_parse(self):
'''
Test TL4T
'''
pass
def test_04_file_error(self):
'''
Test ERROR
'''
pass
| 25 | 3 | 21 | 1 | 19 | 1 | 1 | 0.05 | 1 | 6 | 4 | 0 | 4 | 0 | 4 | 76 | 256 | 14 | 232 | 37 | 207 | 11 | 153 | 26 | 138 | 3 | 2 | 1 | 16 |
146,158 |
LionelAuroux/pyrser
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LionelAuroux_pyrser/tests/gen_dsl.py
|
tests.gen_dsl.GenDsl_Test.test_04_optional.Optional
|
class Optional(grammar.Grammar):
entry = "test"
grammar = """test = [ ['!']? 'A' | ['?']? 'B' ]
"""
|
class Optional(grammar.Grammar):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0 | 4 | 2 | 3 | 0 | 3 | 2 | 2 | 0 | 1 | 0 | 0 |
146,159 |
LionelAuroux/pyrser
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LionelAuroux_pyrser/tests/hooks.py
|
tests.hooks.Hooks_Test.test_01.parserExample
|
class parserExample(grammar.Grammar):
entry = "Example"
grammar = """
Example = [ id eof #setint(_, 12)
]
"""
|
class parserExample(grammar.Grammar):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 | 1 | 6 | 2 | 5 | 1 | 3 | 2 | 2 | 0 | 1 | 0 | 0 |
146,160 |
LionelAuroux/pyrser
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LionelAuroux_pyrser/tests/hooks.py
|
tests.hooks.Hooks_Test.test_01.parserExample3
|
class parserExample3(grammar.Grammar):
entry = "Example"
grammar = """
Example = [ id:i eof #setcapture(_, i)
]
"""
|
class parserExample3(grammar.Grammar):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 | 1 | 6 | 2 | 5 | 1 | 3 | 2 | 2 | 0 | 1 | 0 | 0 |
146,161 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/pyrser/ast/match.py
|
match.Edge
|
class Edge:
""" Class that implement the state transition
used for state construction.
"""
def __init__(self, s: state.State):
self.s = s
self.next_edge = {}
def get_next_edge(self, m: MatchExpr):
# check if the MatchExpr is already handle by the state
if hasattr(m, 'is_in_state'):
sX = m.is_in_state(self.s)
if sX is not None:
# return the corresponding edge
return self.next_edge[id(sX)]
return None
|
class Edge:
''' Class that implement the state transition
used for state construction.
'''
def __init__(self, s: state.State):
pass
def get_next_edge(self, m: MatchExpr):
pass
| 3 | 1 | 6 | 0 | 5 | 1 | 2 | 0.5 | 0 | 2 | 2 | 0 | 2 | 2 | 2 | 2 | 16 | 1 | 10 | 6 | 7 | 5 | 10 | 6 | 7 | 3 | 0 | 2 | 4 |
146,162 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/pyrser/codegen/c/cython.py
|
cython.GenState
|
class GenState:
def __init__(self):
self._lvlids = [0]
self._lvlid = 0
self._errids = [0]
self._errid = 0
def newScopeError(self):
self._lvlid += 1
self._lvlids.append(self._lvlid)
self._errid += 1
self._errids.append(self._errid)
def popScopeError(self):
self._lvlids.pop()
self._errids.pop()
def newScopeAlt(self):
self._lvlids.append(self.lvlid)
self._errid += 1
self._errids.append(self._errid)
def popScopeAlt(self):
self._lvlids.pop()
self._errids.pop()
@property
def lvlid(self):
if len(self._lvlids) > 1:
return self._lvlids[-1]
return 0
@property
def outid(self):
return self._lvlids[-2]
@property
def errid(self):
if len(self._errids) > 1:
return self._errids[-1]
return 0
@property
def outerrid(self):
return self._errids[-2]
def __repr__(self):
txt = "Lvl: [\n"
for l in self._lvlids:
txt += str(l) + ",\n"
txt += "]\nErr:[\n"
for e in self._errids:
txt += str(e) + ",\n"
txt += "]\n"
return txt
|
class GenState:
def __init__(self):
pass
def newScopeError(self):
pass
def popScopeError(self):
pass
def newScopeAlt(self):
pass
def popScopeAlt(self):
pass
@property
def lvlid(self):
pass
@property
def outid(self):
pass
@property
def errid(self):
pass
@property
def outerrid(self):
pass
def __repr__(self):
pass
| 15 | 0 | 4 | 0 | 4 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 10 | 4 | 10 | 10 | 55 | 9 | 46 | 22 | 31 | 0 | 42 | 18 | 31 | 3 | 0 | 1 | 14 |
146,163 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/pyrser/codegen/c/cython.py
|
cython.CStub
|
class CStub:
def __init__(self):
self.psource = None
self.setup = None
self.pyx = None
self.pxd = None
self.cheader = None
self.csource = None
|
class CStub:
def __init__(self):
pass
| 2 | 0 | 7 | 0 | 7 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 6 | 1 | 1 | 8 | 0 | 8 | 8 | 6 | 0 | 8 | 8 | 6 | 1 | 0 | 0 | 1 |
146,164 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/tests/grammar/csv.py
|
csv.CSV2
|
class CSV2(grammar.Grammar, CSV):
entry = "csv2"
# copy the result of CSV.csv as result of csv2
grammar = """
csv2 = [ CSV.csv:>_ ]
item = [ [CSV.item]?:>_ ]
"""
|
class CSV2(grammar.Grammar, CSV):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | 2 | 0 | 0 | 0 | 0 | 0 | 0 | 52 | 8 | 1 | 6 | 3 | 5 | 1 | 3 | 3 | 2 | 0 | 7 | 0 | 0 |
146,165 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/tests/grammar/csv.py
|
csv.CSV
|
class CSV(grammar.Grammar):
entry = "csv"
grammar = """
csv = [ [@ignore("null") line : l #add_line(_, l)]+ eof ]
line =
[
item : c #add_col(_, c)
[';' item : c #add_col(_, c)]*
eol
]
item = [ [id | num] : i #add_item(_, i) ]
"""
|
class CSV(grammar.Grammar):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 52 | 14 | 2 | 12 | 3 | 11 | 4 | 3 | 3 | 2 | 0 | 6 | 0 | 0 |
146,166 |
LionelAuroux/pyrser
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LionelAuroux_pyrser/tests/internal_parse.py
|
tests.internal_parse.InternalParse_Test.test_00_Directive.DummyDirective
|
class DummyDirective(parsing.DirectiveWrapper):
def begin(self, test, a: int, b: int):
test.assertEqual(a, 1)
test.assertEqual(b, 2)
return True
def end(self, test, a: int, b: int):
test.assertEqual(a, 1)
test.assertEqual(b, 2)
return True
|
class DummyDirective(parsing.DirectiveWrapper):
def begin(self, test, a: int, b: int):
pass
def end(self, test, a: int, b: int):
pass
| 3 | 0 | 4 | 0 | 4 | 0 | 1 | 0 | 1 | 1 | 0 | 0 | 2 | 0 | 2 | 2 | 10 | 1 | 9 | 3 | 6 | 0 | 9 | 3 | 6 | 1 | 1 | 0 | 2 |
146,167 |
LionelAuroux/pyrser
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LionelAuroux_pyrser/tests/internal_parse.py
|
tests.internal_parse.InternalParse_Test
|
class InternalParse_Test(unittest.TestCase):
def test_000_Node(self):
d = parsing.Node()
d.tata = [1, 2, 3, 4, 5]
d["chuchu"] = 'bla'
d['papa'] = d
ndict = {}
res = d.check(ndict)
out = ""
for k, v in ndict.items():
out += "[%s]=%s\n" % (k, v)
self.assertTrue(res, "Failed detect recursive node:\n%s" % out)
d = parsing.Node()
d.tata = [1, 2, 3, 4, 5]
d["chuchu"] = 'bla'
ndict = {}
res = d.check(ndict)
out = ""
for k, v in ndict.items():
out += "[%s]=%s\n" % (k, v)
self.assertFalse(res, "Failed detect recursive node:\n%s" % out)
def test_00_Directive(self):
"""Test Directive/DirectiveWrapper
"""
class DummyDirective(parsing.DirectiveWrapper):
def begin(self, test, a: int, b: int):
test.assertEqual(a, 1)
test.assertEqual(b, 2)
return True
def end(self, test, a: int, b: int):
test.assertEqual(a, 1)
test.assertEqual(b, 2)
return True
def dummyParser(p):
return True
direct = parsing.Directive(DummyDirective(), [(1, int), (2, int)],
dummyParser)
direct(self)
def test_01_readIdentifier(self):
"""
Basic test for identifier parsing
"""
parser = parsing.Parser()
parser.parsed_stream("ceci est un test", name="root")
self.assertTrue(
parser.begin_tag('sujet') and
parser.read_identifier() and
parser.end_tag('sujet'),
'failed in read_identifier for sujet')
sujet = parser.get_tag('sujet')
parser.skip_ignore()
self.assertEqual(str(sujet), "ceci", "failed in capture sujet")
self.assertTrue(
parser.begin_tag('verbe') and
parser.read_identifier() and
parser.end_tag('verbe'),
'failed in read_identifier for verbe')
verbe = parser.get_tag('verbe')
parser.skip_ignore()
self.assertEqual(str(verbe), "est", "failed in capture verbe")
self.assertTrue(
parser.begin_tag('other') and
parser.read_until_eof() and
parser.end_tag('other'),
'failed in read_identifier for other')
reste = parser.get_tag('other')
self.assertEqual(str(reste), "un test", "failed in capture other")
def test_02_readInteger(self):
"""
Basic test for integer parsing
"""
parser = parsing.Parser()
parser.parsed_stream("12 333 44444444444444444444444444", name="root")
self.assertTrue(
parser.begin_tag('n1') and
parser.read_integer() and
parser.end_tag('n1'),
'failed in read_integer for n1')
n1 = parser.get_tag('n1')
parser.skip_ignore()
self.assertEqual(str(n1), "12", "failed in capture n1")
self.assertTrue(
parser.begin_tag('n2') and
parser.read_integer() and
parser.end_tag('n2'),
'failed in read_integer for n2')
n2 = parser.get_tag('n2')
parser.skip_ignore()
self.assertEqual(str(n2), "333", "failed in capture n2")
self.assertTrue(
parser.begin_tag('n3') and
parser.read_integer() and
parser.end_tag('n3'),
'failed in read_integer for n3')
n3 = parser.get_tag('n3')
self.assertEqual(str(n3), "44444444444444444444444444",
"failed in capture n3")
def test_04_readCChar(self):
"""
Basic test for read_cchar
"""
parser = parsing.Parser()
parser.parsed_stream(r"'c' '\t'", name="root")
self.assertTrue(
parser.begin_tag('c1') and
parser.read_cchar() and
parser.end_tag('c1'),
'failed in read_cchar for c1')
c1 = parser.get_tag('c1')
parser.skip_ignore()
self.assertEqual(str(c1), "'c'", "failed in capture c1")
self.assertTrue(
parser.begin_tag('c2') and
parser.read_cchar() and
parser.end_tag('c2'),
'failed in read_cchar for c2')
c2 = parser.get_tag('c2')
self.assertEqual(str(c2), r"'\t'", "failed in capture c2")
def test_05_readCString(self):
"""
Basic test for read_cstring
"""
parser = parsing.Parser()
parser.parsed_stream(
r'"premiere chaine"'
r'"deuxieme chaine\n"'
r'"troisieme chainee \"."',
name="root")
self.assertTrue(
parser.begin_tag('s1') and
parser.read_cstring() and
parser.end_tag('s1'),
'failed in read_cstring for s1')
s1 = parser.get_tag('s1')
parser.skip_ignore()
self.assertEqual(str(s1), '"premiere chaine"', "failed in capture s1")
self.assertTrue(
parser.begin_tag('s2') and
parser.read_cstring() and
parser.end_tag('s2'),
'failed in read_cstring for s2')
s2 = parser.get_tag('s2')
parser.skip_ignore()
self.assertEqual(str(s2), '"deuxieme chaine\\n"',
"failed in capture s2")
self.assertTrue(
parser.begin_tag('s3') and
parser.read_cstring() and
parser.end_tag('s3'),
'failed in read_cstring for s3')
s3 = parser.get_tag('s3')
self.assertEqual(str(s3), r'"troisieme chainee \"."',
"failed in capture s3")
def test_06_CallAndSeq(self):
"""
Basic test for call/clauses
"""
parser = parsing.Parser()
parser.parsed_stream("abc def ghg")
parseTree = parsing.Seq(
parsing.Call(parsing.Parser.begin_tag, 'i1'),
parsing.Parser.read_identifier,
parsing.Call(parsing.Parser.end_tag, 'i1'),
parsing.Call(parsing.Parser.begin_tag, 'i2'),
parsing.Parser.read_identifier,
parsing.Call(parsing.Parser.end_tag, 'i2'),
parsing.Call(parsing.Parser.begin_tag, 'i3'),
parsing.Parser.read_identifier,
parsing.Call(parsing.Parser.end_tag, 'i3'))
parseTree(parser)
# Warning! skip_ignore is called between each parsing.Seq
self.assertEqual(str(parser.get_tag("i1")), "abc ",
"failed in captured i1")
self.assertEqual(str(parser.get_tag("i2")), "def ",
"failed in captured i2")
self.assertEqual(str(parser.get_tag("i3")), "ghg",
"failed in captured i3")
def test_07_RepXN(self):
"""
Basic test for repeater operator
"""
parser = parsing.Parser()
parser.parsed_stream("12343 91219****1323 23")
parseTree = parsing.Seq(
parsing.Call(parsing.Parser.begin_tag, 'i1'),
parsing.Parser.read_integer,
parsing.Call(parsing.Parser.end_tag, 'i1'),
parsing.Rep0N(parsing.Call(parsing.Parser.read_char, '*')),
parsing.Call(parsing.Parser.begin_tag, 'i2'),
parsing.Rep1N(parsing.Call(parsing.Parser.read_range, '0', '9')),
parsing.Call(parsing.Parser.end_tag, 'i2'),
parsing.Rep0N(parsing.Call(parsing.Parser.read_char, '*')),
parsing.Call(parsing.Parser.begin_tag, 'i3'),
parsing.Parser.read_integer,
parsing.Call(parsing.Parser.end_tag, 'i3'),
parsing.Call(parsing.Parser.read_eof))
parseTree(parser)
# Warning! skip_ignore is called between each parsing.Seq
self.assertEqual(str(parser.get_tag("i1")), "12343 ",
"failed in captured i1")
self.assertEqual(str(parser.get_tag("i2")), "91219",
"failed in captured i2")
self.assertEqual(str(parser.get_tag("i3")), "1323 ",
"failed in captured i3")
def test_08_RepAlt(self):
"""
Basic test for alternatives
"""
parser = parsing.Parser()
parser.parsed_stream("_ad121dwdw ()[]")
parseTree = parsing.Seq(
parsing.Call(parsing.Parser.begin_tag, 'w1'),
parsing.Scope(
begin=parsing.Call(parsing.Parser.push_ignore,
parsing.Parser.ignore_null),
end=parsing.Call(parsing.Parser.pop_ignore),
pt=parsing.Seq(
parsing.Alt(
parsing.Call(parsing.Parser.read_char, '_'),
parsing.Call(parsing.Parser.read_range, 'a', 'z'),
parsing.Call(parsing.Parser.read_range, 'A', 'Z')
),
parsing.Rep0N(
parsing.Alt(
parsing.Call(parsing.Parser.read_char, '_'),
parsing.Call(parsing.Parser.read_range, 'a', 'z'),
parsing.Call(parsing.Parser.read_range, 'A', 'Z'),
parsing.Call(parsing.Parser.read_range,
'0', '9'))))),
parsing.Call(parsing.Parser.end_tag, 'w1'),
parsing.Capture(
'w2',
parsing.Rep1N(
parsing.Alt(
parsing.Call(parsing.Parser.read_char, '('),
parsing.Call(parsing.Parser.read_char, ')'),
parsing.Call(parsing.Parser.read_char, '['),
parsing.Call(parsing.Parser.read_char, ']'),
)
)),
parsing.Call(parsing.Parser.read_eof)
)
parseTree(parser)
# Warning! skip_ignore is called between each parsing.Seq
self.assertEqual(str(parser.get_tag("w1")), "_ad121dwdw ",
"failed in captured w1")
self.assertEqual(str(parser.get_tag("w2")), "()[]",
"failed in captured w2")
def test_09_RepRules(self):
"""
Basic test for Rules
"""
def check_word(parser, test, tutu):
test.assertIn(parser.value(tutu), ('asbga', 'njnj'))
return True
def check_int(parser, test, toto):
test.assertIn(parser.value(toto), ('12121', '89898'))
return True
check_word = mock.Mock(side_effect=check_word)
check_int = mock.Mock(side_effect=check_int)
parser = parsing.Parser()
parser.parsed_stream("asbga 12121 njnj 89898")
parser.rule_nodes['test'] = self
parser.set_hooks({'checkWord': check_word, 'checkInt': check_int})
parser.set_rules({
'main': parsing.Seq(
parsing.Rep0N(
parsing.Alt(
parsing.Seq(
parsing.Capture('tutu', parsing.Rule('word')),
parsing.Hook(
'checkWord',
[("test", parsing.Node),
("tutu", parsing.Node)])),
parsing.Rule('int'))),
parsing.Rule('Base.eof')),
'word': parsing.Scope(
parsing.Call(parsing.Parser.push_ignore,
parsing.Parser.ignore_null),
parsing.Call(parsing.Parser.pop_ignore),
parsing.Rep1N(
parsing.Alt(
parsing.Call(parsing.Parser.read_range, 'a', 'z'),
parsing.Call(parsing.Parser.read_range, 'A', 'Z')))),
'int': parsing.Seq(
parsing.Scope(
parsing.Call(parsing.Parser.push_ignore,
parsing.Parser.ignore_null),
parsing.Call(parsing.Parser.pop_ignore),
parsing.Capture(
'toto',
parsing.Rep1N(
parsing.Call(parsing.Parser.read_range,
'0', '9')))),
parsing.Hook(
'checkInt',
[("test", parsing.Node), ("toto", parsing.Node)]))})
res = parser.eval_rule('main')
self.assertTrue(res, "failed to parse")
self.assertEqual(2, check_word.call_count)
self.assertEqual(2, check_int.call_count)
def test_10_contextVariables(self):
"""
Basic test for context variables
"""
parser = parsing.Parser()
parser.rule_nodes.update({'coucou': 42,
'grigri': 666,
'toto': [12, 33]})
self.assertEqual(parser.rule_nodes['toto'], [12, 33],
"failed comparing list")
parser.push_rule_nodes()
parser.rule_nodes.update({'local1': 666, 'local2': 777})
parser.rule_nodes['toto'] = [1, 2, 3, 4]
self.assertEqual(parser.rule_nodes['coucou'], 42,
"failed outer scope not visible in local")
parser.push_rule_nodes()
self.assertEqual(parser.rule_nodes['grigri'], 666,
"failed outer scope not visible in local")
self.assertTrue('grigri' in parser.rule_nodes,
"failed outer scope not visible in local")
parser.pop_rule_nodes()
def test_11_namespaceRules(self):
"""
Test the namespace handling
"""
parser = parsing.Parser()
@meta.add_method(parsing.Parser)
def dummy(self):
res = parsing.Node()
res.text = "cool"
self.rule_nodes["_"] = res
return True
meta.set_one(parsing.Parser._rules, "A.B.C.test",
parsing.Call(parsing.Parser.dummy))
bRes = parser.eval_rule('test')
self.assertEqual(bRes.text, "cool",
"failed rule node in global namespace")
bRes = parser.eval_rule('C.test')
self.assertEqual(bRes.text, "cool",
"failed rule node in global namespace")
bRes = parser.eval_rule('B.C.test')
self.assertEqual(bRes.text, "cool",
"failed rule node in global namespace")
bRes = parser.eval_rule('A.B.C.test')
self.assertEqual(bRes.text, "cool",
"failed rule node in global namespace")
def test_12_Metabasicparser(self):
"""
Test the metaclass of BasicParser
"""
class FakeBasic(metaclass=parsing.MetaBasicParser):
_rules = collections.ChainMap()
_hooks = collections.ChainMap()
pass
class A(FakeBasic):
pass
class B(FakeBasic):
_rules = {'key': 'value'}
_hooks = {'key': 'value'}
self.assertTrue('_rules' in dir(A))
self.assertIsInstance(A._rules, collections.ChainMap)
self.assertTrue('_hooks' in dir(A))
self.assertIsInstance(A._hooks, collections.ChainMap)
self.assertEqual(id(A), id(parsing.base._MetaBasicParser['A']),
"failed to found metaclass A in global registry")
self.assertEqual(id(B._rules.maps[1]), id(FakeBasic._rules.maps[0]),
"failed to chain FakeBasic._rules and B._rules")
self.assertEqual(id(B._hooks.maps[1]), id(FakeBasic._hooks.maps[0]),
"failed to chain FakeBasic._hooks and B._hooks")
FakeBasic._rules['newrule'] = 'oldvalue'
FakeBasic._hooks['newhook'] = 'oldvalue'
self.assertIn('newrule', B._rules,
"failed global modification in FakeBasic._rules"
" not impacted in B._rules")
self.assertIn('newhook', B._hooks,
"failed global modification in FakeBasic._hooks"
" not impacted in B._hooks")
B._rules['newrule'] = 'newvalue'
B._hooks['newhook'] = 'newvalue'
self.assertEqual(B._rules['newrule'], 'newvalue',
"failed in local rules modification")
self.assertEqual(B._hooks['newhook'], 'newvalue',
"failed in local hooks modification")
self.assertEqual(FakeBasic._rules['newrule'], 'oldvalue',
"failed local rules modification must be local")
self.assertEqual(FakeBasic._hooks['newhook'], 'oldvalue',
"failed local hooks modification must be local")
def test_13_defaultRules(self):
"""
Test the presence of default rules
"""
parser = parsing.Parser()
self.assertTrue("num" in parser._rules, "failed no found Base.num")
self.assertTrue("Base.num" in parser._rules,
"failed no found Base.num")
self.assertTrue("string" in parser._rules,
"failed no found Base.string")
self.assertTrue("Base.string" in parser._rules,
"failed no found Base.string")
self.assertTrue("hex_num" in parser._rules,
"failed no found Base.hex_num")
self.assertTrue("Base.hex_num" in parser._rules,
"failed no found Base.hex_num")
self.assertTrue("oct_num" in parser._rules,
"failed no found Base.oct_num")
self.assertTrue("Base.oct_num" in parser._rules,
"failed no found Base.oct_num")
def test_14_MetaGrammar(self):
"""
Test the metaclass of Grammar
"""
class FakeGrammar(metaclass=grammar.MetaGrammar):
pass
class SubGrammar(parsing.Parser, FakeGrammar):
pass
# TODO:
# print("Test Grammar")
def test_15_error_index(self):
"""
Test error index
"""
parser = parsing.Parser()
parser.parsed_stream("bla\n12abcd\nblu")
self.assertTrue(parser.read_text("bla\n") and parser.read_integer(),
"failed to parse begin of stream")
self.assertTrue(not parser.read_text("abcde"),
"failed to not parse abcde")
self.assertEqual(parser._stream[parser._stream._cursor
.max_readed_position.index],
'a',
"failed when checking the correction position of last"
" readed character")
self.assertEqual(parser._stream.last_readed_line, "12abcd",
"failed to get the correct last readed line")
def test_16_Negation(self):
"""
Basic test for negation !R
"""
parser = parsing.Parser()
parser.parsed_stream("==")
parseTree = \
parsing.Seq(parsing.Call(parsing.Parser.read_char, '='),
parsing.Neg(parsing.Call(
parsing.Parser.read_char,
'=')))
res = parseTree(parser)
self.assertEqual(res, False, "failed to get the correct final value")
self.assertEqual(parser._stream._cursor._index, 0,
"failed to get the correct index after a negation")
def test_17_Lookahead(self):
"""
Basic test for lookahead !!R
"""
parser = parsing.Parser()
parser.parsed_stream("==")
parseTree = \
parsing.Seq(parsing.Call(parsing.Parser.read_char, '='),
parsing.LookAhead(parsing.Call(
parsing.Parser.read_char,
'=')),
)
res = parseTree(parser)
self.assertEqual(res, True, "failed to get the correct final value")
self.assertEqual(parser._stream._cursor._index, 1,
"failed to get the correct index after a lookahead")
def test_18_Complement(self):
"""
Basic test for complement ~R
"""
parser = parsing.Parser()
parser.parsed_stream("==")
parseTree = parsing.Seq(
parsing.Call(parsing.Parser.read_char, '='),
parsing.Complement(
parsing.Call(parsing.Parser.read_char, '=')
)
)
res = parseTree(parser)
self.assertEqual(res, False, "failed to get the correct final value")
self.assertEqual(parser._stream._cursor._index, 0,
"failed to get the correct index after a lookahead")
parser.parsed_stream("=+")
res = parseTree(parser)
self.assertEqual(res, True, "failed to get the correct final value")
self.assertEqual(parser._stream._cursor._index, 2,
"failed to get the correct index after a lookahead")
def test_19_Until(self):
"""
Basic test for until ->R
"""
parser = parsing.Parser()
parser.parsed_stream("==|=|==tutu")
parseTree = parsing.Seq(
parsing.Until(
parsing.Call(
parsing.Parser.read_text, '|==')
),
parsing.Call(parsing.Parser.read_text, 'tutu'),
)
res = parseTree(parser)
self.assertEqual(res, True, "failed to get the correct final value")
|
class InternalParse_Test(unittest.TestCase):
def test_000_Node(self):
pass
def test_00_Directive(self):
'''Test Directive/DirectiveWrapper
'''
pass
class DummyDirective(parsing.DirectiveWrapper):
def begin(self, test, a: int, b: int):
pass
def end(self, test, a: int, b: int):
pass
def dummyParser(p):
pass
def test_01_readIdentifier(self):
'''
Basic test for identifier parsing
'''
pass
def test_02_readInteger(self):
'''
Basic test for integer parsing
'''
pass
def test_04_readCChar(self):
'''
Basic test for read_cchar
'''
pass
def test_05_readCString(self):
'''
Basic test for read_cstring
'''
pass
def test_06_CallAndSeq(self):
'''
Basic test for call/clauses
'''
pass
def test_07_RepXN(self):
'''
Basic test for repeater operator
'''
pass
def test_08_RepAlt(self):
'''
Basic test for alternatives
'''
pass
def test_09_RepRules(self):
'''
Basic test for Rules
'''
pass
def check_word(parser, test, tutu):
pass
def check_int(parser, test, toto):
pass
def test_10_contextVariables(self):
'''
Basic test for context variables
'''
pass
def test_11_namespaceRules(self):
'''
Test the namespace handling
'''
pass
@meta.add_method(parsing.Parser)
def dummyParser(p):
pass
def test_12_Metabasicparser(self):
'''
Test the metaclass of BasicParser
'''
pass
class FakeBasic(metaclass=parsing.MetaBasicParser):
class A(FakeBasic):
class B(FakeBasic):
def test_13_defaultRules(self):
'''
Test the presence of default rules
'''
pass
def test_14_MetaGrammar(self):
'''
Test the metaclass of Grammar
'''
pass
class FakeGrammar(metaclass=grammar.MetaGrammar):
class SubGrammar(parsing.Parser, FakeGrammar):
def test_15_error_index(self):
'''
Test error index
'''
pass
def test_16_Negation(self):
'''
Basic test for negation !R
'''
pass
def test_17_Lookahead(self):
'''
Basic test for lookahead !!R
'''
pass
def test_18_Complement(self):
'''
Basic test for complement ~R
'''
pass
def test_19_Until(self):
'''
Basic test for until ->R
'''
pass
| 34 | 19 | 20 | 0 | 18 | 2 | 1 | 0.14 | 1 | 8 | 4 | 0 | 20 | 0 | 20 | 92 | 533 | 31 | 441 | 85 | 407 | 61 | 236 | 84 | 203 | 3 | 2 | 1 | 28 |
146,168 |
LionelAuroux/pyrser
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LionelAuroux_pyrser/tests/internal_error.py
|
tests.internal_error.InternalError_Test.test_end_file_03.Exemple
|
class Exemple(Grammar):
entry = 'exemple'
grammar = """
exemple = [ id #false ]
"""
|
class Exemple(Grammar):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0.2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 52 | 5 | 0 | 5 | 3 | 4 | 1 | 3 | 3 | 2 | 0 | 6 | 0 | 0 |
146,169 |
LionelAuroux/pyrser
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LionelAuroux_pyrser/tests/internal_dsl.py
|
tests.internal_dsl.InternalDsl_Test.test_26_set.dummyList
|
class dummyList(parsing.Node):
def __init__(self):
self._ls = []
def append(self, x):
self._ls.append(x)
def __getitem__(self, n):
return self._ls[n]
|
class dummyList(parsing.Node):
def __init__(self):
pass
def append(self, x):
pass
def __getitem__(self, n):
pass
| 4 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 3 | 1 | 3 | 3 | 9 | 2 | 7 | 5 | 3 | 0 | 7 | 5 | 3 | 1 | 1 | 0 | 3 |
146,170 |
LionelAuroux/pyrser
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LionelAuroux_pyrser/tests/internal_type.py
|
tests.internal_type.InternalType_Test.test_symbol_01_symbolpatch.MySymbol
|
class MySymbol(Symbol):
def show_name(self):
return "cool " + self.name
def internal_name(self):
return "tjrs "
|
class MySymbol(Symbol):
def show_name(self):
pass
def internal_name(self):
pass
| 3 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 1 | 0 | 0 | 1 | 2 | 0 | 2 | 2 | 6 | 1 | 5 | 3 | 2 | 0 | 5 | 3 | 2 | 1 | 1 | 0 | 2 |
146,171 |
LionelAuroux/pyrser
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LionelAuroux_pyrser/tests/pyrser/parsing/test_capture.py
|
tests.pyrser.parsing.test_capture.TestCapture
|
class TestCapture(unittest.TestCase):
def test_it_returns_clause_result(self):
res = mock.Mock()
# if __len__ == 0, bool(res) is False
# overrides with __bool__
res.__len__ = lambda x: 0
res.__bool__ = lambda x: True
parser = mock.Mock(rule_nodes=collections.ChainMap(),
**{'begin_tag.return_value': True})
clause = mock.Mock(return_value=res)
capture = parsing.Capture('tagname', clause)
self.assertIs(capture(parser), res)
clause.assert_called_once_with(parser)
def test_it_wraps_boolean_result_in_node(self):
res = mock.Mock()
parser = mock.Mock(rule_nodes=collections.ChainMap(),
**{'get_tag.return_value': res})
clause = mock.Mock(return_value=True)
capture = parsing.Capture('tagname', clause)
expected_res = parsing.Node(True)
expected_res.value = res
self.assertEqual(capture(parser), expected_res)
clause.assert_called_once_with(parser)
def test_it_is_false_when_begintag_is_false(self):
parser = mock.Mock(**{'begin_tag.return_value': False})
capture = parsing.Capture('tagname', None)
self.assertFalse(capture(parser))
parser.begin_tag.assert_called_once_with('tagname')
def test_it_is_false_when_clause_is_false(self):
parser = mock.Mock(rule_nodes=collections.ChainMap(),
**{'begin_tag.return_value': True})
clause = mock.Mock(return_value=False)
capture = parsing.Capture('tagname', clause)
self.assertFalse(capture(parser))
clause.assert_called_once_with(parser)
@unittest.skip('fix it')
def test_it_is_false_when_undoIgnore_is_false(self):
parser = mock.Mock(rule_nodes={},
**{'begin_tag.return_value': True,
'undo_ignore.return_value': False})
clause = mock.Mock(return_value=True)
capture = parsing.Capture('tagname', clause)
self.assertFalse(capture(parser))
def test_it_is_false_when_endtag_is_false(self):
parser = mock.Mock(rule_nodes=collections.ChainMap(),
**{'begin_tag.return_value': True,
'undo_ignore.return_value': True,
'end_tag.return_value': False})
clause = mock.Mock(return_value=True)
capture = parsing.Capture('tagname', clause)
self.assertFalse(capture(parser))
def test_it_raises_typeerror_if_tagname_is_not_a_str(self):
tagname, clause = None, None
with self.assertRaises(TypeError):
parsing.Capture(tagname, clause)
def test_it_raises_typeerror_with_an_empty_tagname(self):
clause = None
with self.assertRaises(TypeError):
parsing.Capture('', clause)
|
class TestCapture(unittest.TestCase):
def test_it_returns_clause_result(self):
pass
def test_it_wraps_boolean_result_in_node(self):
pass
def test_it_is_false_when_begintag_is_false(self):
pass
def test_it_is_false_when_clause_is_false(self):
pass
@unittest.skip('fix it')
def test_it_is_false_when_undoIgnore_is_false(self):
pass
def test_it_is_false_when_endtag_is_false(self):
pass
def test_it_raises_typeerror_if_tagname_is_not_a_str(self):
pass
def test_it_raises_typeerror_with_an_empty_tagname(self):
pass
| 10 | 0 | 7 | 0 | 7 | 0 | 1 | 0.04 | 1 | 5 | 2 | 0 | 8 | 0 | 8 | 80 | 66 | 7 | 57 | 32 | 47 | 2 | 48 | 31 | 39 | 1 | 2 | 1 | 8 |
146,172 |
LionelAuroux/pyrser
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LionelAuroux_pyrser/tests/pyrser/test_grammar.py
|
tests.pyrser.test_grammar.TestGrammar.test_it_parses_a_grammar_and_attach_parsing_rules.Grammar
|
class Grammar(pyrser.Grammar):
grammar = bnf
dsl_parser = dsl
|
class Grammar(pyrser.Grammar):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0 | 3 | 3 | 2 | 0 | 3 | 3 | 2 | 0 | 1 | 0 | 0 |
146,173 |
LionelAuroux/pyrser
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LionelAuroux_pyrser/tests/pyrser/test_grammar.py
|
tests.pyrser.test_grammar.TestGrammar.test_it_parses_source_using_rules.StubGrammar
|
class StubGrammar(pyrser.Grammar):
entry = 'rulename'
dsl_parser = dsl
|
class StubGrammar(pyrser.Grammar):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0 | 3 | 3 | 2 | 0 | 3 | 3 | 2 | 0 | 1 | 0 | 0 |
146,174 |
LionelAuroux/pyrser
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LionelAuroux_pyrser/tests/internal_dsl.py
|
tests.internal_dsl.InternalDsl_Test.test_25_directive.dummyDir
|
class dummyDir(parsing.DirectiveWrapper):
def begin(self, parser, a: int, b: int, c: int):
parser.test.assertTrue(a == 1)
parser.test.assertTrue(b == 2)
parser.test.assertTrue(c == 3)
# for workflow checking
parser.workflow = 1
return True
def end(self, parser, a: int, b: int, c: int):
parser.test.assertTrue(a == 1)
parser.test.assertTrue(b == 2)
parser.test.assertTrue(c == 3)
# for workflow checking
parser.test.assertTrue(parser.workflow == 2)
return True
|
class dummyDir(parsing.DirectiveWrapper):
def begin(self, parser, a: int, b: int, c: int):
pass
def end(self, parser, a: int, b: int, c: int):
pass
| 3 | 0 | 7 | 0 | 6 | 1 | 1 | 0.15 | 1 | 1 | 0 | 0 | 2 | 0 | 2 | 2 | 16 | 1 | 13 | 3 | 10 | 2 | 13 | 3 | 10 | 1 | 1 | 0 | 2 |
146,175 |
LionelAuroux/pyrser
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LionelAuroux_pyrser/tests/internal_dsl.py
|
tests.internal_dsl.InternalDsl_Test
|
class InternalDsl_Test(unittest.TestCase):
def test_01_one_rule(self):
"""
Test default
"""
bnf = dsl.EBNF("""
the_rule = [ a ]
""")
res = bnf.get_rules()
self.assertIn('the_rule', res)
self.assertIsInstance(res['the_rule'], parsing.Rule)
self.assertEqual(res['the_rule'].name, 'a')
def test_02_two_rules(self):
"""
Test default
"""
bnf = dsl.EBNF("""
the_rule=[a b c]
""")
res = bnf.get_rules()
self.assertTrue('the_rule' in res, "failed to fetch the rule name")
self.assertIsInstance(res['the_rule'], parsing.Seq,
"failed in ParserTree type for node Seq")
self.assertIsInstance(res['the_rule'][0], parsing.Rule,
"failed in ParserTree type for node Rule")
self.assertTrue(res['the_rule'][0].name == "a",
"failed in name of rule 'a'")
self.assertIsInstance(res['the_rule'][1], parsing.Rule,
"failed in ParserTree type for node Rule")
self.assertTrue(res['the_rule'][1].name == "b",
"failed in name of rule 'b'")
def test_03_more_rules(self):
"""
Test default
"""
bnf = dsl.EBNF("""
the_rule = [ a b c]
""")
res = bnf.get_rules()
self.assertTrue('the_rule' in res)
self.assertIsInstance(res['the_rule'], parsing.Seq)
self.assertIsInstance(res['the_rule'][0], parsing.Rule)
self.assertTrue(res['the_rule'][0].name == "a")
self.assertIsInstance(res['the_rule'][1], parsing.Rule)
self.assertTrue(res['the_rule'][1].name == "b")
self.assertIsInstance(res['the_rule'][2], parsing.Rule)
self.assertTrue(res['the_rule'][2].name == "c")
def test_04_one_alt(self):
"""
Test default
"""
bnf = dsl.EBNF("""
the_rule = [ a | b ]
""")
res = bnf.get_rules()
self.assertTrue('the_rule' in res)
self.assertIsInstance(res['the_rule'], parsing.Alt)
self.assertIsInstance(res['the_rule'][0], parsing.Rule)
self.assertTrue(res['the_rule'][0].name == "a")
self.assertIsInstance(res['the_rule'][1], parsing.Rule)
self.assertTrue(res['the_rule'][1].name == "b")
def test_05_two_alt(self):
"""
Test default
"""
bnf = dsl.EBNF("""
the_rule = [ a | b | c ]
""")
res = bnf.get_rules()
self.assertTrue('the_rule' in res)
self.assertIsInstance(res['the_rule'], parsing.Alt)
self.assertIsInstance(res['the_rule'][0], parsing.Rule)
self.assertTrue(res['the_rule'][0].name == "a")
self.assertIsInstance(res['the_rule'][1], parsing.Rule)
self.assertTrue(res['the_rule'][1].name == "b")
self.assertIsInstance(res['the_rule'][2], parsing.Rule)
self.assertTrue(res['the_rule'][2].name == "c")
def test_06_char(self):
"""
Test default
"""
bnf = dsl.EBNF("""
the_rule = [ 'a' ]
""")
res = bnf.get_rules()
self.assertTrue('the_rule' in res)
self.assertIsInstance(res['the_rule'], parsing.Char)
self.assertTrue(res['the_rule'].char == 'a')
def test_07_string(self):
"""
Test default
"""
bnf = dsl.EBNF("""
the_rule = [ "bonjour le monde" ]
""")
res = bnf.get_rules()
self.assertTrue('the_rule' in res)
self.assertIsInstance(res['the_rule'], parsing.Text)
self.assertTrue(res['the_rule'].text == "bonjour le monde")
def test_08_range(self):
"""
Test default
"""
bnf = dsl.EBNF("""
the_rule = [ 'a'..'z' ]
""")
res = bnf.get_rules()
self.assertTrue('the_rule' in res)
self.assertIsInstance(res['the_rule'], parsing.Range)
self.assertTrue(res['the_rule'].begin == 'a')
self.assertTrue(res['the_rule'].end == 'z')
def test_09_complexe(self):
"""
Test default
"""
bnf = dsl.EBNF("""
the_rule = [ 'a'..'z' "tutu" 'a' | a b | z ]
""")
res = bnf.get_rules()
self.assertTrue('the_rule' in res)
self.assertIsInstance(res['the_rule'], parsing.Alt)
self.assertIsInstance(res['the_rule'][0], parsing.Seq)
self.assertIsInstance(res['the_rule'][0][0],
parsing.Range)
self.assertTrue(res['the_rule'][0][0].begin == 'a')
self.assertTrue(res['the_rule'][0][0].end == 'z')
self.assertIsInstance(res['the_rule'][0][1],
parsing.Text)
self.assertEqual(res['the_rule'][0][1].text, "tutu")
self.assertIsInstance(res['the_rule'][0][2],
parsing.Char)
self.assertTrue(res['the_rule'][0][2].char == 'a')
self.assertIsInstance(res['the_rule'][1], parsing.Seq)
self.assertIsInstance(res['the_rule'][1][0],
parsing.Rule)
self.assertTrue(res['the_rule'][1][0].name == "a")
self.assertTrue(res['the_rule'][1][1].name == "b")
self.assertIsInstance(res['the_rule'][2], parsing.Rule)
self.assertTrue(res['the_rule'][2].name == "z")
def test_10_repoption(self):
"""
Test default
"""
bnf = dsl.EBNF("""
the_rule = [ a? ]
""")
res = bnf.get_rules()
self.assertTrue('the_rule' in res)
self.assertIsInstance(res['the_rule'], parsing.RepOptional)
self.assertTrue(res['the_rule'].pt.name == 'a')
def test_11_rep0N(self):
"""
Test default
"""
bnf = dsl.EBNF("""
the_rule = [ a* ]
""")
res = bnf.get_rules()
self.assertTrue('the_rule' in res, "failed to fetch the rule name")
self.assertIsInstance(res['the_rule'], parsing.Rep0N)
self.assertIsInstance(res['the_rule'].pt, parsing.Rule)
self.assertTrue(res['the_rule'].pt.name == 'a')
def test_12_rep1N(self):
"""
Test default
"""
bnf = dsl.EBNF("""
the_rule = [ [a "toto"]+ ]
""")
res = bnf.get_rules()
self.assertTrue('the_rule' in res)
self.assertIsInstance(res['the_rule'], parsing.Rep1N)
self.assertTrue(res['the_rule'].pt[0].name == 'a')
self.assertTrue(res['the_rule'].pt[1].text == "toto")
def test_13_complementedRepeatedRule(self):
bnf = dsl.EBNF("""
the_rule = [ ~a+ ]
""")
res = bnf.get_rules()
self.assertTrue('the_rule' in res)
self.assertIsInstance(res['the_rule'], parsing.Rep1N)
self.assertIsInstance(res['the_rule'].pt, parsing.Complement)
self.assertEqual(res['the_rule'].pt.pt.name, 'a')
def test_14_negatedRule(self):
bnf = dsl.EBNF("""
the_rule = [ !a ]
""")
res = bnf.get_rules()
self.assertTrue('the_rule' in res)
self.assertIsInstance(res['the_rule'], parsing.Neg)
self.assertEqual(res['the_rule'].pt.name, 'a')
def test_15_negatedRepeatedRule(self):
bnf = dsl.EBNF("""
the_rule = [ !a+ ]
""")
with self.assertRaises(error.Diagnostic) as pe:
r = bnf.get_rules()
self.assertEqual(
pe.exception.logs[0].msg,
"Cannot repeat a negated rule",
"Bad message"
)
def test_16_lookaheadRule(self):
bnf = dsl.EBNF("""
the_rule = [ !!a ]
""")
res = bnf.get_rules()
self.assertTrue('the_rule' in res)
self.assertIsInstance(res['the_rule'], parsing.LookAhead)
self.assertEqual(res['the_rule'].pt.name, 'a')
def test_17_lookaheadRepeatedRule(self):
bnf = dsl.EBNF("""
the_rule = [ !!a+ ]
""")
with self.assertRaises(error.Diagnostic) as pe:
r = bnf.get_rules()
self.assertEqual(
pe.exception.logs[0].msg,
"Cannot repeat a lookahead rule",
"Bad message"
)
def test_18_hookNoParam(self):
@meta.hook(parsing.Parser)
def my_hook(self):
self.the_hook = True
return True
bnf = dsl.EBNF("""
the_rule = [ #my_hook ]
""")
res = bnf.get_rules()
self.assertTrue('the_rule' in res)
self.assertIsInstance(res['the_rule'], parsing.Hook)
self.assertTrue(res['the_rule'].name == "my_hook")
dummyData = parsing.Parser()
dummyData.set_rules(res)
dummyData.the_hook = False
res = dummyData.eval_rule('the_rule')
self.assertTrue(res)
self.assertTrue(dummyData.the_hook)
bnf = dsl.EBNF("""
the_rule = [ #my_hook() ]
""")
res = bnf.get_rules()
self.assertTrue('the_rule' in res)
self.assertIsInstance(res['the_rule'], parsing.Hook)
self.assertTrue(res['the_rule'].name == "my_hook")
dummyData = parsing.Parser()
dummyData.set_rules(res)
dummyData.the_hook = False
res = dummyData.eval_rule('the_rule')
self.assertTrue(res)
self.assertTrue(dummyData.the_hook)
def test_19_hookOneParamStr(self):
@meta.hook(parsing.Parser)
def my_hook_txt(self, txt):
self.test.assertEqual(txt, "cool",
'failed to receive "cool" in hook')
self.test.assertTrue(txt == "cool")
return True
bnf = dsl.EBNF("""
the_rule = [ #my_hook_txt("cool") ]
""")
res = bnf.get_rules()
self.assertTrue('the_rule' in res, "failed to fetch the rule name")
self.assertIsInstance(res['the_rule'], parsing.Hook)
self.assertTrue(res['the_rule'].name == "my_hook_txt")
dummyData = parsing.Parser()
dummyData.set_rules(res)
dummyData.test = self
# with dummyData as s:
res = dummyData.eval_rule('the_rule')
self.assertTrue(res)
def test_20_hookOneParamChar(self):
@meta.hook(parsing.Parser)
def my_hook_char(self, txt):
self.test.assertEqual(txt, "\t", 'failed to receive "\t" in hook')
return True
bnf = dsl.EBNF("""
the_rule = [ #my_hook_char('\t') ]
""")
res = bnf.get_rules()
self.assertTrue('the_rule' in res, "failed to fetch the rule name")
self.assertIsInstance(res['the_rule'], parsing.Hook)
self.assertTrue(res['the_rule'].name == "my_hook_char")
dummyData = parsing.Parser()
dummyData.set_rules(res)
dummyData.test = self
# with dummyData as s:
res = dummyData.eval_rule('the_rule')
self.assertTrue(res)
def test_21_hookOneParamNum(self):
@meta.hook(parsing.Parser)
def my_hook_num(self, num):
self.test.assertEqual(num, 123456,
'failed to receive 123456 in hook')
self.test.assertTrue(num == 123456)
return True
bnf = dsl.EBNF("""
the_rule = [ #my_hook_num(123456) ]
""")
res = bnf.get_rules()
self.assertTrue('the_rule' in res, "failed to fetch the rule name")
self.assertIsInstance(res['the_rule'], parsing.Hook)
self.assertTrue(res['the_rule'].name == "my_hook_num")
dummyData = parsing.Parser()
dummyData.set_rules(res)
dummyData.test = self
# with dummyData as s:
res = dummyData.eval_rule('the_rule')
self.assertTrue(res)
def test_22_hookOneParamId(self):
@meta.hook(parsing.Parser)
def my_hook_id(self, n):
self.test.assertIsInstance(n, parsing.Node)
return True
bnf = dsl.EBNF("""
the_rule = [ #my_hook_id(_) ]
""")
res = bnf.get_rules()
self.assertTrue('the_rule' in res)
self.assertIsInstance(res['the_rule'], parsing.Hook)
self.assertTrue(res['the_rule'].name == "my_hook_id")
dummyData = parsing.Parser()
dummyData.set_rules(res)
dummyData.test = self
# with dummyData as s:
res = dummyData.eval_rule('the_rule')
self.assertTrue(res)
def test_23_hookParams(self):
@meta.hook(parsing.Parser)
def my_hook_params(self, n, num, txt):
self.test.assertIsInstance(n, parsing.Node)
self.test.assertTrue(num == 123456)
self.test.assertTrue(txt == "cool")
return True
bnf = dsl.EBNF("""
the_rule = [ #my_hook_params(_, 123456, "cool") ]
""")
res = bnf.get_rules()
self.assertTrue('the_rule' in res)
self.assertIsInstance(res['the_rule'], parsing.Hook)
self.assertTrue(res['the_rule'].name == "my_hook_params")
dummyData = parsing.Parser()
dummyData.set_rules(res)
dummyData.test = self
# with dummyData as s:
res = dummyData.eval_rule('the_rule')
self.assertTrue(res)
def test_24_hookAndCapture(self):
@meta.hook(parsing.Parser)
def my_hook_multi(self, n1, n2, n3, n4, n5):
self.test.assertTrue(self.value(n1) == "456")
self.test.assertTrue(self.value(n2) == '"toto"')
self.test.assertTrue(self.value(n3) == "blabla")
self.test.assertTrue(self.value(n4) == "cafebabe12CFE")
self.test.assertTrue(self.value(n5) == "0755")
return True
bnf = dsl.EBNF("""
N = [ Base.num ]
S = [ Base.string ]
I = [ Base.id ]
H = [ Base.hex_num ]
O = [ Base.oct_num ]
the_rule = [ N:nth S:t I:i H:h O:o
#my_hook_multi(nth, t, i, h, o)
]
""")
res = bnf.get_rules()
self.assertTrue('the_rule' in res)
self.assertIsInstance(res['the_rule'], parsing.Seq)
self.assertTrue(res['the_rule'][-1].name == "my_hook_multi")
dummyData = parsing.Parser("""
456 "toto" blabla cafebabe12CFE 0755
""")
dummyData.set_rules(res)
dummyData.test = self
# with dummyData as s:
eval_res = dummyData.eval_rule('the_rule')
self.assertTrue(eval_res)
def test_25_directive(self):
class dummyDir(parsing.DirectiveWrapper):
def begin(self, parser, a: int, b: int, c: int):
parser.test.assertTrue(a == 1)
parser.test.assertTrue(b == 2)
parser.test.assertTrue(c == 3)
# for workflow checking
parser.workflow = 1
return True
def end(self, parser, a: int, b: int, c: int):
parser.test.assertTrue(a == 1)
parser.test.assertTrue(b == 2)
parser.test.assertTrue(c == 3)
# for workflow checking
parser.test.assertTrue(parser.workflow == 2)
return True
@meta.hook(parsing.Parser, erase=True)
def my_hook(self):
# for workflow checking
self.test.assertTrue(self.workflow == 1)
self.workflow = 2
return True
dsl.EBNF.set_directives({'toto.dummyDir': dummyDir})
bnf = dsl.EBNF("""
the_rule = [ @toto.dummyDir(1, 2, 3) test ]
test = [ #my_hook Base.eof ]
""")
res = bnf.get_rules()
self.assertTrue('the_rule' in res, "failed to fetch the rule name")
dummyData = parsing.Parser()
dummyData.set_rules(res)
dummyData.test = self
# with dummyData as s:
res = dummyData.eval_rule('the_rule')
self.assertTrue(res)
def test_25_list_id(self):
@meta.hook(parsing.Parser)
def in_list(self, ls, ident):
if not hasattr(ls, 'list'):
ls.list = []
ls.list.append(self.value(ident))
return True
bnf = dsl.EBNF("""
I = [ id ]
list = [ [I : i #in_list(_, i) ]+ ]
""")
res = bnf.get_rules()
self.assertTrue('list' in res)
dummyData = parsing.Parser("""
a b c d e f
""")
dummyData.set_rules(res)
dummyData.test = self
# with dummyData as s:
eval_res = dummyData.eval_rule('list')
self.assertTrue(eval_res)
self.assertTrue(eval_res.list[0] == "a")
self.assertTrue(eval_res.list[1] == "b")
self.assertTrue(eval_res.list[2] == "c")
self.assertTrue(eval_res.list[3] == "d")
self.assertTrue(eval_res.list[4] == "e")
self.assertTrue(eval_res.list[5] == "f")
def test_26_set(self):
class dummyList(parsing.Node):
def __init__(self):
self._ls = []
def append(self, x):
self._ls.append(x)
def __getitem__(self, n):
return self._ls[n]
@meta.hook(parsing.Parser, erase=True)
def in_list(self, ls, ident):
if type(ls) is parsing.Node:
ls.set(dummyList())
ls.append(self.value(ident))
return True
bnf = dsl.EBNF("""
I = [ id ]
list = [ [I : i #in_list(_, i) ]+ ]
""")
res = bnf.get_rules()
self.assertTrue('list' in res)
self.assertTrue('I' in res)
dummyData = parsing.Parser("""
a b c d e f
""")
dummyData.set_rules(res)
dummyData.test = self
# with dummyData as s:
eval_res = dummyData.eval_rule('list')
self.assertTrue(eval_res)
self.assertTrue(eval_res[0] == "a")
self.assertTrue(eval_res[1] == "b")
self.assertTrue(eval_res[2] == "c")
self.assertTrue(eval_res[3] == "d")
self.assertTrue(eval_res[4] == "e")
self.assertTrue(eval_res[5] == "f")
def test_27_nodescope(self):
@meta.hook(parsing.Parser)
def put(self, ast):
# A.put visible in subrules
ast.put = True
return True
@meta.hook(parsing.Parser)
def check1(self):
self.test.assertTrue('A' in self.rule_nodes)
# _ is from rule1, not main
self.test.assertFalse(hasattr(self.rule_nodes['_'], 'put'))
# return of rule1 with .toto == True
self.rule_nodes['_'].toto = True
return True
@meta.hook(parsing.Parser)
def check2(self):
self.test.assertTrue('A' in self.rule_nodes)
self.test.assertTrue('B' in self.rule_nodes)
return False
@meta.hook(parsing.Parser)
def check3(self):
self.test.assertTrue('A' in self.rule_nodes)
# B no more living (alternative)
self.test.assertFalse('B' in self.rule_nodes)
return True
@meta.hook(parsing.Parser)
def toto(self):
self.test.assertTrue(hasattr(self.rule_nodes['r'], 'toto'))
self.test.assertTrue(hasattr(self.rule_nodes['r'], 'bla'))
return True
@meta.hook(parsing.Parser)
def check4(self):
self.rule_nodes['_'].bla = True
return True
bnf = dsl.EBNF("""
main =
[ __scope__:A #put(_)
rule1:r #toto eof
]
rule1 =
[
#check1 __scope__:B #check2
| #check3 #check4
]
""")
res = bnf.get_rules()
self.assertTrue('main' in res)
self.assertTrue('rule1' in res)
dummyData = parsing.Parser("")
dummyData.set_rules(res)
dummyData.test = self
# with dummyData as s:
eval_res = dummyData.eval_rule('main')
def test_28_errors(self):
with self.assertRaises(TypeError):
@meta.hook(parsing.Parser, "num")
def check5(self):
pass
with self.assertRaises(TypeError):
@meta.rule(parsing.Parser, "Base.read_char")
def check6(self):
pass
with self.assertRaises(TypeError):
@meta.hook(parsing.Parser, "plop")
def check7(self):
pass
bnf = dsl.EBNF("""
main = [ #plop ]
""")
res = bnf.get_rules()
p = parsing.Parser("")
p.set_rules(res)
p.eval_rule('main')
|
class InternalDsl_Test(unittest.TestCase):
def test_01_one_rule(self):
'''
Test default
'''
pass
def test_02_two_rules(self):
'''
Test default
'''
pass
def test_03_more_rules(self):
'''
Test default
'''
pass
def test_04_one_alt(self):
'''
Test default
'''
pass
def test_05_two_alt(self):
'''
Test default
'''
pass
def test_06_char(self):
'''
Test default
'''
pass
def test_07_string(self):
'''
Test default
'''
pass
def test_08_range(self):
'''
Test default
'''
pass
def test_09_complexe(self):
'''
Test default
'''
pass
def test_10_repoption(self):
'''
Test default
'''
pass
def test_11_rep0N(self):
'''
Test default
'''
pass
def test_12_rep1N(self):
'''
Test default
'''
pass
def test_13_complementedRepeatedRule(self):
pass
def test_14_negatedRule(self):
pass
def test_15_negatedRepeatedRule(self):
pass
def test_16_lookaheadRule(self):
pass
def test_17_lookaheadRepeatedRule(self):
pass
def test_18_hookNoParam(self):
pass
@meta.hook(parsing.Parser)
def my_hook(self):
pass
def test_19_hookOneParamStr(self):
pass
@meta.hook(parsing.Parser)
def my_hook_txt(self, txt):
pass
def test_20_hookOneParamChar(self):
pass
@meta.hook(parsing.Parser)
def my_hook_char(self, txt):
pass
def test_21_hookOneParamNum(self):
pass
@meta.hook(parsing.Parser)
def my_hook_num(self, num):
pass
def test_22_hookOneParamId(self):
pass
@meta.hook(parsing.Parser)
def my_hook_id(self, n):
pass
def test_23_hookParams(self):
pass
@meta.hook(parsing.Parser)
def my_hook_params(self, n, num, txt):
pass
def test_24_hookAndCapture(self):
pass
@meta.hook(parsing.Parser)
def my_hook_multi(self, n1, n2, n3, n4, n5):
pass
def test_25_directive(self):
pass
class dummyDir(parsing.DirectiveWrapper):
def begin(self, parser, a: int, b: int, c: int):
pass
def end(self, parser, a: int, b: int, c: int):
pass
@meta.hook(parsing.Parser, erase=True)
def my_hook(self):
pass
def test_25_list_id(self):
pass
@meta.hook(parsing.Parser)
def in_list(self, ls, ident):
pass
def test_26_set(self):
pass
class dummyList(parsing.Node):
def __init__(self):
pass
def append(self, x):
pass
def __getitem__(self, n):
pass
@meta.hook(parsing.Parser, erase=True)
def in_list(self, ls, ident):
pass
def test_27_nodescope(self):
pass
@meta.hook(parsing.Parser)
def put(self, ast):
pass
@meta.hook(parsing.Parser)
def check1(self):
pass
@meta.hook(parsing.Parser)
def check2(self):
pass
@meta.hook(parsing.Parser)
def check3(self):
pass
@meta.hook(parsing.Parser)
def toto(self):
pass
@meta.hook(parsing.Parser)
def check4(self):
pass
def test_28_errors(self):
pass
@meta.hook(parsing.Parser, "num")
def check5(self):
pass
@meta.rule(parsing.Parser, "Base.read_char")
def check6(self):
pass
@meta.hook(parsing.Parser, "plop")
def check7(self):
pass
| 75 | 12 | 13 | 1 | 11 | 1 | 1 | 0.14 | 1 | 4 | 2 | 0 | 29 | 2 | 29 | 101 | 609 | 60 | 494 | 154 | 419 | 69 | 374 | 133 | 318 | 2 | 2 | 1 | 55 |
146,176 |
LionelAuroux/pyrser
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LionelAuroux_pyrser/tests/hooks.py
|
tests.hooks.Hooks_Test.test_01.parserExample2
|
class parserExample2(grammar.Grammar):
entry = "Example"
grammar = """
Example = [ id eof #setstr(_, 'toto')
]
"""
|
class parserExample2(grammar.Grammar):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 | 1 | 6 | 2 | 5 | 1 | 3 | 2 | 2 | 0 | 1 | 0 | 0 |
146,177 |
LionelAuroux/pyrser
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LionelAuroux_pyrser/tests/gen_dsl.py
|
tests.gen_dsl.GenDsl_Test.test_03_number2.Number2
|
class Number2(grammar.Grammar):
entry = "test"
grammar = """test = [ ['0'..'9' | '_']+ ]
"""
|
class Number2(grammar.Grammar):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0 | 4 | 2 | 3 | 0 | 3 | 2 | 2 | 0 | 1 | 0 | 0 |
146,178 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/tests/grammar/tl4t.py
|
tl4t.Terminal
|
class Terminal(NodeInfo):
def __init__(self, value):
super().__init__()
self.value = value
def to_tl4t(self) -> fmt.indentable:
return self.value
def walk(self) -> Node:
"""
TD descent
"""
yield ('term', self, self.value)
|
class Terminal(NodeInfo):
def __init__(self, value):
pass
def to_tl4t(self) -> fmt.indentable:
pass
def walk(self) -> Node:
'''
TD descent
'''
pass
| 4 | 1 | 3 | 0 | 2 | 1 | 1 | 0.38 | 1 | 2 | 1 | 3 | 3 | 1 | 3 | 37 | 13 | 2 | 8 | 5 | 4 | 3 | 8 | 5 | 4 | 1 | 4 | 0 | 3 |
146,179 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/tests/grammar/tl4t.py
|
tl4t.Paren
|
class Paren(Expr):
def __init__(self, expr: Expr):
super().__init__(None, [expr])
def to_tl4t(self):
return fmt.block("(", ")", [self.p[0].to_tl4t()])
|
class Paren(Expr):
def __init__(self, expr: Expr):
pass
def to_tl4t(self):
pass
| 3 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 1 | 2 | 1 | 0 | 2 | 0 | 2 | 40 | 6 | 1 | 5 | 3 | 2 | 0 | 5 | 3 | 2 | 1 | 5 | 0 | 2 |
146,180 |
LionelAuroux/pyrser
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LionelAuroux_pyrser/tests/internal_parse.py
|
tests.internal_parse.InternalParse_Test.test_12_Metabasicparser.A
|
class A(FakeBasic):
pass
|
class A(FakeBasic):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 | 0 | 2 | 1 | 1 | 0 | 2 | 1 | 1 | 0 | 2 | 0 | 0 |
146,181 |
LionelAuroux/pyrser
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LionelAuroux_pyrser/tests/internal_parse.py
|
tests.internal_parse.InternalParse_Test.test_12_Metabasicparser.B
|
class B(FakeBasic):
_rules = {'key': 'value'}
_hooks = {'key': 'value'}
|
class B(FakeBasic):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0 | 3 | 3 | 2 | 0 | 3 | 3 | 2 | 0 | 2 | 0 | 0 |
146,182 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/tests/unify.py
|
tests.unify.TypeExprComponent
|
class TypeExprComponent(list):
contains = None
minarity = None
def __init__(self, *deflist):
if self.minarity is not None and len(deflist) < self.minarity:
raise TypeError("%s take minimum %d parameters" % (type(self).__name__, self.minarity))
for t in deflist:
if self.contains is not None and type(t).__name__ not in self.contains:
raise TypeError("%s can't be put in %s" % (type(t), type(self)))
list.__init__(self, deflist)
@property
def reprlist(self):
r = []
for it in self:
t = str(it)
if isinstance(it, list) and type(self) is not Overload and len(it) > 1:
t = '(' + t + ')'
r.append(t)
return r
def __repr__(self) -> str:
return str(self)
|
class TypeExprComponent(list):
def __init__(self, *deflist):
pass
@property
def reprlist(self):
pass
def __repr__(self) -> str:
pass
| 5 | 0 | 6 | 0 | 6 | 0 | 3 | 0 | 1 | 4 | 1 | 5 | 3 | 0 | 3 | 36 | 23 | 2 | 21 | 10 | 16 | 0 | 20 | 9 | 16 | 4 | 2 | 2 | 8 |
146,183 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/tests/unify.py
|
tests.unify.Tuple
|
class Tuple(TypeExprComponent):
contains = {'Fun', 'Union', 'Tuple', 'T', 'N'}
minarity = 2
def __str__(self) -> str:
return ", ".join(self.reprlist)
|
class Tuple(TypeExprComponent):
def __str__(self) -> str:
pass
| 2 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 1 | 1 | 0 | 0 | 1 | 0 | 1 | 37 | 6 | 1 | 5 | 4 | 3 | 0 | 5 | 4 | 3 | 1 | 3 | 0 | 1 |
146,184 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/tests/unify.py
|
tests.unify.Overload
|
class Overload(TypeExprComponent):
contains = {'Fun', 'T', 'N', 'UnknownName'}
minarity = 0
def unify(self, oth_type_def: TypeExprComponent, blhs, brhs) -> TypeExprComponent:
"""
When we unify an overload vs another type def we match each fun and we return the rest.
t1 & t2 ?? t1 match and we return t1
t1 & t2 ?? t2 match and we return t2
"""
print("OV TRY TO unify %s ?? %s" % (self, oth_type_def))
ovres = Overload()
for ov in self:
if not ov.unify(oth_type_def, blhs, brhs):
return None
ovres.append(oth_type_def)
return ovres
def __str__(self) -> str:
return "\n& ".join(self.reprlist)
|
class Overload(TypeExprComponent):
def unify(self, oth_type_def: TypeExprComponent, blhs, brhs) -> TypeExprComponent:
'''
When we unify an overload vs another type def we match each fun and we return the rest.
t1 & t2 ?? t1 match and we return t1
t1 & t2 ?? t2 match and we return t2
'''
pass
def __str__(self) -> str:
pass
| 3 | 1 | 8 | 1 | 5 | 3 | 2 | 0.38 | 1 | 1 | 0 | 0 | 2 | 0 | 2 | 38 | 21 | 3 | 13 | 7 | 10 | 5 | 13 | 7 | 10 | 3 | 3 | 2 | 4 |
146,185 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/tests/unify.py
|
tests.unify.N
|
class N(TypeExprComponent):
"""N for Namespaces"""
contains = {'T'}
minarity = 2
def __str__(self) -> str:
return ".".join(self.reprlist)
|
class N(TypeExprComponent):
'''N for Namespaces'''
def __str__(self) -> str:
pass
| 2 | 1 | 2 | 0 | 2 | 0 | 1 | 0.2 | 1 | 1 | 0 | 0 | 1 | 0 | 1 | 37 | 7 | 1 | 5 | 4 | 3 | 1 | 5 | 4 | 3 | 1 | 3 | 0 | 1 |
146,186 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/tests/unify.py
|
tests.unify.Fun
|
class Fun(TypeExprComponent):
contains = {'Fun', 'Union', 'Tuple', 'UnknownName', 'T', 'N'}
minarity = 1
def unify(self, oth_type_def: TypeExprComponent, blhs, brhs) -> TypeExprComponent:
"""
When we unify a function vs another type def we match each term and we return the rest.
t1 -> t2 -> t3 ?? t1 -> t2 match and we return t3
t1 -> t2 ?? t1 -> t2 -> t3 didn't match
Note: the first element is the return type
"""
print("FUN TRY TO unify %s ?? %s" % (self, oth_type_def))
if type(oth_type_def) is not Fun:
if type(oth_type_def) is T:
return self[0].unify(oth_type_def, blhs, brhs)
raise "not implemented"
diff_len = len(self) - len(oth_type_def)
if diff_len < 0: ## TODO: ADD ELLIPSIS
return None
for a, b in zip(reversed(self), reversed(oth_type_def)):
if not a.unify(b, blhs, brhs):
return None
# TODO: what to do with the rest
return Fun(*self[:diff_len])
def __str__(self) -> str:
return " -> ".join(reversed(self.reprlist))
|
class Fun(TypeExprComponent):
def unify(self, oth_type_def: TypeExprComponent, blhs, brhs) -> TypeExprComponent:
'''
When we unify a function vs another type def we match each term and we return the rest.
t1 -> t2 -> t3 ?? t1 -> t2 match and we return t3
t1 -> t2 ?? t1 -> t2 -> t3 didn't match
Note: the first element is the return type
'''
pass
def __str__(self) -> str:
pass
| 3 | 1 | 12 | 1 | 8 | 4 | 4 | 0.44 | 1 | 5 | 1 | 0 | 2 | 0 | 2 | 38 | 29 | 4 | 18 | 7 | 15 | 8 | 18 | 7 | 15 | 6 | 3 | 2 | 7 |
146,187 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/tests/unify.py
|
tests.unify.Define
|
class Define:
def __init__(self, name: str, type_def: TypeExprComponent):
self.name = name
self.type_def = type_def
def unify(self, oth_type_def: TypeExprComponent, blhs, brhs) -> TypeExprComponent:
"""
unify a definition with a correspondant type_def
"""
print("DEFINE TRY TO TYPE %s ?? %s" % (type(self.type_def), type(oth_type_def)))
print("TRY TO unify %s ?? %s" % (self.type_def, oth_type_def))
return self.type_def.unify(oth_type_def, blhs, brhs)
def __len__(self) -> int:
return len(self.type_def)
def __getitem__(self, idx) -> TypeExprComponent:
return self.type_def[idx]
def __str__(self) -> str:
return "%s: %s" % (self.name, self.type_def)
def __repr__(self) -> str:
return str(self)
|
class Define:
def __init__(self, name: str, type_def: TypeExprComponent):
pass
def unify(self, oth_type_def: TypeExprComponent, blhs, brhs) -> TypeExprComponent:
'''
unify a definition with a correspondant type_def
'''
pass
def __len__(self) -> int:
pass
def __getitem__(self, idx) -> TypeExprComponent:
pass
def __str__(self) -> str:
pass
def __repr__(self) -> str:
pass
| 7 | 1 | 3 | 0 | 3 | 1 | 1 | 0.19 | 0 | 4 | 1 | 1 | 6 | 2 | 6 | 6 | 24 | 5 | 16 | 9 | 9 | 3 | 16 | 9 | 9 | 1 | 0 | 0 | 6 |
146,188 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/tests/unify.py
|
tests.unify.Constraints
|
class Constraints:
def __init__(self, initial_defs: [Define]=None):
# store definitions
self.defs = []
# store bind->ast_node
self.bind2node = {}
# map name -> idx in defs
self.name2id = ChainMap()
if initial_defs is not None:
self.add_defines(initial_defs)
# ...
self.top_down = []
self.bottom_up = []
self.mapbind = {}
def add_defines(self, defs: [Define]):
if defs is not None and type(defs) is not list:
raise TypeError("Constraints took a list of Define.")
for idx, it in enumerate(defs):
if not issubclass(type(it), Define):
raise TypeError("Param %d is not a define" % idx)
nodef = len(self.defs)
ndef = len(defs)
self.defs += defs
for it, idx in zip(defs, range(ndef)):
self.name2id[it.name] = nodef + idx
def push_context(self):
self.name2id = self.name2id.new_child()
def pop_context(self):
for idx in sorted(self.name2id.maps[0].values(), reverse=True):
self.defs.pop(idx)
self.name2id = self.name2id.parents
def __str__(self) -> str:
return self.to_dot()
def to_dot(self) -> str:
r = 'digraph {'
r += '\n\tlabel="%s";' % str(self.defs)
if len(self.mapbind) > 0:
for k in sorted(self.mapbind.keys()):
f = self.mapbind[k].flag
node_ast = self.mapbind[k].ast_node
t = "..."
if node_ast is not None:
t = node_ast.to_tl4t()
r += '\n\tnode[shape="box", style="rounded", label="{idnode}: {txt} <{flag}>"] _{idnode};'.format(idnode=k, txt=t, flag=f)
for k in sorted(self.mapbind.keys()):
td = self.mapbind[k].td_depend
bu = self.mapbind[k].bu_depend
if td is not None:
r += '\n\t_%d -> _%d [label="TU"];' % (k, td)
if bu is not None:
r += '\n\t_%d -> _%d [label="BU"];' % (k, bu)
r += '\n}'
return r
def get_def(self, name: str) -> Define:
return self.defs[self.name2id[name]]
def get_bind_by_id(self, bid: int) -> Bind:
return self.mapbind[bid]
def get_id_by_bind(self, src) -> int:
bid = id(src)
if bid not in self.mapbind:
self.mapbind[bid] = src
return bid
def add_BU_cnt(self, src) -> int:
id_src = self.get_id_by_bind(src)
self.bottom_up.append(id_src)
return id_src
def add_TD_cnt(self, src):
id_src = self.get_id_by_bind(src)
self.top_down.append(id_src)
return id_src
def resolve(self):
while True:
done_something = False
# BU
while True:
if len(self.bottom_up) == 0:
break
it = self.bottom_up.pop()
b = self.mapbind[it]
if 'to_resolve' in b.flag:
if b.unify_here():
if b.bu_depend is not None:
self.bottom_up.append(b.bu_depend)
b.flag = {'to_resolve'}
done_something = True
# TD
while True:
if len(self.top_down) == 0:
break
it = self.top_down.pop()
b = self.mapbind[it]
if 'to_propagate' in b.flag:
# not unify but fit to type
if b.fit_here():
if b.td_depend is not None:
self.top_down.append(b.td_depend)
b.flag = {'to_propagate'}
done_something = True
if not done_something:
break
|
class Constraints:
def __init__(self, initial_defs: [Define]=None):
pass
def add_defines(self, defs: [Define]):
pass
def push_context(self):
pass
def pop_context(self):
pass
def __str__(self) -> str:
pass
def to_dot(self) -> str:
pass
def get_def(self, name: str) -> Define:
pass
def get_bind_by_id(self, bid: int) -> Bind:
pass
def get_id_by_bind(self, src) -> int:
pass
def add_BU_cnt(self, src) -> int:
pass
def add_TD_cnt(self, src):
pass
def resolve(self):
pass
| 13 | 0 | 8 | 0 | 8 | 1 | 3 | 0.08 | 0 | 11 | 2 | 0 | 12 | 6 | 12 | 12 | 111 | 11 | 93 | 35 | 80 | 7 | 93 | 35 | 80 | 13 | 0 | 5 | 37 |
146,189 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/tests/unify.py
|
tests.unify.Bind
|
class Bind:
def __init__(self, ast_node, cnt: 'Constraints'):
self._ast_node = None
self.ref_ast_node(ast_node)
self.cnt = cnt
# top-down dependence on another Bind object
self.td_depend = None
# bottom-up dependence on another Bind object
self.bu_depend = None
# by default we need to resolve binds, after we need to propagate modification
self.flag = {'to_resolve'}
# the final type (a Define instance)
# by default a polymorphical type: unknownType
self.final_type = AnonDefine(cnt)
if type(self.final_type) is Bind:
raise "C'est la merde"
self.unify_algo = None
def ref_ast_node(self, ast_node):
if ast_node is not None and self._ast_node is None:
bid = id(self)
if bid not in self.cnt.bind2node:
self.cnt.bind2node[bid] = ast_node
self._ast_node = ast_node
@property
def ast_node(self) -> object:
return self._ast_node
@staticmethod
def createList(cnt: 'Constraints', parent_bind: 'Bind', size: int) -> ['Bind']:
res = []
lastid = cnt.get_id_by_bind(parent_bind)
for it in range(size):
b = Bind(None, cnt)
b.bu_depend = lastid
bid = cnt.get_id_by_bind(b)
cnt.get_bind_by_id(lastid).td_depend = bid
lastid = bid
res.append(b)
return res
@staticmethod
def createListNodeItem(cnt: 'Constraints', parent_list_node: node.ListNodeItem = None) -> ['Bind']:
b = Bind(None, cnt)
if parent_list_node is not None:
parent_bind = parent_list_node.data
lastid = cnt.get_id_by_bind(parent_bind)
b.bu_depend = lastid
bid = cnt.get_id_by_bind(b)
parent_bind.td_depend = bid
return parent_list_node.append(b)
return node.ListNodeItem(b)
@staticmethod
def bu_walk(b: 'Bind'):
cnt = b.cnt
bid = cnt.get_id_by_bind(b)
nid = b.bu_depend
while True:
yield (bid, nid)
if nid is not None:
b = cnt.get_bind_by_id(nid)
bid = id(b)
nid = b.bu_depend
else:
break
@staticmethod
def td_walk(b: 'Bind'):
cnt = b.cnt
bid = cnt.get_id_by_bind(b)
nid = b.td_depend
while True:
yield (bid, nid)
if nid is not None:
b = cnt.get_bind_by_id(nid)
bid = id(b)
nid = b.td_depend
else:
break
def __str__(self) -> str:
r = "\nid: %d" % id(self)
r += "\nflags: %s" % str(self.flag)
if self.bu_depend is not None:
r += "\nbu_depend: %d" % self.bu_depend
if self.td_depend is not None:
r += "\ntd_depend: %d" % self.td_depend
print("\n")
return r
def __repr__(self) -> str:
return str(self)
def unify_here(self):
print("Unify %s" % self.final_type)
n = self.ast_node
ft = self.final_type
if self.unify_algo is not None:
ft = self.unify_algo()
if type(ft) is Bind:
raise "C'est la merde"
# ast && bind contain final_type
print("FT T %s" % type(ft))
print("FINAL TYPE for %s is <%s>" % (self.ast_node.to_tl4t(), ft))
if type(ft) is AnonDefine:
print("INSTANCIATE AS %s" % ft.defname.type_def)
n.final_type = ft
self.final_type = ft
return True
def fit_here(self):
print("Fit to %s" % self.final_type)
return True
|
class Bind:
def __init__(self, ast_node, cnt: 'Constraints'):
pass
def ref_ast_node(self, ast_node):
pass
@property
def ast_node(self) -> object:
pass
@staticmethod
def createList(cnt: 'Constraints', parent_bind: 'Bind', size: int) -> ['Bind']:
pass
@staticmethod
def createListNodeItem(cnt: 'Constraints', parent_list_node: node.ListNodeItem = None) -> ['Bind']:
pass
@staticmethod
def bu_walk(b: 'Bind'):
pass
@staticmethod
def td_walk(b: 'Bind'):
pass
def __str__(self) -> str:
pass
def __repr__(self) -> str:
pass
def unify_here(self):
pass
def fit_here(self):
pass
| 17 | 0 | 9 | 0 | 8 | 1 | 2 | 0.06 | 0 | 7 | 2 | 0 | 7 | 7 | 11 | 11 | 115 | 10 | 99 | 43 | 82 | 6 | 92 | 38 | 80 | 4 | 0 | 2 | 25 |
146,190 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/tests/unify.py
|
tests.unify.AnonDefine
|
class AnonDefine(Define):
"""
Implement the TypeVar: ?0, ?1, ?2, ...
"""
def __init__(self, cnt: 'Constraints' = None):
"""
TODO: not really the final version
at begin, we create circular type
"""
self.defname = UnknownName()
self.defs = self.defname
Define.__init__(self, self.defname, Overload(self.defs))
if cnt is not None:
self.cnt = cnt
self.cnt.add_defines([self])
|
class AnonDefine(Define):
'''
Implement the TypeVar: ?0, ?1, ?2, ...
'''
def __init__(self, cnt: 'Constraints' = None):
'''
TODO: not really the final version
at begin, we create circular type
'''
pass
| 2 | 2 | 11 | 0 | 7 | 4 | 2 | 0.88 | 1 | 2 | 2 | 0 | 1 | 3 | 1 | 7 | 15 | 0 | 8 | 5 | 6 | 7 | 8 | 5 | 6 | 2 | 1 | 1 | 2 |
146,191 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/tests/pyrser/test_meta.py
|
tests.pyrser.test_meta.TestRule
|
class TestRule(unittest.TestCase):
def test_it_attach_method_as_rule_to_class(self):
functioname = mock.Mock(__name__='functioname')
cls = mock.Mock(**{'set_one.return_value': functioname,
'_rules': {}, '__name__': 'classname'})
del cls.functioname
meta.rule(cls)(functioname)
self.assertIs(functioname, cls.functioname)
cls.set_one.assert_call_once_with(
cls._rules, 'functioname', functioname)
def test_it_attach_method_as_rule_to_class_with_rulename(self):
method = mock.Mock(__name__='method')
cls = mock.Mock(**{'set_one.return_value': method,
'_rules': {'rulename': 42},
'__name__': 'classname'})
del cls.method
meta.rule(cls, 'rulename')(method)
self.assertIs(method, cls.method)
cls.set_one.assert_call_once_with(42, 'method', method)
def test_it_does_not_attach_a_rule_if_method_already_exist(self):
class cls:
_rules = {}
def method(self):
pass
method = mock.Mock(__name__='method')
with self.assertRaises(AttributeError):
meta.rule(cls, 'rulename')(method)
|
class TestRule(unittest.TestCase):
def test_it_attach_method_as_rule_to_class(self):
pass
def test_it_attach_method_as_rule_to_class_with_rulename(self):
pass
def test_it_does_not_attach_a_rule_if_method_already_exist(self):
pass
class cls:
def method(self):
pass
| 6 | 0 | 8 | 1 | 7 | 0 | 1 | 0 | 1 | 3 | 1 | 0 | 3 | 0 | 3 | 75 | 31 | 4 | 27 | 12 | 21 | 0 | 23 | 12 | 17 | 1 | 2 | 1 | 4 |
146,192 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/pyrser/ast/match.py
|
match.MatchBlock
|
class MatchBlock(MatchExpr):
""" Ast Node for a block of PSL statement. """
def __init__(self, stmts: [MatchExpr]):
self.stmts = stmts
self.root_edge = None
def build_state_tree(self, tree: list, sr: state.StateRegister):
""" main function for creating a bottom-up tree automata
for a block of matching statements.
"""
all_seq = []
# for all statements populate a list
# from deeper to nearer of MatchExpr instances.
for stmt in self.stmts:
part_seq = list()
stmt.build_state_tree(part_seq)
all_seq.append(part_seq)
# Walk on all MatchExpr instance
# and create State instance into the StateRegister
self.root_edge = populate_state_register(all_seq, sr)
def to_fmt(self) -> fmt.indentable:
res = fmt.block('{\n', '}', [fmt.tab([])])
lines = res.lsdata[0].lsdata
for stmt in self.stmts:
lines.append(fmt.end('\n', stmt.to_fmt()))
return res
def __repr__(self) -> str:
return str(self.to_fmt())
|
class MatchBlock(MatchExpr):
''' Ast Node for a block of PSL statement. '''
def __init__(self, stmts: [MatchExpr]):
pass
def build_state_tree(self, tree: list, sr: state.StateRegister):
''' main function for creating a bottom-up tree automata
for a block of matching statements.
'''
pass
def to_fmt(self) -> fmt.indentable:
pass
def __repr__(self) -> str:
pass
| 5 | 2 | 6 | 0 | 5 | 2 | 2 | 0.42 | 1 | 7 | 5 | 0 | 4 | 2 | 4 | 4 | 30 | 3 | 19 | 13 | 14 | 8 | 19 | 13 | 14 | 2 | 1 | 1 | 6 |
146,193 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/pyrser/type_system/type_expr.py
|
pyrser.type_system.type_expr.TypeExprParser
|
class TypeExprParser(Grammar):
entry = 'atype'
grammar = """
atype = [ component ['.' component]* ]
component = [ typename [params]?]
params = [ '<' typename [',' typename]* '>']
typename = ['?'? ~' '+ [attr]? ]
attr = [ '[' kv [',' kv]* ']' ]
kv = [ id ['=' value]?]
"""
|
class TypeExprParser(Grammar):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 52 | 15 | 5 | 10 | 3 | 9 | 0 | 3 | 3 | 2 | 0 | 6 | 0 | 0 |
146,194 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/pyrser/type_system/type_expr.py
|
pyrser.type_system.type_expr.TypeName
|
class TypeName:
def __init__(self, name: str=None):
self.name = name
self.attributes = {}
def set_attr(self, name: str, value=None):
self.attributes[name] = value
def get_attr(self, name: str) -> object:
return self.attributes[name]
def to_fmt(self) -> fmt.indentable:
txt = fmt.sep("", [self.name])
if len(self.attributes) > 0:
lsattr = fmt.sep(", ", [])
lkey = sorted(self.attributes.keys())
for k in lkey:
t = k
if self.attributes[k] is not None:
t += '=' + str(self.attributes[k])
lsattr.lsdata.append(t)
txt.lsdata.append(fmt.block("[", "]", lsattr))
return txt
|
class TypeName:
def __init__(self, name: str=None):
pass
def set_attr(self, name: str, value=None):
pass
def get_attr(self, name: str) -> object:
pass
def to_fmt(self) -> fmt.indentable:
pass
| 5 | 0 | 5 | 0 | 5 | 0 | 2 | 0 | 0 | 5 | 3 | 2 | 4 | 2 | 4 | 4 | 23 | 3 | 20 | 12 | 15 | 0 | 20 | 12 | 15 | 4 | 0 | 3 | 7 |
146,195 |
LionelAuroux/pyrser
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LionelAuroux_pyrser/pyrser/type_system/type_expr.py
|
pyrser.type_system.type_expr.DeltaComponentTypeName.add.D
|
class D(dict):
pass
|
class D(dict):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 27 | 2 | 0 | 2 | 1 | 1 | 0 | 2 | 1 | 1 | 0 | 2 | 0 | 0 |
146,196 |
LionelAuroux/pyrser
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LionelAuroux_pyrser/tests/gen_dsl.py
|
tests.gen_dsl.GenDsl_Test.test_00_seqchar.SeqChar
|
class SeqChar(grammar.Grammar):
entry = "test"
grammar = """test = [ 'a' 'c' 'b' 'e' ]
"""
|
class SeqChar(grammar.Grammar):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0 | 4 | 2 | 3 | 0 | 3 | 2 | 2 | 0 | 1 | 0 | 0 |
146,197 |
LionelAuroux/pyrser
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LionelAuroux_pyrser/tests/gen_dsl.py
|
tests.gen_dsl.GenDsl_Test.test_01_altchar.AltChar
|
class AltChar(grammar.Grammar):
entry = "test"
grammar = """test = [ 'a' ['c' | 'b' ['e' | 'z'] ] 'd']
"""
|
class AltChar(grammar.Grammar):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0 | 4 | 2 | 3 | 0 | 3 | 2 | 2 | 0 | 1 | 0 | 0 |
146,198 |
LionelAuroux/pyrser
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LionelAuroux_pyrser/tests/gen_dsl.py
|
tests.gen_dsl.GenDsl_Test.test_02_text.Text
|
class Text(grammar.Grammar):
entry = "test"
grammar = """test = ["hello"|"world"]
"""
|
class Text(grammar.Grammar):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0 | 4 | 2 | 3 | 0 | 3 | 2 | 2 | 0 | 1 | 0 | 0 |
146,199 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/tests/grammar/tl4t.py
|
tl4t.Param
|
class Param(NodeInfo):
def __init__(self, n: str, t: str):
super().__init__()
self.name = n
self.t = t
def to_tl4t(self):
return fmt.sep(" ", [self.name, ':', self.t])
def walk(self) -> Node:
"""
TD descent
"""
yield ('term', self, self.name)
|
class Param(NodeInfo):
def __init__(self, n: str, t: str):
pass
def to_tl4t(self):
pass
def walk(self) -> Node:
'''
TD descent
'''
pass
| 4 | 1 | 4 | 0 | 3 | 1 | 1 | 0.33 | 1 | 3 | 1 | 0 | 3 | 2 | 3 | 37 | 14 | 2 | 9 | 6 | 5 | 3 | 9 | 6 | 5 | 1 | 4 | 0 | 3 |
146,200 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/tests/grammar/tl4t.py
|
tl4t.Operator
|
class Operator(Terminal):
# to connect Inference
def type_algos(self):
return (self.infer_id, self.value, self.feedback_leaf)
|
class Operator(Terminal):
def type_algos(self):
pass
| 2 | 0 | 2 | 0 | 2 | 0 | 1 | 0.33 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 38 | 4 | 0 | 3 | 2 | 1 | 1 | 3 | 2 | 1 | 1 | 5 | 0 | 1 |
146,201 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/tests/grammar/tl4t.py
|
tl4t.NodeInfo
|
class NodeInfo(Node, Inference):
def __init__(self):
self.info = None
def walk(self) -> Node:
raise TypeError("Not implemented!")
|
class NodeInfo(Node, Inference):
def __init__(self):
pass
def walk(self) -> Node:
pass
| 3 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 2 | 1 | 0 | 6 | 2 | 1 | 2 | 34 | 6 | 1 | 5 | 4 | 2 | 0 | 5 | 4 | 2 | 1 | 3 | 0 | 2 |
146,202 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/tests/grammar/tl4t.py
|
tl4t.Literal
|
class Literal(Terminal):
def __init__(self, value, t):
self.value = value
self.type = t
# to connect Inference
def type_algos(self):
return (
self.infer_literal, (self.value, self.type), self.feedback_leaf
)
def walk(self) -> Node:
"""
TD descent
"""
yield ('literal', self, T('int'))
|
class Literal(Terminal):
def __init__(self, value, t):
pass
def type_algos(self):
pass
def walk(self) -> Node:
'''
TD descent
'''
pass
| 4 | 1 | 4 | 0 | 3 | 1 | 1 | 0.4 | 1 | 0 | 0 | 0 | 3 | 2 | 3 | 40 | 17 | 3 | 10 | 6 | 6 | 4 | 8 | 6 | 4 | 1 | 5 | 0 | 3 |
146,203 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/tests/grammar/tl4t.py
|
tl4t.Id
|
class Id(Terminal):
# to connect Inference
def type_algos(self):
return (self.infer_id, self.value, self.feedback_id)
|
class Id(Terminal):
def type_algos(self):
pass
| 2 | 0 | 2 | 0 | 2 | 0 | 1 | 0.33 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 38 | 5 | 1 | 3 | 2 | 1 | 1 | 3 | 2 | 1 | 1 | 5 | 0 | 1 |
146,204 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/tests/grammar/tl4t.py
|
tl4t.ExprStmt
|
class ExprStmt(NodeInfo):
def __init__(self, e: Expr):
super().__init__()
self.expr = e
def to_tl4t(self):
return fmt.end(";\n", [self.expr.to_tl4t()])
# to connect Inference
def type_algos(self):
return (self.infer_subexpr, self.expr, self.feedback_subexpr)
def walk(self) -> Node:
"""
TD descent
"""
yield ('block', self.expr.walk())
|
class ExprStmt(NodeInfo):
def __init__(self, e: Expr):
pass
def to_tl4t(self):
pass
def type_algos(self):
pass
def walk(self) -> Node:
'''
TD descent
'''
pass
| 5 | 1 | 3 | 0 | 2 | 1 | 1 | 0.4 | 1 | 3 | 2 | 0 | 4 | 1 | 4 | 38 | 17 | 3 | 10 | 6 | 5 | 4 | 10 | 6 | 5 | 1 | 4 | 0 | 4 |
146,205 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/tests/grammar/tl4t.py
|
tl4t.Expr
|
class Expr(NodeInfo):
def __init__(self, ce: 'expr', p: ['expr']):
super().__init__()
self.call_expr = ce
self.p = p
def to_tl4t(self):
params = []
for p in self.p:
params.append(p.to_tl4t())
parenth = fmt.block('(', ')', fmt.sep(', ', params))
lsblock = fmt.sep('', [
self.call_expr.to_tl4t(),
parenth
])
return lsblock
# to connect Inference
def type_algos(self):
return (self.infer_fun, (self.call_expr, self.p), self.feedback_fun)
def walk(self) -> Node:
"""
TD descent
"""
yield ('fun', (it1 for it1 in chain(self.call_expr.walk(), (it2.walk() for it2 in self.p))))
|
class Expr(NodeInfo):
def __init__(self, ce: 'expr', p: ['expr']):
pass
def to_tl4t(self):
pass
def type_algos(self):
pass
def walk(self) -> Node:
'''
TD descent
'''
pass
| 5 | 1 | 5 | 0 | 5 | 1 | 1 | 0.21 | 1 | 4 | 2 | 3 | 4 | 2 | 4 | 38 | 26 | 3 | 19 | 11 | 14 | 4 | 16 | 11 | 11 | 2 | 4 | 1 | 5 |
146,206 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/pyrser/type_system/val.py
|
pyrser.type_system.val.Val
|
class Val(Signature):
"""
Describe a value signature for the language
"""
nvalues = 0
valuniq = dict()
def __init__(self, value, tret: str):
if not isinstance(value, str):
value = str(value)
self.value = value
if not isinstance(tret, TypeName):
tret = TypeName(tret)
self.tret = tret
k = self.value + "$" + tret
idx = 0
if k not in Val.valuniq:
Val.nvalues += 1
Val.valuniq[k] = Val.nvalues
idx = Val.nvalues
else:
idx = Val.valuniq[k]
super().__init__('$' + str(idx))
def internal_name(self):
"""
Return the unique internal name
"""
unq = super().internal_name()
if self.tret is not None:
unq += "_" + self.tret
return unq
def __str__(self) -> str:
import pyrser.type_system.to_fmt
return str(self.to_fmt())
|
class Val(Signature):
'''
Describe a value signature for the language
'''
def __init__(self, value, tret: str):
pass
def internal_name(self):
'''
Return the unique internal name
'''
pass
def __str__(self) -> str:
pass
| 4 | 2 | 9 | 0 | 8 | 1 | 2 | 0.22 | 1 | 3 | 1 | 0 | 3 | 2 | 3 | 15 | 36 | 3 | 27 | 12 | 22 | 6 | 26 | 12 | 21 | 4 | 2 | 1 | 7 |
146,207 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/tests/grammar/tl4t.py
|
tl4t.DeclVar
|
class DeclVar(NodeInfo):
def __init__(self, name: str, t: str, expr=None):
super().__init__()
self.name = name
self.t = None
self.expr = None
if t is not None:
self.t = t
if expr is not None:
self.expr = expr
def to_tl4t(self) -> fmt.indentable:
lsdecl = [
"var",
self.name,
]
if self.t is not None:
lsdecl.append(":")
lsdecl.append(self.t)
if self.expr is not None:
lsdecl.append("=")
lsdecl.append(self.expr.to_tl4t())
else:
lsdecl[-1] += ";\n"
return fmt.sep(" ", lsdecl)
def declare_var(self, args, diagnostic=None):
parent_scope = self.type_node.parent()
typ = self.t
if self.t is None:
typ = '?1'
var = Var(self.name, typ)
parent_scope.add(var)
# try to infer type or check type
if self.expr is not None:
tn = Scope(sig=[Fun('=', typ, [typ, typ])])
tn.set_parent(parent_scope)
# create a fake Expr Node to infer expression with var type
rhs = Expr(Id('='), [Id(self.name), self.expr])
rhs.type_node = Scope()
rhs.type_node.set_parent(tn)
rhs.infer_type(diagnostic)
# TODO: scope surrounded only one sig !!!!!
print("RHS: [%s]" % rhs.type_node)
print("TRET %s" % rhs.type_node.last())
self.t = rhs.type_node.last().compute_tret
self.type_node = rhs.type_node
#var.tret = rhs.type_node.last()
var.tret = TypeName(self.t)
# to connect Inference
def type_algos(self):
return (self.declare_var, None)
def walk(self) -> Node:
"""
TD descent
"""
yield ('block', self.expr.walk())
|
class DeclVar(NodeInfo):
def __init__(self, name: str, t: str, expr=None):
pass
def to_tl4t(self) -> fmt.indentable:
pass
def declare_var(self, args, diagnostic=None):
pass
def type_algos(self):
pass
def walk(self) -> Node:
'''
TD descent
'''
pass
| 6 | 1 | 11 | 0 | 9 | 1 | 2 | 0.17 | 1 | 6 | 4 | 1 | 5 | 4 | 5 | 39 | 59 | 4 | 47 | 16 | 41 | 8 | 43 | 16 | 37 | 3 | 4 | 1 | 11 |
146,208 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/tests/grammar/tl4t.py
|
tl4t.BlockStmt
|
class BlockStmt(NodeInfo):
def __init__(self, root=False):
super().__init__()
self.body = []
# if root node (no brace when pprint)
self.root = root
def to_tl4t(self) -> fmt.indentable:
lssub = []
for s in self.body:
lssub.append(s.to_tl4t())
lsblock = None
if self.root:
lsblock = fmt.sep('', lssub)
else:
lsblock = fmt.block('{\n', '}', [fmt.tab(lssub)])
return lsblock
# to connect Inference
def type_algos(self):
return (self.infer_block, self.body, self.feedback_block)
def walk(self) -> Node:
"""
TD descent
"""
yield ('block', (it.walk() for it in self.body))
|
class BlockStmt(NodeInfo):
def __init__(self, root=False):
pass
def to_tl4t(self) -> fmt.indentable:
pass
def type_algos(self):
pass
def walk(self) -> Node:
'''
TD descent
'''
pass
| 5 | 1 | 6 | 0 | 5 | 1 | 2 | 0.26 | 1 | 5 | 4 | 0 | 4 | 2 | 4 | 38 | 27 | 3 | 19 | 10 | 14 | 5 | 18 | 10 | 13 | 3 | 4 | 1 | 6 |
146,209 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/tests/grammar/tl4t.py
|
tl4t.Binary
|
class Binary(Expr):
def __init__(self, left: Expr, op: Operator, right: Expr):
super().__init__(op, [left, right])
def to_tl4t(self):
return fmt.sep(
" ",
[
self.p[0].to_tl4t(),
self.call_expr.to_tl4t(),
self.p[1].to_tl4t()
]
)
|
class Binary(Expr):
def __init__(self, left: Expr, op: Operator, right: Expr):
pass
def to_tl4t(self):
pass
| 3 | 0 | 6 | 0 | 6 | 0 | 1 | 0 | 1 | 3 | 2 | 0 | 2 | 0 | 2 | 40 | 13 | 1 | 12 | 3 | 9 | 0 | 5 | 3 | 2 | 1 | 5 | 0 | 2 |
146,210 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/tests/unify.py
|
tests.unify.UnknownName
|
class UnknownName:
"""
Implement unknown names: ?0, ?1, ?2
"""
count = 0
minarity = 1
def __init__(self):
self.anonname = '?%d' % UnknownName.count
self.type_def = None
UnknownName.count += 1
def unify(self, oth_type_def: TypeExprComponent, blhs, brhs) -> TypeExprComponent:
"""
When we unify an Unknown Name vs another type def we always match
"""
print("UNK TRY TO unify %s ?? %s" % (self, oth_type_def))
if self.type_def is not None:
if type(oth_type_def) is UnknownName:
if oth_type_def is None:
oth_type_def.type_def = self.type_def
return self.type_def
return self.type_def.unify(oth_type_def, blhs, brhs)
# TODO: the type must be fixed by the feedback pass
if self.type_def is None:
self.type_def = Overload()
if oth_type_def not in self.type_def:
self.type_def.append(oth_type_def)
return oth_type_def
def __str__(self) -> str:
return self.anonname
|
class UnknownName:
'''
Implement unknown names: ?0, ?1, ?2
'''
def __init__(self):
pass
def unify(self, oth_type_def: TypeExprComponent, blhs, brhs) -> TypeExprComponent:
'''
When we unify an Unknown Name vs another type def we always match
'''
pass
def __str__(self) -> str:
pass
| 4 | 2 | 8 | 0 | 6 | 1 | 3 | 0.32 | 0 | 4 | 2 | 0 | 3 | 2 | 3 | 3 | 31 | 2 | 22 | 8 | 18 | 7 | 22 | 8 | 18 | 6 | 0 | 3 | 8 |
146,211 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/tests/unify.py
|
tests.unify.Union
|
class Union(TypeExprComponent):
contains = {'Union', 'T', 'N'}
minarity = 2
def __str__(self) -> str:
return " | ".join(self.reprlist)
|
class Union(TypeExprComponent):
def __str__(self) -> str:
pass
| 2 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 1 | 1 | 0 | 0 | 1 | 0 | 1 | 37 | 6 | 1 | 5 | 4 | 3 | 0 | 5 | 4 | 3 | 1 | 3 | 0 | 1 |
146,212 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/tests/unify.py
|
tests.unify.Unifying_Test
|
class Unifying_Test(unittest.TestCase):
def test_000(self):
"""Pretty Print Test"""
d = Overload(T("t1"), T("t2"))
self.assertEqual(str(d), "t1\n& t2")
d = Fun(T("t1"), T("t2"))
self.assertEqual(str(d), "t2 -> t1")
d = Fun(T("t1"), T("t2"))
self.assertEqual(str(d), "t2 -> t1")
d = Union(T("t1"), T("t2"))
self.assertEqual(str(d), "t1 | t2")
d = Tuple(T("t1"), T("t2"))
self.assertEqual(str(d), "t1, t2")
d = Fun(T('t3'), Tuple(T("t1"), T("t2")))
self.assertEqual(str(d), "(t1, t2) -> t3")
def1 = Define('v1', T('t1'))
self.assertEqual(str(def1), "v1: t1")
def1 = Define('v1', N(T('m1'), T('m2'), T('t1')))
self.assertEqual(str(def1), "v1: m1.m2.t1")
def1 = Define('v1', N(T('m1', T('T')), T('t1')))
self.assertEqual(str(def1), "v1: m1<T>.t1")
def test_001(self):
"""Composition Test"""
d = Overload(Fun(T("t1"), Tuple(T("t2"), T("t3"))),
Fun(T("t4"), Tuple(T("t2"), T("t4")))
)
self.assertEqual(str(d), "(t2, t3) -> t1\n& (t2, t4) -> t4")
with self.assertRaises(TypeError):
d = Overload(Overload(T("t2"), T("t3")))
with self.assertRaises(TypeError):
d = Overload(Union(T("t2"), T("t3")))
with self.assertRaises(TypeError):
d = Overload(Tuple(T("t2"), T("t3")))
with self.assertRaises(TypeError):
d = Fun(Overload(T("t2"), T("t3")))
with self.assertRaises(TypeError):
d = Union(Overload(T("t2"), T("t3")))
with self.assertRaises(TypeError):
d = Union(Fun(T("t2"), T("t3")))
with self.assertRaises(TypeError):
d = Tuple(Overload(T("t2"), T("t3")))
def test_002(self):
"""Constraints class tests"""
basic_a =Define("A", None)
cnt = Constraints([basic_a, Define("B", None), Define("C", None)])
self.assertEqual(cnt.get_def("A").name, "A", "Can't find a basic define")
self.assertEqual(cnt.get_def("B").name, "B", "Can't find a basic define")
cnt.push_context()
d = Define("A", Fun(T("t1"), T("t2")))
cnt.add_defines([d])
self.assertEqual(cnt.get_def("A"), d, "Can't find a basic define")
cnt.pop_context()
self.assertEqual(cnt.get_def("A"), basic_a, "Can't find a basic define")
cnt.push_context()
d1 = Define("A", Fun(T("t3"), T("t4")))
d2 = Define("B", Fun(T("t5"), T("t6")))
d3 = Define("Z", Fun(T("t7"), T("t8")))
d4 = Define("X", Fun(T("t9"), T("t10")))
cnt.add_defines([d1, d2, d3, d4])
self.assertEqual(cnt.get_def("X"), d4, "Can't find a basic define")
self.assertEqual(cnt.get_def("A"), d1, "Can't find a basic define")
self.assertEqual(cnt.get_def("Z"), d3, "Can't find a basic define")
self.assertEqual(cnt.get_def("B"), d2, "Can't find a basic define")
cnt.pop_context()
self.assertEqual(cnt.get_def("A"), basic_a, "Can't find a basic define")
def test_003(self):
"""Bind object and list
"""
cnt = Constraints()
b = Bind(None, cnt)
lstb = Bind.createList(cnt, b, 5)
self.assertEqual(b.td_depend, id(lstb[0]), "List return by createList seems buggy")
self.assertEqual(id(b), lstb[0].bu_depend, "List return by createList seems buggy")
self.assertEqual(lstb[0].td_depend, id(lstb[1]), "List return by createList seems buggy")
self.assertEqual(id(lstb[0]), lstb[1].bu_depend, "List return by createList seems buggy")
self.assertEqual(lstb[1].td_depend, id(lstb[2]), "List return by createList seems buggy")
self.assertEqual(id(lstb[1]), lstb[2].bu_depend, "List return by createList seems buggy")
self.assertEqual(lstb[2].td_depend, id(lstb[3]), "List return by createList seems buggy")
self.assertEqual(id(lstb[2]), lstb[3].bu_depend, "List return by createList seems buggy")
self.assertEqual(lstb[3].td_depend, id(lstb[4]), "List return by createList seems buggy")
self.assertEqual(id(lstb[3]), lstb[4].bu_depend, "List return by createList seems buggy")
self.assertEqual(lstb[4].td_depend, None, "List return by createList seems buggy")
to_png("ctx0.png", cnt)
lsbu = list(Bind.bu_walk(lstb[-1]))
lsburef = []
what = lstb[-1]
while what is not None:
(bid, nid) = (id(what), what.bu_depend)
lsburef.append((bid, nid))
if nid is not None:
what = what.cnt.get_bind_by_id(nid)
else:
what = None
self.assertEqual(lsbu, lsburef, "List walked by bu_walk seems buggy")
lstd = list(Bind.td_walk(b))
lstdref = []
what = b
while what is not None:
(bid, nid) = (id(what), what.td_depend)
lstdref.append((bid, nid))
if nid is not None:
what = what.cnt.get_bind_by_id(nid)
else:
what = None
self.assertEqual(lstd, lstdref, "List walked by bu_walk seems buggy")
# Test it with a little grammar
test = TL4T()
res = test.parse("""
v = f(a, b, c, d);
""")
# get AST nodes
blockstmt = res
exprstmt = blockstmt.body[0]
eqexpr = exprstmt.expr
eq = eqexpr.call_expr
self.assertEqual(eq.value, '=', "bad access to parameter")
v = eqexpr.p[0]
self.assertEqual(v.value, 'v', "bad access to parameter")
funexpr = eqexpr.p[1]
f = funexpr.call_expr
self.assertEqual(f.value, 'f', "bad access to parameter")
a = funexpr.p[0]
self.assertEqual(a.value, 'a', "bad access to parameter")
b = funexpr.p[1]
self.assertEqual(b.value, 'b', "bad access to parameter")
c = funexpr.p[2]
self.assertEqual(c.value, 'c', "bad access to parameter")
# unification and grammar
# f: t2 -> t1
def1 = Define("f", Fun(T("t1"), T("t2")))
# a: t2
def2 = Define("a", Overload(T("t2")))
# g: t1
def3 = Define("g", Overload(T("t1")))
# f: ?0 -> ?0 -> ?0
p1 = UnknownName()
def4 = Define("=", Fun(p1, p1, p1))
# Test it with a little grammar
test = TL4T()
res = test.parse("""
g = f(a);
""")
txt = res.to_tl4t()
print(txt)
cnt = Constraints([def1, def2, def3, def4])
b = Bind(None, cnt)
res.populate(cnt, b)
print("-" * 10)
to_png("ctx1.png", cnt)
cnt.resolve()
def test_04(self):
"""Basic unification.
We assume the Binding (link item to type definition is done.
"""
# just unification for a Fun
overloads = Overload(
Fun(T("t1"), T("t2"), T("t3")),
Fun(T("t4"), T("t2"), T("t5"))
)
def_f = Define("f", overloads)
print(def_f)
# v = f(a, b)
def_a = Define("a", Overload(T("t1"), T("t2")))
def_b = Define("b", Overload(T("t3"), T("t0")))
def_v = Define("v", Overload(T("t1"), T("t4")))
####
fun_args = [def_v, def_a, def_b]
# make the product of all possible signature
selected_sig = []
arg_pos = [range(len(arg)) for arg in fun_args]
for types_tuple in product(*arg_pos):
print(types_tuple)
# make a proposition
possible_sig = [arg[idx] for arg, idx in zip(fun_args, types_tuple)]
print(possible_sig)
# if is good, take it
if possible_sig in def_f:
selected_sig.append(possible_sig)
print("possible sig: %s" % selected_sig)
def test_05(self):
test = TL4T()
res = test.parse("""
fun toto(x, y, z)
{
x = sqrt(y) / z * 2;
}
""")
#print("-" * 20)
#print(res.to_tl4t())
#print("-" * 20)
#print(res.to_yml())
print("-" * 20)
def_x = Define("x", UnknownName())
def_y = Define("y", UnknownName())
def_z = Define("z", UnknownName())
def_sqrt = Define("sqrt", Fun(T("float"), T("float")))
p = UnknownName()
def_eq = Define("=", Fun(p, p, p))
def_mul = Define("*", Fun(p, p, p))
def_div = Define("/", Fun(p, p, p))
cnt = Constraints([def_x, def_y, def_z, def_sqrt, def_eq, def_mul, def_div])
b = Bind.createListNodeItem(cnt)
bind_ast(res.walk(), cnt, b)
|
class Unifying_Test(unittest.TestCase):
def test_000(self):
'''Pretty Print Test'''
pass
def test_001(self):
'''Composition Test'''
pass
def test_002(self):
'''Constraints class tests'''
pass
def test_003(self):
'''Bind object and list
'''
pass
def test_04(self):
'''Basic unification.
We assume the Binding (link item to type definition is done.
'''
pass
def test_05(self):
pass
| 7 | 5 | 34 | 0 | 29 | 4 | 2 | 0.15 | 1 | 16 | 10 | 0 | 6 | 0 | 6 | 78 | 210 | 7 | 177 | 62 | 170 | 26 | 161 | 62 | 154 | 5 | 2 | 2 | 12 |
146,213 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/tests/unify.py
|
tests.unify.Unifier
|
class Unifier:
def __init__(self, bind_depends):
self.cnt_ref = bind_depends[0].cnt
self.def_f = bind_depends[0]
if len(bind_depends) > 1:
self.def_args = bind_depends[1:]
print("UNIFIER: %d ?? %d" % (len(bind_depends), len(self.def_args)))
### unify algo
# TODO: tres crade
def unify_as_fun(self):
"""
On a pas le type de retour, il viendra par l'unification des types de retour possible
de la fonction avec le receveur de la fonction. Et ca sera fit_here
"""
fun_args = self.def_args
# make the product of all possible signature
selected_sig = Overload()
arg_pos = [range(len(arg.final_type)) for arg in fun_args]
for types_tuple in product(*arg_pos):
print(types_tuple)
# make a proposition
possible_sig = Fun(*[arg.final_type[idx] for arg, idx in zip(fun_args, types_tuple)])
print(possible_sig)
# if is good, take it
# unify a tuple or Fun with fun definition
t = self.def_f.final_type.unify(possible_sig, self.def_f, self.def_args)
if t is not None:
selected_sig.append(t)
print("END UNIFY %s" % selected_sig)
if type(selected_sig[0]) is UnknownName:
print("INSTANCIATE ON %s" % selected_sig[0].defname.type_def)
return selected_sig
def unify_as_term(self):
print("AS TERM")
return self.def_f.final_type
|
class Unifier:
def __init__(self, bind_depends):
pass
def unify_as_fun(self):
'''
On a pas le type de retour, il viendra par l'unification des types de retour possible
de la fonction avec le receveur de la fonction. Et ca sera fit_here
'''
pass
def unify_as_term(self):
pass
| 4 | 1 | 11 | 0 | 8 | 3 | 2 | 0.4 | 0 | 7 | 3 | 0 | 3 | 3 | 3 | 3 | 37 | 2 | 25 | 12 | 21 | 10 | 25 | 12 | 21 | 4 | 0 | 2 | 7 |
146,214 |
LionelAuroux/pyrser
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/LionelAuroux_pyrser/tests/gen_dsl.py
|
tests.gen_dsl.GenDsl_Test.test_03_number.Number
|
class Number(grammar.Grammar):
entry = "test"
grammar = """test = [ ['0'..'9']* | "coucou" ]
"""
|
class Number(grammar.Grammar):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0 | 4 | 2 | 3 | 0 | 3 | 2 | 2 | 0 | 1 | 0 | 0 |
146,215 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/tests/grammar/tl4t.py
|
tl4t.DeclFun
|
class DeclFun(DeclVar):
def __init__(self, name: str, t: str, p: [], block=None, variadic=False):
super().__init__(name, t)
self.variadic = variadic
self.p = p
if block is not None:
self.block = block
print("SIZE OF BLOCK %d" % len(block))
def to_tl4t(self) -> fmt.indentable:
params = []
if self.p is not None:
for p in self.p:
params.append(p.to_tl4t())
parenth = fmt.block('(', ')', fmt.sep(", ", params))
lsdecl = fmt.sep(
' ',
[
"fun",
fmt.sep('', [self.name, parenth]),
":",
self.t
]
)
lsblock = None
if hasattr(self, 'block'):
lsblock = fmt.sep("\n", [lsdecl, self.block.to_tl4t()])
else:
lsblock = fmt.end(";\n", lsdecl)
return lsblock
def walk(self) -> Node:
"""
TD descent
"""
yield ('fun', (it.walk() for it in self.p))
yield ('block', self.block.walk())
|
class DeclFun(DeclVar):
def __init__(self, name: str, t: str, p: [], block=None, variadic=False):
pass
def to_tl4t(self) -> fmt.indentable:
pass
def walk(self) -> Node:
'''
TD descent
'''
pass
| 4 | 1 | 11 | 0 | 10 | 1 | 2 | 0.09 | 1 | 6 | 4 | 0 | 3 | 3 | 3 | 42 | 37 | 2 | 32 | 12 | 28 | 3 | 23 | 12 | 19 | 4 | 5 | 2 | 7 |
146,216 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/pyrser/parsing/functors.py
|
pyrser.parsing.functors.Error
|
class Error(Functor):
""" Raise an error. """
def __init__(self, msg: str, **kwargs):
self.msg = msg
self.kw = kwargs
def do_call(self, parser: BasicParser) -> bool:
parser.diagnostic.notify(
error.Severity.ERROR,
self.msg,
error.LocationInfo.from_stream(parser._stream, is_error=True)
)
raise parser.diagnostic
|
class Error(Functor):
''' Raise an error. '''
def __init__(self, msg: str, **kwargs):
pass
def do_call(self, parser: BasicParser) -> bool:
pass
| 3 | 1 | 5 | 0 | 5 | 0 | 1 | 0.09 | 1 | 4 | 2 | 0 | 2 | 2 | 2 | 4 | 14 | 2 | 11 | 5 | 8 | 1 | 7 | 5 | 4 | 1 | 1 | 0 | 2 |
146,217 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/pyrser/parsing/functors.py
|
pyrser.parsing.functors.SkipIgnore
|
class SkipIgnore(Functor):
""" Call Ignore Convention primitive functor. """
def __init__(self, convention: str=""):
"""TODO: Could be better to implement Directive thru functors???"""
self.convention = convention
def do_call(self, parser: BasicParser) -> bool:
#if len(parser._ignores) > 0:
# parser._ignores[-1](parser)
parser.skip_ignore()
return True
|
class SkipIgnore(Functor):
''' Call Ignore Convention primitive functor. '''
def __init__(self, convention: str=""):
'''TODO: Could be better to implement Directive thru functors???'''
pass
def do_call(self, parser: BasicParser) -> bool:
pass
| 3 | 2 | 4 | 0 | 3 | 2 | 1 | 0.67 | 1 | 3 | 1 | 0 | 2 | 1 | 2 | 4 | 12 | 2 | 6 | 4 | 3 | 4 | 6 | 4 | 3 | 1 | 1 | 0 | 2 |
146,218 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/pyrser/parsing/functors.py
|
pyrser.parsing.functors.DirectiveWrapper
|
class DirectiveWrapper(Functor, metaclass=MetaDirectiveWrapper):
""" functor to wrap begin/end directive """
def __init__(self, ):
Functor.__init__(self)
def checkParam(self, params: list):
if (not hasattr(self.__class__, 'begin') or
not hasattr(self.__class__, 'end')):
return False
sbegin = inspect.signature(self.begin)
send = inspect.signature(self.end)
idx = 0
for param in list(sbegin.parameters.values())[1:]:
if idx >= len(params) and param.default is inspect.Parameter.empty:
raise RuntimeError("{}: No parameter given to begin"
" method for argument {}, expected {}".
format(
self.__class__.__name__,
idx, param.annotation))
elif (idx < len(params)
and not isinstance(params[idx], param.annotation)):
raise TypeError(
"{}: Wrong parameter in begin method parameter {} "
"expected {} got {}".format(
self.__class__.__name__,
idx, type(params[idx]),
param.annotation))
idx += 1
idx = 0
for param in list(send.parameters.values())[1:]:
if idx >= len(params) and param.default is inspect.Parameter.empty:
raise RuntimeError("{}: No parameter given to end"
" method for argument {}, expected {}".
format(
self.__class__.__name__,
idx, param.annotation))
elif (idx < len(params)
and not isinstance(params[idx], param.annotation)):
raise TypeError(
"{}: Wrong parameter in end method parameter {} "
"expected {} got {}".format(
self.__class__.__name__,
idx, type(params[idx]),
param.annotation))
idx += 1
return True
def begin(self):
pass
def end(self):
pass
|
class DirectiveWrapper(Functor, metaclass=MetaDirectiveWrapper):
''' functor to wrap begin/end directive '''
def __init__(self, ):
pass
def checkParam(self, params: list):
pass
def begin(self):
pass
def end(self):
pass
| 5 | 1 | 13 | 1 | 12 | 0 | 3 | 0.02 | 2 | 4 | 0 | 1 | 4 | 0 | 4 | 20 | 56 | 7 | 48 | 9 | 43 | 1 | 25 | 9 | 20 | 8 | 3 | 2 | 11 |
146,219 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/pyrser/error.py
|
pyrser.error.LocationInfo
|
class LocationInfo:
def __init__(self, filepath: str, line: int, col: int, size: int=1):
self.filepath = filepath
self.line = line
self.col = col
self.size = size
@staticmethod
def from_stream(stream: 'Stream', is_error=False) -> 'LocationInfo':
if stream._name is None and is_error is True:
(fh, stream._name) = tempfile.mkstemp()
tmpf = os.fdopen(fh, 'w')
tmpf.write(stream._content)
tmpf.close()
atexit.register(os.remove, stream._name)
loc = LocationInfo(
stream._name,
stream._cursor.lineno,
stream._cursor.col_offset
)
return loc
@staticmethod
def from_maxstream(stream: 'Stream', is_error=False) -> 'LocationInfo':
if stream._name is None:
(fh, stream._name) = tempfile.mkstemp()
tmpf = os.fdopen(fh, 'w')
tmpf.write(stream._content)
tmpf.close()
atexit.register(os.remove, stream._name)
loc = LocationInfo(
stream._name,
stream._cursor._maxline,
stream._cursor._maxcol
)
return loc
@staticmethod
def from_here(pos=1):
f = inspect.currentframe()
fcaller = inspect.getouterframes(f)[pos]
rstr = r'(\s+).'
cl = re.compile(rstr)
call = fcaller[4][0]
m = cl.match(call)
current_file = os.path.abspath(fcaller[1])
li = LocationInfo(current_file, fcaller[2], len(m.group(1)) + 1)
return li
def get_content(self) -> str:
f = open(self.filepath, 'r')
lines = list(f)
f.close()
# by default the idiom list(f) don't count the last line if the last line is empty (nothing between last \n and EOF)
if self.line > 1:
lastindex = self.line - 1
else:
lastindex = 0
self.line = 1
if lastindex >= len(lines):
lastindex = len(lines) - 1
self.line = lastindex + 1
self.col = len(lines[lastindex])
txtline = lines[lastindex]
if txtline[-1] != '\n':
txtline += '\n'
indent = ' ' * (self.col - 1)
if self.size != 1:
indent += '~' * (self.size)
else:
indent += '^'
txt = "from {f} at line:{l} col:{c} :\n{content}{i}".format(
f=self.filepath,
content=txtline,
l=self.line,
c=self.col,
i=indent
)
return txt
|
class LocationInfo:
def __init__(self, filepath: str, line: int, col: int, size: int=1):
pass
@staticmethod
def from_stream(stream: 'Stream', is_error=False) -> 'LocationInfo':
pass
@staticmethod
def from_maxstream(stream: 'Stream', is_error=False) -> 'LocationInfo':
pass
@staticmethod
def from_here(pos=1):
pass
def get_content(self) -> str:
pass
| 9 | 0 | 14 | 0 | 14 | 0 | 2 | 0.01 | 0 | 3 | 0 | 0 | 2 | 4 | 5 | 5 | 79 | 4 | 74 | 33 | 65 | 1 | 55 | 30 | 49 | 5 | 0 | 1 | 11 |
146,220 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/pyrser/error.py
|
pyrser.error.Notification
|
class Notification:
"""
Just One notification
"""
def __init__(self, severity: Severity, msg: str,
location: LocationInfo=None, details: str=None):
self.severity = severity
self.location = location
self.msg = msg
self.details = details
def get_content(self, with_locinfos=False, with_details=False) -> str:
sevtxt = ""
txt = "{s} : {msg}\n".format(
s=Severity.rmap[self.severity].lower(),
msg=self.msg
)
if with_locinfos and self.location is not None:
txt += self.location.get_content()
if with_details and self.details is not None:
txt += self.details
return txt
|
class Notification:
'''
Just One notification
'''
def __init__(self, severity: Severity, msg: str,
location: LocationInfo=None, details: str=None):
pass
def get_content(self, with_locinfos=False, with_details=False) -> str:
pass
| 3 | 1 | 9 | 0 | 9 | 0 | 2 | 0.17 | 0 | 2 | 1 | 0 | 2 | 4 | 2 | 2 | 22 | 1 | 18 | 10 | 14 | 3 | 14 | 9 | 11 | 3 | 0 | 1 | 4 |
146,221 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/pyrser/fmt.py
|
pyrser.fmt.block
|
class block(indentable):
"""
for {}, (), []
"""
def __init__(self, beginby: str, endby: str, lsdata: list):
indentable.__init__(self)
self._beginby = beginby
self._endby = endby
if lsdata is None:
raise Exception("lsdata can't be None")
self._lsdata = lsdata
def to_str(self, res: str, parent_indent) -> str:
self.set_indent()
content = catend(res, self._beginby, parent_indent)
if isinstance(self._lsdata, indentable):
return catend(self._lsdata.to_str(content, self._indent),
self._endby, parent_indent)
if isinstance(self._lsdata, list):
content = list_to_str(self._lsdata, content, self._indent)
return catend(content, self._endby, parent_indent)
|
class block(indentable):
'''
for {}, (), []
'''
def __init__(self, beginby: str, endby: str, lsdata: list):
pass
def to_str(self, res: str, parent_indent) -> str:
pass
| 3 | 1 | 8 | 0 | 8 | 0 | 3 | 0.18 | 1 | 3 | 0 | 0 | 2 | 3 | 2 | 7 | 21 | 1 | 17 | 7 | 14 | 3 | 16 | 7 | 13 | 3 | 1 | 1 | 5 |
146,222 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/pyrser/fmt.py
|
pyrser.fmt.end
|
class end(indentable):
"""
for all list that end by a char
"""
def __init__(self, ch: str, lsdata: list):
indentable.__init__(self)
self._ch = ch
if lsdata is None:
raise Exception("lsdata can't be None")
self._lsdata = lsdata
def to_str(self, res: str, parent_indent) -> str:
self.set_indent()
content = res
if isinstance(self._lsdata, indentable):
return catend(self._lsdata.to_str(res, self._indent), self._ch,
self._indent)
if isinstance(self._lsdata, list):
for i in self._lsdata:
if isinstance(i, indentable):
content = i.to_str(content, self._indent)
elif isinstance(i, list):
content = list_to_str(i, content, self._indent)
else:
content = catend(content, i, self._indent)
content = catend(content, self._ch, self._indent)
return content
|
class end(indentable):
'''
for all list that end by a char
'''
def __init__(self, ch: str, lsdata: list):
pass
def to_str(self, res: str, parent_indent) -> str:
pass
| 3 | 1 | 11 | 0 | 11 | 0 | 4 | 0.13 | 1 | 3 | 0 | 0 | 2 | 2 | 2 | 7 | 27 | 1 | 23 | 7 | 20 | 3 | 20 | 7 | 17 | 6 | 1 | 3 | 8 |
146,223 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/pyrser/fmt.py
|
pyrser.fmt.indentable
|
class indentable:
"""
base of all fmt objects
"""
char_indent = " "
num_indent = 4
def __init__(self):
self._indent = 0
self._is_indented = False
self._lsdata = None
def to_str(self, res: str) -> str:
pass
def __str__(self):
self.set_indent()
strinit = (self.char_indent * self.num_indent) * (self._indent - 1)
return self.to_str(strinit, self._indent)
@property
def lsdata(self) -> list:
return self._lsdata
def set_indent(self, indent: int=1):
if self._is_indented:
return
self._indent = indent
if isinstance(self._lsdata, indentable):
self._lsdata.set_indent(self._indent)
if isinstance(self._lsdata, list):
list_set_indent(self._lsdata, self._indent)
self._is_indented = True
|
class indentable:
'''
base of all fmt objects
'''
def __init__(self):
pass
def to_str(self, res: str) -> str:
pass
def __str__(self):
pass
@property
def lsdata(self) -> list:
pass
def set_indent(self, indent: int=1):
pass
| 7 | 1 | 4 | 0 | 4 | 0 | 2 | 0.12 | 0 | 3 | 0 | 4 | 5 | 3 | 5 | 5 | 33 | 5 | 25 | 13 | 18 | 3 | 24 | 12 | 18 | 4 | 0 | 1 | 8 |
146,224 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/pyrser/fmt.py
|
pyrser.fmt.sep
|
class sep(indentable):
"""
for all list seperated by a char
"""
def __init__(self, ch: str, lsdata: list):
indentable.__init__(self)
self._ch = ch
if lsdata is None:
raise Exception("lsdata can't be None")
self._lsdata = lsdata
def to_str(self, res: str, parent_indent) -> str:
self.set_indent()
content = res
if isinstance(self._lsdata, indentable):
return self._lsdata.to_str(res, self._indent)
if isinstance(self._lsdata, list):
sz = len(self._lsdata)
for i in range(sz):
if isinstance(self._lsdata[i], indentable):
content = self._lsdata[i].to_str(content, self._indent)
elif isinstance(self._lsdata[i], list):
content = list_to_str(self._lsdata[i], content,
self._indent)
else:
content = catend(content, self._lsdata[i], self._indent)
if i < sz - 1:
content = catend(content, self._ch, self._indent)
return content
|
class sep(indentable):
'''
for all list seperated by a char
'''
def __init__(self, ch: str, lsdata: list):
pass
def to_str(self, res: str, parent_indent) -> str:
pass
| 3 | 1 | 12 | 0 | 12 | 0 | 5 | 0.12 | 1 | 4 | 0 | 0 | 2 | 2 | 2 | 7 | 29 | 1 | 25 | 8 | 22 | 3 | 22 | 8 | 19 | 7 | 1 | 3 | 9 |
146,225 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/pyrser/fmt.py
|
pyrser.fmt.tab
|
class tab(indentable):
"""
to handle indentation level
"""
def __init__(self, lsdata: indentable):
indentable.__init__(self)
if lsdata is None:
raise Exception("lsdata can't be None")
self._lsdata = lsdata
def set_indent(self, indent: int=1):
if self._is_indented:
return
self._indent = indent + 1
if isinstance(self._lsdata, indentable):
self._lsdata.set_indent(self._indent)
if isinstance(self._lsdata, list):
list_set_indent(self._lsdata, self._indent)
self._is_indented = True
def to_str(self, res: str, parent_indent) -> str:
self.set_indent()
if isinstance(self._lsdata, indentable):
return self._lsdata.to_str(res, self._indent)
if isinstance(self._lsdata, list):
content = res
for i in self._lsdata:
if isinstance(i, indentable):
content = i.to_str(content, self._indent)
elif isinstance(i, list):
content = list_to_str(i, content, self._indent)
else:
content = catend(content, i, self._indent)
return content
|
class tab(indentable):
'''
to handle indentation level
'''
def __init__(self, lsdata: indentable):
pass
def set_indent(self, indent: int=1):
pass
def to_str(self, res: str, parent_indent) -> str:
pass
| 4 | 1 | 9 | 0 | 9 | 0 | 4 | 0.1 | 1 | 4 | 0 | 0 | 3 | 3 | 3 | 8 | 34 | 2 | 29 | 9 | 25 | 3 | 27 | 9 | 23 | 6 | 1 | 3 | 12 |
146,226 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/pyrser/grammar.py
|
pyrser.grammar.Grammar
|
class Grammar(parsing.Parser, metaclass=MetaGrammar):
"""
Base class for all grammars.
This class turn any class A that inherit it into a grammar.
Taking the description of the grammar in parameter it will add
all what is what is needed for A to parse it.
"""
# Text grammar to generate parsing rules for this class.
grammar = None
# Name of the default rule to parse the grammar.
entry = None
# DSL parsing class
dsl_parser = dsl.EBNF
def after_parse(self, node: parsing.Node) -> parsing.Node:
"""
If you want to do some stuff after parsing, overload this...
"""
return node
def _do_parse(self, entry: str) -> parsing.Node:
if self.nstream == 0:
raise ValueError("No opened stream for reading."
+ " Check if you provide something "
+ "to read for method parse()"
+ " or a correct file for method parse_file()"
)
res = None
self.diagnostic = error.Diagnostic()
try:
res = self.eval_rule(entry)
except error.Diagnostic as d:
# User put an error rule
d.notify(
error.Severity.ERROR,
"Exception during the evaluation of '%s'" % self._lastRule,
error.LocationInfo.from_stream(
self._stream,
is_error=self.from_string
)
)
self.diagnostic = d
if not res:
# we fail to parse, but error is not set on the last rule
self.diagnostic.notify(
error.Severity.ERROR,
"Parse error in '%s'" % self._lastRule,
error.LocationInfo.from_maxstream(
self._stream,
is_error=self.from_string
)
)
if self.raise_diagnostic:
raise self.diagnostic
else:
return self
# clear contexted variables
self.rule_nodes.clear()
# create a new Diagnostic object for the node result
res.diagnostic = error.Diagnostic()
# all is ok
return self.after_parse(res)
def parse(self, source: str=None, entry: str=None) -> parsing.Node:
"""Parse source using the grammar"""
self.from_string = True
if source is not None:
self.parsed_stream(source)
if entry is None:
entry = self.entry
if entry is None:
raise ValueError("No entry rule name defined for {}".format(
self.__class__.__name__))
return self._do_parse(entry)
def parse_file(self, filename: str, entry: str=None) -> parsing.Node:
"""Parse filename using the grammar"""
self.from_string = False
import os.path
with open(filename, 'r') as f:
self.parsed_stream(f.read(), os.path.abspath(filename))
if entry is None:
entry = self.entry
if entry is None:
raise ValueError("No entry rule name defined for {}".format(
self.__class__.__name__))
return self._do_parse(entry)
|
class Grammar(parsing.Parser, metaclass=MetaGrammar):
'''
Base class for all grammars.
This class turn any class A that inherit it into a grammar.
Taking the description of the grammar in parameter it will add
all what is what is needed for A to parse it.
'''
def after_parse(self, node: parsing.Node) -> parsing.Node:
'''
If you want to do some stuff after parsing, overload this...
'''
pass
def _do_parse(self, entry: str) -> parsing.Node:
pass
def parse(self, source: str=None, entry: str=None) -> parsing.Node:
'''Parse source using the grammar'''
pass
def parse_file(self, filename: str, entry: str=None) -> parsing.Node:
'''Parse filename using the grammar'''
pass
| 5 | 4 | 18 | 0 | 15 | 3 | 3 | 0.3 | 2 | 5 | 3 | 18 | 4 | 4 | 4 | 52 | 88 | 5 | 64 | 16 | 58 | 19 | 43 | 12 | 37 | 5 | 5 | 2 | 13 |
146,227 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/pyrser/grammar.py
|
pyrser.grammar.MetaGrammar
|
class MetaGrammar(parsing.MetaBasicParser):
"""Metaclass for all grammars."""
def __new__(metacls, name, bases, namespace):
# for multi heritance we have a simple inheritance relation
# from the first class in declaration order.
metabp = parsing.MetaBasicParser
if len(bases) <= 1:
cls = metabp.__new__(metacls, name, bases, namespace)
else:
b = tuple([bases[0]])
cls = metabp.__new__(metacls, name, b, namespace)
# lookup for the metaclass of parsing.
# Grammar magically inherit rules&hooks from Parser
if 'Parser' in parsing.base._MetaBasicParser:
clsbase = parsing.base._MetaBasicParser['Parser']
# link rules&hooks
cls._rules = clsbase._rules.new_child()
cls._hooks = clsbase._hooks.new_child()
# add rules from DSL
if 'grammar' in namespace and namespace['grammar'] is not None:
sname = None
if 'source' in namespace and namespace['source'] is not None:
sname = namespace['source']
rules = cls.dsl_parser(namespace['grammar'], sname).get_rules()
if not rules:
return rules
# namespace rules with module/classe name
for rule_name, rule_pt in rules.items():
if '.' not in rule_name:
rule_name = cls.__module__ \
+ '.' + cls.__name__ \
+ '.' + rule_name
meta.set_one(cls._rules, rule_name, rule_pt)
# add localy define rules (and thus overloads)
if '_rules' in namespace and namespace['_rules'] is not None:
cls._rules.update(namespace['_rules'])
# add localy define hooks
if '_hooks' in namespace and namespace['_hooks'] is not None:
cls._hooks.update(namespace['_hooks'])
# Manage Aggregation
if len(bases) > 1:
aggreg_rules = ChainMap()
aggreg_hooks = ChainMap()
for subgrammar in bases:
if hasattr(subgrammar, '_rules'):
aggreg_rules = ChainMap(*(aggreg_rules.maps
+ subgrammar._rules.maps))
if hasattr(subgrammar, '_hooks'):
aggreg_hooks = ChainMap(*(aggreg_hooks.maps
+ subgrammar._hooks.maps))
# aggregate at toplevel the branch grammar
cls._rules = ChainMap(*(cls._rules.maps + aggreg_rules.maps))
cls._hooks = ChainMap(*(cls._hooks.maps + aggreg_hooks.maps))
# clean redondant in chain for rules
orderedunique_rules = []
tocpy_rules = set([id(_) for _ in cls._rules.maps])
for ch in cls._rules.maps:
idch = id(ch)
if idch in tocpy_rules:
orderedunique_rules.append(ch)
tocpy_rules.remove(idch)
cls._rules = ChainMap(*orderedunique_rules)
# clean redondant in chain for hooks
orderedunique_hooks = []
tocpy_hooks = set([id(_) for _ in cls._hooks.maps])
for ch in cls._hooks.maps:
idch = id(ch)
if idch in tocpy_hooks:
orderedunique_hooks.append(ch)
tocpy_hooks.remove(idch)
cls._hooks = ChainMap(*orderedunique_hooks)
return cls
|
class MetaGrammar(parsing.MetaBasicParser):
'''Metaclass for all grammars.'''
def __new__(metacls, name, bases, namespace):
pass
| 2 | 1 | 70 | 0 | 57 | 13 | 18 | 0.24 | 1 | 3 | 0 | 1 | 1 | 0 | 1 | 15 | 72 | 0 | 58 | 18 | 56 | 14 | 53 | 18 | 51 | 18 | 3 | 4 | 18 |
146,228 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/pyrser/parsing/base.py
|
pyrser.parsing.base.BasicParser
|
class BasicParser(metaclass=MetaBasicParser):
"""Empty basic parser, contains no rule nor hook.
Unless you know what you are doing, use Parser instead of this class.
"""
_rules = collections.ChainMap()
_hooks = collections.ChainMap()
def __init__(
self,
content: str='',
stream_name: str=None,
raise_diagnostic=True
):
self._ignores = [BasicParser.ignore_blanks]
self._streams = [Stream(content, stream_name)]
self.rule_nodes = None
self.push_rule_nodes()
self._lastIgnoreIndex = 0
self._lastIgnore = False
self._lastRule = ""
self.raise_diagnostic = raise_diagnostic
self.diagnostic = error.Diagnostic()
### READ ONLY @property
def __bool__(self):
return self.diagnostic is False
@property
def _stream(self) -> Stream:
"""The current Stream."""
return self._streams[-1]
@property
def nstream(self) -> int:
"""Return the number of opened stream"""
return len(self._streams)
@property
def rules(self) -> dict:
"""
Return the grammar dict
"""
return self._rules
### Rule Nodes
def push_rule_nodes(self) -> bool:
"""Push context variable to store rule nodes."""
if self.rule_nodes is None:
self.rule_nodes = collections.ChainMap()
self.tag_cache = collections.ChainMap()
self.id_cache = collections.ChainMap()
else:
self.rule_nodes = self.rule_nodes.new_child()
self.tag_cache = self.tag_cache.new_child()
self.id_cache = self.id_cache.new_child()
return True
def pop_rule_nodes(self) -> bool:
"""Pop context variable that store rule nodes"""
self.rule_nodes = self.rule_nodes.parents
self.tag_cache = self.tag_cache.parents
self.id_cache = self.id_cache.parents
return True
def value(self, n: Node) -> str:
"""Return the text value of the node"""
id_n = id(n)
idcache = self.id_cache
if id_n not in idcache:
return ""
name = idcache[id_n]
tag_cache = self.tag_cache
if name not in tag_cache:
raise Exception("Incoherent tag cache")
tag = tag_cache[name]
k = "%d:%d" % (tag._begin, tag._end)
valcache = self._streams[-1].value_cache
if k not in valcache:
valcache[k] = str(tag)
return valcache[k]
### STREAM
def parsed_stream(self, content: str, name: str=None):
"""Push a new Stream into the parser.
All subsequent called functions will parse this new stream,
until the 'popStream' function is called.
"""
self._streams.append(Stream(content, name))
def pop_stream(self):
"""Pop the last Stream pushed on to the parser stack."""
s = self._streams.pop()
self.clean_tmp(s)
### VARIABLE PRIMITIVES
def begin_tag(self, name: str) -> Node:
"""Save the current index under the given name."""
# Check if we could attach tag cache to current rule_nodes scope
self.tag_cache[name] = Tag(self._stream, self._stream.index)
return True
def end_tag(self, name: str) -> Node:
"""Extract the string between saved and current index."""
self.tag_cache[name].set_end(self._stream.index)
return True
def get_tag(self, name: str) -> Tag:
"""Extract the string previously saved."""
return self.tag_cache[name]
def tag_node(self, name: str, node: Node):
self.id_cache[id(node)] = name
####
@classmethod
def set_rules(cls, rules: dict) -> bool:
"""
Merge internal rules set with the given rules
"""
cls._rules = cls._rules.new_child()
for rule_name, rule_pt in rules.items():
if '.' not in rule_name:
rule_name = cls.__module__ \
+ '.' + cls.__name__ \
+ '.' + rule_name
meta.set_one(cls._rules, rule_name, rule_pt)
return True
@classmethod
def set_hooks(cls, hooks: dict) -> bool:
"""
Merge internal hooks set with the given hooks
"""
cls._hooks = cls._hooks.new_child()
for hook_name, hook_pt in hooks.items():
if '.' not in hook_name:
hook_name = cls.__module__ \
+ '.' + cls.__name__ \
+ '.' + hook_name
meta.set_one(cls._hooks, hook_name, hook_pt)
return True
@classmethod
def set_directives(cls, directives: dict) -> bool:
"""
Merge internal directives set with the given directives.
For working directives, attach it only in the dsl.Parser class
"""
meta._directives = meta._directives.new_child()
for dir_name, dir_pt in directives.items():
meta.set_one(meta._directives, dir_name, dir_pt)
dir_pt.ns_name = dir_name
return True
def eval_rule(self, name: str) -> Node:
"""Evaluate a rule by name."""
# context created by caller
n = Node()
id_n = id(n)
self.rule_nodes['_'] = n
self.id_cache[id_n] = '_'
# TODO: other behavior for empty rules?
if name not in self.__class__._rules:
self.diagnostic.notify(
error.Severity.ERROR,
"Unknown rule : %s" % name,
error.LocationInfo.from_stream(self._stream, is_error=True)
)
raise self.diagnostic
self._lastRule = name
rule_to_eval = self.__class__._rules[name]
# TODO: add packrat cache here, same rule - same pos == same res
res = rule_to_eval(self)
if res:
res = self.rule_nodes['_']
return res
def eval_hook(self, name: str, ctx: list) -> Node:
"""Evaluate the hook by its name"""
if name not in self.__class__._hooks:
# TODO: don't always throw error, could have return True by default
self.diagnostic.notify(
error.Severity.ERROR,
"Unknown hook : %s" % name,
error.LocationInfo.from_stream(self._stream, is_error=True)
)
raise self.diagnostic
self._lastRule = '#' + name
res = self.__class__._hooks[name](self, *ctx)
if type(res) is not bool:
raise TypeError("Your hook %r didn't return a bool value" % name)
return res
### PARSING PRIMITIVES
def peek_char(self, c: str) -> bool:
if self.read_eof():
return False
return self._stream.peek_char == c
def peek_text(self, text: str) -> bool:
"""Same as readText but doesn't consume the stream."""
start = self._stream.index
stop = start + len(text)
if stop > self._stream.eos_index:
return False
return self._stream[self._stream.index:stop] == text
def one_char(self) -> bool:
"""Read one byte in stream"""
if self.read_eof():
return False
self._stream.incpos()
return True
def read_char(self, c: str) -> bool:
"""
Consume the c head byte, increment current index and return True
else return False. It use peekchar and it's the same as '' in BNF.
"""
if self.read_eof():
return False
self._stream.save_context()
if c == self._stream.peek_char:
self._stream.incpos()
return self._stream.validate_context()
return self._stream.restore_context()
def read_until(self, c: str, inhibitor='\\') -> bool:
"""
Consume the stream while the c byte is not read, else return false
ex : if stream is " abcdef ", read_until("d"); consume "abcd".
"""
if self.read_eof():
return False
self._stream.save_context()
while not self.read_eof():
if self.peek_char(inhibitor):
# Delete inhibitor and inhibited character
self.one_char()
self.one_char()
if self.peek_char(c):
self._stream.incpos()
return self._stream.validate_context()
self._stream.incpos()
return self._stream.restore_context()
def read_until_eof(self) -> bool:
"""Consume all the stream. Same as EOF in BNF."""
if self.read_eof():
return True
# TODO: read ALL
self._stream.save_context()
while not self.read_eof():
self._stream.incpos()
return self._stream.validate_context()
def read_text(self, text: str) -> bool:
"""
Consume a strlen(text) text at current position in the stream
else return False.
Same as "" in BNF
ex : read_text("ls");.
"""
if self.read_eof():
return False
self._stream.save_context()
if self.peek_text(text):
self._stream.incpos(len(text))
return self._stream.validate_context()
return self._stream.restore_context()
def read_range(self, begin: str, end: str) -> int:
"""
Consume head byte if it is >= begin and <= end else return false
Same as 'a'..'z' in BNF
"""
if self.read_eof():
return False
c = self._stream.peek_char
if begin <= c <= end:
self._stream.incpos()
return True
return False
### IGNORE CONVENTION
def ignore_null(self) -> bool:
"""
Empty ignore convention for notignore
"""
return True
def ignore_blanks(self) -> bool:
"""Consume whitespace characters."""
self._stream.save_context()
if not self.read_eof() and self._stream.peek_char in " \t\v\f\r\n":
while (not self.read_eof()
and self._stream.peek_char in " \t\v\f\r\n"):
self._stream.incpos()
return self._stream.validate_context()
return self._stream.validate_context()
def push_ignore(self, ignoreConvention) -> bool:
"""Set the ignore convention"""
self._ignores.append(ignoreConvention)
return True
def pop_ignore(self) -> bool:
"""Remove the last ignore convention"""
self._ignores.pop()
return True
def skip_ignore(self) -> bool:
if len(self._ignores) > 0:
self._ignores[-1](self)
self._lastIgnore = (self._stream.index != self._lastIgnoreIndex)
self._lastIgnoreIndex = self._stream.index
return True
def undo_last_ignore(self) -> bool:
# TODO(iopi): wrong don't work in all case
if (self._stream.index > self._lastIgnoreIndex):
self._stream.decpos(self._stream.index - self._lastIgnoreIndex)
self._lastIgnoreIndex = self._stream.index
#self._lastIgnore = False
return True
|
class BasicParser(metaclass=MetaBasicParser):
'''Empty basic parser, contains no rule nor hook.
Unless you know what you are doing, use Parser instead of this class.
'''
def __init__(
self,
content: str='',
stream_name: str=None,
raise_diagnostic=True
):
pass
def __bool__(self):
pass
@property
def _stream(self) -> Stream:
'''The current Stream.'''
pass
@property
def nstream(self) -> int:
'''Return the number of opened stream'''
pass
@property
def rules(self) -> dict:
'''
Return the grammar dict
'''
pass
def push_rule_nodes(self) -> bool:
'''Push context variable to store rule nodes.'''
pass
def pop_rule_nodes(self) -> bool:
'''Pop context variable that store rule nodes'''
pass
def value(self, n: Node) -> str:
'''Return the text value of the node'''
pass
def parsed_stream(self, content: str, name: str=None):
'''Push a new Stream into the parser.
All subsequent called functions will parse this new stream,
until the 'popStream' function is called.
'''
pass
def pop_stream(self):
'''Pop the last Stream pushed on to the parser stack.'''
pass
def begin_tag(self, name: str) -> Node:
'''Save the current index under the given name.'''
pass
def end_tag(self, name: str) -> Node:
'''Extract the string between saved and current index.'''
pass
def get_tag(self, name: str) -> Tag:
'''Extract the string previously saved.'''
pass
def tag_node(self, name: str, node: Node):
pass
@classmethod
def set_rules(cls, rules: dict) -> bool:
'''
Merge internal rules set with the given rules
'''
pass
@classmethod
def set_hooks(cls, hooks: dict) -> bool:
'''
Merge internal hooks set with the given hooks
'''
pass
@classmethod
def set_directives(cls, directives: dict) -> bool:
'''
Merge internal directives set with the given directives.
For working directives, attach it only in the dsl.Parser class
'''
pass
def eval_rule(self, name: str) -> Node:
'''Evaluate a rule by name.'''
pass
def eval_hook(self, name: str, ctx: list) -> Node:
'''Evaluate the hook by its name'''
pass
def peek_char(self, c: str) -> bool:
pass
def peek_text(self, text: str) -> bool:
'''Same as readText but doesn't consume the stream.'''
pass
def one_char(self) -> bool:
'''Read one byte in stream'''
pass
def read_char(self, c: str) -> bool:
'''
Consume the c head byte, increment current index and return True
else return False. It use peekchar and it's the same as '' in BNF.
'''
pass
def read_until(self, c: str, inhibitor='\\') -> bool:
'''
Consume the stream while the c byte is not read, else return false
ex : if stream is " abcdef ", read_until("d"); consume "abcd".
'''
pass
def read_until_eof(self) -> bool:
'''Consume all the stream. Same as EOF in BNF.'''
pass
def read_text(self, text: str) -> bool:
'''
Consume a strlen(text) text at current position in the stream
else return False.
Same as "" in BNF
ex : read_text("ls");.
'''
pass
def read_range(self, begin: str, end: str) -> int:
'''
Consume head byte if it is >= begin and <= end else return false
Same as 'a'..'z' in BNF
'''
pass
def ignore_null(self) -> bool:
'''
Empty ignore convention for notignore
'''
pass
def ignore_blanks(self) -> bool:
'''Consume whitespace characters.'''
pass
def push_ignore(self, ignoreConvention) -> bool:
'''Set the ignore convention'''
pass
def pop_ignore(self) -> bool:
'''Remove the last ignore convention'''
pass
def skip_ignore(self) -> bool:
pass
def undo_last_ignore(self) -> bool:
pass
| 40 | 28 | 8 | 0 | 6 | 2 | 2 | 0.34 | 1 | 13 | 5 | 1 | 30 | 10 | 33 | 47 | 334 | 42 | 218 | 76 | 173 | 75 | 193 | 65 | 159 | 5 | 3 | 2 | 65 |
146,229 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/pyrser/error.py
|
pyrser.error.Diagnostic
|
class Diagnostic(Exception):
"""
The diagnostic object is use to handle easily
all errors/warnings/infos in a compiler that you could
encounter. Error while parsing, Error while type checking etc...
You could use different severity for your notification.
"""
def __init__(self):
self.logs = []
def __bool__(self):
return self.have_errors
def __str__(self) -> str:
return self.get_content(with_details=True)
def notify(self, severity: Severity, msg: str,
location: LocationInfo=None, details: str=None) -> int:
nfy = Notification(severity, msg, location, details)
self.logs.append(nfy)
return len(self.logs) - 1
def add(self, n: Notification) -> int:
if not isinstance(n, Notification):
raise TypeError("Must be a notification")
self.logs.append(n)
return len(self.logs) - 1
def get_content(self, with_locinfos=True, with_details=False) -> str:
# TODO: First an update Error Infos and then get_content only retrieve calculate data
ls = []
for v in self.logs:
ls.append(v.get_content(with_locinfos, with_details))
txt = ('=' * 79) + '\n'
txt += ('\n' + ('-' * 79) + '\n').join(ls)
txt += '\n' + ('-' * 79)
return txt
def get_infos(self) -> {Severity, int}:
infos = dict()
for s in Severity.map.values():
infos[s] = 0
for v in self.logs:
s = v.severity
infos[s] += 1
return infos
@property
def have_errors(self) -> bool:
for v in self.logs:
if v.severity == Severity.ERROR:
return True
|
class Diagnostic(Exception):
'''
The diagnostic object is use to handle easily
all errors/warnings/infos in a compiler that you could
encounter. Error while parsing, Error while type checking etc...
You could use different severity for your notification.
'''
def __init__(self):
pass
def __bool__(self):
pass
def __str__(self) -> str:
pass
def notify(self, severity: Severity, msg: str,
location: LocationInfo=None, details: str=None) -> int:
pass
def add(self, n: Notification) -> int:
pass
def get_content(self, with_locinfos=True, with_details=False) -> str:
pass
def get_infos(self) -> {Severity, int}:
pass
@property
def have_errors(self) -> bool:
pass
| 10 | 1 | 5 | 0 | 5 | 0 | 2 | 0.18 | 1 | 7 | 2 | 0 | 8 | 1 | 8 | 18 | 52 | 7 | 38 | 20 | 27 | 7 | 36 | 18 | 27 | 3 | 3 | 2 | 14 |
146,230 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/pyrser/parsing/base.py
|
pyrser.parsing.base.MetaBasicParser
|
class MetaBasicParser(type):
"""Metaclass for all parser."""
def __new__(metacls, name, bases, namespace):
global _MetaBasicParser
# create the metaclass instance
cls = type.__new__(metacls, name, bases, namespace)
# search metaclass instance of all base
if len(bases) > 1:
raise TypeError("%s must inherit from an unique parent,"
" use Grammar for aggregation" % name)
# Manage inheritance of Parser
if len(bases) == 1:
strbase = bases[0].__name__
if strbase not in _MetaBasicParser:
raise TypeError("metaclass of %s not found"
% bases[0].__name__)
# we inherit from an already constructed parser, so get metaclass
clsbase = _MetaBasicParser[strbase]
# inherit rules from parser
if hasattr(clsbase, '_rules'):
cls._rules = clsbase._rules.new_child()
# inherit hooks from parser
if hasattr(clsbase, '_hooks'):
cls._hooks = clsbase._hooks.new_child()
# add localy defined rules
if '_rules' in namespace:
cls._rules.update(namespace['_rules'])
# add localy defined hooks
if '_hooks' in namespace:
cls._hooks.update(namespace['_hooks'])
# store in global registry
_MetaBasicParser[name] = cls
return cls
|
class MetaBasicParser(type):
'''Metaclass for all parser.'''
def __new__(metacls, name, bases, namespace):
pass
| 2 | 1 | 31 | 0 | 22 | 9 | 8 | 0.43 | 1 | 1 | 0 | 2 | 1 | 0 | 1 | 14 | 33 | 0 | 23 | 6 | 20 | 10 | 21 | 6 | 18 | 8 | 2 | 2 | 8 |
146,231 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/tests/pyrser/test_meta.py
|
tests.pyrser.test_meta.TestCheckTypes
|
class TestCheckTypes(unittest.TestCase):
def test_it_calls_function_without_annotation_normally(self):
@meta.checktypes
def f(a):
return [1, 2, 3]
self.assertEqual([1, 2, 3], f(0))
def test_it_calls_function_with_non_type_annotation_normally(self):
@meta.checktypes
def f(a: (lambda x: 5 < x < 11)):
return [1, 2, 3]
self.assertEqual([1, 2, 3], f(0))
def test_it_calls_function_with_type_annotation_normally(self):
@meta.checktypes
def f(a: int, *args: int, b: str, **kwargs: str) -> [int]:
return [1, 2, 3]
self.assertEqual([1, 2, 3], f(0, 1, b='', c=''))
def test_it_raises_valueerror_for_incorrect_default_value_type(self):
with self.assertRaises(ValueError):
@meta.checktypes
def f(a: int='9'):
pass
def test_it_raises_valueerror_for_incorrect_parameter_type(self):
with self.assertRaises(ValueError):
@meta.checktypes
def f(a: int):
pass
f('')
def test_it_raises_valueerror_for_incorrect_variadic_type(self):
with self.assertRaises(ValueError):
@meta.checktypes
def f(*args: int):
pass
f(1, 2, '')
def test_it_raises_valueerror_for_incorrect_variadic_keyword_type(self):
with self.assertRaises(ValueError):
@meta.checktypes
def f(**kwargs: int):
pass
f(a=1, b=2, c='')
def test_it_raises_valueerror_for_incorrect_return_type(self):
with self.assertRaises(ValueError):
@meta.checktypes
def f() -> int:
return ''
f()
|
class TestCheckTypes(unittest.TestCase):
def test_it_calls_function_without_annotation_normally(self):
pass
@meta.checktypes
def f(a):
pass
def test_it_calls_function_with_non_type_annotation_normally(self):
pass
@meta.checktypes
def f(a):
pass
def test_it_calls_function_with_type_annotation_normally(self):
pass
@meta.checktypes
def f(a):
pass
def test_it_raises_valueerror_for_incorrect_default_value_type(self):
pass
@meta.checktypes
def f(a):
pass
def test_it_raises_valueerror_for_incorrect_parameter_type(self):
pass
@meta.checktypes
def f(a):
pass
def test_it_raises_valueerror_for_incorrect_variadic_type(self):
pass
@meta.checktypes
def f(a):
pass
def test_it_raises_valueerror_for_incorrect_variadic_keyword_type(self):
pass
@meta.checktypes
def f(a):
pass
def test_it_raises_valueerror_for_incorrect_return_type(self):
pass
@meta.checktypes
def f(a):
pass
| 25 | 0 | 4 | 0 | 4 | 0 | 1 | 0 | 1 | 3 | 0 | 0 | 8 | 0 | 8 | 80 | 52 | 7 | 45 | 25 | 20 | 0 | 37 | 17 | 20 | 1 | 2 | 1 | 16 |
146,232 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/pyrser/parsing/ir.py
|
pyrser.parsing.ir.Return
|
class Return(IR):
"""Abstraction of a return statement. return the last boolean state."""
pass
|
class Return(IR):
'''Abstraction of a return statement. return the last boolean state.'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0.5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0 | 2 | 1 | 1 | 1 | 2 | 1 | 1 | 0 | 1 | 0 | 0 |
146,233 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/pyrser/parsing/ir.py
|
pyrser.parsing.ir.RestoreCtx
|
class RestoreCtx(IR):
"""Restore previous parsing context."""
pass
|
class RestoreCtx(IR):
'''Restore previous parsing context.'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0.5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0 | 2 | 1 | 1 | 1 | 2 | 1 | 1 | 0 | 1 | 0 | 0 |
146,234 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/tests/pyrser/test_meta.py
|
tests.pyrser.test_meta.TestAddMethod
|
class TestAddMethod(unittest.TestCase):
def test_it_attach_method_to_class(self):
class cls:
pass
method = mock.Mock(__name__='method', __doc__='doc string')
meta.add_method(cls)(method)
self.assertIs(method, cls.method)
def test_it_does_not_attach_method_if_attribute_exists(self):
class cls:
def method(self):
pass
method = mock.Mock(__name__='method')
with self.assertRaises(AttributeError):
meta.add_method(cls)(method)
|
class TestAddMethod(unittest.TestCase):
def test_it_attach_method_to_class(self):
pass
class cls:
def test_it_does_not_attach_method_if_attribute_exists(self):
pass
class cls:
def method(self):
pass
| 6 | 0 | 5 | 0 | 5 | 0 | 1 | 0 | 1 | 4 | 2 | 0 | 2 | 0 | 2 | 74 | 15 | 1 | 14 | 8 | 8 | 0 | 14 | 8 | 8 | 1 | 2 | 1 | 3 |
146,235 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/tests/pyrser/test_grammar.py
|
tests.pyrser.test_grammar.TestGrammar
|
class TestGrammar(unittest.TestCase):
def test_it_parses_a_grammar_and_attach_parsing_rules(self):
bnf = mock.Mock()
rule = mock.Mock()
dsl = mock.Mock()
dsl.return_value.get_rules.return_value = {'rulename': rule}
class Grammar(pyrser.Grammar):
grammar = bnf
dsl_parser = dsl
dsl.assert_called_once_with(bnf)
dsl.return_value.get_rules.assert_called_once_with()
self.assertIs(rule, Grammar._rules['rulename'])
def test_it_parses_source_using_rules(self):
bnf = mock.Mock()
rule = mock.Mock()
dsl = mock.Mock()
dsl.return_value.get_rules.return_value = {'rulename': rule}
source = mock.Mock()
class StubGrammar(pyrser.Grammar):
entry = 'rulename'
dsl_parser = dsl
grammar = StubGrammar()
grammar.parsed_stream = mock.Mock()
grammar.eval_rule = mock.Mock()
grammar.parse(source)
grammar.parsed_stream.assert_call_once_with(source)
grammar.eval_rule.assert_call_once_with('rulename')
|
class TestGrammar(unittest.TestCase):
def test_it_parses_a_grammar_and_attach_parsing_rules(self):
pass
class Grammar(pyrser.Grammar):
def test_it_parses_source_using_rules(self):
pass
class StubGrammar(pyrser.Grammar):
| 5 | 0 | 15 | 2 | 13 | 0 | 1 | 0 | 1 | 3 | 2 | 0 | 2 | 0 | 2 | 74 | 32 | 5 | 27 | 17 | 22 | 0 | 27 | 17 | 22 | 1 | 2 | 0 | 2 |
146,236 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/pyrser/directives/ignore.py
|
pyrser.directives.ignore.Ignore
|
class Ignore(parsing.DirectiveWrapper):
def begin(self, parser, convention: str):
if convention == "null":
parser.push_ignore(parsing.Parser.ignore_null)
if convention == "C/C++":
parser.push_ignore(parsing.Parser.ignore_cxx)
if convention == "blanks":
parser.push_ignore(parsing.Parser.ignore_blanks)
return True
def end(self, parser, convention: str):
parser.pop_ignore()
return True
|
class Ignore(parsing.DirectiveWrapper):
def begin(self, parser, convention: str):
pass
def end(self, parser, convention: str):
pass
| 3 | 0 | 6 | 0 | 6 | 0 | 3 | 0 | 1 | 2 | 1 | 0 | 2 | 0 | 2 | 22 | 13 | 1 | 12 | 3 | 9 | 0 | 12 | 3 | 9 | 4 | 4 | 1 | 5 |
146,237 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/tests/pyrser/parsing/test_stream.py
|
tests.pyrser.parsing.test_stream.TestParserStream
|
class TestParserStream(unittest.TestCase):
def test_its_len_is_its_content_len(self):
content = "some content"
stream = parsing.Stream(content)
self.assertEqual(len(content), len(stream))
def test_its_content_can_be_accessed_like_a_string(self):
content = "some content"
stream = parsing.Stream(content)
self.assertEqual(content[5], stream[5])
self.assertEqual(content[2:], stream[2:])
def test_it_increments_position(self):
stream = parsing.Stream(" ")
prev_pos = stream.index
stream.incpos()
self.assertLess(prev_pos, stream.index)
def test_it_increments_position_on_newline(self):
stream = parsing.Stream("\n")
prev_line = stream.lineno
stream.incpos()
self.assertEqual(1, stream.col_offset)
self.assertLess(prev_line, stream.lineno)
def test_it_does_not_increment_position_passed_eof(self):
stream = parsing.Stream("")
pos = stream.index
stream.incpos()
self.assertEqual(pos, stream.index)
def test_it_decrements_position(self):
stream = parsing.Stream("a")
stream._cursor.step_next_char()
stream.decpos()
self.assertEqual(0, stream.index)
def test_it_decrements_position_on_newline(self):
stream = parsing.Stream("\n")
stream._cursor.step_next_line()
stream._cursor.step_next_char()
stream.decpos()
self.assertEqual(1, stream.lineno)
def test_it_does_not_decrement_position_before_bof(self):
stream = parsing.Stream("")
stream.decpos()
self.assertEqual(0, stream.index)
def test_it_saves_context(self):
stream = parsing.Stream()
contexts = stream._contexts
nb_ctx = len(contexts)
stream.save_context()
self.assertEqual(nb_ctx + 1, len(contexts))
def test_it_restore_context(self):
stream = parsing.Stream()
pos = Position(42, 0, 0)
stream._contexts.insert(0, pos)
stream.restore_context()
self.assertEqual(pos.index, stream.index)
def test_it_validates_context(self):
stream = parsing.Stream()
stream._contexts.insert(0, Position(42, 0, 0))
stream.validate_context()
self.assertEqual(0, stream.index)
|
class TestParserStream(unittest.TestCase):
def test_its_len_is_its_content_len(self):
pass
def test_its_content_can_be_accessed_like_a_string(self):
pass
def test_it_increments_position(self):
pass
def test_it_increments_position_on_newline(self):
pass
def test_it_does_not_increment_position_passed_eof(self):
pass
def test_it_decrements_position(self):
pass
def test_it_decrements_position_on_newline(self):
pass
def test_it_does_not_decrement_position_before_bof(self):
pass
def test_it_saves_context(self):
pass
def test_it_restore_context(self):
pass
def test_it_validates_context(self):
pass
| 12 | 0 | 5 | 0 | 5 | 0 | 1 | 0 | 1 | 1 | 1 | 0 | 11 | 0 | 11 | 83 | 68 | 10 | 58 | 31 | 46 | 0 | 58 | 31 | 46 | 1 | 2 | 0 | 11 |
146,238 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/tests/pyrser/parsing/test_seq.py
|
tests.pyrser.parsing.test_seq.TestSeq
|
class TestSeq(unittest.TestCase):
def test_it_calls_skipIgnore_before_each_clause(self):
parser = mock.Mock(spec=parsing.BasicParser)
clause = mock.Mock(return_value=True)
parser.clause = clause
parsing.Seq(clause, clause)(parser)
self.assertEqual(
[mock.call.skip_ignore(), mock.call.clause(parser)] * 2,
parser.mock_calls)
def test_it_calls_all_clauses_in_order_if_each_clause_is_true(self):
parser = mock.Mock(spec=parsing.BasicParser)
clauses = mock.Mock(**{'clause0.return_value': True,
'clause1.return_value': True})
parsing.Seq(clauses.clause0, clauses.clause1)(parser)
self.assertEqual(
[mock.call.clause0(parser), mock.call.clause1(parser)],
clauses.mock_calls)
def test_it_stops_calling_clauses_if_a_clause_is_false(self):
parser = mock.Mock(spec=parsing.BasicParser)
clauses = mock.Mock(**{'clause0.return_value': False,
'clause1.return_value': True})
parsing.Seq(clauses.clause0, clauses.clause1)(parser)
self.assertEqual([mock.call.clause0(parser)], clauses.mock_calls)
def test_it_is_true_if_all_clauses_are_true(self):
parser = mock.Mock(spec=parsing.BasicParser)
clause = mock.Mock(return_value=True)
clauses = parsing.Seq(clause, clause)
self.assertTrue(clauses(parser))
def test_is_is_false_if_any_clause_is_false(self):
parser = mock.Mock(spec=parsing.BasicParser)
clause0 = mock.Mock(return_value=True)
clause1 = mock.Mock(return_value=False)
clauses = parsing.Seq(clause0, clause1)
self.assertFalse(clauses(parser))
def test_it_raises_typeerror_with_no_clause(self):
with self.assertRaises(TypeError):
parsing.Seq()
|
class TestSeq(unittest.TestCase):
def test_it_calls_skipIgnore_before_each_clause(self):
pass
def test_it_calls_all_clauses_in_order_if_each_clause_is_true(self):
pass
def test_it_stops_calling_clauses_if_a_clause_is_false(self):
pass
def test_it_is_true_if_all_clauses_are_true(self):
pass
def test_is_is_false_if_any_clause_is_false(self):
pass
def test_it_raises_typeerror_with_no_clause(self):
pass
| 7 | 0 | 6 | 0 | 6 | 0 | 1 | 0 | 1 | 4 | 2 | 0 | 6 | 0 | 6 | 78 | 42 | 5 | 37 | 20 | 30 | 0 | 31 | 20 | 24 | 1 | 2 | 1 | 6 |
146,239 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/tests/pyrser/parsing/test_scope.py
|
tests.pyrser.parsing.test_scope.TestScope
|
class TestScope(unittest.TestCase):
def test_it_returns_the_clause_result_when_all_clauses_are_true(self):
parser = parsing.BasicParser()
begin_end = mock.Mock(return_value=True)
clause = mock.Mock(return_value=mock.sentinel.clause)
scope = parsing.Scope(begin_end, begin_end, clause)
self.assertEqual(scope(parser), mock.sentinel.clause)
def test_it_is_false_when_begin_clause_is_false(self):
parser = parsing.BasicParser()
begin = mock.Mock(return_value=False)
clause = mock.Mock(return_value=True)
scope = parsing.Scope(begin, clause, clause)
self.assertFalse(scope(parser))
def test_it_is_false_when_clause_is_false(self):
parser = parsing.BasicParser()
begin_end = mock.Mock(return_value=True)
clause = mock.Mock(return_value=False)
scope = parsing.Scope(begin_end, begin_end, clause)
self.assertFalse(scope(parser))
def test_it_is_false_when_end_clause_is_false(self):
parser = parsing.BasicParser()
clause = mock.Mock(return_value=True)
end = mock.Mock(return_value=False)
scope = parsing.Scope(clause, end, clause)
self.assertFalse(scope(parser))
|
class TestScope(unittest.TestCase):
def test_it_returns_the_clause_result_when_all_clauses_are_true(self):
pass
def test_it_is_false_when_begin_clause_is_false(self):
pass
def test_it_is_false_when_clause_is_false(self):
pass
def test_it_is_false_when_end_clause_is_false(self):
pass
| 5 | 0 | 6 | 0 | 6 | 0 | 1 | 0 | 1 | 3 | 2 | 0 | 4 | 0 | 4 | 76 | 28 | 3 | 25 | 21 | 20 | 0 | 25 | 21 | 20 | 1 | 2 | 0 | 4 |
146,240 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/tests/pyrser/parsing/test_rule.py
|
tests.pyrser.parsing.test_rule.TestRule
|
class TestRule(unittest.TestCase):
def test_it_evaluate_the_rule(self):
rulename = 'rule'
parser = mock.Mock(spec=parsing.BasicParser)
rule = parsing.Rule(rulename)
rule(parser)
parser.eval_rule.assert_called_once_with(rulename)
def test_it_is_true_when_the_rule_is_true(self):
parser = mock.Mock(spec=parsing.BasicParser)
parser.eval_rule.return_value = True
rule = parsing.Rule('rule')
self.assertTrue(rule(parser))
def test_it_is_false_when_the_rule_is_false(self):
parser = mock.Mock(spec=parsing.BasicParser)
parser.eval_rule.return_value = False
rule = parsing.Rule('rule')
self.assertFalse(rule(parser))
|
class TestRule(unittest.TestCase):
def test_it_evaluate_the_rule(self):
pass
def test_it_is_true_when_the_rule_is_true(self):
pass
def test_it_is_false_when_the_rule_is_false(self):
pass
| 4 | 0 | 5 | 0 | 5 | 0 | 1 | 0 | 1 | 3 | 2 | 0 | 3 | 0 | 3 | 75 | 19 | 2 | 17 | 11 | 13 | 0 | 17 | 11 | 13 | 1 | 2 | 0 | 3 |
146,241 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/tests/pyrser/test_meta.py
|
tests.pyrser.test_meta.TestHook
|
class TestHook(unittest.TestCase):
def test_it_attach_method_as_hook_to_class(self):
hooks = {}
cls = mock.Mock(__name__='cls', _hooks=hooks)
del cls.fn
fn = mock.Mock(__name__='fn')
meta.hook(cls)(fn)
self.assertIs(fn, cls.fn)
cls.set_one.assert_call_once_with(hooks, 'cls.fn', fn)
def test_it_attach_method_as_hook_to_class_with_rulename(self):
hooks = {}
cls = mock.Mock(__name__='cls', _hooks=hooks)
del cls.fn
fn = mock.Mock(__name__='fn')
meta.hook(cls, 'hookname')(fn)
self.assertIs(fn, cls.fn)
cls.set_one.assert_call_once_with(hooks, 'cls.hookname', fn)
def test_it_does_not_attach_a_hook_if_method_already_exist(self):
cls = mock.Mock(__name__='cls')
method = mock.Mock(__name__='method')
with self.assertRaises(AttributeError):
meta.hook(cls, 'rulename')(method)
|
class TestHook(unittest.TestCase):
def test_it_attach_method_as_hook_to_class(self):
pass
def test_it_attach_method_as_hook_to_class_with_rulename(self):
pass
def test_it_does_not_attach_a_hook_if_method_already_exist(self):
pass
| 4 | 0 | 7 | 0 | 7 | 0 | 1 | 0 | 1 | 2 | 0 | 0 | 3 | 0 | 3 | 75 | 24 | 2 | 22 | 12 | 18 | 0 | 22 | 12 | 18 | 1 | 2 | 1 | 3 |
146,242 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/tests/pyrser/parsing/test_repoptional.py
|
tests.pyrser.parsing.test_repoptional.TestRepOptional
|
class TestRepOptional(unittest.TestCase):
def test_it_calls_skipIgnore_before_clause(self):
parser = mock.Mock(spec=parsing.BasicParser)
clause = mock.Mock(return_value=True)
parser.clause = clause
parsing.RepOptional(clause)(parser)
self.assertEqual(
[mock.call.skip_ignore(), mock.call.clause(parser)],
parser.method_calls)
def test_it_is_true_when_clause_is_true(self):
parser = mock.Mock(spec=parsing.BasicParser)
clause = mock.Mock(return_value=True)
rep = parsing.RepOptional(clause)
self.assertTrue(rep(parser))
def test_it_is_true_when_clause_is_false(self):
parser = mock.Mock(spec=parsing.BasicParser)
clause = mock.Mock(return_value=False)
rep = parsing.RepOptional(clause)
self.assertTrue(rep(parser))
|
class TestRepOptional(unittest.TestCase):
def test_it_calls_skipIgnore_before_clause(self):
pass
def test_it_is_true_when_clause_is_true(self):
pass
def test_it_is_true_when_clause_is_false(self):
pass
| 4 | 0 | 6 | 0 | 6 | 0 | 1 | 0 | 1 | 3 | 2 | 0 | 3 | 0 | 3 | 75 | 21 | 2 | 19 | 12 | 15 | 0 | 17 | 12 | 13 | 1 | 2 | 0 | 3 |
146,243 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/pyrser/dsl.py
|
pyrser.dsl.EBNF
|
class EBNF(parsing.Parser):
"""
Basic class for BNF DSL PARSING.
A full parser for the BNF is provided by this class.
We construct a tree to represents, thru functors, BNF semantics.
"""
def get_rules(self) -> parsing.Node:
"""
Parse the DSL and provide a dictionnaries of all resulting rules.
Call by the MetaGrammar class.
TODO: could be done in the rules property of parsing.BasicParser???
"""
res = None
try:
res = self.eval_rule('bnf_dsl')
if not res:
# we fail to parse, but error is not set
self.diagnostic.notify(
error.Severity.ERROR,
"Parse error in '%s' in EBNF bnf" % self._lastRule,
error.LocationInfo.from_maxstream(self._stream)
)
raise self.diagnostic
except error.Diagnostic as d:
d.notify(
error.Severity.ERROR,
"Parse error in '%s' in EBNF bnf" % self._lastRule
)
raise d
return res
@property
def rules(self) -> dict:
print("USE rules PROPERTY")
return self._rules
def __init__(self, content='', sname=None):
"""
Define the DSL parser.
"""
super().__init__(content, sname)
self.set_rules({
#
# bnf_dsl = [ @ignore("C/C++") bnf_stmts ]
# //todo: bnf_dsl = [ @ignore("C/C++") [bnf_stmts] eof ]
#
'bnf_dsl': parsing.Seq(
# tree is not already construct but Directive need it
# forward it thru a lambda
parsing.Directive(ignore.Ignore(),
[("C/C++", str)],
lambda parser:
self.__class__._rules['bnf_stmts'](parser)),
),
#
# bnf_stmts = [ [rule : r #add_rules(_, r) ]+ Base.eof ]
# //todo: bnf_stmts = [ [rule : r #add_rules(_, r) ]+]
#
'bnf_stmts': parsing.Seq(
parsing.Rep1N(parsing.Seq(
parsing.Capture("r", parsing.Rule('rule')),
parsing.Hook('add_rules', [("_", parsing.Node),
("r", parsing.Node)])
)),
parsing.Rule('Base.eof')
),
# TODO: add directive hooks / change ns_name by def_rule
#
# rule = [ ns_name : rn '=' '[' alternatives : alts
# #add_rule(_, rn, alts) ']' ]
#
'rule': parsing.Seq(
parsing.Capture("rn", parsing.Rule('ns_name')),
parsing.Alt(
parsing.Char("="),
parsing.Error("Expected '='")),
parsing.Alt(
parsing.Char("["),
parsing.Error("Expected '['")),
parsing.Capture("alts", parsing.Rule('alternatives')),
parsing.Hook('add_rule', [("_", parsing.Node),
("rn", parsing.Node),
("alts", parsing.Node)]),
parsing.Alt(
#parsing.Call(parsing.Parser.read_char, ']'),
parsing.Char(']'),
parsing.Error("Expected ']'"))
),
#
# alternatives =
# [
# sequences : alt #add_alt(_, alt)
# ['|' sequences : alt #add_alt(_, alt) ]*
# ]
#
'alternatives': parsing.Seq(
parsing.Capture('alt', parsing.Rule('sequences')),
parsing.Hook('add_alt', [("_", parsing.Node),
("alt", parsing.Node)]),
parsing.Rep0N(
parsing.Seq(
parsing.Char('|'),
parsing.Capture('alt', parsing.Rule('sequences')),
parsing.Hook('add_alt',
[("_", parsing.Node),
("alt", parsing.Node)])
)
)
),
#
# sequences = [ [ sequence : cla #add_sequences(_, cla) ]+ ]
#
'sequences': parsing.Rep1N(
parsing.Seq(
parsing.Capture('cla', parsing.Rule('sequence')),
parsing.Hook('add_sequences',
[("_", parsing.Node),
("cla", parsing.Node)])
)
),
#
# sequence = [
# [
# [ '~' | "!!" | '!' | "->" ]?: mod
# [ ns_name : rid #add_ruleclause_name(_, rid)
# | Base.string : txt #add_read_dqstring(_, txt)
# | Base.char : begin ".." Base.char : end
# #add_range(_, begin, end)
# | Base.char : c #add_read_sqstring(_, c)
# | '[' alternatives : subsequence ']'
# #add_subsequence(_, subsequence)
# ] #add_mod(_, mod)
# [ repeat : rpt #add_rpt(_, mod, rpt) ]?
# | hook : h #add_hook(_, h)
# | directive2 : d sequences : s #add_directive2(_, d, s)
# | directive : d sequences : s #add_directive(_, d, s)
# ]
# [
# ":>" Base.id : bind #add_bind(_, bind)
# | ':' Base.id : cpt #add_capture(_, cpt)
# ]?
# ]
#
'sequence':
parsing.Seq(
parsing.Alt(
parsing.Seq(
parsing.Capture(
'mod',
parsing.RepOptional(
parsing.Alt(
parsing.Char('~'),
parsing.Text('!!'),
parsing.Char('!'),
parsing.Text('->')
)
)
),
parsing.Alt(
parsing.Seq(
parsing.Capture(
'rid',
parsing.Rule('ns_name')
),
parsing.Hook('add_ruleclause_name',
[("_", parsing.Node),
("rid", parsing.Node)])
),
parsing.Seq(
parsing.Capture('txt',
parsing.Rule('Base.string')),
parsing.Hook('add_read_dqstring',
[("_", parsing.Node),
("txt", parsing.Node)])
),
parsing.Seq(
parsing.Capture('begin',
parsing.Rule('Base.char')),
parsing.Text(".."),
parsing.Capture(
'end',
parsing.Rule('Base.char')
),
parsing.Hook('add_range',
[("_", parsing.Node),
("begin", parsing.Node),
("end", parsing.Node)])
),
parsing.Seq(
parsing.Capture(
'c',
parsing.Rule('Base.char')
),
parsing.Hook('add_read_sqstring',
[("_", parsing.Node),
("c", parsing.Node)])
),
parsing.Seq(
parsing.Char('['),
parsing.Capture(
'subsequence',
parsing.Alt(
parsing.Rule('alternatives'),
parsing.Error("Expected sequences"))),
parsing.Alt(
parsing.Char(']'),
parsing.Error("Expected ']'")),
parsing.Hook('add_subsequence',
[("_", parsing.Node),
("subsequence", parsing.Node)]),
)
),
parsing.Hook('add_mod', [("_", parsing.Node),
("mod", parsing.Node)]),
parsing.RepOptional(
parsing.Seq(
parsing.Capture(
'rpt',
parsing.Rule('repeat')
),
parsing.Hook('add_rpt',
[("_", parsing.Node),
("mod", parsing.Node),
("rpt", parsing.Node)])
)
),
),
parsing.Seq(
parsing.Capture('h', parsing.Rule('hook')),
parsing.Hook('add_hook', [('_', parsing.Node),
('h', parsing.Node)])
),
parsing.Seq(
parsing.Capture('d', parsing.Rule('directive2')),
parsing.Capture('s', parsing.Rule('sequences')),
parsing.Hook('add_directive2', [('_', parsing.Node),
('d', parsing.Node),
('s', parsing.Node)])
),
parsing.Seq(
parsing.Capture('d', parsing.Rule('directive')),
parsing.Capture('s', parsing.Rule('sequences')),
parsing.Hook('add_directive', [('_', parsing.Node),
('d', parsing.Node),
('s', parsing.Node)])
)
),
parsing.RepOptional(
parsing.Alt(
parsing.Seq(
parsing.Text(':>'),
parsing.Capture(
'bind',
parsing.Rule('Base.id')),
parsing.Hook('add_bind',
[('_', parsing.Node),
('bind', parsing.Node)])
),
parsing.Seq(
parsing.Char(':'),
parsing.Capture(
'cpt',
parsing.Rule('Base.id')),
parsing.Hook('add_capture',
[('_', parsing.Node),
('cpt', parsing.Node)])
)
)
)
),
# ns_name = [ [@ignore("null") [ Base.id ['.' Base.id]* ]]: rid ]
#
'ns_name': parsing.Capture(
'rid',
parsing.Scope(
parsing.Call(parsing.Parser.push_ignore,
parsing.Parser.ignore_null),
parsing.Call(parsing.Parser.pop_ignore),
parsing.Seq(
parsing.Rule('Base.id'),
parsing.Rep0N(
parsing.Seq(
parsing.Char('.'),
parsing.Alt(
parsing.Rule('Base.id'),
parsing.Error(
"Expected identifier after '.'"))
)
)
)
)
),
#
# repeat = [ '?' #add_optional(_)
# | '*' #add_0N(_)
# | '+' #add_1N(_)
# ]
#
'repeat': parsing.Alt(
parsing.Seq(
parsing.Char('?'),
parsing.Hook('add_optional', [("_", parsing.Node)])
),
parsing.Seq(
parsing.Char('*'),
parsing.Hook('add_0N', [("_", parsing.Node)])
),
parsing.Seq(
parsing.Char('+'),
parsing.Hook('add_1N', [("_", parsing.Node)])
),
),
#
# hook = [ '#' ns_name : n #hook_name(_, n)
# ['(' [ param : p #hook_param(_, p)
# [',' param : p #hook_param(_, p)]*
# ]? ')']?
# ]
#
'hook': parsing.Seq(
parsing.Char('#'),
parsing.Capture('n', parsing.Rule('ns_name')),
parsing.Hook('hook_name',
[('_', parsing.Node), ('n', parsing.Node)]),
parsing.RepOptional(
parsing.Seq(
parsing.Char('('),
parsing.RepOptional(
parsing.Seq(
parsing.Capture(
'p',
parsing.Rule('param'),
),
parsing.Hook('hook_param', [('_', parsing.Node),
('p', parsing.Node)]),
parsing.Rep0N(
parsing.Seq(
parsing.Char(','),
parsing.Capture(
'p',
parsing.Alt(
parsing.Rule('param'),
parsing.Error("Expected parameter"))),
parsing.Hook('hook_param',
[('_', parsing.Node),
('p', parsing.Node)]))
)
)
),
parsing.Alt(
parsing.Char(')'),
parsing.Error("Expected ')'"))
)
),
),
#
# directive2 = [ '$' ns_name : n #hook_name(_, n)
# ['(' param : p #hook_param(_, p)
# [',' param : p #hook_param(_, p)]*
# ')']?
# ]
'directive2': parsing.Seq(
parsing.Char('$'),
parsing.Capture('n', parsing.Rule('ns_name')),
parsing.Hook('hook_name', [('_', parsing.Node),
('n', parsing.Node)]),
parsing.RepOptional(
parsing.Seq(
parsing.Char('('),
parsing.Capture(
'p',
parsing.Alt(
parsing.Rule('param'),
parsing.Error("Expected parameter"))),
parsing.Hook('hook_param',
[('_', parsing.Node),
('p', parsing.Node)]),
parsing.Rep0N(
parsing.Seq(
parsing.Char(','),
parsing.Capture(
'p',
parsing.Alt(
parsing.Rule('param'),
parsing.Error("Expected parameter"))),
parsing.Hook('hook_param',
[('_', parsing.Node),
('p', parsing.Node)]),
)
),
parsing.Alt(
parsing.Char(')'),
parsing.Error("Expected ')'"))
)
),
),
#
# directive = [ '@' ns_name : n #hook_name(_, n)
# ['(' param : p #hook_param(_, p)
# [',' param : p #hook_param(_, p)]*
# ')']?
# ]
'directive': parsing.Seq(
parsing.Char('@'),
parsing.Capture('n', parsing.Rule('ns_name')),
parsing.Hook('hook_name', [('_', parsing.Node),
('n', parsing.Node)]),
parsing.RepOptional(
parsing.Seq(
parsing.Char('('),
parsing.Capture(
'p',
parsing.Alt(
parsing.Rule('param'),
parsing.Error("Expected parameter"))),
parsing.Hook('hook_param',
[('_', parsing.Node),
('p', parsing.Node)]),
parsing.Rep0N(
parsing.Seq(
parsing.Char(','),
parsing.Capture(
'p',
parsing.Alt(
parsing.Rule('param'),
parsing.Error("Expected parameter"))),
parsing.Hook('hook_param',
[('_', parsing.Node),
('p', parsing.Node)]),
)
),
parsing.Alt(
parsing.Char(')'),
parsing.Error("Expected ')'"))
)
),
),
#
# param = [ Base.num :n #param_num(_, n)
# | Base.string : s #param_str(_, s)
# | Base.char : c #param_char(_, c)
# | ns_name : i #param_id(_, i)
# ]
#
'param': parsing.Alt(
parsing.Seq(
parsing.Capture('n', parsing.Rule('Base.num')),
parsing.Hook('param_num', [('_', parsing.Node),
('n', parsing.Node)])
),
parsing.Seq(
parsing.Capture('s', parsing.Rule('Base.string')),
parsing.Hook('param_str', [('_', parsing.Node),
('s', parsing.Node)])
),
parsing.Seq(
parsing.Capture('c', parsing.Rule('Base.char')),
parsing.Hook('param_char', [('_', parsing.Node),
('c', parsing.Node)])
),
parsing.Seq(
parsing.Capture('i', parsing.Rule('ns_name')),
parsing.Hook('param_id', [('_', parsing.Node),
('i', parsing.Node)])
),
),
})
|
class EBNF(parsing.Parser):
'''
Basic class for BNF DSL PARSING.
A full parser for the BNF is provided by this class.
We construct a tree to represents, thru functors, BNF semantics.
'''
def get_rules(self) -> parsing.Node:
'''
Parse the DSL and provide a dictionnaries of all resulting rules.
Call by the MetaGrammar class.
TODO: could be done in the rules property of parsing.BasicParser???
'''
pass
@property
def rules(self) -> dict:
pass
def __init__(self, content='', sname=None):
'''
Define the DSL parser.
'''
pass
| 5 | 3 | 157 | 4 | 122 | 31 | 2 | 0.27 | 1 | 21 | 18 | 0 | 3 | 0 | 3 | 50 | 481 | 16 | 368 | 7 | 363 | 98 | 18 | 5 | 14 | 3 | 5 | 2 | 5 |
146,244 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/pyrser/type_system/type_expr.py
|
pyrser.type_system.type_expr.AbstractName
|
class AbstractName(TypeName):
def to_fmt(self) -> fmt.indentable:
return fmt.sep("", ['?', TypeName.to_fmt(self)])
|
class AbstractName(TypeName):
def to_fmt(self) -> fmt.indentable:
pass
| 2 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 1 | 2 | 2 | 0 | 1 | 0 | 1 | 5 | 3 | 0 | 3 | 2 | 1 | 0 | 3 | 2 | 1 | 1 | 1 | 0 | 1 |
146,245 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/pyrser/parsing/ir.py
|
pyrser.parsing.ir.ReturnOnEof
|
class ReturnOnEof(IR):
"""If we reach the EOF, we abort parsing directly."""
pass
|
class ReturnOnEof(IR):
'''If we reach the EOF, we abort parsing directly.'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0.5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0 | 2 | 1 | 1 | 1 | 2 | 1 | 1 | 0 | 1 | 0 | 0 |
146,246 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/pyrser/parsing/ir.py
|
pyrser.parsing.ir.Rule
|
class Rule(IR):
"""Abstraction of a target function. """
def __init__(self, name: str):
self.name = name
self.block = None
|
class Rule(IR):
'''Abstraction of a target function. '''
def __init__(self, name: str):
pass
| 2 | 1 | 3 | 0 | 3 | 0 | 1 | 0.25 | 1 | 1 | 0 | 0 | 1 | 2 | 1 | 1 | 6 | 1 | 4 | 4 | 2 | 1 | 4 | 4 | 2 | 1 | 1 | 0 | 1 |
146,247 |
LionelAuroux/pyrser
|
LionelAuroux_pyrser/pyrser/ast/match.py
|
match.MatchIndice
|
class MatchIndice(MatchExpr):
"""
Ast Node for matching one indice.
"""
def __init__(self, idx: int, v=None):
self.idx = idx
if v is None:
v = MatchValue()
self.v = v
def __eq__(self, other) -> bool:
return self.idx == other.idx
def is_in_state(self, s: state.State):
if self.idx is None and '*' in s.indices:
return s.indices['*']
elif self.idx in s.indices:
return s.indices[self.idx]
return None
def attach(
self,
s1: state.State,
s2: state.State,
sr: state.StateRegister
):
s1.matchIndice(self.idx, s2)
def build_state_tree(self, tree: list):
# go deeper
self.v.build_state_tree(tree)
# add ourself
tree.append(self)
def to_fmt(self) -> fmt.indentable:
index = '*'
if self.idx is not None:
index = str(self.idx)
res = fmt.block('[' + index + ': ', ']', [])
res.lsdata.append(self.v.to_fmt())
return res
def __repr__(self) -> str:
return str(self.to_fmt())
|
class MatchIndice(MatchExpr):
'''
Ast Node for matching one indice.
'''
def __init__(self, idx: int, v=None):
pass
def __eq__(self, other) -> bool:
pass
def is_in_state(self, s: state.State):
pass
def attach(
self,
s1: state.State,
s2: state.State,
sr: state.StateRegister
):
pass
def build_state_tree(self, tree: list):
pass
def to_fmt(self) -> fmt.indentable:
pass
def __repr__(self) -> str:
pass
| 8 | 1 | 5 | 0 | 5 | 0 | 2 | 0.15 | 1 | 9 | 5 | 0 | 7 | 2 | 7 | 7 | 44 | 6 | 33 | 17 | 20 | 5 | 27 | 12 | 19 | 3 | 1 | 1 | 11 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.