id
int64 0
843k
| repository_name
stringlengths 7
55
| file_path
stringlengths 9
332
| class_name
stringlengths 3
290
| human_written_code
stringlengths 12
4.36M
| class_skeleton
stringlengths 19
2.2M
| total_program_units
int64 1
9.57k
| total_doc_str
int64 0
4.2k
| AvgCountLine
float64 0
7.89k
| AvgCountLineBlank
float64 0
300
| AvgCountLineCode
float64 0
7.89k
| AvgCountLineComment
float64 0
7.89k
| AvgCyclomatic
float64 0
130
| CommentToCodeRatio
float64 0
176
| CountClassBase
float64 0
48
| CountClassCoupled
float64 0
589
| CountClassCoupledModified
float64 0
581
| CountClassDerived
float64 0
5.37k
| CountDeclInstanceMethod
float64 0
4.2k
| CountDeclInstanceVariable
float64 0
299
| CountDeclMethod
float64 0
4.2k
| CountDeclMethodAll
float64 0
4.2k
| CountLine
float64 1
115k
| CountLineBlank
float64 0
9.01k
| CountLineCode
float64 0
94.4k
| CountLineCodeDecl
float64 0
46.1k
| CountLineCodeExe
float64 0
91.3k
| CountLineComment
float64 0
27k
| CountStmt
float64 1
93.2k
| CountStmtDecl
float64 0
46.1k
| CountStmtExe
float64 0
90.2k
| MaxCyclomatic
float64 0
759
| MaxInheritanceTree
float64 0
16
| MaxNesting
float64 0
34
| SumCyclomatic
float64 0
6k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
7,600 |
Atomistica/atomistica
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Atomistica_atomistica/versioneer.py
|
versioneer.get_cmdclass.cmd_py2exe
|
class cmd_py2exe(_py2exe):
def run(self):
root = get_root()
cfg = get_config_from_root(root)
versions = get_versions()
target_versionfile = cfg.versionfile_source
print("UPDATING %s" % target_versionfile)
write_to_version_file(target_versionfile, versions)
_py2exe.run(self)
os.unlink(target_versionfile)
with open(cfg.versionfile_source, "w") as f:
LONG = LONG_VERSION_PY[cfg.VCS]
f.write(LONG %
{"DOLLAR": "$",
"STYLE": cfg.style,
"TAG_PREFIX": cfg.tag_prefix,
"PARENTDIR_PREFIX": cfg.parentdir_prefix,
"VERSIONFILE_SOURCE": cfg.versionfile_source,
})
|
class cmd_py2exe(_py2exe):
def run(self):
pass
| 2 | 0 | 19 | 1 | 18 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 20 | 1 | 19 | 8 | 17 | 0 | 13 | 7 | 11 | 1 | 1 | 1 | 1 |
7,601 |
Atomistica/atomistica
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Atomistica_atomistica/versioneer.py
|
versioneer.get_cmdclass.cmd_build_py
|
class cmd_build_py(_build_py):
def run(self):
root = get_root()
cfg = get_config_from_root(root)
versions = get_versions()
_build_py.run(self)
# now locate _version.py in the new build/ directory and replace
# it with an updated value
if cfg.versionfile_build:
target_versionfile = os.path.join(self.build_lib,
cfg.versionfile_build)
print("UPDATING %s" % target_versionfile)
write_to_version_file(target_versionfile, versions)
|
class cmd_build_py(_build_py):
def run(self):
pass
| 2 | 0 | 12 | 0 | 10 | 2 | 2 | 0.18 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 13 | 0 | 11 | 6 | 9 | 2 | 10 | 6 | 8 | 2 | 1 | 1 | 2 |
7,602 |
Atomistica/atomistica
|
Atomistica_atomistica/src/python/atomistica/logger.py
|
atomistica.logger.MDLogger
|
class MDLogger:
"""Class for logging molecular dynamics simulations with some
extended functionality.
Parameters:
dyn: The dynamics. Only a weak reference is kept.
atoms: The atoms.
logfile: File name or open file, "-" meaning standart output.
stress=False: Include stress in log.
cell=True: Include cell in log.
volume=True: Include volume in log.
peratom=False: Write energies per atom.
mode="a": How the file is opened if logfile is a filename.
"""
def __init__(self, dyn, atoms, logfile, header=True, stress=False,
cell=False, volume=False, peratom=False, hiprec=False,
mode="a"):
import ase.parallel
if ase.parallel.rank > 0:
logfile="/dev/null" # Only log on master
if hasattr(dyn, "get_time"):
self.dyn = weakref.proxy(dyn)
else:
self.dyn = None
self.atoms = atoms
self.natoms = atoms.get_number_of_atoms()
if logfile == "-":
self.logfile = sys.stdout
self.ownlogfile = False
elif hasattr(logfile, "write"):
self.logfile = logfile
self.ownlogfile = False
else:
self.logfile = open(logfile, mode)
self.ownlogfile = True
self.stress = stress
self.cell = cell
self.volume = volume
self.peratom = peratom
if hiprec:
nf = '%20.12e'
else:
nf = '%12.4f '
i = 1
if self.dyn is not None:
self.hdr = "# {0}:Time[ps]".format(i)
self.fmt = nf
i += 1
else:
self.hdr = "# "
self.fmt = ""
if self.peratom:
self.hdr += "{0}:Etot/N[eV] {1}:Epot/N[eV]" \
"{2}:Ekin/N[eV] {3}:T[K]".format(i,i+1,i+2,i+3)
self.fmt += 4*nf
i += 4
else:
self.hdr += "{0}:Etot[eV] {1}:Epot[eV]" \
"{2}:Ekin[eV] {3}:T[K]".format(i,i+1,i+2,i+3)
self.fmt += 4*nf
i += 4
if self.stress:
self.hdr += "{0}:stress(xx) {1}:stress(yy) {2}:stress(zz)" \
"{3}:stress(xy) {4}:stress(yz) {5}:stress(zx)".format(i,i+1,i+2,i+3,i+4,i+5)
self.fmt += 6*nf
i += 6
if self.cell:
self.hdr += "{0}:cell(xx) {1}:cell(yy) {2}:cell(zz)" \
"{3}:cell(xy) {4}:cell(yz) {5}:cell(zx)".format(i,i+1,i+2,i+3,i+4,i+5)
self.fmt += 6*nf
i += 6
if self.volume:
self.hdr += "{0}:Vol [A^3]".format(i)
self.fmt += nf
i += 1
self.fmt += "\n"
if header:
self.logfile.write(self.hdr+"\n")
def __del__(self):
self.close()
def close(self):
if self.ownlogfile:
self.logfile.close()
def __call__(self):
epot = self.atoms.get_potential_energy()
ekin = self.atoms.get_kinetic_energy()
temp = ekin / (1.5 * units.kB * self.natoms)
if self.peratom:
epot /= self.natoms
ekin /= self.natoms
if self.dyn is not None:
t = self.dyn.get_time() / (1000*units.fs)
dat = (t,)
else:
dat = ()
dat += (epot+ekin, epot, ekin, temp)
if self.stress:
dat += tuple(self.atoms.get_stress() / units.GPa)
if self.cell:
cell = self.atoms.get_cell()
dat += ( cell[0,0], cell[1,1], cell[2,2],
(cell[1,2]+cell[2,1])/2,
(cell[2,0]+cell[0,2])/2,
(cell[0,1]+cell[1,0])/2 )
if self.volume:
dat += ( self.atoms.get_volume(), )
self.logfile.write(self.fmt % dat)
self.logfile.flush()
|
class MDLogger:
'''Class for logging molecular dynamics simulations with some
extended functionality.
Parameters:
dyn: The dynamics. Only a weak reference is kept.
atoms: The atoms.
logfile: File name or open file, "-" meaning standart output.
stress=False: Include stress in log.
cell=True: Include cell in log.
volume=True: Include volume in log.
peratom=False: Write energies per atom.
mode="a": How the file is opened if logfile is a filename.
'''
def __init__(self, dyn, atoms, logfile, header=True, stress=False,
cell=False, volume=False, peratom=False, hiprec=False,
mode="a"):
pass
def __del__(self):
pass
def close(self):
pass
def __call__(self):
pass
| 5 | 1 | 24 | 0 | 24 | 1 | 5 | 0.16 | 0 | 1 | 0 | 0 | 4 | 11 | 4 | 4 | 118 | 11 | 95 | 27 | 87 | 15 | 79 | 25 | 73 | 12 | 0 | 1 | 21 |
7,603 |
Atomistica/atomistica
|
Atomistica_atomistica/tests/test_dimers.py
|
test_dimers.DimerTest
|
class DimerTest(unittest.TestCase):
def test_C2(self):
vac = 4
dist_min = 1.2
dist_max = 3.1
a = Atoms('CC', positions=[[0, 0, 0], [dist_min, 0, 0]])
a.center(vacuum=vac)
distances = np.linspace(dist_min, dist_max, 1000)
for potential in [Rebo2(), Rebo2Scr()]:
a.calc = potential
energies = []
forces = []
for dist in distances:
a[1].position[0] = dist + vac
forces += [a.get_forces()[0][0]]
energies += [a.get_potential_energy()]
forces = np.array(forces)
energies = np.array(energies)
en_differences = np.abs(energies[1:] - energies[:-1])
self.assertTrue(np.max(en_differences) < 0.05)
self.assertTrue(np.max(np.abs(forces[1:] - forces[:-1])) < 0.5)
def test_H2(self):
vac = 4
dist_min = 0.6
dist_max = 1.8
a = Atoms('HH', positions=[[0, 0, 0], [dist_min, 0, 0]])
a.center(vacuum=vac)
distances = np.linspace(dist_min, dist_max, 1000)
for potential in [Rebo2(), Rebo2Scr()]:
a.calc = potential
energies = []
forces = []
for dist in distances:
a[1].position[0] = dist + vac
forces += [a.get_forces()[0][0]]
energies += [a.get_potential_energy()]
forces = np.array(forces)
energies = np.array(energies)
en_differences = np.abs(energies[1:] - energies[:-1])
self.assertTrue(np.max(en_differences) < 0.02)
self.assertTrue(np.max(np.abs(forces[1:] - forces[:-1])) < 0.2)
def test_CH(self):
vac = 4
dist_min = 0.8
dist_max = 1.9
a = Atoms('CH', positions=[[0, 0, 0], [dist_min, 0, 0]])
a.center(vacuum=vac)
distances = np.linspace(dist_min, dist_max, 1000)
for potential in [Rebo2(), Rebo2Scr()]:
a.calc = potential
energies = []
forces = []
for dist in distances:
a[1].position[0] = dist + vac
forces += [a.get_forces()[0][0]]
energies += [a.get_potential_energy()]
forces = np.array(forces)
energies = np.array(energies)
en_differences = np.abs(energies[1:] - energies[:-1])
self.assertTrue(np.max(en_differences) < 0.03)
self.assertTrue(np.max(np.abs(forces[1:] - forces[:-1])) < 0.3)
def test_Si2(self):
vac = 4
dist_min = 1.8
dist_max = 6.2
a = Atoms('Si2', positions=[[0, 0, 0], [dist_min, 0, 0]])
a.center(vacuum=vac)
distances = np.linspace(dist_min, dist_max, 1000)
for potential in [Kumagai(), KumagaiScr()]:
a.calc = potential
energies = []
forces = []
for dist in distances:
a[1].position[0] = dist + vac
forces += [a.get_forces()[0][0]]
energies += [a.get_potential_energy()]
forces = np.array(forces)
energies = np.array(energies)
en_differences = np.abs(energies[1:] - energies[:-1])
self.assertTrue(np.max(en_differences) < 0.08)
self.assertTrue(np.max(np.abs(forces[1:] - forces[:-1])) < 0.4)
|
class DimerTest(unittest.TestCase):
def test_C2(self):
pass
def test_H2(self):
pass
def test_CH(self):
pass
def test_Si2(self):
pass
| 5 | 0 | 23 | 3 | 20 | 0 | 3 | 0 | 1 | 0 | 0 | 0 | 4 | 0 | 4 | 76 | 98 | 17 | 81 | 45 | 76 | 0 | 81 | 45 | 76 | 3 | 2 | 2 | 12 |
7,604 |
Atomistica/atomistica
|
Atomistica_atomistica/tests/test_pbc.py
|
test_pbc.PBCTest
|
class PBCTest(unittest.TestCase):
def test_pbc(self):
a = Diamond('Si', latticeconstant=5.432, size=[2,2,2])
sx, sy, sz = a.get_cell().diagonal()
a.calc = Tersoff()
e1 = a.get_potential_energy()
a.set_pbc([True,True,False])
e2 = a.get_potential_energy()
a.set_pbc(True)
a.set_cell([sx,sy,2*sz])
e3 = a.get_potential_energy()
self.assertEqual(e2, e3)
# This should give the unrelaxed surface energy
esurf = (e2-e1)/(2*sx*sy) * Jm2
self.assertTrue(abs(esurf-2.309) < 0.001)
|
class PBCTest(unittest.TestCase):
def test_pbc(self):
pass
| 2 | 0 | 18 | 4 | 13 | 1 | 1 | 0.07 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 73 | 20 | 5 | 14 | 8 | 12 | 1 | 14 | 8 | 12 | 1 | 2 | 0 | 1 |
7,605 |
Atomistica/atomistica
|
Atomistica_atomistica/tests/test_mask.py
|
test_mask.MaskTest
|
class MaskTest(unittest.TestCase):
def random_mask_test(self, a):
c = a.calc
e = a.get_potential_energy()
f = a.get_forces()
w = a.get_stress()
mask = np.random.randint(0, len(a), size=len(a)) < \
len(a)/2
imask = np.logical_not(mask)
c.set_mask(mask)
e1 = a.get_potential_energy()
f1 = a.get_forces()
w1 = a.get_stress()
c.set_mask(imask)
e2 = a.get_potential_energy()
f2 = a.get_forces()
w2 = a.get_stress()
c.set_mask(None)
e3 = a.get_potential_energy()
self.assertTrue(abs(e-e1-e2) < 1e-6)
self.assertTrue(abs(e-e3) < 1e-6)
self.assertTrue(np.max(np.abs(f-f1-f2)) < 1e-6)
self.assertTrue(np.max(np.abs(w-w1-w2)) < 1e-6)
def test_mask_decomposition_bop(self):
a = io.read('aC.cfg')
for pot in [Tersoff, TersoffScr]:
c = Tersoff()
a.calc = c
self.random_mask_test(a)
def test_mask_decomposition_lj_cut(self):
a = FaceCenteredCubic('Au', size=[2,2,2])
c = LJCut(el1='Au', el2='Au', epsilon=1.0, sigma=1.0, cutoff=6.0)
a.calc = c
self.random_mask_test(a)
def test_mask_decomposition_tabulated_alloy_eam(self):
a = FaceCenteredCubic('Au', size=[2,2,2])
c = TabulatedAlloyEAM(fn='Au-Grochola-JCP05.eam.alloy')
a.calc = c
self.random_mask_test(a)
|
class MaskTest(unittest.TestCase):
def random_mask_test(self, a):
pass
def test_mask_decomposition_bop(self):
pass
def test_mask_decomposition_lj_cut(self):
pass
def test_mask_decomposition_tabulated_alloy_eam(self):
pass
| 5 | 0 | 11 | 1 | 10 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 4 | 0 | 4 | 76 | 48 | 9 | 39 | 25 | 34 | 0 | 38 | 25 | 33 | 2 | 2 | 1 | 5 |
7,606 |
Atomistica/atomistica
|
Atomistica_atomistica/tests/test_mio.py
|
test_mio.TestMIO
|
class TestMIO(unittest.TestCase):
def test_mio(self):
if os.getenv('MIO') is None:
print('Skipping MIO test. Specify path to mio Slater-Koster ' \
'tables in MIO environment variable if you want to run it.')
else:
run_mio_test(self)
|
class TestMIO(unittest.TestCase):
def test_mio(self):
pass
| 2 | 0 | 6 | 0 | 6 | 0 | 2 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 73 | 8 | 1 | 7 | 2 | 5 | 0 | 5 | 2 | 3 | 2 | 2 | 1 | 2 |
7,607 |
Atomistica/atomistica
|
Atomistica_atomistica/tests/test_neighbor_list.py
|
test_neighbor_list.NeighborListTest
|
class NeighborListTest(unittest.TestCase):
def test_neighbor_list(self):
a = io.read('aC.cfg')
an = native.from_atoms(a)
nl = native.Neighbors(100)
nl.request_interaction_range(5.0)
i, j, abs_dr_no_vec = nl.get_neighbors(an)
i, j, dr, abs_dr = nl.get_neighbors(an, vec=True)
self.assertTrue(np.all(np.abs(abs_dr_no_vec-abs_dr) < 1e-12))
r = a.get_positions()
dr_direct = mic(r[i]-r[j], a.cell)
abs_dr_from_dr = np.sqrt(np.sum(dr*dr, axis=1))
abs_dr_direct = np.sqrt(np.sum(dr_direct*dr_direct, axis=1))
self.assertTrue(np.all(np.abs(abs_dr-abs_dr_from_dr) < 1e-12))
self.assertTrue(np.all(np.abs(abs_dr-abs_dr_direct) < 1e-12))
self.assertTrue(np.all(np.abs(dr-dr_direct) < 1e-12))
def test_pbc(self):
a = ase.Atoms('CC',
positions=[[0.1, 0.5, 0.5],
[0.9, 0.5, 0.5]],
cell=[1, 1, 1],
pbc=True)
an = native.from_atoms(a)
nl = native.Neighbors(100)
nl.request_interaction_range(0.3)
# with pbc
i, j, abs_dr = nl.get_neighbors(an)
self.assertEqual(len(i), 2)
a.set_pbc(False)
an = native.from_atoms(a)
nl = native.Neighbors(100)
nl.request_interaction_range(0.3)
# no pbc
i, j, abs_dr = nl.get_neighbors(an)
self.assertEqual(len(i), 0)
a.set_pbc([False,False,True])
an = native.from_atoms(a)
nl = native.Neighbors(100)
nl.request_interaction_range(0.3)
# partial pbc
i, j, abs_dr = nl.get_neighbors(an)
self.assertEqual(len(i), 0)
a.set_pbc([True,False,False])
an = native.from_atoms(a)
nl = native.Neighbors(100)
nl.request_interaction_range(0.3)
# partial pbc
i, j, abs_dr = nl.get_neighbors(an)
self.assertEqual(len(i), 2)
def test_pbc_shift_by_multiple_cells(self):
a = io.read('aC.cfg')
a.calc = Tersoff()
e1 = a.get_potential_energy()
i1, j1, r1 = a.calc.nl.get_neighbors(a.calc.particles)
a[100].position += 3*a.cell[0]
e2 = a.get_potential_energy()
i2, j2, r2 = a.calc.nl.get_neighbors(a.calc.particles)
for i in range(len(a)):
n1 = np.array(sorted(j1[i1==i]))
n2 = np.array(sorted(j2[i2==i]))
if np.any(n1 != n2):
print(i, n1, n2)
a[100].position += a.cell.T.dot([1,3,-4])
e3 = a.get_potential_energy()
self.assertAlmostEqual(e1, e2)
self.assertAlmostEqual(e1, e3)
def test_no_pbc_small_cell(self):
a = io.read('aC.cfg')
a.calc = Tersoff()
a.set_pbc(False)
e1 = a.get_potential_energy()
i1, j1, r1 = a.calc.nl.get_neighbors(a.calc.particles)
a.set_cell(a.cell*0.9, scale_atoms=False)
e2 = a.get_potential_energy()
self.assertAlmostEqual(e1, e2)
i2, j2, r2 = a.calc.nl.get_neighbors(a.calc.particles)
for k in range(len(a)):
neigh1 = np.array(sorted(j1[i1==k]))
neigh2 = np.array(sorted(j2[i2==k]))
self.assertTrue(np.all(neigh1 == neigh2))
def test_partial_pbc_small_cell(self):
a = io.read('aC.cfg')
a.set_cell(a.cell.diagonal(), scale_atoms=True)
a.calc = Tersoff()
a.set_pbc([True, False, False])
e1 = a.get_potential_energy()
i1, j1, r1 = a.calc.nl.get_neighbors(a.calc.particles)
a.set_cell(a.cell.diagonal()*np.array([1.0, 0.8, 0.9]), scale_atoms=False)
e2 = a.get_potential_energy()
self.assertAlmostEqual(e1, e2)
i2, j2, r2 = a.calc.nl.get_neighbors(a.calc.particles)
for k in range(len(a)):
neigh1 = np.array(sorted(j1[i1==k]))
neigh2 = np.array(sorted(j2[i2==k]))
self.assertTrue(np.all(neigh1 == neigh2))
def test_floating_point_issue(self):
calc = Tersoff()
a1 = ase.Atoms('Si4C4', positions=np.array([[-4.41173839e-52, 0.00000000e+00, 0.00000000e+00],
[-4.41173839e-52, 2.26371743e+00, 2.26371743e+00],
[ 2.26371743e+00, 0.00000000e+00, 2.26371743e+00],
[ 2.26371743e+00, 2.26371743e+00, 0.00000000e+00],
[ 1.13185872e+00, 1.13185872e+00, 1.13185872e+00],
[ 1.13185872e+00, 3.39557615e+00, 3.39557615e+00],
[ 3.39557615e+00, 1.13185872e+00, 3.39557615e+00],
[ 3.39557615e+00, 3.39557615e+00, 1.13185872e+00]]),
cell=[4.527434867899659, 4.527434867899659, 4.527434867899659], pbc=True)
a1.calc = calc
a1.get_potential_energy()
self.assertTrue((calc.nl.get_coordination_numbers(calc.particles, 3.0) == 4).all())
a2 = a1.copy()
a2.calc = calc
a2.set_scaled_positions(a2.get_scaled_positions())
a2.get_potential_energy()
self.assertTrue((calc.nl.get_coordination_numbers(calc.particles, 3.0) == 4).all())
|
class NeighborListTest(unittest.TestCase):
def test_neighbor_list(self):
pass
def test_pbc(self):
pass
def test_pbc_shift_by_multiple_cells(self):
pass
def test_no_pbc_small_cell(self):
pass
def test_partial_pbc_small_cell(self):
pass
def test_floating_point_issue(self):
pass
| 7 | 0 | 22 | 3 | 18 | 1 | 2 | 0.04 | 1 | 1 | 0 | 0 | 6 | 0 | 6 | 78 | 139 | 25 | 110 | 48 | 103 | 4 | 98 | 48 | 91 | 3 | 2 | 2 | 10 |
7,608 |
Atomistica/atomistica
|
Atomistica_atomistica/tests/test_eam_special_cases.py
|
test_eam_special_cases.TestEAMSpecialCases
|
class TestEAMSpecialCases(unittest.TestCase):
def test_crash1(self):
a = io.read('eam_crash1.poscar')
a.calc = TabulatedAlloyEAM(fn='Cu_mishin1.eam.alloy')
a.get_potential_energy()
def test_dense_forces(self):
orig_a = io.read('eam_crash2.poscar')
c = TabulatedAlloyEAM(fn='Cu_mishin1.eam.alloy')
for fac in [0.2, 0.3, 0.4, 0.5]:
a = orig_a.copy()
a.set_cell(fac*a.cell, scale_atoms=True)
a.calc = c
ffd, f0, maxdf = forces(a, dx=dx)
if maxdf > tol:
nfail += 1
print("forces .failed.")
print("max(df) = %f" % maxdf)
print("f - from potential")
for i, f in enumerate(f0):
print(i, f)
print("f - numerically")
for i, f in enumerate(ffd):
print(i, f)
print("difference between the above")
for i, f in enumerate(f0-ffd):
print(i, f)
self.assertTrue(maxdf < tol)
|
class TestEAMSpecialCases(unittest.TestCase):
def test_crash1(self):
pass
def test_dense_forces(self):
pass
| 3 | 0 | 13 | 0 | 13 | 0 | 4 | 0 | 1 | 1 | 0 | 0 | 2 | 0 | 2 | 74 | 29 | 2 | 27 | 10 | 24 | 0 | 27 | 10 | 24 | 6 | 2 | 3 | 7 |
7,609 |
Atrox/haikunatorpy
|
Atrox_haikunatorpy/haikunator/tests.py
|
haikunator.tests.HaikunatorTests
|
class HaikunatorTests(unittest.TestCase):
def setUp(self):
if sys.version_info > (3, 0):
self.assertRegexp = self.assertRegex
else:
self.assertRegexp = self.assertRegexpMatches
def test_general_functionality(self):
tests = [
[{}, '[a-z]+-[a-z]+-[0-9]{4}$'],
[{'token_hex': True}, '[a-z]+-[a-z]+-[0-f]{4}$'],
[{'token_length': 9}, '[a-z]+-[a-z]+-[0-9]{9}$'],
[{'token_length': 9, 'token_hex': True}, '[a-z]+-[a-z]+-[0-f]{9}$'],
[{'token_length': 0}, '[a-z]+-[a-z]+$'],
[{'delimiter': '.'}, '[a-z]+.[a-z]+.[0-9]{4}$'],
[{'token_length': 0, 'delimiter': ' '}, '[a-z]+ [a-z]+'],
[{'token_length': 0, 'delimiter': ''}, '[a-z]+$'],
[{'token_chars': 'xyz'}, '[a-z]+-[a-z]+-[x-z]{4}$'],
]
haikunator = Haikunator()
for test in tests:
self.assertRegexp(haikunator.haikunate(**test[0]), test[1])
def test_wont_return_same(self):
haikunator = Haikunator()
self.assertNotEqual(haikunator.haikunate(), haikunator.haikunate())
def test_return_same_with_seed(self):
seed = 'definitively random seed'
h1 = Haikunator(seed=seed)
h2 = Haikunator(seed=seed)
self.assertEqual(h1.haikunate(), h2.haikunate())
self.assertEqual(h1.haikunate(), h2.haikunate())
def test_custom_adjectives_nouns(self):
haikunator = Haikunator(
adjectives=['adjective'],
nouns=['noun']
)
self.assertRegexp(haikunator.haikunate(), 'adjective-noun-\d{4}$')
def test_empty_adjectives_nouns(self):
haikunator = Haikunator(
adjectives=[],
nouns=[]
)
self.assertEqual(haikunator.haikunate(token_chars=''), '')
|
class HaikunatorTests(unittest.TestCase):
def setUp(self):
pass
def test_general_functionality(self):
pass
def test_wont_return_same(self):
pass
def test_return_same_with_seed(self):
pass
def test_custom_adjectives_nouns(self):
pass
def test_empty_adjectives_nouns(self):
pass
| 7 | 0 | 8 | 1 | 7 | 0 | 1 | 0 | 1 | 1 | 1 | 0 | 6 | 1 | 6 | 78 | 53 | 11 | 42 | 17 | 35 | 0 | 25 | 17 | 18 | 2 | 2 | 1 | 8 |
7,610 |
Atrox/haikunatorpy
|
Atrox_haikunatorpy/haikunator/haikunator.py
|
haikunator.haikunator.Haikunator
|
class Haikunator:
_adjectives = [
'aged', 'ancient', 'autumn', 'billowing', 'bitter', 'black', 'blue', 'bold',
'broad', 'broken', 'calm', 'cold', 'cool', 'crimson', 'curly', 'damp',
'dark', 'dawn', 'delicate', 'divine', 'dry', 'empty', 'falling', 'fancy',
'flat', 'floral', 'fragrant', 'frosty', 'gentle', 'green', 'hidden', 'holy',
'icy', 'jolly', 'late', 'lingering', 'little', 'lively', 'long', 'lucky',
'misty', 'morning', 'muddy', 'mute', 'nameless', 'noisy', 'odd', 'old',
'orange', 'patient', 'plain', 'polished', 'proud', 'purple', 'quiet', 'rapid',
'raspy', 'red', 'restless', 'rough', 'round', 'royal', 'shiny', 'shrill',
'shy', 'silent', 'small', 'snowy', 'soft', 'solitary', 'sparkling', 'spring',
'square', 'steep', 'still', 'summer', 'super', 'sweet', 'throbbing', 'tight',
'tiny', 'twilight', 'wandering', 'weathered', 'white', 'wild', 'winter', 'wispy',
'withered', 'yellow', 'young'
]
_nouns = [
'art', 'band', 'bar', 'base', 'bird', 'block', 'boat', 'bonus',
'bread', 'breeze', 'brook', 'bush', 'butterfly', 'cake', 'cell', 'cherry',
'cloud', 'credit', 'darkness', 'dawn', 'dew', 'disk', 'dream', 'dust',
'feather', 'field', 'fire', 'firefly', 'flower', 'fog', 'forest', 'frog',
'frost', 'glade', 'glitter', 'grass', 'hall', 'hat', 'haze', 'heart',
'hill', 'king', 'lab', 'lake', 'leaf', 'limit', 'math', 'meadow',
'mode', 'moon', 'morning', 'mountain', 'mouse', 'mud', 'night', 'paper',
'pine', 'poetry', 'pond', 'queen', 'rain', 'recipe', 'resonance', 'rice',
'river', 'salad', 'scene', 'sea', 'shadow', 'shape', 'silence', 'sky',
'smoke', 'snow', 'snowflake', 'sound', 'star', 'sun', 'sun', 'sunset',
'surf', 'term', 'thunder', 'tooth', 'tree', 'truth', 'union', 'unit',
'violet', 'voice', 'water', 'waterfall', 'wave', 'wildflower', 'wind', 'wood'
]
def __init__(self, seed=None, adjectives=None, nouns=None):
"""
Initialize new haikunator
:param seed: Seed for Random
:param adjectives: Custom Adjectives
:param nouns: Custom Nouns
:type adjectives: list
:type nouns: list
"""
if adjectives is not None:
self._adjectives = adjectives
if nouns is not None:
self._nouns = nouns
self.random = Random(seed)
def haikunate(self, delimiter='-', token_length=4, token_hex=False, token_chars='0123456789'):
"""
Generate heroku-like random names to use in your python applications
:param delimiter: Delimiter
:param token_length: TokenLength
:param token_hex: TokenHex
:param token_chars: TokenChars
:type delimiter: str
:type token_length: int
:type token_hex: bool
:type token_chars: str
:return: heroku-like random string
:rtype: str
"""
if token_hex:
token_chars = '0123456789abcdef'
adjective = self._random_element(self._adjectives)
noun = self._random_element(self._nouns)
token = ''.join(self._random_element(token_chars) for _ in range(token_length))
sections = [adjective, noun, token]
return delimiter.join(filter(None, sections))
def _random_element(self, s):
"""
Get random element from string or list
:param s: Element
:type s: str or list
:return: str
:rtype: str
"""
if len(s) <= 0:
return ''
return self.random.choice(s)
|
class Haikunator:
def __init__(self, seed=None, adjectives=None, nouns=None):
'''
Initialize new haikunator
:param seed: Seed for Random
:param adjectives: Custom Adjectives
:param nouns: Custom Nouns
:type adjectives: list
:type nouns: list
'''
pass
def haikunate(self, delimiter='-', token_length=4, token_hex=False, token_chars='0123456789'):
'''
Generate heroku-like random names to use in your python applications
:param delimiter: Delimiter
:param token_length: TokenLength
:param token_hex: TokenHex
:param token_chars: TokenChars
:type delimiter: str
:type token_length: int
:type token_hex: bool
:type token_chars: str
:return: heroku-like random string
:rtype: str
'''
pass
def _random_element(self, s):
'''
Get random element from string or list
:param s: Element
:type s: str or list
:return: str
:rtype: str
'''
pass
| 4 | 3 | 18 | 3 | 6 | 9 | 2 | 0.6 | 0 | 3 | 0 | 0 | 3 | 1 | 3 | 3 | 87 | 12 | 47 | 11 | 43 | 28 | 21 | 11 | 17 | 3 | 0 | 1 | 7 |
7,611 |
AtsushiSakai/SimpleTkGUIKit
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtsushiSakai_SimpleTkGUIKit/tkinter_samples/tkinter_samples.py
|
tkinter_samples.sample3.Application
|
class Application(tkinter.Frame):
def __init__(self, master=None):
super().__init__(master)
self.pack()
self.create_widgets()
def create_widgets(self):
self.hi_there = tkinter.Button(self)
self.hi_there["text"] = "Hello World(click me)"
self.hi_there["command"] = self.say_hi
self.hi_there.pack(side="top")
self.quit = tkinter.Button(
self, text="QUIT", command=self.master.destroy)
self.quit.pack(side="bottom")
def say_hi(self):
print("hi there, everyone!")
|
class Application(tkinter.Frame):
def __init__(self, master=None):
pass
def create_widgets(self):
pass
def say_hi(self):
pass
| 4 | 0 | 5 | 0 | 5 | 0 | 1 | 0 | 1 | 2 | 0 | 0 | 3 | 2 | 3 | 167 | 19 | 4 | 15 | 6 | 11 | 0 | 14 | 6 | 10 | 1 | 4 | 0 | 3 |
7,612 |
AtsushiSakai/SimpleTkGUIKit
|
AtsushiSakai_SimpleTkGUIKit/tkinter_samples/tkinter_samples.py
|
tkinter_samples.MyHovertip
|
class MyHovertip(Hovertip):
def showcontents(self):
label = tkinter.Label(self.tipwindow, text=self.text, justify=tkinter.LEFT,
foreground="black", background="#ffffe0", relief=tkinter.SOLID, borderwidth=1)
label.pack()
|
class MyHovertip(Hovertip):
def showcontents(self):
pass
| 2 | 0 | 4 | 0 | 4 | 1 | 1 | 0.2 | 1 | 1 | 0 | 0 | 1 | 2 | 1 | 17 | 5 | 0 | 5 | 3 | 3 | 1 | 4 | 3 | 2 | 1 | 3 | 0 | 1 |
7,613 |
AtteqCom/zsl
|
AtteqCom_zsl/src/zsl/testing/zsl.py
|
zsl.testing.zsl.ZslTestCase
|
class ZslTestCase:
ZSL_TEST_CONFIGURATION = None # type: ZslTestConfiguration
@classmethod
def setUpClass(cls):
if cls.ZSL_TEST_CONFIGURATION is None:
raise InvalidConfigurationException("Please give a test container "
"specification via 'container' "
"class variable.")
config = cls.ZSL_TEST_CONFIGURATION # type: ZslTestConfiguration
if config.profile:
set_profile(config.profile)
app = Zsl(
config.app_name + "-test",
version=config.version,
modules=config.container.modules(),
config_object=config.config_object
)
app.testing = True
app.debug = True
logging.getLogger(config.app_name).debug(
"ZSL test app created {0}.".format(app))
super(ZslTestCase, cls).setUpClass()
@classmethod
def tearDownClass(cls):
config = cls.ZSL_TEST_CONFIGURATION # type: ZslTestConfiguration
logging.getLogger(config.app_name).debug(
"ZSL test app tear down {0}.".format(config.app_name))
set_current_app(None)
|
class ZslTestCase:
@classmethod
def setUpClass(cls):
pass
@classmethod
def tearDownClass(cls):
pass
| 5 | 0 | 14 | 2 | 12 | 1 | 2 | 0.11 | 0 | 2 | 1 | 17 | 0 | 0 | 2 | 2 | 33 | 5 | 28 | 9 | 23 | 3 | 17 | 7 | 14 | 3 | 0 | 1 | 4 |
7,614 |
AtteqCom/zsl
|
AtteqCom_zsl/src/zsl/tasks/zsl/schedule_gearman_task.py
|
zsl.tasks.zsl.schedule_gearman_task.ScheduleGearmanTask
|
class ScheduleGearmanTask:
def perform(self, data):
# type: (TaskData)->str
data = data.payload
# Create gearman.
schedule_gearman_task(data['path'], data['data'])
return 'job submitted'
|
class ScheduleGearmanTask:
def perform(self, data):
pass
| 2 | 0 | 8 | 2 | 4 | 2 | 1 | 0.4 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 10 | 3 | 5 | 2 | 3 | 2 | 5 | 2 | 3 | 1 | 0 | 0 | 1 |
7,615 |
AtteqCom/zsl
|
AtteqCom_zsl/src/zsl/utils/cache_helper.py
|
zsl.utils.cache_helper.CacheModelDecorator
|
class CacheModelDecorator(CacheDecorator):
def get_wrapped_fn(self):
def wrapped_fn(*args):
if not self.is_caching(*args):
return self._fn(*args)
key = self.get_data_key(*args)
logging.debug("Initializing CacheModelDecorator - key %s.", key)
if self._id_helper.check_key(key):
model_key = self._id_helper.get_key(key)
logging.debug("Retrieved from cache %s.", model_key)
if self._id_helper.check_key(model_key):
return self.get_decoder()(model_key, self._id_helper.get_key(model_key))
model = self._fn(*args)
if model is None:
return model
encoded_model = json.dumps(model, cls=AppModelJSONEncoder)
model_key = model_key_generator(model)
self._id_helper.set_key(key, model_key, self._timeout)
self._id_helper.set_key(model_key, encoded_model, self._timeout)
logging.debug("Newly fetched into the cache.")
return model
return wrapped_fn
|
class CacheModelDecorator(CacheDecorator):
def get_wrapped_fn(self):
pass
def wrapped_fn(*args):
pass
| 3 | 0 | 25 | 5 | 20 | 0 | 3 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 1 | 9 | 28 | 6 | 22 | 7 | 19 | 0 | 22 | 7 | 19 | 5 | 1 | 2 | 6 |
7,616 |
AtteqCom/zsl
|
AtteqCom_zsl/src/zsl/utils/cache_helper.py
|
zsl.utils.cache_helper.CacheOutputDecorator
|
class CacheOutputDecorator(CacheDecorator):
def clear_cache(self, task: type):
key = self.get_data_key_wildcard(task)
self._id_helper.invalidate_keys_by_prefix(key)
def get_wrapped_fn(self):
def wrapped_fn(*args):
if not self.is_caching(*args):
return self._fn(*args)
key = self.get_data_key(*args)
logging.debug("Initializing CacheOutputDecorator - key %s.", key)
if self._id_helper.check_key(key):
logging.debug("Retrieved from cache.")
return self._id_helper.get_key(key)
else:
ret_val = self._fn(*args)
if not isinstance(ret_val, (str, bytes)):
raise Exception("Can not cache non-string value. Is the serialization, json_output, already done?")
self._id_helper.set_key(key, ret_val, self._timeout)
logging.debug("Newly fetched into the cache.")
return ret_val
return wrapped_fn
|
class CacheOutputDecorator(CacheDecorator):
def clear_cache(self, task: type):
pass
def get_wrapped_fn(self):
pass
def wrapped_fn(*args):
pass
| 4 | 0 | 14 | 2 | 12 | 0 | 2 | 0 | 1 | 4 | 0 | 0 | 2 | 0 | 2 | 10 | 27 | 6 | 21 | 7 | 17 | 0 | 20 | 7 | 16 | 4 | 1 | 2 | 6 |
7,617 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/tests/resource/guarded_resource_test.py
|
tests.resource.guarded_resource_test.GuardedResourceTest.testDefaultDenyPolicy.Resource
|
class Resource(GuardedMixin):
def read(self, *args, **kwargs):
return SAMPLE_MODEL
|
class Resource(GuardedMixin):
def read(self, *args, **kwargs):
pass
| 2 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 5 | 3 | 0 | 3 | 2 | 1 | 0 | 3 | 2 | 1 | 1 | 1 | 0 | 1 |
7,618 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/tests/resource/read_only_resource.py
|
tests.resource.read_only_resource.TestReadOnlyModelResource
|
class TestReadOnlyModelResource(TestCase, DbTestCase):
@classmethod
def setUpClass(cls):
create_resource_test_data()
@inject(app=Zsl)
def setUp(self, app):
self.resource = app._injector.create_object(
ReadOnlyModelResource, {'model_cls': UserModel})
def testRead(self):
m = self.resource.read([9])
self.assertEqual('nine', m.val, "Read one")
self.assertEqual(10, len(self.resource.read([])),
'Expecting 10 dummy models.')
def testCreate(self):
self.assertRaises(ReadOnlyResourceUpdateOperationException,
lambda: self.resource.create({}, {}, {}))
def testDelete(self):
self.assertRaises(ReadOnlyResourceUpdateOperationException,
lambda: self.resource.delete({}, {}, {}))
def testUpdate(self):
self.assertRaises(ReadOnlyResourceUpdateOperationException,
lambda: self.resource.update({}, {}, {}))
|
class TestReadOnlyModelResource(TestCase, DbTestCase):
@classmethod
def setUpClass(cls):
pass
@inject(app=Zsl)
def setUpClass(cls):
pass
def testRead(self):
pass
def testCreate(self):
pass
def testDelete(self):
pass
def testUpdate(self):
pass
| 9 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 2 | 3 | 3 | 0 | 5 | 1 | 6 | 81 | 23 | 6 | 17 | 11 | 8 | 0 | 15 | 9 | 8 | 1 | 2 | 0 | 6 |
7,619 |
AtteqCom/zsl
|
AtteqCom_zsl/src/zsl/utils/cache_helper.py
|
zsl.utils.cache_helper.CachePageDecorator
|
class CachePageDecorator(CacheDecorator):
def get_wrapped_fn(self):
def wrapped_fn(*args):
if not self.is_caching(*args):
return self._fn(*args)
page_key = self.get_data_key(*args)
logging.debug("Initializing CachePageDecorator - key %s.", page_key)
if self._id_helper.check_page(page_key):
logging.debug("Retrieved from cache %s.", page_key)
return self._id_helper.gather_page(page_key, self.get_decoder())
page = self._fn(*args)
self._id_helper.fill_page(page_key, page, self._timeout, self.get_encoder())
logging.debug("Newly fetched into the cache.")
return page
return wrapped_fn
|
class CachePageDecorator(CacheDecorator):
def get_wrapped_fn(self):
pass
def wrapped_fn(*args):
pass
| 3 | 0 | 17 | 4 | 13 | 0 | 2 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 9 | 20 | 5 | 15 | 5 | 12 | 0 | 15 | 5 | 12 | 3 | 1 | 1 | 4 |
7,620 |
AtteqCom/zsl
|
AtteqCom_zsl/src/zsl/testing/test_utils.py
|
zsl.testing.test_utils.TestTaskData
|
class TestTaskData(TaskData):
"""Data suitable when directly calling a task."""
def __init__(self, payload):
# type: (Any)->None
super().__init__(json.dumps(payload))
|
class TestTaskData(TaskData):
'''Data suitable when directly calling a task.'''
def __init__(self, payload):
pass
| 2 | 1 | 3 | 0 | 2 | 1 | 1 | 0.67 | 1 | 1 | 0 | 0 | 1 | 0 | 1 | 5 | 6 | 1 | 3 | 2 | 1 | 2 | 3 | 2 | 1 | 1 | 1 | 0 | 1 |
7,621 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/tests/resource/transactional_guard_test.py
|
tests.resource.transactional_guard_test.TransactionalGuardTest
|
class TransactionalGuardTest(TestCase):
@inject(engine_pool=EnginePool)
def setUp(self, engine_pool=Injected):
zsl = Zsl(__name__, config_object=IN_MEMORY_DB_SETTINGS,
modules=WebContainer.modules())
zsl.testing = True
engine = engine_pool.get_engine(EnginePool._DEFAULT_ENGINE_NAME)
TestSessionFactory.reset_db_schema_initialization(engine)
super().setUp()
@inject(engine_pool=EnginePool)
def tearDown(self, engine_pool=Injected):
super(TransactionalGuardTest, self).tearDown()
engine = engine_pool.get_engine(EnginePool._DEFAULT_ENGINE_NAME)
TestSessionFactory.reset_db_schema_initialization(engine)
def testIsInTransaction(self):
test_case = self
class AllowPolicy(ResourcePolicy):
default = Access.ALLOW
@transactional_guard([AllowPolicy()])
class GuardedUserModel(UserResource, GuardedMixin):
def secure_read(self, *args, **kwargs):
test_case.assertIsNotNone(self._orm)
test_case.assertTrue(self._in_transaction)
return super().read(*args, **kwargs)
create_resource_test_data()
resource = GuardedUserModel()
user = resource.read('1', {}, {})
self.assertDictEqual(users[0]._asdict(), user.get_attributes(),
"should return first user")
@staticmethod
def testRollbackBefore():
class DenyPolicy(ResourcePolicy):
default = Access.DENY
@transactional_guard([DenyPolicy()])
class GuardedUserModel(UserResource, GuardedMixin):
pass
class TestTHolder(TransactionHolder):
rollback = mock.MagicMock()
_orm = mock.MagicMock()
create_resource_test_data()
with mock.patch(
'zsl.application.modules.alchemy_module.TransactionHolder',
side_effect=TestTHolder
):
resource = GuardedUserModel()
resource.read('', {}, {})
TestTHolder.rollback.assert_called_with()
TestTHolder._orm.assert_not_called()
@staticmethod
def testRollbackAfter():
class DenyAfterPolicy(ResourcePolicy):
default = Access.ALLOW
def can_read__after(self, *args, **kwargs):
return Access.DENY
mock_sess = mock_db_session()
@transactional_guard([DenyAfterPolicy()])
class GuardedUserModel(UserResource, GuardedMixin):
pass
class MyTestCase(DbTestCase, TestCase):
def runTest(self):
pass
def testIt(self):
create_resource_test_data()
resource = GuardedUserModel()
resource.read('', {}, {})
if hasattr(mock_sess.query, 'assert_called'):
mock_sess.query.assert_called()
mock_sess.rollback.assert_called_with()
test_case = MyTestCase()
test_case.setUp()
test_case.testIt()
test_case.tearDown()
|
class TransactionalGuardTest(TestCase):
@inject(engine_pool=EnginePool)
def setUp(self, engine_pool=Injected):
pass
@inject(engine_pool=EnginePool)
def tearDown(self, engine_pool=Injected):
pass
def testIsInTransaction(self):
pass
class AllowPolicy(ResourcePolicy):
@transactional_guard([AllowPolicy()])
class GuardedUserModel(UserResource, GuardedMixin):
def secure_read(self, *args, **kwargs):
pass
@staticmethod
def testRollbackBefore():
pass
class DenyPolicy(ResourcePolicy):
@transactional_guard([DenyPolicy()])
class GuardedUserModel(UserResource, GuardedMixin):
class TestTHolder(TransactionHolder):
@staticmethod
def testRollbackAfter():
pass
class DenyAfterPolicy(ResourcePolicy):
def can_read__after(self, *args, **kwargs):
pass
@transactional_guard([DenyAfterPolicy()])
class GuardedUserModel(UserResource, GuardedMixin):
class MyTestCase(DbTestCase, TestCase):
def runTest(self):
pass
def testIt(self):
pass
| 25 | 0 | 11 | 2 | 9 | 0 | 1 | 0 | 1 | 12 | 11 | 0 | 3 | 0 | 5 | 77 | 94 | 22 | 72 | 40 | 47 | 0 | 60 | 33 | 42 | 2 | 2 | 1 | 10 |
7,622 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/tests/resource/resource_guard_test.py
|
tests.resource.resource_guard_test.TestResourceGuard.setUp.ToBeSecuredResource
|
class ToBeSecuredResource(PlainResource, GuardedMixin):
pass
|
class ToBeSecuredResource(PlainResource, GuardedMixin):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 0 | 8 | 2 | 0 | 2 | 1 | 1 | 0 | 2 | 1 | 1 | 0 | 1 | 0 | 0 |
7,623 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/tests/resource/resource_guard_test.py
|
tests.resource.resource_guard_test.TestResourceGuard.testCallAfterAndBeforeCallbacks.TestResource
|
class TestResource(self.ToBeSecuredResource):
def create(self):
if has_assert_called:
policy.can_create__before.assert_called()
policy.can_create__after.assert_not_called()
def read(self):
if has_assert_called:
policy.can_read__before.assert_called()
policy.can_read__after.assert_not_called()
def update(self):
if has_assert_called:
policy.can_update__before.assert_called()
policy.can_update__after.assert_not_called()
def delete(self):
if has_assert_called:
policy.can_delete__before.assert_called()
policy.can_delete__after.assert_not_called()
|
class TestResource(self.ToBeSecuredResource):
def create(self):
pass
def read(self):
pass
def update(self):
pass
def delete(self):
pass
| 5 | 0 | 4 | 0 | 4 | 0 | 2 | 0 | 1 | 1 | 1 | 0 | 4 | 0 | 4 | 4 | 20 | 3 | 17 | 5 | 12 | 0 | 17 | 5 | 12 | 2 | 1 | 1 | 8 |
7,624 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/tests/resource/resource_guard_test.py
|
tests.resource.resource_guard_test.TestResourceGuard.testDenyWithException.ForbiddenPolicy
|
class ForbiddenPolicy(ResourcePolicy):
@property
def default(self):
raise AccessError
|
class ForbiddenPolicy(ResourcePolicy):
@property
def default(self):
pass
| 3 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 1 | 10 | 4 | 0 | 4 | 3 | 1 | 0 | 3 | 2 | 1 | 1 | 1 | 0 | 1 |
7,625 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/tests/resource/resource_guard_test.py
|
tests.resource.resource_guard_test.TestResourceGuard.testErrorHandler.ForbiddenPolicy
|
class ForbiddenPolicy(ResourcePolicy):
default = Access.DENY
|
class ForbiddenPolicy(ResourcePolicy):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 9 | 2 | 0 | 2 | 2 | 1 | 0 | 2 | 2 | 1 | 0 | 1 | 0 | 0 |
7,626 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/tests/resource/resource_guard_test.py
|
tests.resource.resource_guard_test.TestResourceGuard.testPolicyChaining.AllowReadPolicy
|
class AllowReadPolicy(ResourcePolicy):
can_read = Access.ALLOW
|
class AllowReadPolicy(ResourcePolicy):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 9 | 2 | 0 | 2 | 2 | 1 | 0 | 2 | 2 | 1 | 0 | 1 | 0 | 0 |
7,627 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/tests/resource/resource_guard_test.py
|
tests.resource.resource_guard_test.TestResourceGuard.testPolicyChaining.AllowUpdatePolicy
|
class AllowUpdatePolicy(ResourcePolicy):
can_update = Access.ALLOW
|
class AllowUpdatePolicy(ResourcePolicy):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 9 | 2 | 0 | 2 | 2 | 1 | 0 | 2 | 2 | 1 | 0 | 1 | 0 | 0 |
7,628 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/tests/resource/resource_guard_test.py
|
tests.resource.resource_guard_test.TestResourceGuard.testPolicyChaining.DenyDeletePolicy
|
class DenyDeletePolicy(ResourcePolicy):
can_delete = Access.DENY
|
class DenyDeletePolicy(ResourcePolicy):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 9 | 2 | 0 | 2 | 2 | 1 | 0 | 2 | 2 | 1 | 0 | 1 | 0 | 0 |
7,629 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/tests/resource/resource_guard_test.py
|
tests.resource.resource_guard_test.TestResourceGuard.testWrappers.PermissivePolicy
|
class PermissivePolicy(ResourcePolicy):
default = Access.ALLOW
|
class PermissivePolicy(ResourcePolicy):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 9 | 2 | 0 | 2 | 2 | 1 | 0 | 2 | 2 | 1 | 0 | 1 | 0 | 0 |
7,630 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/tests/resource/resource_policy_test.py
|
tests.resource.resource_policy_test.TestResourcePolicy.testDefault.TestPolicy
|
class TestPolicy(ResourcePolicy):
pass
|
class TestPolicy(ResourcePolicy):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 9 | 2 | 0 | 2 | 1 | 1 | 0 | 2 | 1 | 1 | 0 | 1 | 0 | 0 |
7,631 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/tests/resource/resource_policy_test.py
|
tests.resource.resource_policy_test.TestResourcePolicy.testDefaultSetter.TestPolicy
|
class TestPolicy(ResourcePolicy):
default = Access.ALLOW
|
class TestPolicy(ResourcePolicy):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 9 | 2 | 0 | 2 | 2 | 1 | 0 | 2 | 2 | 1 | 0 | 1 | 0 | 0 |
7,632 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/tests/resource/resource_guard_test.py
|
tests.resource.resource_guard_test.TestResourceGuard.setUp.PlainResource
|
class PlainResource:
def create(self):
return TEST_VALUE_CREATED
def read(self):
return TEST_VALUE_READ
def update(self):
return TEST_VALUE_UPDATED
def delete(self):
return TEST_VALUE_DELETED
|
class PlainResource:
def create(self):
pass
def read(self):
pass
def update(self):
pass
def delete(self):
pass
| 5 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 4 | 0 | 4 | 4 | 12 | 3 | 9 | 5 | 4 | 0 | 9 | 5 | 4 | 1 | 0 | 0 | 4 |
7,633 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/tests/resource/guarded_resource_test.py
|
tests.resource.guarded_resource_test.GuardedResourceTest.testCustomDenyException.PaymentPolicy
|
class PaymentPolicy(ResourcePolicy):
@property
def default(self):
raise PaymentError()
|
class PaymentPolicy(ResourcePolicy):
@property
def default(self):
pass
| 3 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 1 | 10 | 4 | 0 | 4 | 3 | 1 | 0 | 3 | 2 | 1 | 1 | 1 | 0 | 1 |
7,634 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/tests/resource/guarded_resource_test.py
|
tests.resource.guarded_resource_test.GuardedResourceTest.testCustomDenyException.PaymentError
|
class PaymentError(PolicyViolationError):
def __init__(self):
super().__init__('Payment required', code=402)
|
class PaymentError(PolicyViolationError):
def __init__(self):
pass
| 2 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 1 | 1 | 0 | 0 | 1 | 0 | 1 | 12 | 3 | 0 | 3 | 2 | 1 | 0 | 3 | 2 | 1 | 1 | 4 | 0 | 1 |
7,635 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/tests/resource/guarded_resource_test.py
|
tests.resource.guarded_resource_test.GuardedResourceTest
|
class GuardedResourceTest(TestCase, HTTPTestCase):
PATH = '/resource/guarded_resource_test'
RESOURCE_CLASS = 'resource.guarded_resource_test.GuardedResourceTestResource'
def setUp(self):
config_object = IN_MEMORY_DB_SETTINGS.copy()
# add this package as resource package for zsl to find the
# `JsonServerModelResourceResource`
config_object['RESOURCE_PACKAGES'] = ('resource',)
zsl = Zsl(__name__, config_object=config_object,
modules=WebContainer.modules())
zsl.testing = True
# mock http requests
self.app = zsl.test_client()
def testAllowPolicy(self):
class AllowPolicy(ResourcePolicy):
default = Access.ALLOW
@guard([AllowPolicy()])
class Resource(GuardedMixin):
def read(self, *args, **kwargs):
return SAMPLE_MODEL
with mock.patch(self.RESOURCE_CLASS, Resource):
rv = self.app.get(self.PATH + '/1')
self.assertHTTPStatus(
http.client.OK,
rv.status_code,
"should return 200 status, returned data {0}".format(rv.data)
)
data = json_loads(rv.data)
self.assertDictEqual(SAMPLE_MODEL, data,
"should return sample model")
def testDefaultDenyPolicy(self):
class DenyPolicy(ResourcePolicy):
default = Access.DENY
@guard([DenyPolicy()])
class Resource(GuardedMixin):
def read(self, *args, **kwargs):
return SAMPLE_MODEL
with mock.patch(self.RESOURCE_CLASS, Resource):
rv = self.app.get(self.PATH + '/1')
data = json_loads(rv.data)
self.assertDictEqual({}, data, "should return an empty model")
self.assertHTTPStatus(http.client.FORBIDDEN, rv.status_code,
"should return default 403 status")
def testCustomDenyException(self):
class PaymentError(PolicyViolationError):
def __init__(self):
super().__init__('Payment required', code=402)
class PaymentPolicy(ResourcePolicy):
@property
def default(self):
raise PaymentError()
@guard([PaymentPolicy()])
class Resource(GuardedMixin):
def read(self, *args, **kwargs):
return SAMPLE_MODEL
with mock.patch(self.RESOURCE_CLASS, Resource):
rv = self.app.get(self.PATH + '/1')
data = json_loads(rv.data)
self.assertDictEqual({}, data, "should return an empty model")
self.assertHTTPStatus(http.client.PAYMENT_REQUIRED, rv.status_code,
"should return custom status 402")
def testCustomErrorHandling(self):
def error_handler(*_):
return ResourceResult(body={}, status=301)
class DenyPolicy(ResourcePolicy):
default = Access.DENY
@guard(
policies=[DenyPolicy()],
exception_handlers=[error_handler]
)
class Resource(GuardedMixin):
def read(self, *args, **kwargs):
return SAMPLE_MODEL
with mock.patch(self.RESOURCE_CLASS, Resource):
rv = self.app.get(self.PATH + '/1')
data = json_loads(rv.data)
self.assertDictEqual({}, data, "should return an empty model")
self.assertHTTPStatus(http.client.MOVED_PERMANENTLY, rv.status_code,
"should return custom status 301")
|
class GuardedResourceTest(TestCase, HTTPTestCase):
def setUp(self):
pass
def testAllowPolicy(self):
pass
class AllowPolicy(ResourcePolicy):
@guard([AllowPolicy()])
class Resource(GuardedMixin):
def read(self, *args, **kwargs):
pass
def testDefaultDenyPolicy(self):
pass
class DenyPolicy(ResourcePolicy):
@guard([DenyPolicy()])
class Resource(GuardedMixin):
def read(self, *args, **kwargs):
pass
def testCustomDenyException(self):
pass
class PaymentError(PolicyViolationError):
def __init__(self):
pass
class PaymentPolicy(ResourcePolicy):
@property
def default(self):
pass
@guard([PaymentPolicy()])
class Resource(GuardedMixin):
def read(self, *args, **kwargs):
pass
def testCustomErrorHandling(self):
pass
def error_handler(*_):
pass
class DenyPolicy(ResourcePolicy):
@guard(
policies=[DenyPolicy()],
exception_handlers=[error_handler]
)
class Resource(GuardedMixin):
def read(self, *args, **kwargs):
pass
| 27 | 0 | 9 | 1 | 7 | 0 | 1 | 0.04 | 2 | 10 | 10 | 0 | 5 | 1 | 5 | 83 | 99 | 20 | 76 | 46 | 46 | 3 | 59 | 38 | 37 | 1 | 2 | 1 | 12 |
7,636 |
AtteqCom/zsl
|
AtteqCom_zsl/src/zsl/testing/db.py
|
zsl.testing.db.TestTransactionHolderFactory
|
class TestTransactionHolderFactory(TransactionHolderFactory):
def create_transaction_holder(self):
return TestTransactionHolder()
|
class TestTransactionHolderFactory(TransactionHolderFactory):
def create_transaction_holder(self):
pass
| 2 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 1 | 2 | 3 | 0 | 3 | 2 | 1 | 0 | 3 | 2 | 1 | 1 | 1 | 0 | 1 |
7,637 |
AtteqCom/zsl
|
AtteqCom_zsl/src/zsl/testing/db.py
|
zsl.testing.db.TestTransactionHolder
|
class TestTransactionHolder(TransactionHolder):
def __init__(self):
super().__init__()
self._nested_tx = None
def begin(self):
self._nested_tx = self.session.begin_nested()
def commit(self):
self._nested_tx.commit()
def rollback(self):
self._nested_tx.rollback()
def close(self):
logger.debug("Close.")
self._orm = None
self._in_transaction = False
|
class TestTransactionHolder(TransactionHolder):
def __init__(self):
pass
def begin(self):
pass
def commit(self):
pass
def rollback(self):
pass
def close(self):
pass
| 6 | 0 | 3 | 0 | 3 | 0 | 1 | 0 | 1 | 1 | 0 | 0 | 5 | 3 | 5 | 16 | 18 | 4 | 14 | 9 | 8 | 0 | 14 | 9 | 8 | 1 | 1 | 0 | 5 |
7,638 |
AtteqCom/zsl
|
AtteqCom_zsl/src/zsl/utils/xml_to_json.py
|
zsl.utils.xml_to_json.XmlToJsonException
|
class XmlToJsonException(Exception):
"""Exception raised during converting xml to json."""
# TODO make use of build in exception, see bug #13299
pass
|
class XmlToJsonException(Exception):
'''Exception raised during converting xml to json.'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 4 | 0 | 2 | 1 | 1 | 2 | 2 | 1 | 1 | 0 | 3 | 0 | 0 |
7,639 |
AtteqCom/zsl
|
AtteqCom_zsl/src/zsl/utils/xml_to_json.py
|
zsl.utils.xml_to_json.NotCompleteXmlException
|
class NotCompleteXmlException(Exception):
"""Exception raised during parsing an invalid XML."""
# TODO make use of build in exception, see bug #13299
pass
|
class NotCompleteXmlException(Exception):
'''Exception raised during parsing an invalid XML.'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 4 | 0 | 2 | 1 | 1 | 2 | 2 | 1 | 1 | 0 | 3 | 0 | 0 |
7,640 |
AtteqCom/zsl
|
AtteqCom_zsl/src/zsl/utils/xml_helper.py
|
zsl.utils.xml_helper.NotValidXmlException
|
class NotValidXmlException(Exception):
"""Exception raised on invalid xml"""
# TODO try to use some build in exception #13299
pass
|
class NotValidXmlException(Exception):
'''Exception raised on invalid xml'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 4 | 0 | 2 | 1 | 1 | 2 | 2 | 1 | 1 | 0 | 3 | 0 | 0 |
7,641 |
AtteqCom/zsl
|
AtteqCom_zsl/src/zsl/testing/db.py
|
zsl.testing.db.DatabaseSchemaInitializationException
|
class DatabaseSchemaInitializationException(Exception):
pass
|
class DatabaseSchemaInitializationException(Exception):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 2 | 0 | 2 | 1 | 1 | 0 | 2 | 1 | 1 | 0 | 3 | 0 | 0 |
7,642 |
AtteqCom/zsl
|
AtteqCom_zsl/src/zsl/tasks/zsl/test_task.py
|
zsl.tasks.zsl.test_task.TestTask
|
class TestTask:
def perform(self, _data):
logging.getLogger(__name__).debug("Running zsl.tasks.zsl.TestTask")
return "ok"
|
class TestTask:
def perform(self, _data):
pass
| 2 | 0 | 3 | 0 | 3 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 4 | 0 | 4 | 2 | 2 | 0 | 4 | 2 | 2 | 1 | 0 | 0 | 1 |
7,643 |
AtteqCom/zsl
|
AtteqCom_zsl/src/zsl/utils/resource_helper.py
|
zsl.utils.resource_helper.MethodNotImplementedException
|
class MethodNotImplementedException(Exception):
"""Exception raised on missing method"""
pass
|
class MethodNotImplementedException(Exception):
'''Exception raised on missing method'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0.5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 3 | 0 | 2 | 1 | 1 | 1 | 2 | 1 | 1 | 0 | 3 | 0 | 0 |
7,644 |
AtteqCom/zsl
|
AtteqCom_zsl/src/zsl/utils/params_helper.py
|
zsl.utils.params_helper.RequestException
|
class RequestException(Exception):
"""Exception raised on bad request"""
pass
|
class RequestException(Exception):
'''Exception raised on bad request'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0.5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 3 | 0 | 2 | 1 | 1 | 1 | 2 | 1 | 1 | 0 | 3 | 0 | 0 |
7,645 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/tests/application/version_test.py
|
tests.application.version_test.AppVersionTestCase
|
class AppVersionTestCase(ZslTestCase, TestCase):
CONFIG = IN_MEMORY_DB_SETTINGS.copy()
CONFIG.update(
TASKS=TaskConfiguration()
)
ZSL_TEST_CONFIGURATION = ZslTestConfiguration(
app_name='ErrorHandlingTestCase', container=CoreContainer,
version="1234.1111.4321",
config_object=CONFIG)
@inject(app=Zsl)
def testErrorTaskExecution(self, app: Zsl) -> None:
self.assertEqual(Zsl.VERSION, app.zsl_version)
self.assertEqual("1234.1111.4321", app.app_version)
self.assertEqual(Zsl.VERSION + ":1234.1111.4321", app.get_version())
self.assertEqual(Zsl.VERSION + ":1234.1111.4321", app.version)
|
class AppVersionTestCase(ZslTestCase, TestCase):
@inject(app=Zsl)
def testErrorTaskExecution(self, app: Zsl) -> None:
pass
| 3 | 0 | 5 | 0 | 5 | 0 | 1 | 0 | 2 | 0 | 0 | 0 | 1 | 0 | 1 | 75 | 16 | 1 | 15 | 5 | 12 | 0 | 9 | 4 | 7 | 1 | 2 | 0 | 1 |
7,646 |
AtteqCom/zsl
|
AtteqCom_zsl/src/zsl/tasks/zsl/schedule_kill_worker_task.py
|
zsl.tasks.zsl.schedule_kill_worker_task.ScheduleKillWorkerTask
|
class ScheduleKillWorkerTask:
def perform(self, _):
# Create gearman.
schedule_gearman_task('zsl/kill_worker_task', {})
return 'job submitted'
|
class ScheduleKillWorkerTask:
def perform(self, _):
pass
| 2 | 0 | 4 | 0 | 3 | 1 | 1 | 0.25 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 5 | 0 | 4 | 2 | 2 | 1 | 4 | 2 | 2 | 1 | 0 | 0 | 1 |
7,647 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/tests/application/version_test.py
|
tests.application.version_test.NoAppVersionTestCase
|
class NoAppVersionTestCase(ZslTestCase, TestCase):
CONFIG = IN_MEMORY_DB_SETTINGS.copy()
CONFIG.update(
TASKS=TaskConfiguration()
)
ZSL_TEST_CONFIGURATION = ZslTestConfiguration(
app_name='ErrorHandlingTestCase', container=CoreContainer,
config_object=CONFIG)
@inject(app=Zsl)
def testErrorTaskExecution(self, app: Zsl) -> None:
self.assertEqual(Zsl.VERSION, app.zsl_version)
self.assertIsNone(app.app_version)
self.assertEqual(Zsl.VERSION, app.get_version())
self.assertEqual(Zsl.VERSION, app.version)
|
class NoAppVersionTestCase(ZslTestCase, TestCase):
@inject(app=Zsl)
def testErrorTaskExecution(self, app: Zsl) -> None:
pass
| 3 | 0 | 5 | 0 | 5 | 0 | 1 | 0 | 2 | 0 | 0 | 0 | 1 | 0 | 1 | 75 | 15 | 1 | 14 | 5 | 11 | 0 | 9 | 4 | 7 | 1 | 2 | 0 | 1 |
7,648 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/tests/interface/web/cors_test.py
|
tests.interface.web.cors_test.CorsTestCase
|
class CorsTestCase(ZslTestCase, TestCase):
CONFIG = IN_MEMORY_DB_SETTINGS.copy()
CONFIG.update(
CORS=CORSConfiguration(
'default-origin', ['default-allow-headers'], ['default-expose-headers'], 42),
TASKS=TaskConfiguration()
.create_namespace('n')
.add_routes({'r': TestTask})
.get_configuration().
create_namespace('z')
.add_packages(['zsl.tasks.zsl'])
.get_configuration()
)
ZSL_TEST_CONFIGURATION = ZslTestConfiguration(
app_name='CorsTestCase', container=WebContainer,
config_object=CONFIG)
@inject(app=Zsl)
def testNotFound(self, app):
with app.test_request_context('/'):
response = perform_web_task('nn', 'r')
self.assertEqual(http.client.NOT_FOUND,
response.status_code,
"Status code must be NOT_FOUND.")
self.assertEqual('default-allow-headers',
response.headers['Access-Control-Allow-Headers'])
self.assertEqual('default-expose-headers',
response.headers['Access-Control-Expose-Headers'])
self.assertEqual('default-origin',
response.headers['Access-Control-Allow-Origin'])
self.assertEqual('42', response.headers['Access-Control-Max-Age'])
@inject(app=Zsl)
def testCustom(self, app):
with app.test_request_context('/'):
response = perform_web_task('n', 'r')
self.assertEqual(http.client.OK,
response.status_code,
"Status code must be OK.")
self.assertEqual(
'allow-h', response.headers['Access-Control-Allow-Headers'])
self.assertEqual(
'expose-h', response.headers['Access-Control-Expose-Headers'])
self.assertEqual(
'custom-origin', response.headers['Access-Control-Allow-Origin'])
self.assertEqual('21', response.headers['Access-Control-Max-Age'])
|
class CorsTestCase(ZslTestCase, TestCase):
@inject(app=Zsl)
def testNotFound(self, app):
pass
@inject(app=Zsl)
def testCustom(self, app):
pass
| 5 | 0 | 12 | 2 | 10 | 0 | 1 | 0 | 2 | 0 | 0 | 0 | 2 | 0 | 2 | 76 | 44 | 7 | 37 | 9 | 32 | 0 | 20 | 7 | 17 | 1 | 2 | 1 | 2 |
7,649 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/tests/interface/web/cors_test.py
|
tests.interface.web.cors_test.TestTask
|
class TestTask:
@crossdomain('custom-origin', 'm1', 'allow-h', 'expose-h', 21)
def perform(self, _data):
return "ok"
|
class TestTask:
@crossdomain('custom-origin', 'm1', 'allow-h', 'expose-h', 21)
def perform(self, _data):
pass
| 3 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 4 | 0 | 4 | 3 | 1 | 0 | 3 | 2 | 1 | 1 | 0 | 0 | 1 |
7,650 |
AtteqCom/zsl
|
AtteqCom_zsl/src/zsl/utils/nginx_push_helper.py
|
zsl.utils.nginx_push_helper.NginxPusher
|
class NginxPusher:
def __init__(self, server_path, channel_prefix=None):
self._server_path = server_path
self._channel_prefix = (channel_prefix + '.') if channel_prefix is not None else ''
def channel_path(self, channel_id):
return '{0}?id={1}{2}'.format(self._server_path, self._channel_prefix, channel_id)
def push_msg(self, channel_id, msg):
"""Push ``msg`` for given ``channel_id``. If ``msg`` is not string, it
will be urlencoded
"""
if type(msg) is not str:
msg = urlencode(msg)
return self.push(channel_id, msg)
def push_object(self, channel_id, obj):
"""Push ``obj`` for ``channel_id``. ``obj`` will be encoded as JSON in
the request.
"""
return self.push(channel_id, json.dumps(obj).replace('"', '\\"'))
def push(self, channel_id, data):
"""Push message with POST ``data`` for ``channel_id``
"""
channel_path = self.channel_path(channel_id)
response = requests.post(channel_path, data)
return response.json()
def delete_channel(self, channel_id):
"""Deletes channel
"""
req = requests.delete(self.channel_path(channel_id))
return req
|
class NginxPusher:
def __init__(self, server_path, channel_prefix=None):
pass
def channel_path(self, channel_id):
pass
def push_msg(self, channel_id, msg):
'''Push ``msg`` for given ``channel_id``. If ``msg`` is not string, it
will be urlencoded
'''
pass
def push_object(self, channel_id, obj):
'''Push ``obj`` for ``channel_id``. ``obj`` will be encoded as JSON in
the request.
'''
pass
def push_msg(self, channel_id, msg):
'''Push message with POST ``data`` for ``channel_id``
'''
pass
def delete_channel(self, channel_id):
'''Deletes channel
'''
pass
| 7 | 4 | 6 | 1 | 3 | 2 | 1 | 0.53 | 0 | 2 | 0 | 0 | 6 | 2 | 6 | 6 | 39 | 10 | 19 | 12 | 12 | 10 | 19 | 12 | 12 | 2 | 0 | 1 | 8 |
7,651 |
AtteqCom/zsl
|
AtteqCom_zsl/src/zsl/utils/deploy/js_model_generator.py
|
zsl.utils.deploy.js_model_generator.ModelGenerator
|
class ModelGenerator:
def __init__(self, module, model_prefix="", collection_prefix="", model_fn="Atteq.bb.Model",
collection_fn="Atteq.bb.Collection"):
self.model_prefix = model_prefix
self.collection_prefix = collection_prefix
self.model_fn = model_fn
self.collection_fn = collection_fn
self.table_to_class = {}
self.models = importlib.import_module(module)
def _get_list_options(self, column):
fk = list(column.foreign_keys)[0]
table_name = underscore_to_camelcase(fk.column.table.name)
return list_opts_tpl.format(collection_prefix=self.collection_prefix, model_name=table_name)
def _map_table_name(self, model_names):
"""
Pre foregin_keys potrbejeme pre z nazvu tabulky zistit class,
tak si to namapujme
"""
for model in model_names:
if isinstance(model, tuple):
model = model[0]
try:
model_cls = getattr(self.models, model)
self.table_to_class[class_mapper(model_cls).tables[0].name] = model
except AttributeError:
pass
def generate_model(self, model_name, model_plural=None):
if model_name not in dir(self.models):
raise ImportError(
"Model [{name}] couldn't be found in {module}\n".format(name=model_name, module=self.models.__name__))
if model_plural is None:
model_plural = model_name + 's'
model = getattr(self.models, model_name)
schema = {}
mapper = class_mapper(model)
callbacks = []
for column in mapper.columns:
col_type = column.type.__class__.__name__
attrs = {}
if column.primary_key:
continue
if column.foreign_keys:
try:
attrs['type'] = 'AtteqSelect'
attrs['options'] = '__CALLBACK__%d' % len(callbacks)
callbacks.append(self._get_list_options(column))
# TODO uf uf uuuuf
fk_table = list(column.foreign_keys)[0].target_fullname.split('.')[0]
if fk_table in self.table_to_class:
attrs['foreign_model'] = '%s%s' % (self.model_prefix, self.table_to_class[fk_table])
except sqlalchemy.exc.NoReferencedTableError:
attrs['type'] = 'Text'
elif col_type == 'TEXT':
attrs['type'] = "TextArea"
elif col_type == 'Enum':
attrs['type'] = 'AtteqSelect' if column.nullable else 'Select'
attrs['options'] = column.type.enums
elif col_type == 'INTEGER':
attrs['type'] = 'Number'
else:
attrs['type'] = "Text"
if column.nullable:
attrs['nullable'] = True
schema[column.name] = attrs
schema = "\n ".join(json.dumps(schema, indent=4).split("\n"))
for i in range(len(callbacks)):
schema = schema.replace('"__CALLBACK__%d"' % i, callbacks[i])
return model_tpl.format(
model_name=model_name,
model_prefix=self.model_prefix,
collection_prefix=self.collection_prefix,
resource_name=camelcase_to_underscore(model_plural),
model_fn=self.model_fn,
collection_fn=self.collection_fn,
schema=schema
)
def generate_models(self, models):
js_models = []
self._map_table_name(models)
for model in models:
if isinstance(model, tuple):
model_name = model[0]
model_plural = model[1]
else:
model_name = model
model_plural = None
js_model = self.generate_model(model_name, model_plural)
js_models.append(js_model)
return js_models
|
class ModelGenerator:
def __init__(self, module, model_prefix="", collection_prefix="", model_fn="Atteq.bb.Model",
collection_fn="Atteq.bb.Collection"):
pass
def _get_list_options(self, column):
pass
def _map_table_name(self, model_names):
'''
Pre foregin_keys potrbejeme pre z nazvu tabulky zistit class,
tak si to namapujme
'''
pass
def generate_model(self, model_name, model_plural=None):
pass
def generate_models(self, models):
pass
| 6 | 1 | 23 | 5 | 17 | 1 | 5 | 0.06 | 0 | 5 | 0 | 0 | 5 | 6 | 5 | 5 | 120 | 31 | 84 | 31 | 77 | 5 | 69 | 30 | 63 | 14 | 0 | 4 | 23 |
7,652 |
AtteqCom/zsl
|
AtteqCom_zsl/src/zsl/utils/deploy/apiari_doc_generator.py
|
zsl.utils.deploy.apiari_doc_generator.ApiaryDoc
|
class ApiaryDoc(pydoc.Doc):
_API_DOC_STR = "API Documentation:"
def __init__(self):
self._docs = []
self._done = set()
@staticmethod
def _get_obj_id(obj):
if hasattr(obj, '__path__'):
return obj.__path__[0]
if hasattr(obj, '__file__'):
return obj.__file__
if hasattr(obj, '__name__'):
return obj.__name__
else:
return obj
def _add_doc(self, obj):
obj_id = self._get_obj_id(obj)
if obj_id in self._done:
return
logging.debug('Adding {0}.'.format(obj_id))
self._done.add(obj_id)
apistr = ""
try:
if obj.__doc__ is None:
return
start = obj.__doc__.find(self._API_DOC_STR)
if start == -1:
return
apistr = obj.__doc__[start + len(self._API_DOC_STR):]
except: # NOQA
return
apistr = apistr.splitlines()
while apistr[0].isspace() or apistr[0] == "":
apistr = apistr[1:]
def white_space_at_beginning(l):
c = 0
i = 0
length = len(l)
while i < length:
if l[i].isspace():
c += 1
else:
return c
i += 1
return c
m = white_space_at_beginning(apistr[0])
for char in apistr:
if char.isspace() or char == "":
continue
wl = white_space_at_beginning(char)
if m > wl:
m = wl
apistr = [x[m:] for x in apistr]
self._docs.append("\n".join(apistr))
def docmodule(self, obj, name=None, *args):
if self._get_obj_id(obj) in self._done:
return
self._add_doc(obj)
for _key, value in inspect.getmembers(obj, inspect.isclass):
self._add_doc(value)
for _key, value in inspect.getmembers(obj, lambda x: hasattr(x, '__call__')):
self._add_doc(value)
for _key, value in inspect.getmembers(obj, inspect.ismodule):
self.docmodule(value)
if not hasattr(obj, '__path__'):
return
if obj.__path__[0].startswith(os.path.dirname(sys.executable)):
return
for loader, module_name, _ispkg in pkgutil.iter_modules(obj.__path__):
logging.debug("Loading module {0} in {1}.".format(module_name, obj.__path__))
try:
module = loader.find_module(module_name).load_module(module_name)
self.docmodule(module)
except: # NOQA
pass
def get_doc(self):
return "FORMAT: 1A\n\n" + "\n\n".join(self._docs)
|
class ApiaryDoc(pydoc.Doc):
def __init__(self):
pass
@staticmethod
def _get_obj_id(obj):
pass
def _add_doc(self, obj):
pass
def white_space_at_beginning(l):
pass
def docmodule(self, obj, name=None, *args):
pass
def get_doc(self):
pass
| 8 | 0 | 16 | 2 | 14 | 0 | 5 | 0.03 | 1 | 1 | 0 | 0 | 4 | 2 | 5 | 8 | 92 | 15 | 77 | 23 | 69 | 2 | 74 | 22 | 67 | 9 | 1 | 2 | 27 |
7,653 |
AtteqCom/zsl
|
AtteqCom_zsl/src/zsl/utils/command_dispatcher.py
|
zsl.utils.command_dispatcher.CommandDispatcher
|
class CommandDispatcher:
"""
A simple class for command dictionary. A command is a function
which can take named parameters.
"""
def __init__(self):
"""
Create command dictionary
"""
self.commands = {}
def command(self, fn):
"""
Add method or function to dispatcher. Can be use as a nice
decorator.
:param fn: function or method
:type fn: function
:return: the same function
:rtype: function
"""
self.commands[fn.__name__] = fn
return fn
"""alias for ``CommandDispatcher.command``"""
add_function = command
def execute_command(self, command, args=None):
"""
Execute a command
:param command: name of the command
:type command: str
:param args: optional named arguments for command
:type args: dict
:return: the result of command
:raises KeyError: if command is not found
"""
if args is None:
args = {}
command_fn = self.commands[command]
return command_fn(**args)
def bound(self, instance):
"""
Return a new dispatcher, which will switch all command functions
with bounded methods of given instance matched by name. It will
match only regular methods.
:param instance: object instance
:type instance: object
:return: new Dispatcher
:rtype: CommandDispatcher
"""
bounded_dispatcher = CommandDispatcher()
bounded_dispatcher.commands = self.commands.copy()
for name in self.commands:
method = getattr(instance, name, None)
if method and inspect.ismethod(method) and method.__self__ == instance:
bounded_dispatcher.commands[name] = method
return bounded_dispatcher
|
class CommandDispatcher:
'''
A simple class for command dictionary. A command is a function
which can take named parameters.
'''
def __init__(self):
'''
Create command dictionary
'''
pass
def command(self, fn):
'''
Add method or function to dispatcher. Can be use as a nice
decorator.
:param fn: function or method
:type fn: function
:return: the same function
:rtype: function
'''
pass
def execute_command(self, command, args=None):
'''
Execute a command
:param command: name of the command
:type command: str
:param args: optional named arguments for command
:type args: dict
:return: the result of command
:raises KeyError: if command is not found
'''
pass
def bound(self, instance):
'''
Return a new dispatcher, which will switch all command functions
with bounded methods of given instance matched by name. It will
match only regular methods.
:param instance: object instance
:type instance: object
:return: new Dispatcher
:rtype: CommandDispatcher
'''
pass
| 5 | 5 | 15 | 3 | 5 | 7 | 2 | 1.7 | 0 | 0 | 0 | 0 | 4 | 1 | 4 | 4 | 71 | 17 | 20 | 11 | 15 | 34 | 20 | 11 | 15 | 3 | 0 | 2 | 7 |
7,654 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/tests/interface/web/task_router_test.py
|
tests.interface.web.task_router_test.TaskRouterTestCase
|
class TaskRouterTestCase(ZslTestCase, TestCase):
CONFIG = IN_MEMORY_DB_SETTINGS.copy()
CONFIG.update(
CORS=CORSConfiguration('origin'),
TASKS=TaskConfiguration().create_namespace('n').add_routes(
{'r': TestTask}).get_configuration().create_namespace('z').add_packages(
['zsl.tasks.zsl']).get_configuration()
)
ZSL_TEST_CONFIGURATION = ZslTestConfiguration(
app_name='ErrorHandlingTestCase', container=WebContainer,
config_object=CONFIG)
@inject(app=Zsl)
def testRoutingRoutes(self, app):
with app.test_request_context('/'):
response = perform_web_task('n', 'r')
self.assertEqual(http.client.OK,
response.status_code,
"Status code must be ok.")
self.assertEqual('ok', response.data.decode('utf-8'))
@inject(app=Zsl)
def testNotFound(self, app):
with app.test_request_context('/'):
response = perform_web_task('nn', 'r')
self.assertEqual(http.client.NOT_FOUND,
response.status_code,
"Status code must be NOT_FOUND.")
@inject(app=Zsl)
def testRoutingPackages(self, app):
with app.test_request_context('/'):
response = perform_web_task('z', 'test_task')
print(response.status_code)
self.assertEqual(http.client.OK,
response.status_code,
"Status code must be ok.")
self.assertEqual('ok', response.data.decode('utf-8'))
|
class TaskRouterTestCase(ZslTestCase, TestCase):
@inject(app=Zsl)
def testRoutingRoutes(self, app):
pass
@inject(app=Zsl)
def testNotFound(self, app):
pass
@inject(app=Zsl)
def testRoutingPackages(self, app):
pass
| 7 | 0 | 8 | 1 | 7 | 0 | 1 | 0 | 2 | 0 | 0 | 0 | 3 | 0 | 3 | 77 | 40 | 5 | 35 | 12 | 28 | 0 | 19 | 9 | 15 | 1 | 2 | 1 | 3 |
7,655 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/tests/interface/worker/run_dummy_worker_with_params_test.py
|
tests.interface.worker.run_dummy_worker_with_params_test.RunDummyWorkerWithParamsTestCase
|
class RunDummyWorkerWithParamsTestCase(ZslTestCase, TestCase):
CONFIG = IN_MEMORY_DB_SETTINGS.copy()
ZSL_TEST_CONFIGURATION = ZslTestConfiguration(
app_name='RunDummyWorkerWithParamsTestCase', container=DummyWorkerContainer,
config_object=CONFIG)
_PARAM_1_TEST_VALUE = 1
_PARAM_2_TEST_VALUE = 'Dont forget to read some good books!'
@inject(worker=TaskQueueWorker)
def testWorkerRunCountAndRunParams(self, worker):
self.assertEqual(worker.run_count, 0,
"worker.run() shouldn't be called yet.")
run_worker(self._PARAM_1_TEST_VALUE, param_2=self._PARAM_2_TEST_VALUE)
self.assertEqual(worker.run_count, 1,
"worker.run() should be called exactly once.")
self.assertEqual(worker.last_param_1, self._PARAM_1_TEST_VALUE,
"worker.run() has obtained incorrect parameter 'param_1'.")
self.assertEqual(worker.last_param_2, self._PARAM_2_TEST_VALUE,
"worker.run() has obtained incorrect parameter 'param_2'.")
|
class RunDummyWorkerWithParamsTestCase(ZslTestCase, TestCase):
@inject(worker=TaskQueueWorker)
def testWorkerRunCountAndRunParams(self, worker):
pass
| 3 | 0 | 8 | 2 | 6 | 0 | 1 | 0 | 2 | 0 | 0 | 0 | 1 | 0 | 1 | 75 | 18 | 4 | 14 | 7 | 11 | 0 | 11 | 6 | 9 | 1 | 2 | 0 | 1 |
7,656 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/tests/interface/worker/run_dummy_worker_without_params_test.py
|
tests.interface.worker.run_dummy_worker_without_params_test.RunDummyWorkerWithoutParamsTestCase
|
class RunDummyWorkerWithoutParamsTestCase(ZslTestCase, TestCase):
CONFIG = IN_MEMORY_DB_SETTINGS.copy()
ZSL_TEST_CONFIGURATION = ZslTestConfiguration(
app_name='RunDummyWorkerWithoutParamsTestCase', container=DummyWorkerContainer,
config_object=CONFIG)
@inject(worker=TaskQueueWorker)
def testWorkerRunCount(self, worker):
self.assertEqual(worker.run_count, 0,
"worker.run() shouldn't be called yet.")
run_worker()
self.assertEqual(worker.run_count, 1,
"worker.run() should be called exactly once.")
|
class RunDummyWorkerWithoutParamsTestCase(ZslTestCase, TestCase):
@inject(worker=TaskQueueWorker)
def testWorkerRunCount(self, worker):
pass
| 3 | 0 | 6 | 2 | 4 | 0 | 1 | 0 | 2 | 0 | 0 | 0 | 1 | 0 | 1 | 75 | 13 | 3 | 10 | 5 | 7 | 0 | 7 | 4 | 5 | 1 | 2 | 0 | 1 |
7,657 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/tests/mocks.py
|
tests.mocks.mock_db_session.TestSessionFactory
|
class TestSessionFactory(DbTestTestSessionFactory):
def __init__(self):
super().__init__()
self._session_holder = session_holder
|
class TestSessionFactory(DbTestTestSessionFactory):
def __init__(self):
pass
| 2 | 0 | 3 | 0 | 3 | 0 | 1 | 0 | 1 | 1 | 0 | 0 | 1 | 1 | 1 | 9 | 4 | 0 | 4 | 3 | 2 | 0 | 4 | 3 | 2 | 1 | 2 | 0 | 1 |
7,658 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/tests/interface/cli/exec_task_from_cli_test.py
|
tests.interface.cli.exec_task_from_cli_test.ExecTaskFromCliTestCase
|
class ExecTaskFromCliTestCase(ZslTestCase, TestCase):
ZSL_TEST_CONFIGURATION = ZslTestConfiguration(
app_name="ExecTaskFromCliTestCase",
config_object=CONFIG,
container=TestCliContainer,
)
@inject(zsl_cli=ZslCli)
def testRunningTestTask(self, zsl_cli):
# type:(ZslCli)->None
runner = CliRunner()
result = runner.invoke(zsl_cli.cli, ["task", "task/zsl/test_task"])
self.assertEqual(0, result.exit_code, "No error is expected.")
self.assertEqual("ok", result.output.strip(),
"Valid task output must be shown")
@inject(zsl_cli=ZslCli)
def testRunningTaskWithListInput(self, zsl_cli):
# type:(ZslCli)->None
runner = CliRunner()
result = runner.invoke(
zsl_cli.cli,
["task", "task/zsl/with_request_task",
'{"list_of_numbers": [1,2,3] }'],
)
self.assertEqual(0, result.exit_code, "No error is expected.")
self.assertEqual(
json.loads("[1, 2, 3]"),
json.loads(result.output.strip()),
"Valid task output must be shown",
)
|
class ExecTaskFromCliTestCase(ZslTestCase, TestCase):
@inject(zsl_cli=ZslCli)
def testRunningTestTask(self, zsl_cli):
pass
@inject(zsl_cli=ZslCli)
def testRunningTaskWithListInput(self, zsl_cli):
pass
| 5 | 0 | 10 | 0 | 9 | 1 | 1 | 0.08 | 2 | 0 | 0 | 0 | 2 | 0 | 2 | 76 | 29 | 2 | 25 | 10 | 20 | 2 | 12 | 8 | 9 | 1 | 2 | 0 | 2 |
7,659 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/tests/resource/transactional_guard_test.py
|
tests.resource.transactional_guard_test.TransactionalGuardTest.testIsInTransaction.AllowPolicy
|
class AllowPolicy(ResourcePolicy):
default = Access.ALLOW
|
class AllowPolicy(ResourcePolicy):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 9 | 2 | 0 | 2 | 2 | 1 | 0 | 2 | 2 | 1 | 0 | 1 | 0 | 0 |
7,660 |
AtteqCom/zsl
|
AtteqCom_zsl/src/zsl/db/helpers/query_filter.py
|
zsl.db.helpers.query_filter.OperatorLike
|
class OperatorLike:
@staticmethod
def apply(q, attr, v):
return q.filter(attr.like('%{0}%'.format(v)))
|
class OperatorLike:
@staticmethod
def apply(q, attr, v):
pass
| 3 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 4 | 0 | 4 | 3 | 1 | 0 | 3 | 2 | 1 | 1 | 0 | 0 | 1 |
7,661 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/tests/resource/transactional_guard_test.py
|
tests.resource.transactional_guard_test.TransactionalGuardTest.testRollbackAfter.DenyAfterPolicy
|
class DenyAfterPolicy(ResourcePolicy):
default = Access.ALLOW
def can_read__after(self, *args, **kwargs):
return Access.DENY
|
class DenyAfterPolicy(ResourcePolicy):
def can_read__after(self, *args, **kwargs):
pass
| 2 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 1 | 10 | 5 | 1 | 4 | 3 | 2 | 0 | 4 | 3 | 2 | 1 | 1 | 0 | 1 |
7,662 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/src/zsl/testing/http.py
|
zsl.testing.http.HTTPTestCase
|
class HTTPTestCase:
"""Extends TestCase with methods for easier testing of HTTP requests."""
_DEFAULT_REQUEST_TASK_HEADERS = {
HttpHeaders.CONTENT_TYPE.value: MimeType.APPLICATION_JSON.value
}
def requestTask(self, client, task, data, headers=None):
# type: (FlaskClient, str, dict, dict)->Response
"""
Request a task using POST and convert the given data to JSON.
:param client: The client to ZSL which will be used for the request.
:param task: Url which will be requested using POST method.
:param data: Data which will be posted and first converted to JSON.
:param headers: Dictionary of headers that'll be appended the
Content-Type: application/json header.
:return: Flask response.
"""
if headers is None:
headers = {}
headers.update(HTTPTestCase._DEFAULT_REQUEST_TASK_HEADERS)
return client.post(task, data=json.dumps(data), headers=headers)
def assertHTTPStatus(self, status, test_value, msg):
# type: (Union[int, HTTPStatus], int, AnyStr) -> None
"""Assert HTTP status
:param status: http status
:param test_value: flask respond status
:param msg: test message
"""
status = get_http_status_code_value(status)
self.assertEqual(status, test_value, msg)
def assertJSONData(self, rv, data, msg):
# type: (Response, Any, AnyStr) -> None
data1 = self.extractResponseJSON(rv)
self.assertEqual(data1, data, msg)
def extractResponseJSON(self, rv):
# type: (Response) -> Dict
return json.loads(rv.data.decode())
@inject(app=Zsl)
def getHTTPClient(self, app):
# type: (Zsl) -> FlaskClient
return app.test_client()
@inject(app=Zsl)
def getRequestContext(self, app):
return app.test_request_context()
|
class HTTPTestCase:
'''Extends TestCase with methods for easier testing of HTTP requests.'''
def requestTask(self, client, task, data, headers=None):
'''
Request a task using POST and convert the given data to JSON.
:param client: The client to ZSL which will be used for the request.
:param task: Url which will be requested using POST method.
:param data: Data which will be posted and first converted to JSON.
:param headers: Dictionary of headers that'll be appended the
Content-Type: application/json header.
:return: Flask response.
'''
pass
def assertHTTPStatus(self, status, test_value, msg):
'''Assert HTTP status
:param status: http status
:param test_value: flask respond status
:param msg: test message
'''
pass
def assertJSONData(self, rv, data, msg):
pass
def extractResponseJSON(self, rv):
pass
@inject(app=Zsl)
def getHTTPClient(self, app):
pass
@inject(app=Zsl)
def getRequestContext(self, app):
pass
| 9 | 3 | 7 | 1 | 3 | 3 | 1 | 0.87 | 0 | 0 | 0 | 4 | 6 | 0 | 6 | 6 | 54 | 11 | 23 | 11 | 14 | 20 | 19 | 9 | 12 | 2 | 0 | 1 | 7 |
7,663 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/src/zsl/testing/db.py
|
zsl.testing.db.TestSessionFactory
|
class TestSessionFactory(SessionFactory):
"""Factory always returning the single test transaction."""
_test_session = None
_db_schema_initialized = False
@classmethod
def reset_db_schema_initialization(cls, engine: Engine) -> None:
metadata.drop_all(engine)
cls._db_schema_initialized = False
@inject(engine_pool=EnginePool)
def initialize_db_schema(self, engine_pool: EnginePool = Injected) -> None:
engine = engine_pool.get_engine(EnginePool._DEFAULT_ENGINE_NAME)
if not TestSessionFactory._db_schema_initialized:
logger.info("Initialize db schema")
metadata.bind = engine
logger.debug(
"Initialize db schema - Check if db contains any table.")
self._raise_if_database_is_not_empty(engine)
logger.debug("Initialize db schema - Create all tables.")
metadata.create_all(engine)
logger.debug("Initialize db schema - Create all tables - Done.")
TestSessionFactory._db_schema_initialized = True
logger.info("Initialize db schema - Done.")
def create_test_session(self) -> Session:
assert TestSessionFactory._test_session is None
logger.debug("Create test session - begin test session/setUp")
TestSessionFactory._test_session = self._session_holder()
TestSessionFactory._test_session.autoflush = True
TestSessionFactory._test_session.begin_nested()
assert TestSessionFactory._test_session is not None
return TestSessionFactory._test_session
def create_session(self):
logger.debug("Create test session")
assert TestSessionFactory._test_session is not None
return TestSessionFactory._test_session
def close_test_session(self):
TestSessionFactory._test_session.rollback()
TestSessionFactory._test_session.close()
TestSessionFactory._test_session = None
logger.debug("Close test session - close test test session/tearDown")
def _raise_if_database_is_not_empty(self, engine):
inspector = inspect(engine)
existing_tables = inspector.get_table_names()
if len(existing_tables) > 0:
raise DatabaseSchemaInitializationException(
f"The database contains already some tables. This is forbidden to prevent accidentally running tests "
f"on a production database. Database url: {engine.url}. Found tables: {existing_tables}"
)
|
class TestSessionFactory(SessionFactory):
'''Factory always returning the single test transaction.'''
@classmethod
def reset_db_schema_initialization(cls, engine: Engine) -> None:
pass
@inject(engine_pool=EnginePool)
def initialize_db_schema(self, engine_pool: EnginePool = Injected) -> None:
pass
def create_test_session(self) -> Session:
pass
def create_session(self):
pass
def close_test_session(self):
pass
def _raise_if_database_is_not_empty(self, engine):
pass
| 9 | 1 | 7 | 1 | 7 | 0 | 1 | 0.02 | 1 | 2 | 2 | 1 | 5 | 0 | 6 | 8 | 56 | 10 | 45 | 14 | 36 | 1 | 40 | 12 | 33 | 2 | 1 | 1 | 8 |
7,664 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/src/zsl/testing/db.py
|
zsl.testing.db.DbTestModule
|
class DbTestModule(Module):
"""Module fixing the :class:`zsl.service.service.SessionFactory`
to our :class:`.TestSessionFactory`."""
@provides(SessionHolder, scope=singleton)
@inject(engine_pool=EnginePool)
def provide_session_holder(
self, engine_pool: EnginePool
) -> SessionHolder:
return TestSessionHolder(engine_pool)
@provides(SessionFactory, scope=singleton)
def get_session_factory(self) -> SessionFactory:
return TestSessionFactory()
@provides(TestSessionFactory, scope=singleton)
@inject(session_factory=SessionFactory)
def get_test_session_factory(
self, session_factory: SessionFactory = Injected
) -> SessionFactory:
return session_factory
@provides(TransactionHolderFactory, scope=singleton)
def provide_transaction_holder_factory(self) -> TransactionHolderFactory:
return TestTransactionHolderFactory()
|
class DbTestModule(Module):
'''Module fixing the :class:`zsl.service.service.SessionFactory`
to our :class:`.TestSessionFactory`.'''
@provides(SessionHolder, scope=singleton)
@inject(engine_pool=EnginePool)
def provide_session_holder(
self, engine_pool: EnginePool
) -> SessionHolder:
pass
@provides(SessionFactory, scope=singleton)
def get_session_factory(self) -> SessionFactory:
pass
@provides(TestSessionFactory, scope=singleton)
@inject(session_factory=SessionFactory)
def get_test_session_factory(
self, session_factory: SessionFactory = Injected
) -> SessionFactory:
pass
@provides(TransactionHolderFactory, scope=singleton)
def provide_transaction_holder_factory(self) -> TransactionHolderFactory:
pass
| 11 | 1 | 3 | 0 | 3 | 0 | 1 | 0.11 | 1 | 7 | 7 | 0 | 4 | 0 | 4 | 4 | 25 | 4 | 19 | 13 | 4 | 2 | 9 | 5 | 4 | 1 | 1 | 0 | 4 |
7,665 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/src/zsl/testing/db.py
|
zsl.testing.db.DbTestCase
|
class DbTestCase:
""":class:`.DbTestCase` is a mixin to be used when testing with
a database."""
_session = None
@classmethod
@inject(session_factory=TestSessionFactory)
def setUpClass(cls, session_factory: TestSessionFactory = Injected) -> None:
super().setUpClass()
session_factory.initialize_db_schema()
@inject(session_factory=TestSessionFactory)
def setUp(self, session_factory: TestSessionFactory = Injected) -> None:
super().setUp()
logger.debug("DbTestCase.setUp")
session_factory.create_test_session()
@inject(session_factory=TestSessionFactory)
def tearDown(self, session_factory: TestSessionFactory = Injected) -> None:
# This will return the same transaction/session
# as the one used in setUp.
logger.debug("DbTestCase.tearDown")
session_factory.close_test_session()
super().tearDown()
|
class DbTestCase:
''':class:`.DbTestCase` is a mixin to be used when testing with
a database.'''
@classmethod
@inject(session_factory=TestSessionFactory)
def setUpClass(cls, session_factory: TestSessionFactory = Injected) -> None:
pass
@inject(session_factory=TestSessionFactory)
def setUpClass(cls, session_factory: TestSessionFactory = Injected) -> None:
pass
@inject(session_factory=TestSessionFactory)
def tearDown(self, session_factory: TestSessionFactory = Injected) -> None:
pass
| 8 | 1 | 4 | 0 | 4 | 1 | 1 | 0.24 | 0 | 2 | 1 | 4 | 2 | 0 | 3 | 3 | 25 | 4 | 17 | 8 | 9 | 4 | 13 | 5 | 9 | 1 | 0 | 0 | 3 |
7,666 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/src/zsl/tasks/zsl/with_request_task.py
|
zsl.tasks.zsl.with_request_task.WithRequestTask.Request
|
class Request:
def __init__(self):
self.list_of_numbers = []
|
class Request:
def __init__(self):
pass
| 2 | 0 | 2 | 0 | 2 | 1 | 1 | 0.33 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 3 | 0 | 3 | 3 | 1 | 1 | 3 | 3 | 1 | 1 | 0 | 0 | 1 |
7,667 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/src/zsl/tasks/zsl/with_request_task.py
|
zsl.tasks.zsl.with_request_task.WithRequestTask
|
class WithRequestTask:
class Request:
def __init__(self):
self.list_of_numbers = [] # type : List[int]
"""
Returns received data if any or 'empty'
"""
@inject(app=Zsl)
def __init__(self, app):
self._app = app
@json_input
@json_output
@payload_into_model(Request)
def perform(self, request):
return request.list_of_numbers if request.list_of_numbers else "empty"
|
class WithRequestTask:
class Request:
def __init__(self):
pass
@inject(app=Zsl)
def __init__(self):
pass
@json_input
@json_output
@payload_into_model(Request)
def perform(self, request):
pass
| 9 | 0 | 2 | 0 | 2 | 0 | 1 | 0.33 | 0 | 0 | 0 | 0 | 2 | 1 | 2 | 2 | 19 | 4 | 12 | 9 | 3 | 4 | 8 | 7 | 3 | 2 | 0 | 0 | 4 |
7,668 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/src/zsl/tasks/zsl/version_task.py
|
zsl.tasks.zsl.version_task.VersionTask
|
class VersionTask:
"""
Shows the versions of ASL and the various used libraries.
"""
@inject(app=Zsl)
def __init__(self, app):
self._app = app
@json_output
def perform(self, data):
return {
"ASL": self._app.VERSION,
"SqlAlchemy": sqlalchemy.__version__
}
|
class VersionTask:
'''
Shows the versions of ASL and the various used libraries.
'''
@inject(app=Zsl)
def __init__(self, app):
pass
@json_output
def perform(self, data):
pass
| 5 | 1 | 4 | 0 | 4 | 0 | 1 | 0.3 | 0 | 0 | 0 | 0 | 2 | 1 | 2 | 2 | 15 | 2 | 10 | 6 | 5 | 3 | 5 | 4 | 2 | 1 | 0 | 0 | 2 |
7,669 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/src/zsl/tasks/zsl/sum_task.py
|
zsl.tasks.zsl.sum_task.SumTask
|
class SumTask:
@inject(app=Zsl)
def __init__(self, app):
self._app = app
@json_input
@json_output
def perform(self, data):
# type: (TaskData)->str
payload = data.payload
self._app.logger.debug("Sum task with data '{0}'.".format(payload))
return {"input": payload, "result": sum(payload)}
|
class SumTask:
@inject(app=Zsl)
def __init__(self, app):
pass
@json_input
@json_output
def perform(self, data):
pass
| 6 | 0 | 4 | 0 | 3 | 1 | 1 | 0.1 | 0 | 0 | 0 | 0 | 2 | 1 | 2 | 2 | 12 | 1 | 10 | 7 | 4 | 1 | 7 | 5 | 4 | 1 | 0 | 0 | 2 |
7,670 |
AtteqCom/zsl
|
AtteqCom_zsl/src/zsl/tasks/zsl/schedule_celery_task.py
|
zsl.tasks.zsl.schedule_celery_task.ScheduleCeleryTask
|
class ScheduleCeleryTask:
@json_input
def perform(self, data):
# type: (TaskData)->str
data = data.payload
schedule_celery_task(data["path"], data["data"])
return "job submitted"
|
class ScheduleCeleryTask:
@json_input
def perform(self, data):
pass
| 3 | 0 | 5 | 0 | 4 | 1 | 1 | 0.17 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 7 | 0 | 6 | 3 | 3 | 1 | 5 | 2 | 3 | 1 | 0 | 0 | 1 |
7,671 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/src/zsl/tasks/zsl/kill_worker_task.py
|
zsl.tasks.zsl.kill_worker_task.KillWorkerTask
|
class KillWorkerTask:
@inject(app=Zsl)
def __init__(self, app):
self._app = app
@staticmethod
def perform(data):
if isinstance(JobContext.get_current_context(), WebJobContext):
raise Exception("Can not kill worker from web!")
raise KillWorkerException()
|
class KillWorkerTask:
@inject(app=Zsl)
def __init__(self, app):
pass
@staticmethod
def perform(data):
pass
| 5 | 0 | 4 | 1 | 3 | 0 | 2 | 0 | 0 | 4 | 3 | 0 | 1 | 1 | 2 | 2 | 11 | 2 | 9 | 6 | 4 | 0 | 7 | 4 | 4 | 2 | 0 | 1 | 3 |
7,672 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/src/zsl/tasks/zsl/db_test_task.py
|
zsl.tasks.zsl.db_test_task.DbTestTask
|
class DbTestTask:
"""
Connects to a database and executes a simple query. The result of the query should be 6.
<emph>Input</emph>
No input.
<emph>Output</emph>
Returns just a number 6.
@author: Martin Babka
"""
@inject(db=sqlalchemy.engine.Engine, app=Zsl)
def __init__(self, db, app):
self._db = db
self._app = app
self._app.logger.debug(
"Call from DbTestTesk.__init__, db {0}".format(db))
def perform(self, data):
return Response("{0}".format(self._db.execute("select 1 * 2 * 3").scalar()), mimetype='text/plain')
|
class DbTestTask:
'''
Connects to a database and executes a simple query. The result of the query should be 6.
<emph>Input</emph>
No input.
<emph>Output</emph>
Returns just a number 6.
@author: Martin Babka
'''
@inject(db=sqlalchemy.engine.Engine, app=Zsl)
def __init__(self, db, app):
pass
def perform(self, data):
pass
| 4 | 1 | 3 | 0 | 3 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 2 | 2 | 2 | 2 | 21 | 5 | 8 | 6 | 4 | 8 | 7 | 5 | 4 | 1 | 0 | 0 | 2 |
7,673 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/src/zsl/tasks/zsl/cors_test_task.py
|
zsl.tasks.zsl.cors_test_task.CorsTestTask
|
class CorsTestTask:
@inject(app=Zsl)
def __init__(self, app):
self._app = app
@json_input
@json_output
@crossdomain(methods=["GET", "OPTIONS"])
def perform(self, data):
return "ok"
|
class CorsTestTask:
@inject(app=Zsl)
def __init__(self, app):
pass
@json_input
@json_output
@crossdomain(methods=["GET", "OPTIONS"])
def perform(self, data):
pass
| 7 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 | 1 | 2 | 2 | 11 | 2 | 9 | 6 | 2 | 0 | 5 | 4 | 2 | 1 | 0 | 0 | 2 |
7,674 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/src/zsl/task/task_decorator.py
|
zsl.task.task_decorator.CrossdomainWebTaskResponder
|
class CrossdomainWebTaskResponder(Responder):
"""
source: https://github.com/fengsp/flask-snippets/blob/master/decorators/http_access_control.py
"""
@inject(app=Zsl, cors_config=CORSConfiguration)
def __init__(self, origin=None, methods=None, allow_headers=None,
expose_headers=None, max_age=None, app=Injected,
cors_config=Injected):
# type: (str, List[str], str, str, Union[int, timedelta], Zsl, CORSConfiguration)->None
self._app = app
methods = join_list(methods, transform=lambda x: x.upper())
self.methods = methods
if allow_headers is None:
allow_headers = cors_config.allow_headers
allow_headers = join_list(allow_headers)
self.allow_headers = allow_headers
if expose_headers is None:
expose_headers = cors_config.expose_headers
expose_headers = join_list(expose_headers)
self.expose_headers = expose_headers
if origin is None:
origin = cors_config.origin
self.origin = join_list(origin)
if max_age is None:
max_age = cors_config.max_age
if isinstance(max_age, timedelta):
max_age = max_age.total_seconds()
self.max_age = max_age
def get_methods(self):
if self.methods is not None:
return self.methods
options_resp = self._app.make_default_options_response()
return options_resp.headers['allow']
def respond(self, response):
headers = response.headers
headers['Access-Control-Allow-Origin'] = self.origin
headers['Access-Control-Allow-Methods'] = self.get_methods()
headers['Access-Control-Max-Age'] = str(self.max_age)
headers['Access-Control-Allow-Headers'] = self.allow_headers
headers['Access-Control-Expose-Headers'] = self.expose_headers
return response
|
class CrossdomainWebTaskResponder(Responder):
'''
source: https://github.com/fengsp/flask-snippets/blob/master/decorators/http_access_control.py
'''
@inject(app=Zsl, cors_config=CORSConfiguration)
def __init__(self, origin=None, methods=None, allow_headers=None,
expose_headers=None, max_age=None, app=Injected,
cors_config=Injected):
pass
def get_methods(self):
pass
def respond(self, response):
pass
| 5 | 1 | 14 | 2 | 12 | 0 | 3 | 0.11 | 1 | 2 | 0 | 0 | 3 | 6 | 3 | 4 | 50 | 9 | 37 | 15 | 30 | 4 | 34 | 12 | 30 | 6 | 1 | 1 | 9 |
7,675 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/src/zsl/utils/redis_helper.py
|
zsl.utils.redis_helper.Keymaker
|
class Keymaker:
"""Keymaker is a class to generate an object to generate Redis keys.
:Example:
>>> article_keys = Keymaker(prefix='articles', keys={'full_article': 'full', 'short_article': 'short'})
>>> article_keys.full_article('today', 'ID214')
'$PROJECT_PREFIX:articles:full:today:ID214'
"""
# TODO I think this should be done in proper OOP
@inject(config=Config)
def __init__(self, keys, prefix=None, config=None):
project_specific_prefix = config.get('REDIS', {}).get('prefix')
for method, key in keys.items():
setattr(self, method, partial(
redis_key, project_specific_prefix, prefix, key))
|
class Keymaker:
'''Keymaker is a class to generate an object to generate Redis keys.
:Example:
>>> article_keys = Keymaker(prefix='articles', keys={'full_article': 'full', 'short_article': 'short'})
>>> article_keys.full_article('today', 'ID214')
'$PROJECT_PREFIX:articles:full:today:ID214'
'''
@inject(config=Config)
def __init__(self, keys, prefix=None, config=None):
pass
| 3 | 1 | 5 | 1 | 4 | 0 | 2 | 1.17 | 0 | 1 | 0 | 0 | 1 | 0 | 1 | 1 | 17 | 4 | 6 | 5 | 3 | 7 | 5 | 4 | 3 | 2 | 0 | 1 | 2 |
7,676 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/src/zsl/task/task_data.py
|
zsl.task.task_data.TaskData
|
class TaskData:
@inject(app=Zsl)
def __init__(self, payload, app=Injected, payload_type=str):
self._app = app
self._payload = payload
self._payload_type = payload_type
@deprecated
def get_data(self):
return self._payload
@property
def payload(self):
return self._payload
def transform_payload(self, f):
self._payload = f(self._payload) if self._payload is not None else {}
|
class TaskData:
@inject(app=Zsl)
def __init__(self, payload, app=Injected, payload_type=str):
pass
@deprecated
def get_data(self):
pass
@property
def payload(self):
pass
def transform_payload(self, f):
pass
| 8 | 0 | 3 | 0 | 3 | 0 | 1 | 0 | 0 | 1 | 0 | 1 | 4 | 3 | 4 | 4 | 17 | 3 | 14 | 11 | 6 | 0 | 11 | 8 | 6 | 2 | 0 | 0 | 5 |
7,677 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/src/zsl/router/task.py
|
zsl.router.task.TaskRouter
|
class TaskRouter:
@inject(config=Config, task_configuration=TaskConfiguration)
def __init__(self, config, task_configuration):
# type: (Config, TaskConfiguration) -> None
self._config = config
self._task_configuration = task_configuration # type: TaskConfiguration
self._strategies = [
PathTaskRouterStrategy(self._task_configuration),
PackageTaskRouterStrategy(
self._task_configuration, self._config.get('DEBUG', False))
]
def route(self, path):
# type: (str)->Tuple[Any, Callable]
"""
Returns the task handling the given request path.
"""
logging.getLogger(__name__).debug("Routing path '%s'.", path)
cls = None
for strategy in self._strategies:
if strategy.can_route(path):
cls = strategy.route(path)
break
if cls is None:
raise RoutingError(path)
return self._create_result(cls)
def _create_result(self, cls):
# type:(Callable)->Tuple[Any, Callable]
"""
Create the task using the injector initialization.
:param cls:
:return:
"""
task = instantiate(cls)
logging.getLogger(__name__).debug(
"Task object {0} created [{1}].".format(cls.__name__, task))
return task, get_callable(task)
|
class TaskRouter:
@inject(config=Config, task_configuration=TaskConfiguration)
def __init__(self, config, task_configuration):
pass
def route(self, path):
'''
Returns the task handling the given request path.
'''
pass
def _create_result(self, cls):
'''
Create the task using the injector initialization.
:param cls:
:return:
'''
pass
| 5 | 2 | 12 | 1 | 7 | 4 | 2 | 0.52 | 0 | 3 | 3 | 0 | 3 | 3 | 3 | 3 | 39 | 5 | 23 | 11 | 18 | 12 | 19 | 10 | 15 | 4 | 0 | 2 | 6 |
7,678 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/src/zsl/resource/model_resource.py
|
zsl.resource.model_resource.CachedModelResource
|
class CachedModelResource(ModelResource):
"""
The cached resource - uses redis to cache the resource for the given amount of seconds.
"""
@inject(cache_module=CacheModule, id_helper=IdHelper, logger=logging.Logger)
def __init__(self, model_cls, cache_module, id_helper, logger, timeout='short'):
super().__init__(model_cls)
self._cache_module = cache_module
self._id_helper = id_helper
self._logger = logger
self._timeout = timeout
def _create_key(self, arghash):
key_prefix = create_key_class_prefix(self.model_cls)
return "cached-resource:{0}:{1}".format(key_prefix, arghash)
def _create_key_from_context(self, ctx):
arghash = sha256(json.dumps(
{'params': ctx.params, 'args': ctx.args, 'data': ctx.data})).hexdigest()
return self._create_key(arghash)
def _get_one(self, row_id, ctx):
# Make hash of params, args and data and ache using the hash in the key.
key = self._create_key_from_context(ctx)
self._logger.debug(
"CachedModelResource - get one, key: {0}.".format(key))
if self._id_helper.check_key(key):
result = json.loads(self._id_helper.get_key(key))
else:
self._logger.debug(
"CachedModelResource - get one not cached, transferring to resource...")
result = super()._get_one(row_id, ctx)
# serialize as model
self._id_helper.set_key(
key, app_model_encoder_fn(result), self._timeout)
self.invalidate()
return result
def _get_collection_count(self, ctx):
# Make hash of params, args and data and ache using the hash in the key.
key = self._create_key_from_context(ctx)
self._logger.debug(
"CachedModelResource - get one, key: {0}.".format(key))
if self._id_helper.check_key(key):
result = int(self._id_helper.get_key(key))
else:
self._logger.debug(
"CachedModelResource - get one not cached, transferring to resource...")
result = super()._get_collection_count(ctx)
self._id_helper.set_key(
key, app_model_encoder_fn(result), self._timeout)
return result
def _get_collection(self, ctx):
# Make hash of params, args and data and ache using the hash in the key.
key = self._create_key_from_context(ctx)
self._logger.debug(
"CachedModelResource - collection, key: {0}.".format(key))
if self._id_helper.check_key(key):
result = self._id_helper.gather_page(key, app_model_decoder_fn)
else:
self._logger.debug(
"CachedModelResource - collection not cached, transferring to resource...")
result = super()._get_collection(ctx)
self._id_helper.fill_page(
key, result, self._timeout, app_model_encoder_fn)
return result
def invalidate(self):
"""
Invalidates all the data associated with this model
"""
key = self._create_key("")
self._id_helper.invalidate_keys_by_prefix(key)
def create(self, params, args, data):
rv = ModelResource.create(self, params, args, data)
self.invalidate()
return rv
def update(self, params, args, data):
rv = ModelResource.update(self, params, args, data)
self.invalidate()
return rv
def delete(self, params, args, data):
rv = ModelResource.delete(self, params, args, data)
self.invalidate()
return rv
|
class CachedModelResource(ModelResource):
'''
The cached resource - uses redis to cache the resource for the given amount of seconds.
'''
@inject(cache_module=CacheModule, id_helper=IdHelper, logger=logging.Logger)
def __init__(self, model_cls, cache_module, id_helper, logger, timeout='short'):
pass
def _create_key(self, arghash):
pass
def _create_key_from_context(self, ctx):
pass
def _get_one(self, row_id, ctx):
pass
def _get_collection_count(self, ctx):
pass
def _get_collection_count(self, ctx):
pass
def invalidate(self):
'''
Invalidates all the data associated with this model
'''
pass
def create(self, params, args, data):
pass
def update(self, params, args, data):
pass
def delete(self, params, args, data):
pass
| 12 | 2 | 7 | 1 | 6 | 1 | 1 | 0.17 | 1 | 2 | 0 | 0 | 10 | 4 | 10 | 37 | 87 | 17 | 60 | 28 | 48 | 10 | 56 | 27 | 45 | 2 | 3 | 1 | 13 |
7,679 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/src/zsl/resource/guard.py
|
zsl.resource.guard.guard
|
class guard:
"""Guard decorator.
This decorator wraps the CRUD methods with security checks before and
after CRUD method execution, so that the response can be stopped or
manipulated. The original CRUD methods are renamed to *guarded_method*,
where *method* can be [*create*, *read*, *update*, *delete*], so by using a
`GuardedResource` as a base, you can still redeclare the *guarded_methods*
and won't loose the security checks.
It takes a list of policies, which will be always checked before and
after executing the CRUD method.
Policy is met, when it returns ``Access.ALLOW``, on ``Access.CONTINUE`` it
will continue to check others and on ``Access.DENY`` or raising a
``PolicyViolationError`` access will be restricted. If there is no policy
which grants the access a ``PolicyViolationError`` is raised and access
will be restricted.
Guard can have a custom exception handlers or method wrappers to _wrap the
CRUD method around.
.. code-block:: python
class Policy(ResourcePolicy):
default = Access.DENY
can_read = Access.ALLOW # allow only read
@guard([Policy()])
class GuardedResource(GuardedMixin):
def read(self, param, args, data):
return resources[param]
class SpecificResource(GuardedResource):
# override GuardedResource.read, but with its security checks
def guarded_read(self, param, args, data):
return specific_resources[param]
"""
method_wrappers = []
exception_handlers = [default_error_handler]
resource_methods = ['create', 'read', 'update', 'delete']
def __init__(self, policies=None, method_wrappers=None,
exception_handlers=None):
# type: (Optional[List[policies]]) -> None
self.policies = list(policies) if policies else []
if method_wrappers:
self._method_wrappers = self.method_wrappers + method_wrappers
else:
self._method_wrappers = list(self.method_wrappers)
if exception_handlers:
self._exception_handlers = \
self.exception_handlers + exception_handlers
else:
self._exception_handlers = list(self.exception_handlers)
@staticmethod
def _check_before_policies(res, name, *args, **kwargs):
for policy in res._guard_policies:
access = _call_before(policy, name)(*args, **kwargs)
if access == Access.ALLOW:
return
elif access == Access.DENY:
raise PolicyViolationError('Access denied for {} {}'.format(
name, 'before'), code=_HTTP_STATUS_FORBIDDEN)
elif access == Access.CONTINUE:
continue
else:
raise TypeError('Access has no value {}'.format(access))
raise PolicyViolationError(
"Access haven't been granted for {} {}".format(
name, 'before'), code=_HTTP_STATUS_FORBIDDEN)
@staticmethod
def _check_after_policies(res, name, result):
for policy in res._guard_policies:
access = _call_after(policy, name)(result)
if access == Access.ALLOW:
return
elif access == Access.DENY:
raise PolicyViolationError('Policy violation for {} {}'.format(
name, 'before'), code=_HTTP_STATUS_FORBIDDEN)
elif access == Access.CONTINUE:
continue
else:
raise TypeError('Access have no value {}'.format(access))
raise PolicyViolationError(
"Access haven't been granted for {} {}".format(
name, 'after'), code=_HTTP_STATUS_FORBIDDEN)
def _wrap(self, method):
# type: (Callable) -> Callable
name = method.__name__
@wraps(method)
def wrapped(*args, **kwargs):
res = args[0]
args = args[1:]
try:
self._check_before_policies(res, name, *args, **kwargs)
rv = _guarded_method(res, name)(*args, **kwargs)
self._check_after_policies(res, name, rv)
except PolicyViolationError as e:
rv = self._handle_exception(e, res)
return rv
for mw in reversed(self._method_wrappers):
wrapped = mw(wrapped)
return wrapped
def _handle_exception(self, error, resource):
rv = None
for handler in self._exception_handlers:
rv = handler(error, rv, resource)
return rv
def __call__(self, cls):
if hasattr(cls, '_guard_policies'):
self.policies += getattr(cls, '_guard_policies')
setattr(cls, '_guard_policies', list(self.policies))
return cls
setattr(cls, '_guard_policies', list(self.policies))
for method_name in self.resource_methods:
guarded_name = _guarded_name(method_name)
if hasattr(cls, method_name):
method = getattr(cls, method_name)
setattr(cls, method_name, self._wrap(method))
setattr(cls, guarded_name, method)
if issubclass(cls, GuardedMixin):
return cls
else:
return type(cls.__name__, (cls, GuardedMixin), {})
|
class guard:
'''Guard decorator.
This decorator wraps the CRUD methods with security checks before and
after CRUD method execution, so that the response can be stopped or
manipulated. The original CRUD methods are renamed to *guarded_method*,
where *method* can be [*create*, *read*, *update*, *delete*], so by using a
`GuardedResource` as a base, you can still redeclare the *guarded_methods*
and won't loose the security checks.
It takes a list of policies, which will be always checked before and
after executing the CRUD method.
Policy is met, when it returns ``Access.ALLOW``, on ``Access.CONTINUE`` it
will continue to check others and on ``Access.DENY`` or raising a
``PolicyViolationError`` access will be restricted. If there is no policy
which grants the access a ``PolicyViolationError`` is raised and access
will be restricted.
Guard can have a custom exception handlers or method wrappers to _wrap the
CRUD method around.
.. code-block:: python
class Policy(ResourcePolicy):
default = Access.DENY
can_read = Access.ALLOW # allow only read
@guard([Policy()])
class GuardedResource(GuardedMixin):
def read(self, param, args, data):
return resources[param]
class SpecificResource(GuardedResource):
# override GuardedResource.read, but with its security checks
def guarded_read(self, param, args, data):
return specific_resources[param]
'''
def __init__(self, policies=None, method_wrappers=None,
exception_handlers=None):
pass
@staticmethod
def _check_before_policies(res, name, *args, **kwargs):
pass
@staticmethod
def _check_after_policies(res, name, result):
pass
def _wrap(self, method):
pass
@wraps(method)
def wrapped(*args, **kwargs):
pass
def _handle_exception(self, error, resource):
pass
def __call__(self, cls):
pass
| 11 | 1 | 17 | 4 | 13 | 0 | 4 | 0.36 | 0 | 7 | 3 | 1 | 4 | 3 | 6 | 6 | 161 | 45 | 85 | 32 | 73 | 31 | 65 | 27 | 57 | 5 | 0 | 2 | 25 |
7,680 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/src/zsl/db/model/app_model_json_decoder.py
|
zsl.db.model.app_model_json_decoder.get_json_decoder.AppModelJSONDecoder
|
class AppModelJSONDecoder(JSONDecoder):
def decode(self, s, _w=WHITESPACE.match):
values = JSONDecoder.decode(self, s, _w=_w)
model = fetch_class(full_class_name)(values, 'id', hints)
return model
|
class AppModelJSONDecoder(JSONDecoder):
def decode(self, s, _w=WHITESPACE.match):
pass
| 2 | 0 | 4 | 0 | 4 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 4 | 5 | 0 | 5 | 4 | 3 | 0 | 5 | 4 | 3 | 1 | 2 | 0 | 1 |
7,681 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/src/zsl/contrib/sentry/sentry_module.py
|
zsl.contrib.sentry.sentry_module.SentryModule
|
class SentryModule(Module):
"""Adds Sentry support."""
SENTRY_CONFIG_NAME = 'SENTRY'
@provides(SentryConfiguration)
@inject(config=Config)
def provide_sentry_configuration(self, config):
# type: (Config) -> SentryConfiguration
return config.get(SentryModule.SENTRY_CONFIG_NAME)
def configure(self, binder):
# type: (Binder) -> None
simple_bind(binder, SentryCli, singleton)
register(SentryErrorProcessor())
|
class SentryModule(Module):
'''Adds Sentry support.'''
@provides(SentryConfiguration)
@inject(config=Config)
def provide_sentry_configuration(self, config):
pass
def configure(self, binder):
pass
| 5 | 1 | 4 | 0 | 3 | 1 | 1 | 0.33 | 1 | 2 | 2 | 0 | 2 | 0 | 2 | 2 | 15 | 3 | 9 | 5 | 4 | 3 | 7 | 4 | 4 | 1 | 1 | 0 | 2 |
7,682 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/src/zsl/contrib/sentry/sentry_module.py
|
zsl.contrib.sentry.sentry_module.SentryErrorProcessor
|
class SentryErrorProcessor(ErrorProcessor):
@inject(config=SentryConfiguration, zsl=Zsl)
def __init__(self, config, zsl):
# type: (SentryConfiguration, Zsl)->None
self._init_sdk(config, zsl)
@staticmethod
def _init_sdk(config, zsl):
# type: (SentryConfiguration, Zsl)->None
logging_integration = SentryErrorProcessor._register_logging_handler(
config)
sentry_sdk.init(
dsn=config.dsn,
transport=HttpTransport,
environment=config.environment,
release=zsl.get_version(),
integrations=[
logging_integration] if logging_integration else None,
)
for key, value in config.tags.items():
sentry_sdk.set_tag(key, value)
@staticmethod
def _register_logging_handler(config):
# type: (SentryConfiguration)->LoggingIntegration
return LoggingIntegration(
level=None,
event_level=config.sentry_logging_handler_level if config.register_logging_handler else None,
)
def handle(self, e):
logging.getLogger(__name__).info(
'Sending error message for {0}.'.format(e))
sentry_sdk.capture_exception(e)
|
class SentryErrorProcessor(ErrorProcessor):
@inject(config=SentryConfiguration, zsl=Zsl)
def __init__(self, config, zsl):
pass
@staticmethod
def _init_sdk(config, zsl):
pass
@staticmethod
def _register_logging_handler(config):
pass
def handle(self, e):
pass
| 8 | 0 | 7 | 1 | 5 | 1 | 2 | 0.12 | 1 | 0 | 0 | 0 | 2 | 0 | 4 | 5 | 34 | 6 | 25 | 10 | 17 | 3 | 13 | 7 | 8 | 3 | 1 | 1 | 7 |
7,683 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/src/zsl/contrib/sentry/sentry_module.py
|
zsl.contrib.sentry.sentry_module.SentryCli
|
class SentryCli:
pass
"""Sentry CLI interface support."""
@inject(zsl_cli=ZslCli)
def __init__(self, zsl_cli):
# type: (ZslCli) -> None
logging.getLogger(__name__).debug("Creating Sentry CLI.")
@zsl_cli.cli.group()
def sentry():
pass
@sentry.command(help='Send a test error to the Sentry backend.')
def test():
print('Sending test Sentry message.')
raise ZslError("Sentry test error from Zsl.")
self._sentry = sentry
@property
def sentry(self):
return self._sentry
|
class SentryCli:
@inject(zsl_cli=ZslCli)
def __init__(self, zsl_cli):
pass
@zsl_cli.cli.group()
def sentry():
pass
@sentry.command(help='Send a test error to the Sentry backend.')
def test():
pass
@property
def sentry():
pass
| 9 | 0 | 5 | 1 | 4 | 0 | 1 | 0.13 | 0 | 1 | 1 | 0 | 2 | 1 | 2 | 2 | 24 | 6 | 16 | 10 | 7 | 2 | 12 | 6 | 7 | 1 | 0 | 0 | 4 |
7,684 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/src/zsl/contrib/alembic/alembic_module.py
|
zsl.contrib.alembic.alembic_module.AlembicModule
|
class AlembicModule(Module):
"""Adds Alembic support for migrations."""
ALEMBIC_CONFIG_NAME = 'ALEMBIC'
@provides(AlembicConfiguration)
@inject(config=Config)
def provide_alembic_configuration(self, config):
# type: (Config) -> AlembicConfiguration
return config.get(AlembicModule.ALEMBIC_CONFIG_NAME)
def configure(self, binder):
# type: (Binder) -> None
simple_bind(binder, AlembicCli, singleton)
|
class AlembicModule(Module):
'''Adds Alembic support for migrations.'''
@provides(AlembicConfiguration)
@inject(config=Config)
def provide_alembic_configuration(self, config):
pass
def configure(self, binder):
pass
| 5 | 1 | 3 | 0 | 2 | 1 | 1 | 0.38 | 1 | 1 | 1 | 0 | 2 | 0 | 2 | 2 | 14 | 3 | 8 | 5 | 3 | 3 | 6 | 4 | 3 | 1 | 1 | 0 | 2 |
7,685 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/src/zsl/contrib/alembic/alembic_module.py
|
zsl.contrib.alembic.alembic_module.AlembicCli
|
class AlembicCli:
"""Alembic Cli interface support."""
@inject(zsl_cli=ZslCli)
def __init__(self, zsl_cli):
# type: (ZslCli) -> AlembicCli
logging.getLogger(__name__).debug("Creating Alembic CLI.")
@zsl_cli.cli.command(help='Run alembic maintenance tasks.',
context_settings=dict(
ignore_unknown_options=True,
allow_extra_args=True
))
@click.pass_context
def alembic(ctx):
# type: (Context) -> None
self.call_alembic(ctx.args)
self._alembic = alembic
@property
def alembic(self):
return self._alembic
@inject(alembic_cfg=AlembicConfiguration)
def call_alembic(self, args, alembic_cfg):
# type: (List[str], AlembicConfiguration)->None
is_initializing = len(args) and args[0] == 'init'
alembic_directory = alembic_cfg.alembic_directory
if is_initializing:
cwd = None
args.append(alembic_directory)
else:
cwd = os.getcwd()
os.chdir(alembic_directory)
CommandLine().main(args)
if is_initializing:
default_ini_path = 'alembic.ini'
target_ini_path = os.path.join(alembic_directory, 'alembic.ini')
os.rename(default_ini_path, target_ini_path)
else:
os.chdir(cwd)
def __call__(self, *args, **kwargs):
self._alembic()
|
class AlembicCli:
'''Alembic Cli interface support.'''
@inject(zsl_cli=ZslCli)
def __init__(self, zsl_cli):
pass
@zsl_cli.cli.command(help='Run alembic maintenance tasks.',
context_settings=dict(
ignore_unknown_options=True,
allow_extra_args=True
))
@click.pass_context
def alembic(ctx):
pass
@property
def alembic(ctx):
pass
@inject(alembic_cfg=AlembicConfiguration)
def call_alembic(self, args, alembic_cfg):
pass
def __call__(self, *args, **kwargs):
pass
| 11 | 1 | 8 | 0 | 7 | 1 | 1 | 0.11 | 0 | 1 | 0 | 0 | 4 | 1 | 4 | 4 | 45 | 6 | 35 | 20 | 20 | 4 | 24 | 12 | 18 | 3 | 0 | 1 | 7 |
7,686 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/src/zsl/cache/redis_id_helper.py
|
zsl.cache.redis_id_helper.RedisIdHelper
|
class RedisIdHelper(IdHelper):
CACHE_DEFAULT_TIMEOUT = 300
@inject(config=Config, redis_cache_module=RedisCacheModule)
def __init__(self, config, redis_cache_module):
"""
Creates the id helper for caching support of AppModels.
"""
self._config = config
self._redis_cache_module = redis_cache_module
if 'CACHE_TIMEOUTS' in self._config:
self._cache_timeouts = self._config['CACHE_TIMEOUTS']
else:
self._cache_timeouts = None
self._cache_default_timeout = self._config.get('CACHE_DEFAULT_TIMEOUT',
RedisIdHelper.CACHE_DEFAULT_TIMEOUT)
def get_timeout(self, key, value, timeout):
if self._cache_timeouts is None:
return self._cache_default_timeout
else:
return self._cache_timeouts[timeout]
def gather_page(self, page_key, decoder=decoder_identity):
page_keys = self._redis_cache_module.get_list(page_key)
logging.debug("Fetching page {0} from redis using keys {1}.".format(
page_key, page_keys))
p = []
for k in page_keys:
p.append(decoder(k, self.get_key(k)))
return p
def fill_page(self, page_key, data, timeout, encoder=encoder_identity, model_key_generator=model_key_generator):
self._redis_cache_module.invalidate_key(page_key)
first = True
for d in data:
key = model_key_generator(d)
self._redis_cache_module.append_to_list(page_key, key)
if first:
self._redis_cache_module.set_key_expiration(
page_key, self.get_timeout(page_key, data, timeout))
first = False
self._redis_cache_module.set_key(
key, encoder(d), self.get_timeout(key, d, timeout))
def check_page(self, page_key):
if not self._redis_cache_module.contains_list(page_key):
return False
page_keys = self._redis_cache_module.get_list(page_key)
for k in page_keys:
if not self.check_key(k):
return False
return True
def check_key(self, key):
return self._redis_cache_module.contains_key(key)
def get_key(self, key):
return self._redis_cache_module.get_key(key)
def invalidate_key(self, key):
return self._redis_cache_module.invalidate_key(key)
def invalidate_keys_by_prefix(self, key_prefix):
return self._redis_cache_module.invalidate_by_glob(key_prefix + "*")
def set_key(self, key, value, timeout):
self._redis_cache_module.set_key(
key, value, self.get_timeout(key, value, timeout))
|
class RedisIdHelper(IdHelper):
@inject(config=Config, redis_cache_module=RedisCacheModule)
def __init__(self, config, redis_cache_module):
'''
Creates the id helper for caching support of AppModels.
'''
pass
def get_timeout(self, key, value, timeout):
pass
def gather_page(self, page_key, decoder=decoder_identity):
pass
def fill_page(self, page_key, data, timeout, encoder=encoder_identity, model_key_generator=model_key_generator):
pass
def check_page(self, page_key):
pass
def check_key(self, key):
pass
def get_key(self, key):
pass
def invalidate_key(self, key):
pass
def invalidate_keys_by_prefix(self, key_prefix):
pass
def set_key(self, key, value, timeout):
pass
| 12 | 1 | 6 | 1 | 5 | 0 | 2 | 0.06 | 1 | 0 | 0 | 0 | 10 | 4 | 10 | 38 | 71 | 16 | 52 | 25 | 40 | 3 | 48 | 24 | 37 | 4 | 4 | 2 | 18 |
7,687 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/src/zsl/cache/redis_cache_module.py
|
zsl.cache.redis_cache_module.RedisCacheModule
|
class RedisCacheModule(CacheModule):
"""Abstraction layer for caching."""
@inject(app=Zsl, config=Config)
def __init__(self, app, config):
"""Abstraction layer for caching."""
self._app = app
self._config = config
redis_conf = self._config.get('REDIS', {})
self._client = redis.StrictRedis(
host=redis_conf.get('host'),
port=redis_conf.get('port'),
db=redis_conf.get('db', 0),
password=redis_conf.get('password')
)
self.logger = self._app.logger.getChild('cache')
self.logger.debug("Redis client created.")
self._cache_prefix = self._config[
'CACHE_PREFIX'] + ':' if 'CACHE_PREFIX' in self._config else ''
def _prefix_key(self, key):
return self._cache_prefix + key
def set_key(self, key, value, timeout):
pkey = self._prefix_key(key)
self._client.set(pkey, value)
self.set_key_expiration(key, timeout)
def invalidate_key(self, key):
pkey = self._prefix_key(key)
self.logger.debug("Key invalidation '{0}'.".format(key))
self._client.delete(pkey)
def set_key_expiration(self, key, timeout):
pkey = self._prefix_key(key)
self.logger.debug("Key expiration '{0}' = {1}.".format(key, timeout))
self._client.expire(pkey, timeout)
def contains_key(self, key):
pkey = self._prefix_key(key)
return self._client.exists(pkey)
def contains_list(self, key):
pkey = self._prefix_key(key)
return self._client.exists(pkey)
def get_key(self, key):
pkey = self._prefix_key(key)
return self._client.get(pkey)
def append_to_list(self, key, value):
pkey = self._prefix_key(key)
self._client.rpush(pkey, value)
def get_list(self, key):
pkey = self._prefix_key(key)
llen = self._client.llen(pkey)
return self._client.lrange(pkey, 0, llen - 1)
def get_by_glob(self, glob):
"""Returns all keys which match the given glob. Warning: this method can be slow for large datasets."""
pglob = self._prefix_key(glob)
keys = self._client.scan_iter(pglob)
return {key: self._client.get(key) for key in keys}
def contains_keys_by_glob(self, glob):
"""Returns number of keys which match the given glob. Warning: this method can be slow for large datasets."""
pglob = self._prefix_key(glob)
keys = self._client.scan_iter(pglob)
return len(list(keys))
def invalidate_by_glob(self, glob):
pglob = self._prefix_key(glob)
if self._config.get('IS_USING_MEDIEVAL_SOFTWARE', False):
keylist = self._client.keys(pglob)
else:
keylist = self._client.scan_iter(pglob)
for key in keylist:
# This does not need to prefixed.
self.logger.debug('Invalidating key {0}.'.format(key))
self._client.delete(key)
|
class RedisCacheModule(CacheModule):
'''Abstraction layer for caching.'''
@inject(app=Zsl, config=Config)
def __init__(self, app, config):
'''Abstraction layer for caching.'''
pass
def _prefix_key(self, key):
pass
def set_key(self, key, value, timeout):
pass
def invalidate_key(self, key):
pass
def set_key_expiration(self, key, timeout):
pass
def contains_key(self, key):
pass
def contains_list(self, key):
pass
def get_key(self, key):
pass
def append_to_list(self, key, value):
pass
def get_list(self, key):
pass
def get_by_glob(self, glob):
'''Returns all keys which match the given glob. Warning: this method can be slow for large datasets.'''
pass
def contains_keys_by_glob(self, glob):
'''Returns number of keys which match the given glob. Warning: this method can be slow for large datasets.'''
pass
def invalidate_by_glob(self, glob):
pass
| 15 | 4 | 5 | 0 | 5 | 0 | 1 | 0.08 | 1 | 1 | 0 | 0 | 13 | 5 | 13 | 40 | 85 | 17 | 63 | 37 | 48 | 5 | 55 | 36 | 41 | 3 | 4 | 1 | 16 |
7,688 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/src/zsl/application/modules/web/web_context_module.py
|
zsl.application.modules.web.web_context_module.WebHandler
|
class WebHandler:
@inject(flask=Zsl)
def run_web(self, flask, host='127.0.0.1', port=5000, **options):
# type: (Zsl, str, int, **Any)->None
"""Alias for Flask.run"""
return flask.run(
host=flask.config.get('FLASK_HOST', host),
port=flask.config.get('FLASK_PORT', port),
debug=flask.config.get('DEBUG', False),
**options
)
|
class WebHandler:
@inject(flask=Zsl)
def run_web(self, flask, host='127.0.0.1', port=5000, **options):
'''Alias for Flask.run'''
pass
| 3 | 1 | 9 | 0 | 7 | 2 | 1 | 0.22 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 11 | 0 | 9 | 3 | 6 | 2 | 3 | 2 | 1 | 1 | 0 | 0 | 1 |
7,689 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/src/zsl/application/modules/web/web_context_module.py
|
zsl.application.modules.web.web_context_module.WebContextModule
|
class WebContextModule(DefaultContextModule):
"""Adds web application context to current configuration."""
def _create_context(self):
logging.getLogger(__name__).debug("Creating web context.")
return InitializationContext(initializers=web_initializers)
@provides(interface=WebCli, scope=singleton)
def provide_web_cli(self):
return WebCli()
@provides(interface=WebHandler, scope=singleton)
def provide_web_handler(self):
return WebHandler()
@provides(interface=CORSConfiguration, scope=singleton)
@inject(config=Config)
def provide_cors_configuration(self, config):
# type: (Config)->CORSConfiguration
return config.get(CORS_CONFIGURATION_NAME, CORSConfiguration())
def configure(self, binder):
# type: (Binder) -> None
super().configure(binder)
simple_bind(binder, WebCli, singleton)
create_task_mapping()
|
class WebContextModule(DefaultContextModule):
'''Adds web application context to current configuration.'''
def _create_context(self):
pass
@provides(interface=WebCli, scope=singleton)
def provide_web_cli(self):
pass
@provides(interface=WebHandler, scope=singleton)
def provide_web_handler(self):
pass
@provides(interface=CORSConfiguration, scope=singleton)
@inject(config=Config)
def provide_cors_configuration(self, config):
pass
def configure(self, binder):
pass
| 10 | 1 | 3 | 0 | 3 | 0 | 1 | 0.17 | 1 | 5 | 4 | 0 | 5 | 0 | 5 | 7 | 26 | 5 | 18 | 9 | 8 | 3 | 14 | 6 | 8 | 1 | 2 | 0 | 5 |
7,690 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/src/zsl/service/service.py
|
zsl.service.service.Service
|
class Service(TransactionalSupportMixin):
"""
Main service class.
"""
@inject(app=Zsl)
def __init__(self, app=Injected, engine=Injected):
"""Constructor - initializes and injects the needed libraries."""
super().__init__()
self._app = app
|
class Service(TransactionalSupportMixin):
'''
Main service class.
'''
@inject(app=Zsl)
def __init__(self, app=Injected, engine=Injected):
'''Constructor - initializes and injects the needed libraries.'''
pass
| 3 | 2 | 4 | 0 | 3 | 1 | 1 | 0.8 | 1 | 1 | 0 | 3 | 1 | 1 | 1 | 2 | 10 | 1 | 5 | 4 | 2 | 4 | 4 | 3 | 2 | 1 | 1 | 0 | 1 |
7,691 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/tests/resource/transactional_guard_test.py
|
tests.resource.transactional_guard_test.TransactionalGuardTest.testIsInTransaction.GuardedUserModel
|
class GuardedUserModel(UserResource, GuardedMixin):
def secure_read(self, *args, **kwargs):
test_case.assertIsNotNone(self._orm)
test_case.assertTrue(self._in_transaction)
return super().read(*args, **kwargs)
|
class GuardedUserModel(UserResource, GuardedMixin):
def secure_read(self, *args, **kwargs):
pass
| 2 | 0 | 5 | 1 | 4 | 0 | 1 | 0 | 2 | 2 | 1 | 0 | 1 | 0 | 1 | 32 | 6 | 1 | 5 | 2 | 3 | 0 | 5 | 2 | 3 | 1 | 4 | 0 | 1 |
7,692 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/src/zsl/application/modules/logger_module.py
|
zsl.application.modules.logger_module.LoggerModule
|
class LoggerModule(Module):
"""Configure the application logger."""
LOGGING_CONFIG_NAME = 'LOGGING'
def configure(self, binder):
# type: (Binder) -> None
super().configure(binder)
self.configure_logging()
@inject(config=Config, app=Zsl)
def configure_logging(self, config, app):
# type: (Config) -> None
default_config = dict(
version=1,
root=dict(
level='DEBUG' if config.get('DEBUG', False) else 'WARNING'
)
)
logging.config.dictConfig(config.get(
LoggerModule.LOGGING_CONFIG_NAME, default_config))
self._recreate_app_logger(app)
def _recreate_app_logger(self, app):
logging._acquireLock()
del logging.getLogger(app.name).manager.loggerDict[app.name]
logging._releaseLock()
app._logger = logging.getLogger(app.name)
|
class LoggerModule(Module):
'''Configure the application logger.'''
def configure(self, binder):
pass
@inject(config=Config, app=Zsl)
def configure_logging(self, config, app):
pass
def _recreate_app_logger(self, app):
pass
| 5 | 1 | 6 | 0 | 6 | 1 | 1 | 0.15 | 1 | 2 | 0 | 0 | 3 | 0 | 3 | 3 | 27 | 4 | 20 | 7 | 15 | 3 | 14 | 6 | 10 | 2 | 1 | 0 | 4 |
7,693 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/src/zsl/application/modules/error_handler_module.py
|
zsl.application.modules.error_handler_module.ErrorHandlerModule
|
class ErrorHandlerModule(Module):
@provides(interface=ErrorConfiguration, scope=singleton)
@inject(config=Config)
def provide_error_config(self, config):
# type: (Config)->ErrorConfiguration
return config.get(ERROR_CONFIG_NAME, ErrorConfiguration())
def configure(self, binder):
# type: (Binder)->None
@inject(error_config=ErrorConfiguration)
def get_error_config(error_config):
# type: (ErrorConfiguration)->ErrorConfiguration
return error_config
super().configure(binder)
for handler in get_error_config().handlers:
register(handler)
|
class ErrorHandlerModule(Module):
@provides(interface=ErrorConfiguration, scope=singleton)
@inject(config=Config)
def provide_error_config(self, config):
pass
def configure(self, binder):
pass
@inject(error_config=ErrorConfiguration)
def get_error_config(error_config):
pass
| 7 | 0 | 5 | 0 | 4 | 1 | 1 | 0.25 | 1 | 2 | 1 | 0 | 2 | 0 | 2 | 2 | 17 | 2 | 12 | 7 | 5 | 3 | 9 | 5 | 5 | 2 | 1 | 1 | 4 |
7,694 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/tests/resource/transactional_guard_test.py
|
tests.resource.transactional_guard_test.TransactionalGuardTest.testRollbackAfter.MyTestCase
|
class MyTestCase(DbTestCase, TestCase):
def runTest(self):
pass
def testIt(self):
create_resource_test_data()
resource = GuardedUserModel()
resource.read('', {}, {})
if hasattr(mock_sess.query, 'assert_called'):
mock_sess.query.assert_called()
mock_sess.rollback.assert_called_with()
|
class MyTestCase(DbTestCase, TestCase):
def runTest(self):
pass
def testIt(self):
pass
| 3 | 0 | 5 | 1 | 5 | 0 | 2 | 0 | 2 | 2 | 2 | 0 | 2 | 0 | 2 | 77 | 12 | 2 | 10 | 4 | 7 | 0 | 10 | 4 | 7 | 2 | 2 | 1 | 3 |
7,695 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/tests/resource/transactional_guard_test.py
|
tests.resource.transactional_guard_test.TransactionalGuardTest.testRollbackBefore.DenyPolicy
|
class DenyPolicy(ResourcePolicy):
default = Access.DENY
|
class DenyPolicy(ResourcePolicy):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 9 | 2 | 0 | 2 | 2 | 1 | 0 | 2 | 2 | 1 | 0 | 1 | 0 | 0 |
7,696 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/tests/resource/transactional_guard_test.py
|
tests.resource.transactional_guard_test.TransactionalGuardTest.testRollbackBefore.GuardedUserModel
|
class GuardedUserModel(UserResource, GuardedMixin):
pass
|
class GuardedUserModel(UserResource, GuardedMixin):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 0 | 31 | 2 | 0 | 2 | 1 | 1 | 0 | 2 | 1 | 1 | 0 | 4 | 0 | 0 |
7,697 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/tests/resource/transactional_guard_test.py
|
tests.resource.transactional_guard_test.TransactionalGuardTest.testRollbackBefore.TestTHolder
|
class TestTHolder(TransactionHolder):
rollback = mock.MagicMock()
_orm = mock.MagicMock()
|
class TestTHolder(TransactionHolder):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 11 | 3 | 0 | 3 | 3 | 2 | 0 | 3 | 3 | 2 | 0 | 1 | 0 | 0 |
7,698 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/tests/service/service__tx_session_test.py
|
tests.service.service__tx_session_test.TestTxSession
|
class TestTxSession(unittest.TestCase):
def setUp(self):
self.service = TestTxSession._create_simple_service()
@patch("zsl.service.service._get_session_factory")
def test_tx_session__when_called_as_ctx__then_call_commit_close(self, mock_get_session_factory):
mock_session_factory = Mock(spec=SessionFactory)
mock_session = Mock(spec=Session)
mock_session_factory.create_session.return_value = mock_session
mock_get_session_factory.return_value = mock_session_factory
with tx_session(self.service) as session:
self.assertEqual(session, mock_session)
mock_session.commit.assert_called_once()
mock_session.close.assert_called_once()
@patch("zsl.service.service._get_session_factory")
def test_tx_session__when_called_with_exception__then_rollback(self, mock_get_session_factory):
mock_session_factory = Mock(spec=SessionFactory)
mock_session = Mock(spec=Session)
mock_session_factory.create_session.return_value = mock_session
mock_get_session_factory.return_value = mock_session_factory
with self.assertRaises(Exception):
with tx_session(self.service):
raise Exception("Simulated error")
mock_session.rollback.assert_called_once()
mock_session.close.assert_called_once()
@patch("zsl.service.service.tx_session")
def test_transactional_decorator__when_called_with_decorator__then_use_tx_session_context(self, mock_tx_session):
session = Mock(spec=Session)
ctx_instance = MagicMock()
ctx_instance.__enter__.return_value = session
ctx_instance.__exit__.return_value = False
mock_tx_session.return_value = ctx_instance
test = self
class TestService(Service):
@transactional
def test_function(self):
test.assertIsInstance(self, TestService)
service = TestService()
service.test_function()
ctx_instance.__enter__.assert_called_once()
ctx_instance.__exit__.assert_called_once()
@patch("zsl.service.service._get_session_factory")
def test_transactional_decorator__when_called_with_decorator__then_use_set_service_orm(self, mock_get_session_factory):
mock_session_factory = Mock(spec=SessionFactory)
mock_session = Mock(spec=Session)
mock_session_factory.create_session.return_value = mock_session
mock_get_session_factory.return_value = mock_session_factory
test = self
class TestService(Service):
@transactional
def test_function(self):
test.assertEqual(self._orm, mock_session)
service = TestService()
service.test_function()
@staticmethod
def _create_simple_service() -> Service:
class SimpleService(Service):
pass
return SimpleService()
|
class TestTxSession(unittest.TestCase):
def setUp(self):
pass
@patch("zsl.service.service._get_session_factory")
def test_tx_session__when_called_as_ctx__then_call_commit_close(self, mock_get_session_factory):
pass
@patch("zsl.service.service._get_session_factory")
def test_tx_session__when_called_with_exception__then_rollback(self, mock_get_session_factory):
pass
@patch("zsl.service.service.tx_session")
def test_transactional_decorator__when_called_with_decorator__then_use_tx_session_context(self, mock_tx_session):
pass
class TestService(Service):
@transactional
def test_function(self):
pass
@patch("zsl.service.service._get_session_factory")
def test_transactional_decorator__when_called_with_decorator__then_use_set_service_orm(self, mock_get_session_factory):
pass
class TestService(Service):
@transactional
def test_function(self):
pass
@staticmethod
def _create_simple_service() -> Service:
pass
class SimpleService(Service):
| 19 | 0 | 9 | 2 | 7 | 0 | 1 | 0 | 1 | 8 | 5 | 0 | 5 | 1 | 6 | 78 | 78 | 20 | 58 | 33 | 39 | 0 | 51 | 25 | 39 | 1 | 2 | 2 | 8 |
7,699 |
AtteqCom/zsl
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/tests/service/service__tx_session_test.py
|
tests.service.service__tx_session_test.TestTxSession._create_simple_service.SimpleService
|
class SimpleService(Service):
pass
|
class SimpleService(Service):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 2 | 2 | 0 | 2 | 1 | 1 | 0 | 2 | 1 | 1 | 0 | 2 | 0 | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.