id
int64 0
843k
| repository_name
stringlengths 7
55
| file_path
stringlengths 9
332
| class_name
stringlengths 3
290
| human_written_code
stringlengths 12
4.36M
| class_skeleton
stringlengths 19
2.2M
| total_program_units
int64 1
9.57k
| total_doc_str
int64 0
4.2k
| AvgCountLine
float64 0
7.89k
| AvgCountLineBlank
float64 0
300
| AvgCountLineCode
float64 0
7.89k
| AvgCountLineComment
float64 0
7.89k
| AvgCyclomatic
float64 0
130
| CommentToCodeRatio
float64 0
176
| CountClassBase
float64 0
48
| CountClassCoupled
float64 0
589
| CountClassCoupledModified
float64 0
581
| CountClassDerived
float64 0
5.37k
| CountDeclInstanceMethod
float64 0
4.2k
| CountDeclInstanceVariable
float64 0
299
| CountDeclMethod
float64 0
4.2k
| CountDeclMethodAll
float64 0
4.2k
| CountLine
float64 1
115k
| CountLineBlank
float64 0
9.01k
| CountLineCode
float64 0
94.4k
| CountLineCodeDecl
float64 0
46.1k
| CountLineCodeExe
float64 0
91.3k
| CountLineComment
float64 0
27k
| CountStmt
float64 1
93.2k
| CountStmtDecl
float64 0
46.1k
| CountStmtExe
float64 0
90.2k
| MaxCyclomatic
float64 0
759
| MaxInheritanceTree
float64 0
16
| MaxNesting
float64 0
34
| SumCyclomatic
float64 0
6k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
2,900 |
AbletonAG/abl.vpath
|
AbletonAG_abl.vpath/abl/vpath/base/uriparse.py
|
abl.vpath.base.uriparse.URLParser
|
class URLParser:
"""Internal Basic URL parsing class.
In this code URI generally refers to generic URIs and URL refers to
to URIs that match scheme://user:password@host:port/path?query#fragment.
While users of this library could use this as an example of how to write a
parser or even as a parent class of their own parser, they should not directly
instantiate it - let URIParser do that for you.
"""
# user, password, host, port, path, query, fragment
_defaults = (None, None, None, None, None, None, None)
def __init__(self, defaults=None):
if defaults:
self._defaults = defaults
dlist = list(self._defaults)
for d in range(len(self._defaults)):
if dlist[d]:
dlist[d] = str(dlist[d])
self._defaults = dlist
def parse(self, urlinfo):
scheme, authority, path, query, frag = urisplit(urlinfo)
user, passwd, host, port = split_authority(authority)
duser, dpasswd, dhost, dport, dpath, dquery, dfrag = self._defaults
if user is None:
user = duser
if passwd is None:
passwd = dpasswd
if host is None:
host = dhost
if port is None:
port = dport
if path == '':
path = dpath
if query is None:
query = dquery
if frag is None:
frag = self._defaults[6]
return (user, passwd, host, port, path, query, frag)
def unparse(self, pieces):
authority = join_authority(pieces[:4])
return uriunsplit(('', authority, pieces[4], pieces[5], pieces[6]))
|
class URLParser:
'''Internal Basic URL parsing class.
In this code URI generally refers to generic URIs and URL refers to
to URIs that match scheme://user:password@host:port/path?query#fragment.
While users of this library could use this as an example of how to write a
parser or even as a parent class of their own parser, they should not directly
instantiate it - let URIParser do that for you.
'''
def __init__(self, defaults=None):
pass
def parse(self, urlinfo):
pass
def unparse(self, pieces):
pass
| 4 | 1 | 10 | 0 | 10 | 0 | 4 | 0.25 | 0 | 3 | 0 | 9 | 3 | 0 | 3 | 3 | 45 | 5 | 32 | 11 | 28 | 8 | 32 | 11 | 28 | 8 | 0 | 2 | 13 |
2,901 |
AbletonAG/abl.vpath
|
AbletonAG_abl.vpath/tests/test_fs.py
|
tests.test_fs.TestLocalFSSymlink
|
class TestLocalFSSymlink(CommonLocalFSSymlinkTest):
__test__ = is_on_mac()
def setUp(self):
self.thisdir = os.path.split(os.path.abspath(__file__))[0]
self.tmpdir = tempfile.mkdtemp('.temp', 'test-local-fs', self.thisdir)
self.baseurl = 'file://' + self.tmpdir
def tearDown(self):
shutil.rmtree(self.tmpdir)
|
class TestLocalFSSymlink(CommonLocalFSSymlinkTest):
def setUp(self):
pass
def tearDown(self):
pass
| 3 | 0 | 3 | 0 | 3 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 2 | 3 | 2 | 84 | 11 | 3 | 8 | 7 | 5 | 0 | 8 | 7 | 5 | 1 | 3 | 0 | 2 |
2,902 |
AbletonAG/abl.vpath
|
AbletonAG_abl.vpath/abl/vpath/base/uriparse.py
|
abl.vpath.base.uriparse.TftpURLParser
|
class TftpURLParser(URLParser):
"""Internal class to hold the defaults of TFTP URLs"""
_defaults=(None, None, None, 69, '/', None, None)
|
class TftpURLParser(URLParser):
'''Internal class to hold the defaults of TFTP URLs'''
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0.5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 3 | 0 | 2 | 2 | 1 | 1 | 2 | 2 | 1 | 0 | 1 | 0 | 0 |
2,903 |
AbletonAG/abl.vpath
|
AbletonAG_abl.vpath/abl/vpath/base/uriparse.py
|
abl.vpath.base.uriparse.TelnetURLParser
|
class TelnetURLParser(URLParser):
"""Internal class to hold the defaults of telnet URLs"""
_defaults=(None, None, None, 23, '/', None, None)
|
class TelnetURLParser(URLParser):
'''Internal class to hold the defaults of telnet URLs'''
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0.5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 3 | 0 | 2 | 2 | 1 | 1 | 2 | 2 | 1 | 0 | 1 | 0 | 0 |
2,904 |
AbletonAG/abl.vpath
|
AbletonAG_abl.vpath/abl/vpath/base/uriparse.py
|
abl.vpath.base.uriparse.MailtoURIParser
|
class MailtoURIParser(URLParser):
"""Internal mailto URI parser
This class has a basic understanding of mailto: URIs of the
format: mailto:user@host?query#frag
"""
# user, host, query, fragment
_defaults = (None, None, None, None)
def parse(self, urlinfo):
scheme, authority, path, query, frag = urisplit(urlinfo)
user, host = path.split('@', 1)
return (user, host, query, frag)
def unparse(self, pieces):
path = pieces[0] + '@' + pieces[1]
return uriunsplit(('', None, path, pieces[2], pieces[3]))
|
class MailtoURIParser(URLParser):
'''Internal mailto URI parser
This class has a basic understanding of mailto: URIs of the
format: mailto:user@host?query#frag
'''
def parse(self, urlinfo):
pass
def unparse(self, pieces):
pass
| 3 | 1 | 4 | 0 | 4 | 0 | 1 | 0.56 | 1 | 0 | 0 | 0 | 2 | 0 | 2 | 5 | 18 | 4 | 9 | 7 | 6 | 5 | 9 | 7 | 6 | 1 | 1 | 0 | 2 |
2,905 |
AbletonAG/abl.vpath
|
AbletonAG_abl.vpath/tests/test_path.py
|
tests.test_path.TestEq
|
class TestEq(TestCase):
def test_eq(self):
"""
test for bugfix: __eq__ didn't check that 'other' is of URI type
"""
p = URI('/some/path')
self.assertNotEqual(p, None)
|
class TestEq(TestCase):
def test_eq(self):
'''
test for bugfix: __eq__ didn't check that 'other' is of URI type
'''
pass
| 2 | 1 | 6 | 0 | 3 | 3 | 1 | 0.75 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 73 | 7 | 0 | 4 | 3 | 2 | 3 | 4 | 3 | 2 | 1 | 2 | 0 | 1 |
2,906 |
AbletonAG/abl.vpath
|
AbletonAG_abl.vpath/tests/test_fs_symlink_copy.py
|
tests.test_fs_symlink_copy.CommonLocalFSSymlinkCopyTest
|
class CommonLocalFSSymlinkCopyTest(TestCase):
__test__ = False
def test_copy_filesymlink_to_file_followlinks(self):
root = URI(self.baseurl)
bar_path = root / 'foo' / 'bar'
bar_path.makedirs()
gaz_path = bar_path / 'gaz.txt'
create_file(gaz_path, content='foobar')
tee_path = root / 'helloworld'
gaz_path.symlink(tee_path)
moo_path = root / 'moo.txt'
create_file(moo_path, content='moomoo')
tee_path.copy(moo_path, followlinks=True)
self.assertTrue(not moo_path.islink())
self.assertEqual(load_file(moo_path), 'foobar')
def test_copy_filesymlink_to_file_preservelinks(self):
root = URI(self.baseurl)
bar_path = root / 'foo' / 'bar'
bar_path.makedirs()
gaz_path = bar_path / 'gaz.txt'
create_file(gaz_path, content='foobar')
tee_path = root / 'helloworld'
gaz_path.symlink(tee_path)
moo_path = root / 'moo2.txt'
create_file(moo_path, content='moomoo')
tee_path.copy(moo_path, followlinks=False)
self.assertTrue(moo_path.islink())
self.assertEqual(load_file(moo_path), 'foobar')
def test_copy_filesymlink_to_dir_followlinks(self):
root = URI(self.baseurl)
bar_path = root / 'foo' / 'bar'
bar_path.makedirs()
gaz_path = bar_path / 'gaz.txt'
create_file(gaz_path, content='foobar')
tee_path = root / 'helloworld'
gaz_path.symlink(tee_path)
moo_path = root / 'moo'
moo_path.makedirs()
tee_path.copy(moo_path, followlinks=True)
helloworld_path = moo_path / 'helloworld'
self.assertTrue(not helloworld_path.islink())
self.assertEqual(load_file(helloworld_path), 'foobar')
def test_copy_filesymlink_to_dir_preservelinks(self):
root = URI(self.baseurl)
bar_path = root / 'foo' / 'bar'
bar_path.makedirs()
gaz_path = bar_path / 'gaz.txt'
create_file(gaz_path, content='foobar')
tee_path = root / 'helloworld'
gaz_path.symlink(tee_path)
moo_path = root / 'moo'
moo_path.makedirs()
tee_path.copy(moo_path, followlinks=False)
helloworld_path = moo_path / 'helloworld'
self.assertTrue(helloworld_path.islink())
self.assertEqual(load_file(helloworld_path), 'foobar')
def test_copy_filesymlink_to_missingfile_followlinks(self):
root = URI(self.baseurl)
bar_path = root / 'foo' / 'bar'
bar_path.makedirs()
gaz_path = bar_path / 'gaz.txt'
create_file(gaz_path, content='foobar')
tee_path = root / 'helloworld'
gaz_path.symlink(tee_path)
moo_path = root / 'moo'
moo_path.makedirs()
helloworld_path = moo_path / 'helloworld'
tee_path.copy(helloworld_path, followlinks=True)
self.assertTrue(not helloworld_path.islink())
self.assertEqual(load_file(helloworld_path), 'foobar')
def test_copy_filesymlink_to_missingfile_preservelinks(self):
root = URI(self.baseurl)
bar_path = root / 'foo' / 'bar'
bar_path.makedirs()
gaz_path = bar_path / 'gaz.txt'
create_file(gaz_path, content='foobar')
tee_path = root / 'helloworld'
gaz_path.symlink(tee_path)
moo_path = root / 'moo'
moo_path.makedirs()
helloworld_path = moo_path / 'helloworld'
tee_path.copy(helloworld_path, followlinks=False)
self.assertTrue(helloworld_path.islink())
self.assertEqual(load_file(helloworld_path), 'foobar')
def test_copy_filesymlink_to_filesymlink_followlinks(self):
root = URI(self.baseurl)
bar_path = root / 'foo' / 'bar'
bar_path.makedirs()
gaz_path = bar_path / 'gaz.txt'
create_file(gaz_path, content='foobar')
gaz2_path = bar_path / 'gaz2.txt'
create_file(gaz2_path, content='foobar2')
tee_path = root / 'helloworld'
gaz_path.symlink(tee_path)
tee2_path = root / 'helloworld2'
gaz2_path.symlink(tee2_path)
tee_path.copy(tee2_path, followlinks=True)
# when following links copying to a symlink->file modifies the
# referenced file!
self.assertTrue(tee2_path.islink())
self.assertEqual(load_file(tee2_path), 'foobar')
self.assertEqual(load_file(gaz2_path), 'foobar')
self.assertTrue((bar_path / 'gaz2.txt').isfile())
self.assertTrue((bar_path / 'gaz.txt').isfile())
def test_copy_filesymlink_to_filesymlink_preservelinks(self):
root = URI(self.baseurl)
bar_path = root / 'foo' / 'bar'
bar_path.makedirs()
gaz_path = bar_path / 'gaz.txt'
create_file(gaz_path, content='foobar')
gaz2_path = bar_path / 'gaz2.txt'
create_file(gaz2_path, content='foobar2')
tee_path = root / 'helloworld'
gaz_path.symlink(tee_path)
tee2_path = root / 'helloworld2'
gaz2_path.symlink(tee2_path)
tee_path.copy(tee2_path, followlinks=False)
self.assertTrue(tee2_path.islink())
self.assertEqual(load_file(tee2_path), 'foobar')
self.assertEqual(tee2_path.readlink(), gaz_path)
# when preserving links, we don't touch the original file!
self.assertEqual(load_file(gaz2_path), 'foobar2')
def test_copy_filesymlink_to_dirsymlink_followlinks(self):
root = URI(self.baseurl)
bar_path = root / 'foo' / 'bar'
bar_path.makedirs()
gaz_path = bar_path / 'gaz.txt'
create_file(gaz_path, content='foobar')
moo_path = root / 'moo'
moo_path.makedirs()
tee_path = root / 'helloworld'
gaz_path.symlink(tee_path)
tee2_path = root / 'helloworld2'
moo_path.symlink(tee2_path)
tee_path.copy(tee2_path, followlinks=True)
helloworld_path = tee2_path / 'helloworld'
self.assertTrue(tee2_path.islink()) # still a link?
self.assertTrue(tee_path.islink()) # still a link?
self.assertTrue(not helloworld_path.islink())
self.assertTrue(helloworld_path.isfile())
self.assertEqual(load_file(helloworld_path), 'foobar')
self.assertTrue((moo_path / 'helloworld').isfile())
def test_copy_filesymlink_to_dirsymlink_preservelinks(self):
root = URI(self.baseurl)
bar_path = root / 'foo' / 'bar'
bar_path.makedirs()
gaz_path = bar_path / 'gaz.txt'
create_file(gaz_path, content='foobar')
moo_path = root / 'moo'
moo_path.makedirs()
tee_path = root / 'helloworld'
gaz_path.symlink(tee_path)
tee2_path = root / 'helloworld2'
moo_path.symlink(tee2_path)
tee_path.copy(tee2_path, followlinks=False)
helloworld_path = tee2_path / 'helloworld'
self.assertTrue(tee2_path.islink()) # still a link?
self.assertTrue(tee_path.islink()) # still a link?
self.assertTrue(helloworld_path.islink())
self.assertTrue(helloworld_path.isfile())
self.assertEqual(load_file(helloworld_path), 'foobar')
self.assertTrue((moo_path / 'helloworld').islink())
self.assertTrue((moo_path / 'helloworld').isfile())
self.assertEqual(helloworld_path.readlink(), gaz_path)
#------------------------------
def test_copy_dirsymlink_to_file_followlinks(self):
root = URI(self.baseurl)
bar_path = root / 'foo' / 'bar'
bar_path.makedirs()
gaz_path = bar_path / 'gaz.txt'
create_file(gaz_path, content='foobar')
tee_path = root / 'helloworld'
bar_path.symlink(tee_path)
moo_path = root / 'moo.txt'
create_file(moo_path, content='moomoo')
# can't copy dir over existing file
self.assertRaises(OSError, tee_path.copy, moo_path,
recursive=True, followlinks=True)
def test_copy_dirsymlink_to_file_preservelinks(self):
root = URI(self.baseurl)
bar_path = root / 'foo' / 'bar'
bar_path.makedirs()
gaz_path = bar_path / 'gaz.txt'
create_file(gaz_path, content='foobar')
tee_path = root / 'helloworld'
bar_path.symlink(tee_path)
moo_path = root / 'moo.txt'
create_file(moo_path, content='moomoo')
tee_path.copy(moo_path, recursive=True, followlinks=False)
self.assertTrue(moo_path.islink())
self.assertTrue(moo_path.isdir())
self.assertEqual(load_file(moo_path / 'gaz.txt'), 'foobar')
def test_copy_dirsymlink_to_dir_followlinks(self):
root = URI(self.baseurl)
bar_path = root / 'foo' / 'bar'
bar_path.makedirs()
gaz_path = bar_path / 'gaz.txt'
create_file(gaz_path, content='foobar')
tee_path = root / 'helloworld'
bar_path.symlink(tee_path)
moo_path = root / 'moo'
moo_path.makedirs()
# can't copy dir over existing file
tee_path.copy(moo_path, recursive=True, followlinks=True)
helloworld_path = moo_path / 'helloworld'
self.assertTrue(not helloworld_path.islink())
self.assertTrue(helloworld_path.isdir())
self.assertTrue((helloworld_path / 'gaz.txt').isfile())
def test_copy_dirsymlink_to_dir_preservelinks(self):
root = URI(self.baseurl)
bar_path = root / 'foo' / 'bar'
bar_path.makedirs()
gaz_path = bar_path / 'gaz.txt'
create_file(gaz_path, content='foobar')
tee_path = root / 'helloworld'
bar_path.symlink(tee_path)
moo_path = root / 'moo'
moo_path.makedirs()
# can't copy dir over existing file
tee_path.copy(moo_path, recursive=True, followlinks=False)
helloworld_path = moo_path / 'helloworld'
self.assertTrue(helloworld_path.islink())
self.assertTrue(helloworld_path.isdir())
self.assertEqual(helloworld_path.readlink(), bar_path)
def test_copy_dirsymlink_to_missingfile_followlinks(self):
root = URI(self.baseurl)
bar_path = root / 'foo' / 'bar'
bar_path.makedirs()
gaz_path = bar_path / 'gaz.txt'
create_file(gaz_path, content='foobar')
tee_path = root / 'helloworld'
bar_path.symlink(tee_path)
moo_path = root / 'moo'
# can't copy dir over existing file
tee_path.copy(moo_path, recursive=True, followlinks=True)
self.assertTrue(not moo_path.islink())
self.assertTrue(moo_path.isdir())
self.assertTrue((moo_path / 'gaz.txt').isfile())
def test_copy_dirsymlink_to_missingfile_preservelinks(self):
root = URI(self.baseurl)
bar_path = root / 'foo' / 'bar'
bar_path.makedirs()
gaz_path = bar_path / 'gaz.txt'
create_file(gaz_path, content='foobar')
tee_path = root / 'helloworld'
bar_path.symlink(tee_path)
moo_path = root / 'moo'
# can't copy dir over existing file
tee_path.copy(moo_path, recursive=True, followlinks=False)
self.assertTrue(moo_path.islink())
self.assertTrue(moo_path.isdir())
self.assertEqual(moo_path.readlink(), bar_path)
def test_copy_dirsymlink_to_filesymlink_followlinks(self):
root = URI(self.baseurl)
bar_path = root / 'foo' / 'bar'
bar_path.makedirs()
gaz_path = bar_path / 'gaz.txt'
create_file(gaz_path, content='foobar')
gaz2_path = bar_path / 'gaz2.txt'
create_file(gaz2_path, content='foobar2')
tee_path = root / 'helloworld'
bar_path.symlink(tee_path)
tee2_path = root / 'helloworld2'
gaz2_path.symlink(tee2_path)
# copying a dir to a symlink->file fails.
self.assertRaises(OSError, tee_path.copy,
tee2_path, recursive=True, followlinks=True)
def test_copy_dirsymlink_to_filesymlink_preservelinks(self):
root = URI(self.baseurl)
bar_path = root / 'foo' / 'bar'
bar_path.makedirs()
gaz_path = bar_path / 'gaz.txt'
create_file(gaz_path, content='foobar')
gaz2_path = bar_path / 'gaz2.txt'
create_file(gaz2_path, content='foobar2')
tee_path = root / 'helloworld'
bar_path.symlink(tee_path)
tee2_path = root / 'helloworld2'
gaz2_path.symlink(tee2_path)
# copying a dir to a symlink->file fails.
tee_path.copy(tee2_path, recursive=True, followlinks=False)
self.assertTrue(tee2_path.islink())
self.assertTrue(tee2_path.isdir())
self.assertEqual(tee2_path.readlink(), tee_path.readlink())
self.assertEqual(tee2_path.readlink(), bar_path)
def test_copy_dirsymlink_to_dirsymlink_followlinks(self):
root = URI(self.baseurl)
bar_path = root / 'foo' / 'bar'
bar_path.makedirs()
gaz_path = bar_path / 'gaz.txt'
create_file(gaz_path, content='foobar')
moo_path = root / 'moo'
moo_path.makedirs()
tee_path = root / 'helloworld'
bar_path.symlink(tee_path)
tee2_path = root / 'helloworld2'
moo_path.symlink(tee2_path)
tee_path.copy(tee2_path, recursive=True, followlinks=True)
helloworld_path = tee2_path / 'helloworld'
self.assertTrue(tee2_path.islink()) # still a link?
self.assertTrue(tee_path.islink()) # still a link?
self.assertTrue(not helloworld_path.islink())
self.assertTrue(helloworld_path.isdir())
self.assertTrue((helloworld_path / 'gaz.txt').isfile())
def test_copy_dirsymlink_to_dirsymlink_preservelinks(self):
root = URI(self.baseurl)
bar_path = root / 'foo' / 'bar'
bar_path.makedirs()
gaz_path = bar_path / 'gaz.txt'
create_file(gaz_path, content='foobar')
moo_path = root / 'moo'
moo_path.makedirs()
tee_path = root / 'helloworld'
bar_path.symlink(tee_path)
tee2_path = root / 'helloworld2'
moo_path.symlink(tee2_path)
tee_path.copy(tee2_path, recursive=True, followlinks=False)
helloworld_path = tee2_path / 'helloworld'
self.assertTrue(tee2_path.islink()) # still a link?
self.assertTrue(tee_path.islink()) # still a link?
self.assertTrue(helloworld_path.islink())
self.assertTrue(helloworld_path.isdir())
self.assertTrue((helloworld_path / 'gaz.txt').isfile())
self.assertEqual(helloworld_path.readlink(), bar_path)
self.assertEqual(helloworld_path.readlink(), tee_path.readlink())
|
class CommonLocalFSSymlinkCopyTest(TestCase):
def test_copy_filesymlink_to_file_followlinks(self):
pass
def test_copy_filesymlink_to_file_preservelinks(self):
pass
def test_copy_filesymlink_to_dir_followlinks(self):
pass
def test_copy_filesymlink_to_dir_preservelinks(self):
pass
def test_copy_filesymlink_to_missingfile_followlinks(self):
pass
def test_copy_filesymlink_to_missingfile_preservelinks(self):
pass
def test_copy_filesymlink_to_filesymlink_followlinks(self):
pass
def test_copy_filesymlink_to_filesymlink_preservelinks(self):
pass
def test_copy_filesymlink_to_dirsymlink_followlinks(self):
pass
def test_copy_filesymlink_to_dirsymlink_preservelinks(self):
pass
def test_copy_dirsymlink_to_file_followlinks(self):
pass
def test_copy_dirsymlink_to_file_preservelinks(self):
pass
def test_copy_dirsymlink_to_dir_followlinks(self):
pass
def test_copy_dirsymlink_to_dir_preservelinks(self):
pass
def test_copy_dirsymlink_to_missingfile_followlinks(self):
pass
def test_copy_dirsymlink_to_missingfile_preservelinks(self):
pass
def test_copy_dirsymlink_to_filesymlink_followlinks(self):
pass
def test_copy_dirsymlink_to_filesymlink_preservelinks(self):
pass
def test_copy_dirsymlink_to_dirsymlink_followlinks(self):
pass
def test_copy_dirsymlink_to_dirsymlink_preservelinks(self):
pass
| 21 | 0 | 22 | 5 | 16 | 1 | 1 | 0.06 | 1 | 1 | 0 | 2 | 20 | 0 | 20 | 92 | 473 | 148 | 314 | 140 | 293 | 19 | 312 | 140 | 291 | 1 | 2 | 0 | 20 |
2,907 |
AbletonAG/abl.vpath
|
AbletonAG_abl.vpath/tests/test_fs.py
|
tests.test_fs.TestLocalFSCopy2
|
class TestLocalFSCopy2(CommonFSCopyTest):
__test__ = True
def setUp(self):
thisdir = os.path.split(os.path.abspath(__file__))[0]
self.tmpdir = tempfile.mkdtemp('.temp', 'test-local-fs', thisdir)
self.baseurl = 'file://' + self.tmpdir
def tearDown(self):
shutil.rmtree(self.tmpdir)
|
class TestLocalFSCopy2(CommonFSCopyTest):
def setUp(self):
pass
def tearDown(self):
pass
| 3 | 0 | 3 | 0 | 3 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 2 | 2 | 2 | 79 | 10 | 2 | 8 | 7 | 5 | 0 | 8 | 7 | 5 | 1 | 3 | 0 | 2 |
2,908 |
AbletonAG/abl.vpath
|
AbletonAG_abl.vpath/tests/test_path.py
|
tests.test_path.TestFileSystem
|
class TestFileSystem(TestCase):
def setUp(self):
thisdir = os.path.split(os.path.abspath(__file__))[0]
self.tmpdir = tempfile.mkdtemp('.temp', 'test-local-fs', thisdir)
self.baseurl = 'file://' + self.tmpdir
def tearDown(self):
shutil.rmtree(self.tmpdir)
def test_backend(self):
foo_path = URI(self.baseurl) / 'foo'
bar_path = URI(foo_path.path + '?arg1=value1')
foo_2_path = foo_path / 'some_dir'
self.assertTrue(foo_path.get_connection() is foo_2_path.get_connection())
self.assertTrue(bar_path.get_connection() is not foo_path.get_connection())
foo_path_connection = foo_path.get_connection()
foo_path.query['arg'] = 'value'
self.assertTrue(foo_path_connection is not foo_path.get_connection())
|
class TestFileSystem(TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_backend(self):
pass
| 4 | 0 | 5 | 0 | 5 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 3 | 2 | 3 | 75 | 21 | 5 | 16 | 11 | 12 | 0 | 16 | 11 | 12 | 1 | 2 | 0 | 3 |
2,909 |
Accelize/pycosio
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Accelize_pycosio/tests/test_storage_oss.py
|
tests.test_storage_oss.test_mocked_storage.Response
|
class Response:
"""HTTP request response"""
status = 200
request_id = 0
def __init__(self, **attributes):
for name, value in attributes.items():
setattr(self, name, value)
|
class Response:
'''HTTP request response'''
def __init__(self, **attributes):
pass
| 2 | 1 | 3 | 0 | 3 | 0 | 2 | 0.17 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 1 | 8 | 1 | 6 | 5 | 4 | 1 | 6 | 5 | 4 | 2 | 0 | 1 | 2 |
2,910 |
Accelize/pycosio
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Accelize_pycosio/tests/test_storage_oss.py
|
tests.test_storage_oss.test_mocked_storage.Service
|
class Service:
"""oss2.Service"""
def __init__(self, *_, **__):
"""oss2.Service.__init__"""
@staticmethod
def list_buckets(**_):
"""oss2.Service.list_buckets"""
response = storage_mock.get_locators()
buckets = []
for name, headers in response.items():
bucket = HeadObjectResult(Response(headers=headers))
bucket.name = name
buckets.append(bucket)
return ListResult(buckets=buckets)
|
class Service:
'''oss2.Service'''
def __init__(self, *_, **__):
'''oss2.Service.__init__'''
pass
@staticmethod
def list_buckets(**_):
'''oss2.Service.list_buckets'''
pass
| 4 | 3 | 6 | 0 | 5 | 1 | 2 | 0.27 | 0 | 2 | 2 | 0 | 1 | 0 | 2 | 2 | 16 | 2 | 11 | 8 | 7 | 3 | 10 | 7 | 7 | 2 | 0 | 1 | 3 |
2,911 |
Accelize/pycosio
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Accelize_pycosio/tests/test_storage_s3.py
|
tests.test_storage_s3.test_mocked_storage.Client
|
class Client:
"""boto3.client"""
def __init__(self, *_, **kwargs):
"""boto3.client.__init__"""
self.kwargs = kwargs
@staticmethod
def get_object(Bucket=None, Key=None, Range=None, **_):
"""boto3.client.get_object"""
return dict(Body=BytesIO(
storage_mock.get_object(Bucket, Key, header=dict(Range=Range))))
@staticmethod
def head_object(Bucket=None, Key=None, **_):
"""boto3.client.head_object"""
if no_head:
return dict()
return storage_mock.head_object(Bucket, Key)
@staticmethod
def put_object(Bucket=None, Key=None, Body=None, **_):
"""boto3.client.put_object"""
storage_mock.put_object(Bucket, Key, Body, new_file=True)
@staticmethod
def delete_object(Bucket=None, Key=None, **_):
"""boto3.client.delete_object"""
storage_mock.delete_object(Bucket, Key)
@staticmethod
def head_bucket(Bucket=None, **_):
"""boto3.client.head_bucket"""
return storage_mock.head_locator(Bucket)
@staticmethod
def create_bucket(Bucket=None, **_):
"""boto3.client.create_bucket"""
storage_mock.put_locator(Bucket)
@staticmethod
def copy_object(Bucket=None, Key=None, CopySource=None, **_):
"""boto3.client.copy_object"""
storage_mock.copy_object(
CopySource['Key'], Key, dst_locator=Bucket,
src_locator=CopySource['Bucket'])
@staticmethod
def delete_bucket(Bucket=None, **_):
"""boto3.client.delete_bucket"""
storage_mock.delete_locator(Bucket)
@staticmethod
def list_objects_v2(Bucket=None, Prefix=None, MaxKeys=None, **_):
"""boto3.client.list_objects_v2"""
objects = []
for name, header in storage_mock.get_locator(
Bucket, prefix=Prefix, limit=MaxKeys,
raise_404_if_empty=False).items():
header['Key'] = name
objects.append(header)
if not objects:
return dict()
return dict(Contents=objects)
@staticmethod
def list_buckets():
"""boto3.client.list_buckets"""
objects = []
for name, header in storage_mock.get_locators().items():
header['Name'] = name
objects.append(header)
return dict(Buckets=objects)
@staticmethod
def create_multipart_upload(**_):
"""boto3.client.create_multipart_upload"""
return dict(UploadId=123)
@staticmethod
def complete_multipart_upload(
Bucket=None, Key=None, MultipartUpload=None,
UploadId=None, **_):
"""boto3.client.complete_multipart_upload"""
uploaded_parts = MultipartUpload['Parts']
assert UploadId == 123
parts = []
for part in uploaded_parts:
parts.append(Key + str(part['PartNumber']))
assert part['ETag']
storage_mock.concat_objects(Bucket, Key, parts)
@staticmethod
def upload_part(Bucket=None, Key=None, PartNumber=None,
Body=None, UploadId=None, **_):
"""boto3.client.upload_part"""
assert UploadId == 123
return storage_mock.put_object(
Bucket, Key + str(PartNumber), Body)
|
class Client:
'''boto3.client'''
def __init__(self, *_, **kwargs):
'''boto3.client.__init__'''
pass
@staticmethod
def get_object(Bucket=None, Key=None, Range=None, **_):
'''boto3.client.get_object'''
pass
@staticmethod
def head_object(Bucket=None, Key=None, **_):
'''boto3.client.head_object'''
pass
@staticmethod
def put_object(Bucket=None, Key=None, Body=None, **_):
'''boto3.client.put_object'''
pass
@staticmethod
def delete_object(Bucket=None, Key=None, **_):
'''boto3.client.delete_object'''
pass
@staticmethod
def head_bucket(Bucket=None, **_):
'''boto3.client.head_bucket'''
pass
@staticmethod
def create_bucket(Bucket=None, **_):
'''boto3.client.create_bucket'''
pass
@staticmethod
def copy_object(Bucket=None, Key=None, CopySource=None, **_):
'''boto3.client.copy_object'''
pass
@staticmethod
def delete_bucket(Bucket=None, **_):
'''boto3.client.delete_bucket'''
pass
@staticmethod
def list_objects_v2(Bucket=None, Prefix=None, MaxKeys=None, **_):
'''boto3.client.list_objects_v2'''
pass
@staticmethod
def list_buckets():
'''boto3.client.list_buckets'''
pass
@staticmethod
def create_multipart_upload(**_):
'''boto3.client.create_multipart_upload'''
pass
@staticmethod
def complete_multipart_upload(
Bucket=None, Key=None, MultipartUpload=None,
UploadId=None, **_):
'''boto3.client.complete_multipart_upload'''
pass
@staticmethod
def upload_part(Bucket=None, Key=None, PartNumber=None,
Body=None, UploadId=None, **_):
'''boto3.client.upload_part'''
pass
| 28 | 15 | 5 | 0 | 4 | 1 | 1 | 0.21 | 0 | 2 | 0 | 0 | 1 | 1 | 14 | 14 | 105 | 20 | 70 | 39 | 39 | 15 | 48 | 23 | 33 | 3 | 0 | 1 | 19 |
2,912 |
Accelize/pycosio
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Accelize_pycosio/tests/test_storage_s3.py
|
tests.test_storage_s3.test_mocked_storage.Session
|
class Session:
"""boto3.session.Session"""
client = Client
region_name = ''
def __init__(self, *_, **__):
"""boto3.session.Session.__init__"""
|
class Session:
'''boto3.session.Session'''
def __init__(self, *_, **__):
'''boto3.session.Session.__init__'''
pass
| 2 | 2 | 2 | 0 | 1 | 1 | 1 | 0.5 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 7 | 1 | 4 | 4 | 2 | 2 | 4 | 4 | 2 | 1 | 0 | 0 | 1 |
2,913 |
Accelize/pycosio
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Accelize_pycosio/tests/test_storage_swift.py
|
tests.test_storage_swift.test_mocked_storage.Connection
|
class Connection:
"""swiftclient.client.Connection"""
def __init__(self, *_, **kwargs):
self.kwargs = kwargs
@staticmethod
def get_auth():
"""swiftclient.client.Connection.get_auth"""
return '###',
@staticmethod
def get_object(container, obj, headers=None, **_):
"""swiftclient.client.Connection.get_object"""
return (storage_mock.head_object(container, obj),
storage_mock.get_object(container, obj, header=headers))
@staticmethod
def head_object(container, obj, **_):
"""swiftclient.client.Connection.head_object"""
return storage_mock.head_object(container, obj)
@staticmethod
def put_object(container, obj, contents, query_string=None, **_):
"""swiftclient.client.Connection.put_object"""
# Concatenates object parts
if query_string == 'multipart-manifest=put':
manifest = loads(contents)
parts = []
for part in manifest:
path = part['path'].split(container + '/')[1]
parts.append(path)
# Check manifest format
assert path.startswith(obj)
assert part['etag']
header = storage_mock.concat_objects(container, obj, parts)
# Single object upload
else:
header = storage_mock.put_object(container, obj, contents,
new_file=True)
# Return Etag
return header['ETag']
@staticmethod
def delete_object(container, obj, **_):
"""swiftclient.client.Connection.delete_object"""
storage_mock.delete_object(container, obj)
@staticmethod
def put_container(container, **_):
"""swiftclient.client.Connection.put_container"""
storage_mock.put_locator(container)
@staticmethod
def head_container(container=None, **_):
"""swiftclient.client.Connection.head_container"""
return storage_mock.head_locator(container)
@staticmethod
def delete_container(container, **_):
"""swiftclient.client.Connection.delete_container"""
storage_mock.delete_locator(container)
@staticmethod
def copy_object(container=None, obj=None, destination=None, **_):
"""swiftclient.client.Connection.copy_object"""
storage_mock.copy_object(obj, destination, src_locator=container)
@staticmethod
def get_container(container, limit=None, prefix=None, **_):
"""swiftclient.client.Connection.get_container"""
objects = []
for name, header in storage_mock.get_locator(
container, prefix=prefix, limit=limit).items():
header['name'] = name
objects.append(header)
return storage_mock.head_locator(container), objects
@staticmethod
def get_account():
"""swiftclient.client.Connection.get_account"""
objects = []
for name, header in storage_mock.get_locators().items():
header['name'] = name
objects.append(header)
return {}, objects
|
class Connection:
'''swiftclient.client.Connection'''
def __init__(self, *_, **kwargs):
pass
@staticmethod
def get_auth():
'''swiftclient.client.Connection.get_auth'''
pass
@staticmethod
def get_object(container, obj, headers=None, **_):
'''swiftclient.client.Connection.get_object'''
pass
@staticmethod
def head_object(container, obj, **_):
'''swiftclient.client.Connection.head_object'''
pass
@staticmethod
def put_object(container, obj, contents, query_string=None, **_):
'''swiftclient.client.Connection.put_object'''
pass
@staticmethod
def delete_object(container, obj, **_):
'''swiftclient.client.Connection.delete_object'''
pass
@staticmethod
def put_container(container, **_):
'''swiftclient.client.Connection.put_container'''
pass
@staticmethod
def head_container(container=None, **_):
'''swiftclient.client.Connection.head_container'''
pass
@staticmethod
def delete_container(container, **_):
'''swiftclient.client.Connection.delete_container'''
pass
@staticmethod
def copy_object(container=None, obj=None, destination=None, **_):
'''swiftclient.client.Connection.copy_object'''
pass
@staticmethod
def get_container(container, limit=None, prefix=None, **_):
'''swiftclient.client.Connection.get_container'''
pass
@staticmethod
def get_account():
'''swiftclient.client.Connection.get_account'''
pass
| 24 | 12 | 6 | 1 | 4 | 1 | 1 | 0.29 | 0 | 0 | 0 | 0 | 1 | 1 | 12 | 12 | 93 | 19 | 58 | 34 | 34 | 17 | 43 | 23 | 30 | 3 | 0 | 2 | 16 |
2,914 |
Accelize/pycosio
|
Accelize_pycosio/tests_storage_package/mock.py
|
tests_storage_package.mock._Error416
|
class _Error416(_exc.ObjectException):
"""416 Error"""
|
class _Error416(_exc.ObjectException):
'''416 Error'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 2 | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | 4 | 0 | 0 |
2,915 |
Accelize/pycosio
|
Accelize_pycosio/tests_storage_package/mock.py
|
tests_storage_package.mock.MockSystem
|
class MockSystem(_SystemBase):
"""Mock System"""
_CTIME_KEYS = ('Created',)
def _get_client(self):
"""
Storage client
Returns:
tests.storage_mock.ObjectStorageMock: client
"""
storage_mock = _ObjectStorageMock(_raise_404, _raise_416, _raise_500)
storage_mock.attach_io_system(self)
return storage_mock
def get_client_kwargs(self, path):
"""
Get base keyword arguments for client for a
specific path.
Args:
path (str): Absolute path or URL.
Returns:
dict: client args
"""
locator, path = self.split_locator(path)
kwargs = dict(locator=locator)
if path:
kwargs['path'] = path
return kwargs
def _get_roots(self):
"""
Return URL roots for this storage.
Returns:
tuple of str or re.Pattern: URL roots
"""
return 'mock://',
def _head(self, client_kwargs):
"""
Returns object HTTP header.
Args:
client_kwargs (dict): Client arguments.
Returns:
dict: HTTP header.
"""
if 'path' in client_kwargs:
return self.client.head_object(**client_kwargs)
return self.client.head_locator(**client_kwargs)
def _make_dir(self, client_kwargs):
"""
Make a directory.
args:
client_kwargs (dict): Client arguments.
"""
if 'path' in client_kwargs:
return self.client.put_object(**client_kwargs)
return self.client.put_locator(**client_kwargs)
def copy(self, src, dst, other_system=None):
"""
Copy object of the same storage.
Args:
src (str): Path or URL.
dst (str): Path or URL.
other_system (pycosio._core.io_system.SystemBase subclass):
Other storage system. May be required for some storage.
"""
self.client.copy_object(
src_path=self.relpath(src), dst_path=self.relpath(dst))
def _remove(self, client_kwargs):
"""
Remove an object.
args:
client_kwargs (dict): Client arguments.
"""
if 'path' in client_kwargs:
return self.client.delete_object(**client_kwargs)
return self.client.delete_locator(**client_kwargs)
def _list_locators(self):
"""
Lists locators.
Returns:
generator of tuple: locator name str, locator header dict
"""
return self.client.get_locators().items()
def _list_objects(self, client_kwargs, path, max_request_entries):
"""
Lists objects.
args:
client_kwargs (dict): Client arguments.
path (str): Path relative to current locator.
max_request_entries (int): If specified, maximum entries returned
by request.
Returns:
generator of tuple: object name str, object header dict
"""
objects = tuple(self.client.get_locator(
prefix=path, limit=max_request_entries, raise_404_if_empty=False,
**client_kwargs).items())
if not objects:
_raise_404()
return objects
|
class MockSystem(_SystemBase):
'''Mock System'''
def _get_client(self):
'''
Storage client
Returns:
tests.storage_mock.ObjectStorageMock: client
'''
pass
def get_client_kwargs(self, path):
'''
Get base keyword arguments for client for a
specific path.
Args:
path (str): Absolute path or URL.
Returns:
dict: client args
'''
pass
def _get_roots(self):
'''
Return URL roots for this storage.
Returns:
tuple of str or re.Pattern: URL roots
'''
pass
def _head(self, client_kwargs):
'''
Returns object HTTP header.
Args:
client_kwargs (dict): Client arguments.
Returns:
dict: HTTP header.
'''
pass
def _make_dir(self, client_kwargs):
'''
Make a directory.
args:
client_kwargs (dict): Client arguments.
'''
pass
def copy(self, src, dst, other_system=None):
'''
Copy object of the same storage.
Args:
src (str): Path or URL.
dst (str): Path or URL.
other_system (pycosio._core.io_system.SystemBase subclass):
Other storage system. May be required for some storage.
'''
pass
def _remove(self, client_kwargs):
'''
Remove an object.
args:
client_kwargs (dict): Client arguments.
'''
pass
def _list_locators(self):
'''
Lists locators.
Returns:
generator of tuple: locator name str, locator header dict
'''
pass
def _list_objects(self, client_kwargs, path, max_request_entries):
'''
Lists objects.
args:
client_kwargs (dict): Client arguments.
path (str): Path relative to current locator.
max_request_entries (int): If specified, maximum entries returned
by request.
Returns:
generator of tuple: object name str, object header dict
'''
pass
| 10 | 10 | 12 | 1 | 4 | 6 | 2 | 1.55 | 1 | 2 | 0 | 0 | 9 | 0 | 9 | 67 | 118 | 21 | 38 | 15 | 28 | 59 | 35 | 15 | 25 | 2 | 5 | 1 | 14 |
2,916 |
Accelize/pycosio
|
Accelize_pycosio/tests_storage_package/mock.py
|
tests_storage_package.mock.MockRawIO
|
class MockRawIO(_ObjectRawIORandomWriteBase):
"""Mock Raw IO"""
_SYSTEM_CLASS = MockSystem
def _flush(self, buffer, start, end):
"""
Flush the write buffers of the stream if applicable.
Args:
buffer (memoryview): Buffer content.
start (int): Start of buffer position to flush.
Supported only if random write supported.
end (int): End of buffer position to flush.
Supported only if random write supported.
"""
self._client.put_object(
content=buffer, data_range=(start, end), **self._client_kwargs)
def _create(self):
"""
Create the file if not exists.
"""
self._client.put_object(new_file=True, **self._client_kwargs)
def _read_range(self, start, end=0):
"""
Read a range of bytes in stream.
Args:
start (int): Start stream position.
end (int): End stream position.
0 To not specify end.
Returns:
bytes: number of bytes read
"""
try:
return self._client.get_object(
data_range=(start, end or None), **self._client_kwargs)
except _Error416:
return b''
|
class MockRawIO(_ObjectRawIORandomWriteBase):
'''Mock Raw IO'''
def _flush(self, buffer, start, end):
'''
Flush the write buffers of the stream if applicable.
Args:
buffer (memoryview): Buffer content.
start (int): Start of buffer position to flush.
Supported only if random write supported.
end (int): End of buffer position to flush.
Supported only if random write supported.
'''
pass
def _create(self):
'''
Create the file if not exists.
'''
pass
def _read_range(self, start, end=0):
'''
Read a range of bytes in stream.
Args:
start (int): Start stream position.
end (int): End stream position.
0 To not specify end.
Returns:
bytes: number of bytes read
'''
pass
| 4 | 4 | 12 | 1 | 4 | 7 | 1 | 1.69 | 1 | 1 | 1 | 0 | 3 | 0 | 3 | 57 | 41 | 6 | 13 | 5 | 9 | 22 | 11 | 5 | 7 | 2 | 7 | 1 | 4 |
2,917 |
Accelize/pycosio
|
Accelize_pycosio/pycosio/storage/oss.py
|
pycosio.storage.oss.OSSBufferedIO
|
class OSSBufferedIO(_ObjectBufferedIOBase):
"""Buffered binary OSS Object I/O
Args:
name (path-like object): URL or path to the file which will be opened.
mode (str): The mode can be 'r', 'w' for reading (default) or writing
buffer_size (int): The size of buffer.
max_buffers (int): The maximum number of buffers to preload in read mode
or awaiting flush in write mode. 0 for no limit.
max_workers (int): The maximum number of threads that can be used to
execute the given calls.
storage_parameters (dict): OSS2 Auth keyword arguments and endpoint.
This is generally OSS credentials and configuration.
unsecure (bool): If True, disables TLS/SSL to improves
transfer performance. But makes connection unsecure.
"""
__slots__ = ('_bucket', '_key', '_upload_id')
_RAW_CLASS = OSSRawIO
#: Minimal buffer_size in bytes (OSS multipart upload minimal part size)
MINIMUM_BUFFER_SIZE = 102400
def __init__(self, *args, **kwargs):
_ObjectBufferedIOBase.__init__(self, *args, **kwargs)
self._bucket = self._raw._bucket
self._key = self._raw._key
self._upload_id = None
def _flush(self):
"""
Flush the write buffers of the stream.
"""
# Initialize multipart upload
if self._upload_id is None:
with _handle_oss_error():
self._upload_id = self._bucket.init_multipart_upload(
self._key).upload_id
# Upload part with workers
response = self._workers.submit(
self._bucket.upload_part, key=self._key, upload_id=self._upload_id,
part_number=self._seek, data=self._get_buffer().tobytes())
# Save part information
self._write_futures.append(
dict(response=response, part_number=self._seek))
def _close_writable(self):
"""
Close the object in write mode.
"""
# Wait parts upload completion
parts = [_PartInfo(part_number=future['part_number'],
etag=future['response'].result().etag)
for future in self._write_futures]
# Complete multipart upload
with _handle_oss_error():
try:
self._bucket.complete_multipart_upload(
key=self._key, upload_id=self._upload_id, parts=parts)
except _OssError:
# Clean up failed upload
self._bucket.abort_multipart_upload(
key=self._key, upload_id=self._upload_id)
raise
|
class OSSBufferedIO(_ObjectBufferedIOBase):
'''Buffered binary OSS Object I/O
Args:
name (path-like object): URL or path to the file which will be opened.
mode (str): The mode can be 'r', 'w' for reading (default) or writing
buffer_size (int): The size of buffer.
max_buffers (int): The maximum number of buffers to preload in read mode
or awaiting flush in write mode. 0 for no limit.
max_workers (int): The maximum number of threads that can be used to
execute the given calls.
storage_parameters (dict): OSS2 Auth keyword arguments and endpoint.
This is generally OSS credentials and configuration.
unsecure (bool): If True, disables TLS/SSL to improves
transfer performance. But makes connection unsecure.
'''
def __init__(self, *args, **kwargs):
pass
def _flush(self):
'''
Flush the write buffers of the stream.
'''
pass
def _close_writable(self):
'''
Close the object in write mode.
'''
pass
| 4 | 3 | 14 | 1 | 9 | 4 | 2 | 0.87 | 1 | 1 | 0 | 0 | 3 | 4 | 3 | 51 | 68 | 10 | 31 | 13 | 27 | 27 | 23 | 12 | 19 | 2 | 6 | 2 | 5 |
2,918 |
Accelize/pycosio
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Accelize_pycosio/tests/test_storage_oss.py
|
tests.test_storage_oss.test_mocked_storage.ListResult
|
class ListResult(Response):
"""
oss2.models.ListBucketsResult
oss2.models.ListObjectsResult
"""
is_truncated = False
next_marker = ''
|
class ListResult(Response):
'''
oss2.models.ListBucketsResult
oss2.models.ListObjectsResult
'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1.33 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 7 | 0 | 3 | 3 | 2 | 4 | 3 | 3 | 2 | 0 | 1 | 0 | 0 |
2,919 |
Accelize/pycosio
|
Accelize_pycosio/pycosio/storage/http.py
|
pycosio.storage.http._HTTPSystem
|
class _HTTPSystem(_SystemBase):
"""
HTTP system.
"""
# Request Timeout (seconds)
_TIMEOUT = 5
def get_client_kwargs(self, path):
"""
Get base keyword arguments for client for a
specific path.
Args:
path (str): Absolute path or URL.
Returns:
dict: client args
"""
return dict(url=path)
def _get_client(self):
"""
HTTP client
Returns:
requests.Session: client
"""
return _requests.Session()
def _get_roots(self):
"""
Return URL roots for this storage.
Returns:
tuple of str or re.Pattern: URL roots
"""
return 'http://', 'https://'
def _head(self, client_kwargs):
"""
Returns object HTTP header.
Args:
client_kwargs (dict): Client arguments.
Returns:
dict: HTTP header.
"""
return _handle_http_errors(
self.client.request(
'HEAD', timeout=self._TIMEOUT, **client_kwargs)).headers
|
class _HTTPSystem(_SystemBase):
'''
HTTP system.
'''
def get_client_kwargs(self, path):
'''
Get base keyword arguments for client for a
specific path.
Args:
path (str): Absolute path or URL.
Returns:
dict: client args
'''
pass
def _get_client(self):
'''
HTTP client
Returns:
requests.Session: client
'''
pass
def _get_roots(self):
'''
Return URL roots for this storage.
Returns:
tuple of str or re.Pattern: URL roots
'''
pass
def _head(self, client_kwargs):
'''
Returns object HTTP header.
Args:
client_kwargs (dict): Client arguments.
Returns:
dict: HTTP header.
'''
pass
| 5 | 5 | 10 | 2 | 3 | 6 | 1 | 2.42 | 1 | 2 | 0 | 0 | 4 | 0 | 4 | 62 | 52 | 11 | 12 | 6 | 7 | 29 | 10 | 6 | 5 | 1 | 5 | 0 | 4 |
2,920 |
Accelize/pycosio
|
Accelize_pycosio/tests_storage_package/mock.py
|
tests_storage_package.mock.MockBufferedIO
|
class MockBufferedIO(_ObjectBufferedIORandomWriteBase):
"""Mock Buffered IO"""
_RAW_CLASS = MockRawIO
|
class MockBufferedIO(_ObjectBufferedIORandomWriteBase):
'''Mock Buffered IO'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0.5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 51 | 3 | 0 | 2 | 2 | 1 | 1 | 2 | 2 | 1 | 0 | 7 | 0 | 0 |
2,921 |
Accelize/pycosio
|
Accelize_pycosio/tests/test_storage.py
|
tests.test_storage.StorageTester
|
class StorageTester:
"""
Class that contain common set of tests for storage.
Args:
system (pycosio._core.io_system.SystemBase instance):
System to test.
raw_io (pycosio._core.io_raw.ObjectRawIOBase subclass):
Raw IO class.
buffered_io (pycosio._core.io_buffered.ObjectBufferedIOBase subclass):
Buffered IO class.
storage_mock (tests.storage_mock.ObjectStorageMock instance):
Storage mock in use, if any.
storage_info (dict): Storage information from pycosio.mount.
"""
def __init__(self, system=None, raw_io=None, buffered_io=None,
storage_mock=None, unsupported_operations=None,
storage_info=None, system_parameters=None, root=None):
if system is None:
system = storage_info['system_cached']
if raw_io is None:
raw_io = storage_info['raw']
if buffered_io is None:
buffered_io = storage_info['buffered']
if system_parameters is None and storage_info:
system_parameters = storage_info['system_parameters']
if not system_parameters:
system_parameters = dict(storage_parameters=dict())
else:
system_parameters = _deepcopy(system_parameters)
self._system_parameters = system_parameters
self._system_parameters['storage_parameters'][
'pycosio.system_cached'] = system
self._system = system
self._raw_io = raw_io
self._buffered_io = buffered_io
self._storage_mock = storage_mock
self._unsupported_operations = unsupported_operations or tuple()
# Get storage root
if not root:
root = system.roots[0]
# Defines randomized names for locator and objects
self.locator = self._get_id()
self.locator_url = '/'.join((root, self.locator))
self.base_dir_name = '%s/' % self._get_id()
self.base_dir_path = '%s/%s' % (self.locator, self.base_dir_name)
self.base_dir_url = root + self.base_dir_path
# Run test sequence
self._objects = set()
self._to_clean = self._objects.add
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, exc_traceback):
from pycosio._core.exceptions import ObjectNotFoundError
# Remove objects, and once empty the locator
for obj in reversed(sorted(self._objects, key=str.lower)):
self._objects.discard(obj)
try:
self._system.remove(obj, relative=True)
except (ObjectNotFoundError, _UnsupportedOperation):
continue
def test_common(self):
"""
Common set of tests
"""
self._test_system_locator()
self._test_system_objects()
self._test_raw_io()
self._test_buffered_io()
# TODO: Add pycosio public functions tests
# Only if mocked
if self._storage_mock is not None:
self._test_mock_only()
def _is_supported(self, feature):
"""
Return True if a feature is supported.
Args:
feature (str): Feature to support.
Returns:
bool: Feature is supported.
"""
return feature not in self._unsupported_operations
@staticmethod
def _get_id():
"""
Return an unique ID.
Returns:
str: id
"""
return 'pycosio%s' % (str(_uuid()).replace('-', ''))
def _test_raw_io(self):
"""
Tests raw IO.
"""
from os import SEEK_END, SEEK_CUR
size = 100
file_name = 'raw_file0.dat'
file_path = self.base_dir_path + file_name
self._to_clean(file_path)
content = _urandom(size)
# Open file in write mode
assert not self._system.exists(file_path), 'Raw write, file not exists'
if self._is_supported('write'):
with self._raw_io(file_path, 'wb',
**self._system_parameters) as file:
assert self._system.getsize(file_path) == 0, \
'Raw write, file must exist but empty'
# Get file file type for later
is_seekable = file.seekable()
try:
max_flush_size = file.MAX_FLUSH_SIZE
except AttributeError:
max_flush_size = 0
# Test: Write blocs of data
assert file.write(content[:10]) == 10, \
'Raw write, written size match'
if is_seekable:
assert file.write(b'\0' * 10) == 10, \
'Raw write, written size match'
else:
assert file.write(content[10:20]) == 10, \
'Raw write, written size match'
assert file.write(content[20:]) == 80, \
'Raw write, written size match'
# Test: tell
if is_seekable:
assert file.tell() == size,\
'Raw write, tell match writen size'
# Test write seek back and write
assert file.seek(10) == 10, \
'Raw write, seek position match'
assert file.write(content[10:20]) == 10, \
'Raw write, written size match'
assert file.tell() == 20,\
'Raw write, tell match ending positon'
else:
# Test not seekable raises Unsupported exception
with _pytest.raises(_UnsupportedOperation):
file.tell()
with _pytest.raises(_UnsupportedOperation):
file.seek(0)
# Test: read in write mode is not supported
with _pytest.raises(_UnsupportedOperation):
file.read()
with _pytest.raises(_UnsupportedOperation):
file.readinto(bytearray(100))
else:
is_seekable = False
max_flush_size = 0
# Test: Unsupported
with _pytest.raises(_UnsupportedOperation):
self._raw_io(file_path, 'wb', **self._system_parameters)
# Create pre-existing file
if self._storage_mock:
self._storage_mock.put_object(
self.locator, self.base_dir_name + file_name, content)
# Open file in read mode
with self._raw_io(file_path, **self._system_parameters) as file:
# Test: read_all
assert file.readall() == content, 'Raw read all, content match'
assert file.tell() == size, 'Raw read all, tell match'
# Test: seek and read_all
assert file.seek(10) == 10, 'Raw seek 10 & read all, seek match'
assert file.readall() == content[10:],\
'Raw seek 10 & read all, content match'
assert file.tell() == size,\
'Raw seek 10 & read all, tell match'
# Test: seek from current position & read_all
assert file.seek(-50, SEEK_CUR) == 50, \
'Raw seek from current & read all, seek match'
assert file.readall() == content[-50:],\
'Raw seek from current & read all, content match'
assert file.tell() == size,\
'Raw seek from current & read all, tell match'
# Test: seek with bad whence value
with _pytest.raises(ValueError):
file.seek(0, 10)
# Test: Cannot write in read mode
with _pytest.raises(_UnsupportedOperation):
file.write(b'0')
# Test: Flush has no effect in read mode
file.flush()
# Test: _read_range
assert file.seek(0) == 0, 'Raw seek 0, seek match'
buffer = bytearray(40)
assert file.readinto(buffer) == 40,\
'Raw read into, returned size match'
assert bytes(buffer) == content[:40], 'Raw read into, content match'
assert file.tell() == 40, 'Raw read into, tell match'
buffer = bytearray(40)
assert file.readinto(buffer) == 40,\
'Raw read into from 40, returned size match'
assert bytes(buffer) == content[40:80],\
'Raw read into from 40, content match'
assert file.tell() == 80, 'Raw read into from 40, tell match'
buffer = bytearray(40)
assert file.readinto(buffer) == 20,\
'Raw read into partially over EOF, returned size match'
assert bytes(buffer) == content[80:] + b'\0' * 20,\
'Raw read into partially over EOF, content match'
assert file.tell() == size,\
'Raw read into partially over EOF, tell match'
buffer = bytearray(40)
assert file.readinto(buffer) == 0,\
'Raw read into over EOF, returned size match'
assert bytes(buffer) == b'\0' * 40,\
'Raw read into over EOF, content match'
assert file.tell() == size,\
'Raw read into over EOF, tell match'
file.seek(-10, SEEK_END)
buffer = bytearray(20)
assert file.readinto(buffer) == 10,\
'Raw seek from end & read into, returned size match'
assert bytes(buffer) == content[90:] + b'\0' * 10,\
'Raw seek from end & read into, content match'
assert file.tell() == size,\
'Raw seek from end & read into, tell match'
# Test: Append mode
if self._is_supported('write'):
# Test: Appending on existing file
with self._raw_io(file_path, mode='ab',
**self._system_parameters) as file:
file.write(content)
with self._raw_io(file_path, **self._system_parameters) as file:
assert file.readall() == content + content,\
'Raw append, previous content read match'
# Test: Appending on not existing file
file_name = 'raw_file1.dat'
file_path = self.base_dir_path + file_name
self._to_clean(file_path)
with self._raw_io(file_path, mode='ab',
**self._system_parameters) as file:
file.write(content)
with self._raw_io(file_path, **self._system_parameters) as file:
assert file.readall() == content,\
'Raw append, file create content match'
# Test: Seek out of file and write
if is_seekable:
file_name = 'raw_file2.dat'
file_path = self.base_dir_path + file_name
self._to_clean(file_path)
with self._raw_io(file_path, 'wb',
**self._system_parameters) as file:
file.seek(256)
file.write(b'\x01')
with self._raw_io(file_path, 'rb',
**self._system_parameters) as file:
assert file.readall() == b'\0' * 256 + b'\x01',\
'Raw seek, null padding read'
# Test: write big file
if self._is_supported('write') and max_flush_size:
file_name = 'raw_file3.dat'
file_path = self.base_dir_path + file_name
self._to_clean(file_path)
size = max_flush_size * 4
content = _urandom(size)
with self._raw_io(file_path, 'wb',
**self._system_parameters) as file:
file.write(content)
with self._raw_io(file_path, 'rb',
**self._system_parameters) as file:
assert file.readall() == content,\
'Raw Write big file, content match'
# Test exclusive write mode
if self._is_supported('write'):
file_name = 'raw_file4.dat'
file_path = self.base_dir_path + file_name
self._to_clean(file_path)
# Create file
with self._raw_io(file_path, 'xb', **self._system_parameters):
pass
# File already exists
with _pytest.raises(FileExistsError):
self._raw_io(file_path, 'xb', **self._system_parameters)
def _test_buffered_io(self):
"""
Tests buffered IO.
"""
from pycosio.io import ObjectBufferedIOBase
# Set buffer size
buffer_size = 16 * 1024
# Test: write data, not multiple of buffer
file_name = 'buffered_file0.dat'
file_path = self.base_dir_path + file_name
self._to_clean(file_path)
size = int(4.5 * buffer_size)
content = _urandom(size)
if self._is_supported('write'):
with self._buffered_io(file_path, 'wb', buffer_size=buffer_size,
**self._system_parameters) as file:
file.write(content)
else:
# Test: Unsupported
with _pytest.raises(_UnsupportedOperation):
self._buffered_io(file_path, 'wb', buffer_size=buffer_size,
**self._system_parameters)
# Create pre-existing file
if self._storage_mock:
self._storage_mock.put_object(
self.locator, self.base_dir_name + file_name, content)
# Test: Read data, not multiple of buffer
with self._buffered_io(file_path, 'rb', buffer_size=buffer_size,
**self._system_parameters) as file:
assert content == file.read(),\
'Buffered read, not multiple of buffer size'
# Test: write data, multiple of buffer
file_name = 'buffered_file1.dat'
file_path = self.base_dir_path + file_name
self._to_clean(file_path)
size = int(5 * buffer_size)
content = _urandom(size)
if self._is_supported('write'):
with self._buffered_io(file_path, 'wb', buffer_size=buffer_size,
**self._system_parameters) as file:
file.write(content)
# Test: Flush manually
file.flush()
# Test: read in write mode is not supported
with _pytest.raises(_UnsupportedOperation):
file.read()
with _pytest.raises(_UnsupportedOperation):
file.read1()
with _pytest.raises(_UnsupportedOperation):
file.readinto(bytearray(100))
with _pytest.raises(_UnsupportedOperation):
file.readinto1(bytearray(100))
with _pytest.raises(_UnsupportedOperation):
file.peek()
# Test: Unsupported if not seekable
if not file.seekable():
with _pytest.raises(_UnsupportedOperation):
file.tell()
with _pytest.raises(_UnsupportedOperation):
file.seek(0)
else:
# Create pre-existing file
if self._storage_mock:
self._storage_mock.put_object(
self.locator, self.base_dir_name + file_name, content)
# Test: Read data, multiple of buffer
with self._buffered_io(file_path, 'rb', buffer_size=buffer_size,
**self._system_parameters) as file:
# Test full data read
assert content == file.read(),\
'Buffered read, multiple of buffer size'
# Test: seek
assert file.seek(10) == 10, 'Buffered read, seek'
assert file.tell() == 10, 'Buffered read, tell match seek'
# Test: peek:
assert content[10:110] == file.peek(100), \
'Buffered read, peek content match'
assert file.tell() == 10, \
'Buffered read, peek tell match'
# Test: Cannot write in read mode
with _pytest.raises(_UnsupportedOperation):
file.write(b'0')
# Test: Flush has no effect in read mode
file.flush()
# Check if pycosio subclass
is_pycosio_subclass = isinstance(file, ObjectBufferedIOBase)
# Test: Buffer limits and default values
if is_pycosio_subclass:
with self._buffered_io(
file_path, **self._system_parameters) as file:
assert file._buffer_size == file.DEFAULT_BUFFER_SIZE, \
'Buffered, Default buffer size'
# Get limits values
minimum_buffer_size = file.MINIMUM_BUFFER_SIZE
maximum_buffer_size = file.MAXIMUM_BUFFER_SIZE
# Get current max buffers
calculated_max_buffers = file._max_buffers
# Test: Minimum buffer size
if minimum_buffer_size > 1:
with self._buffered_io(
file_path, buffer_size=minimum_buffer_size // 2,
**self._system_parameters) as file:
assert file._buffer_size == minimum_buffer_size, \
'Buffered, Minimum buffer size'
# Test: Maximum buffer size
if maximum_buffer_size:
with self._buffered_io(
file_path, buffer_size=maximum_buffer_size * 2,
**self._system_parameters) as file:
assert file._buffer_size == maximum_buffer_size, \
'Buffered, Maximum buffer size'
# Test: Maximum buffer count
assert calculated_max_buffers, \
'Buffered, calculated buffer count not 0'
max_buffers = calculated_max_buffers * 2
with self._buffered_io(
file_path, mode='rb', max_buffers=max_buffers,
**self._system_parameters) as file:
assert file._max_buffers == max_buffers, \
'Buffered, Maximum buffer count'
def _test_system_locator(self):
"""
Test system internals related to locators.
"""
system = self._system
# Test: Create locator
if self._is_supported('mkdir'):
system.make_dir(self.locator_url)
self._to_clean(self.locator)
else:
# Test: Unsupported
with _pytest.raises(_UnsupportedOperation):
system.make_dir(self.locator_url)
# Create a preexisting locator
if self._storage_mock:
self._storage_mock.put_locator(self.locator)
# Test: Check locator listed
if self._is_supported('listdir'):
for name, header in system._list_locators():
if name == self.locator and isinstance(header, dict):
break
else:
_pytest.fail('Locator "%s" not found' % self.locator)
# Test: Check locator header return a mapping
assert hasattr(system.head(path=self.locator), '__getitem__'), \
'List locators, header is mapping'
else:
# Test: Unsupported
with _pytest.raises(_UnsupportedOperation):
system._list_locators()
# Test: remove locator
tmp_locator = self._get_id()
self._to_clean(tmp_locator)
if self._is_supported('mkdir'):
system.make_dir(tmp_locator)
elif self._storage_mock:
self._storage_mock.put_locator(tmp_locator)
if self._is_supported('remove'):
if self._is_supported('listdir'):
assert tmp_locator in [
name for name, _ in system._list_locators()],\
'Remove locator, locator exists'
system.remove(tmp_locator)
if self._is_supported('listdir'):
assert tmp_locator not in [
name for name, _ in system._list_locators()],\
'Remove locator, locator not exists'
else:
# Test: Unsupported
with _pytest.raises(_UnsupportedOperation):
system.remove(tmp_locator)
def _test_system_objects(self):
"""
Test system internals related to objects.
"""
from pycosio._core.exceptions import ObjectNotFoundError
system = self._system
if self._is_supported('mkdir'):
# Create parent directory
system.make_dir(self.base_dir_path)
self._to_clean(self.base_dir_path)
# Test: Make a directory (With trailing /)
dir_name0 = 'directory0/'
dir_path0 = self.base_dir_path + dir_name0
system.make_dir(dir_path0)
self._to_clean(dir_path0)
if self._is_supported('listdir'):
assert dir_path0 in self._list_objects_names(), \
'Create directory, exists (with "/")'
# Test: Check directory header
assert hasattr(system.head(path=dir_path0), '__getitem__'), \
'Head directory, header is mapping'
# Test: Make a directory (Without trailing /)
dir_name1 = 'directory1'
dir_path1 = self.base_dir_path + dir_name1
system.make_dir(dir_path1)
dir_path1 += '/'
self._to_clean(dir_path1)
if self._is_supported('listdir'):
assert dir_path1 in self._list_objects_names(), \
'Create directory, exists (without "/")'
# Test: Listing empty directory
assert len(tuple(system.list_objects(dir_path0))) == 0, \
'List objects, empty directory'
# Write a sample file
file_name = 'sample_1K.dat'
file_path = self.base_dir_path + file_name
self._to_clean(file_path)
file_url = self.base_dir_url + file_name
size = 1024
content = _urandom(size)
if self._is_supported('write'):
with self._raw_io(file_path, mode='w',
**self._system_parameters) as file:
# Write content
file.write(content)
elif self._storage_mock:
# Create pre-existing file
self._storage_mock.put_object(
self.locator, self.base_dir_name + file_name, content)
# Estimate creation time
create_time = _time()
# Test: Check file header
assert hasattr(system.head(path=file_path), '__getitem__'), \
'Head file, header is mapping'
# Test: Check file size
try:
assert system.getsize(file_path) == size, \
'Head file, size match'
except _UnsupportedOperation:
# May not be supported on all files, if supported
if self._is_supported('getsize'):
raise
# Test: Check file modification time
try:
file_time = system.getmtime(file_path)
if self._is_supported('write'):
assert file_time == _pytest.approx(create_time, 2), \
'Head file, modification time match'
except _UnsupportedOperation:
# May not be supported on all files, if supported
if self._is_supported('getmtime'):
raise
# Test: Check file creation time
try:
file_time = system.getctime(file_path)
if self._is_supported('write'):
assert file_time == _pytest.approx(create_time, 2), \
'Head file, creation time match'
except _UnsupportedOperation:
# May not be supported on all files, if supported
if self._is_supported('getctime'):
raise
# Test: Check path and URL handling
with self._raw_io(file_path, **self._system_parameters) as file:
assert file.name == file_path, 'Open file, path match'
with self._raw_io(file_url, **self._system_parameters) as file:
assert file.name == file_url, 'Open file, URL match'
# Write some files
files = set()
files.add(file_path)
for i in range(11):
if i < 10:
# Files in directory
file_name = 'file%d.dat' % i
path = self.base_dir_path + file_name
rel_path = self.base_dir_name + file_name
else:
# File in locator
rel_path = self._get_id() + '.dat'
path = '%s/%s' % (self.locator, rel_path)
files.add(path)
self._to_clean(path)
if self._is_supported('write'):
with self._raw_io(
path, mode='w', **self._system_parameters) as file:
file.flush()
elif self._storage_mock:
# Create pre-existing file
self._storage_mock.put_object(self.locator, rel_path, b'')
# Test: List objects
if self._is_supported('listdir'):
objects = tuple(system.list_objects(self.locator))
objects_list = set(
'%s/%s' % (self.locator, name) for name, _ in objects)
for file in files:
assert file in objects_list, 'List objects, file name match'
for _, header in objects:
assert hasattr(header, '__getitem__'),\
'List objects, file header is mapping'
# Test: List objects, with limited output
max_request_entries = 5
entries = len(tuple(system.list_objects(
max_request_entries=max_request_entries)))
assert entries == max_request_entries, \
'List objects, Number of entries match'
# Test: List objects, no objects found
with _pytest.raises(ObjectNotFoundError):
list(system.list_objects(
self.base_dir_path + 'dir_not_exists/'))
# Test: List objects on locator root, no objects found
with _pytest.raises(ObjectNotFoundError):
list(system.list_objects(self.locator + '/dir_not_exists/'))
# Test: List objects, locator not found
with _pytest.raises(ObjectNotFoundError):
list(system.list_objects(self._get_id()))
else:
# Test: Unsupported
with _pytest.raises(_UnsupportedOperation):
list(system.list_objects(self.base_dir_path))
# Test: copy
copy_path = file_path + '.copy'
self._to_clean(copy_path)
if self._is_supported('copy'):
system.copy(file_path, copy_path)
assert system.getsize(copy_path) == size, 'Copy file, size match'
else:
# Test: Unsupported
with _pytest.raises(_UnsupportedOperation):
system.copy(file_path, copy_path)
# Test: Normal file is not symlink
assert not system.islink(file_path), 'Symlink, file is not symlink'
# Test: Symlink
if self._is_supported('symlink'):
link_path = self.base_dir_path + 'symlink'
# TODO: Tests once create symlink implemented
# Test: Is symlink
#assert system.islink(link_path)
#assert system.islink(header=system.head(link_path)
# Test: Remove file
if self._is_supported('remove'):
if self._is_supported('listdir'):
assert file_path in self._list_objects_names(), \
'Remove file, file exists'
system.remove(file_path)
if self._is_supported('listdir'):
assert file_path not in self._list_objects_names(), \
'Remove file, file not exists'
else:
# Test: Unsupported
with _pytest.raises(_UnsupportedOperation):
system.remove(file_path)
def _test_mock_only(self):
"""
Tests that can only be performed on mocks
"""
file_name = 'mocked.dat'
# Create a file
file_path = self.base_dir_path + file_name
self._to_clean(file_path)
content = _urandom(20)
if self._is_supported('write'):
with self._raw_io(
file_path, mode='w', **self._system_parameters) as file:
file.write(content)
file.flush()
elif self._storage_mock:
# Create pre-existing file
self._storage_mock.put_object(
self.locator, self.base_dir_name + file_name, content)
# Test: Read not block other exceptions
with self._storage_mock.raise_server_error():
with _pytest.raises(OSError):
self._raw_io(file_path, **self._system_parameters).read(10)
def _list_objects_names(self):
"""
List objects names.
Returns:
set of str: objects names.
"""
return set('%s/%s' % (self.locator, name)
for name, _ in self._system.list_objects(self.locator))
|
class StorageTester:
'''
Class that contain common set of tests for storage.
Args:
system (pycosio._core.io_system.SystemBase instance):
System to test.
raw_io (pycosio._core.io_raw.ObjectRawIOBase subclass):
Raw IO class.
buffered_io (pycosio._core.io_buffered.ObjectBufferedIOBase subclass):
Buffered IO class.
storage_mock (tests.storage_mock.ObjectStorageMock instance):
Storage mock in use, if any.
storage_info (dict): Storage information from pycosio.mount.
'''
def __init__(self, system=None, raw_io=None, buffered_io=None,
storage_mock=None, unsupported_operations=None,
storage_info=None, system_parameters=None, root=None):
pass
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, exc_traceback):
pass
def test_common(self):
'''
Common set of tests
'''
pass
def _is_supported(self, feature):
'''
Return True if a feature is supported.
Args:
feature (str): Feature to support.
Returns:
bool: Feature is supported.
'''
pass
@staticmethod
def _get_id():
'''
Return an unique ID.
Returns:
str: id
'''
pass
def _test_raw_io(self):
'''
Tests raw IO.
'''
pass
def _test_buffered_io(self):
'''
Tests buffered IO.
'''
pass
def _test_system_locator(self):
'''
Test system internals related to locators.
'''
pass
def _test_system_objects(self):
'''
Test system internals related to objects.
'''
pass
def _test_mock_only(self):
'''
Tests that can only be performed on mocks
'''
pass
def _list_objects_names(self):
'''
List objects names.
Returns:
set of str: objects names.
'''
pass
| 14 | 10 | 63 | 10 | 41 | 11 | 6 | 0.3 | 0 | 16 | 2 | 0 | 11 | 13 | 12 | 12 | 779 | 134 | 495 | 87 | 475 | 150 | 387 | 77 | 370 | 26 | 0 | 4 | 74 |
2,922 |
Accelize/pycosio
|
Accelize_pycosio/tests/storage_mock.py
|
tests.storage_mock.ObjectStorageMock
|
class ObjectStorageMock:
"""
Mocked Object storage.
Args:
raise_404 (callable): Function to call to raise a 404 error
(Not found).
raise_416 (callable): Function to call to raise a 416 error
(End Of File/Out of range).
raise_500 (callable): Function to call to raise a 500 error
(Server exception).
base_exception (Exception subclass): Type of exception raised by the
500 error.
"""
def __init__(self, raise_404, raise_416, raise_500, format_date=None):
self._put_lock = _Lock()
self._system = None
self._locators = {}
self._header_size = None
self._header_mtime = None
self._header_ctime = None
self._raise_404 = raise_404
self._raise_416 = raise_416
self._raise_500 = raise_500
self._raise_server_error = False
if format_date is None:
from wsgiref.handlers import format_date_time
format_date = format_date_time
self._format_date = format_date
def attach_io_system(self, system):
"""
Attach IO system to use.
Args:
system (pycosio._core.io_system.SystemBase subclass):
IO system to use.
"""
self._system = system
try:
self._header_size = system._SIZE_KEYS[0]
except IndexError:
pass
try:
self._header_mtime = system._MTIME_KEYS[0]
except IndexError:
pass
try:
self._header_ctime = system._CTIME_KEYS[0]
except IndexError:
pass
@_contextmanager
def raise_server_error(self):
"""Context manager that force sotrage to raise server exception."""
self._raise_server_error = True
try:
yield
finally:
self._raise_server_error = False
def put_locator(self, locator):
"""
Put a locator.
Args:
locator (str): locator name
"""
self._locators[locator] = locator = dict(_content=dict(),)
if self._header_ctime:
locator[self._header_ctime] = self._format_date(_time())
if self._header_mtime:
locator[self._header_mtime] = self._format_date(_time())
def _get_locator(self, locator):
"""
Get a locator.
Args:
locator (str): locator name
"""
try:
return self._locators[locator]
except KeyError:
self._raise_404()
def get_locator(self, locator, prefix=None, limit=None,
raise_404_if_empty=True, first_level=False, relative=False):
"""
Get locator content.
Args:
locator (str): locator name
prefix (str): Filter returned object with this prefix.
limit (int): Maximum number of result to return.
raise_404_if_empty (bool): Raise 404 Error if empty.
first_level (bool): If True, return only first level after prefix.
relative (bool): If True, return objects names relative to prefix.
Returns:
dict: objects names, objects headers.
"""
if prefix is None:
prefix = ''
headers = dict()
for name, header in self._get_locator_content(locator).items():
if name.startswith(prefix):
if (relative and prefix) or first_level:
# Relative path
if prefix:
name = name.split(prefix)[1].lstrip('/')
# Get first level element
if first_level and '/' in name.rstrip('/'):
name = name.split('/', 1)[0].rstrip('/')
if name:
name += '/'
# Set name absolute
if not relative and prefix and name:
name = '%s/%s' % (prefix.rstrip('/'), name)
# Skip already existing
if first_level and name in headers:
continue
headers[name] = header.copy()
del headers[name]['_content']
if len(headers) == limit:
break
if not headers and raise_404_if_empty:
self._raise_404()
return headers
def get_locators(self):
"""
Get locators headers.
Returns:
dict: locators names, locators headers.
"""
headers = dict()
for name, header in self._locators.items():
headers[name] = header.copy()
del headers[name]['_content']
if not headers:
self._raise_404()
return headers
def _get_locator_content(self, locator):
"""
Get locator content.
Args:
locator (str): locator name
Returns:
dict: objects names, objects with header.
"""
return self._get_locator(locator)['_content']
def head_locator(self, locator):
"""
Get locator header
Args:
locator (str): locator name
"""
header = self._get_locator(locator).copy()
del header['_content']
return header
def get_locator_ctime(self, locator):
"""
Get locator creation time.
Args:
locator (str): locator name
Returns:
object: Creation time.
"""
return self._get_locator(locator)[self._header_ctime]
def get_locator_mtime(self, locator):
"""
Get locator modification time.
Args:
locator (str): locator name
Returns:
object: Modification time.
"""
return self._get_locator(locator)[self._header_mtime]
def get_locator_size(self, locator):
"""
Get locator size.
Args:
locator (str): locator name
Returns:
int: Size.
"""
return self._get_locator(locator)[self._header_size]
def delete_locator(self, locator):
"""
Delete locator.
Args:
locator (str): locator name
"""
try:
del self._locators[locator]
except KeyError:
self._raise_404()
def put_object(self, locator, path, content=None, headers=None,
data_range=None, new_file=False):
"""
Put object.
Args:
locator (str): locator name
path (str): Object path.
content (bytes like-object): File content.
headers (dict): Header to put with the file.
data_range (tuple of int): Range of position of content.
new_file (bool): If True, force new file creation.
Returns:
dict: File header.
"""
with self._put_lock:
if new_file:
self.delete_object(locator, path, not_exists_ok=True)
try:
# Existing file
file = self._get_locator_content(locator)[path]
except KeyError:
# New file
self._get_locator_content(locator)[path] = file = {
'Accept-Ranges': 'bytes',
'ETag': str(_uuid()),
'_content': bytearray(),
'_lock': _Lock()
}
if self._header_size:
file[self._header_size] = 0
if self._header_ctime:
file[self._header_ctime] = self._format_date(_time())
# Update file
with file['_lock']:
if content:
file_content = file['_content']
# Write full content
if not data_range or (
data_range[0] is None and data_range[1] is None):
file_content[:] = content
# Write content range
else:
# Define range
start, end = data_range
if start is None:
start = 0
if end is None:
end = start + len(content)
# Add padding if missing data
if start > len(file_content):
file_content[len(file_content):start] = (
start - len(file_content)) * b'\0'
# Flush new content
file_content[start:end] = content
if headers:
file.update(headers)
if self._header_size:
file[self._header_size] = len(file['_content'])
if self._header_mtime:
file[self._header_mtime] = self._format_date(_time())
# Return Header
header = file.copy()
del header['_content']
return header
def concat_objects(self, locator, path, parts):
"""
Concatenates objects as one object.
Args:
locator (str): locator name
path (str): Object path.
parts (iterable of str): Paths of objects to concatenate.
Returns:
dict: File header.
"""
content = bytearray()
for part in parts:
content += self.get_object(locator, part)
return self.put_object(locator, path, content)
def copy_object(self, src_path, dst_path, src_locator=None,
dst_locator=None):
"""
Copy object.
Args:
src_path (str): Source object path.
dst_path (str): Destination object path.
src_locator (str): Source locator.
dst_locator (str): Destination locator.
"""
if src_locator is None:
src_locator, src_path = src_path.split('/', 1)
if dst_locator is None:
dst_locator, dst_path = dst_path.split('/', 1)
file = self._get_object(src_locator, src_path).copy()
del file['_lock']
file = _deepcopy(file)
file['_lock'] = _Lock()
self._get_locator_content(dst_locator)[dst_path] = file
if self._header_mtime:
file[self._header_mtime] = self._format_date(_time())
def _get_object(self, locator, path):
"""
Get object.
Args:
locator (str): locator name
path (str): Object path.
Returns:
dict: Object
"""
# Get object
try:
return self._get_locator_content(locator)[path]
except KeyError:
self._raise_404()
def get_object(self, locator, path, data_range=None, header=None):
"""
Get object content.
Args:
locator (str): locator name.
path (str): Object path.
data_range (tuple of int): Range as (start, end) tuple.
header (dict): HTTP header that can contain Range.
Returns:
bytes: File content.
"""
# Simulate server error
if self._raise_server_error:
self._raise_500()
# Read file
content = self._get_object(locator, path)['_content']
size = len(content)
if header and header.get('Range'):
# Return object part
data_range = header['Range'].split('=')[1]
start, end = data_range.split('-')
start = int(start)
try:
end = int(end) + 1
except ValueError:
end = size
elif data_range is not None:
start, end = data_range
else:
start = None
end = None
if start is None:
start = 0
elif start >= size:
# EOF reached
self._raise_416()
if end is None or end > size:
end = size
return content[start:end]
def head_object(self, locator, path):
"""
Get object header.
Args:
locator (str): locator name
path (str): Object path..
Returns:
dict: header.
"""
header = self._get_object(locator, path).copy()
del header['_content']
return header
def get_object_ctime(self, locator, path):
"""
Get object creation time.
Args:
locator (str): locator name
path (str): Object path.
Returns:
object: Creation time.
"""
return self._get_object(locator, path)[self._header_ctime]
def get_object_mtime(self, locator, path):
"""
Get object modification time.
Args:
locator (str): locator name
path (str): Object path.
Returns:
object: Modification time.
"""
return self._get_object(locator, path)[self._header_mtime]
def get_object_size(self, locator, path):
"""
Get object size.
Args:
locator (str): locator name
path (str): Object path.
Returns:
int: Size.
"""
return self._get_object(locator, path)[self._header_size]
def delete_object(self, locator, path, not_exists_ok=False):
"""
Delete object.
Args:
locator (str): locator name
path (str): Object path.
not_exists_ok (bool): If True, do not raise if object not exist.
"""
try:
del self._get_locator_content(locator)[path]
except KeyError:
if not not_exists_ok:
self._raise_404()
|
class ObjectStorageMock:
'''
Mocked Object storage.
Args:
raise_404 (callable): Function to call to raise a 404 error
(Not found).
raise_416 (callable): Function to call to raise a 416 error
(End Of File/Out of range).
raise_500 (callable): Function to call to raise a 500 error
(Server exception).
base_exception (Exception subclass): Type of exception raised by the
500 error.
'''
def __init__(self, raise_404, raise_416, raise_500, format_date=None):
pass
def attach_io_system(self, system):
'''
Attach IO system to use.
Args:
system (pycosio._core.io_system.SystemBase subclass):
IO system to use.
'''
pass
@_contextmanager
def raise_server_error(self):
'''Context manager that force sotrage to raise server exception.'''
pass
def put_locator(self, locator):
'''
Put a locator.
Args:
locator (str): locator name
'''
pass
def _get_locator(self, locator):
'''
Get a locator.
Args:
locator (str): locator name
'''
pass
def get_locator(self, locator, prefix=None, limit=None,
raise_404_if_empty=True, first_level=False, relative=False):
'''
Get locator content.
Args:
locator (str): locator name
prefix (str): Filter returned object with this prefix.
limit (int): Maximum number of result to return.
raise_404_if_empty (bool): Raise 404 Error if empty.
first_level (bool): If True, return only first level after prefix.
relative (bool): If True, return objects names relative to prefix.
Returns:
dict: objects names, objects headers.
'''
pass
def get_locators(self):
'''
Get locators headers.
Returns:
dict: locators names, locators headers.
'''
pass
def _get_locator_content(self, locator):
'''
Get locator content.
Args:
locator (str): locator name
Returns:
dict: objects names, objects with header.
'''
pass
def head_locator(self, locator):
'''
Get locator header
Args:
locator (str): locator name
'''
pass
def get_locator_ctime(self, locator):
'''
Get locator creation time.
Args:
locator (str): locator name
Returns:
object: Creation time.
'''
pass
def get_locator_mtime(self, locator):
'''
Get locator modification time.
Args:
locator (str): locator name
Returns:
object: Modification time.
'''
pass
def get_locator_size(self, locator):
'''
Get locator size.
Args:
locator (str): locator name
Returns:
int: Size.
'''
pass
def delete_locator(self, locator):
'''
Delete locator.
Args:
locator (str): locator name
'''
pass
def put_object(self, locator, path, content=None, headers=None,
data_range=None, new_file=False):
'''
Put object.
Args:
locator (str): locator name
path (str): Object path.
content (bytes like-object): File content.
headers (dict): Header to put with the file.
data_range (tuple of int): Range of position of content.
new_file (bool): If True, force new file creation.
Returns:
dict: File header.
'''
pass
def concat_objects(self, locator, path, parts):
'''
Concatenates objects as one object.
Args:
locator (str): locator name
path (str): Object path.
parts (iterable of str): Paths of objects to concatenate.
Returns:
dict: File header.
'''
pass
def copy_object(self, src_path, dst_path, src_locator=None,
dst_locator=None):
'''
Copy object.
Args:
src_path (str): Source object path.
dst_path (str): Destination object path.
src_locator (str): Source locator.
dst_locator (str): Destination locator.
'''
pass
def _get_object(self, locator, path):
'''
Get object.
Args:
locator (str): locator name
path (str): Object path.
Returns:
dict: Object
'''
pass
def get_object(self, locator, path, data_range=None, header=None):
'''
Get object content.
Args:
locator (str): locator name.
path (str): Object path.
data_range (tuple of int): Range as (start, end) tuple.
header (dict): HTTP header that can contain Range.
Returns:
bytes: File content.
'''
pass
def head_object(self, locator, path):
'''
Get object header.
Args:
locator (str): locator name
path (str): Object path..
Returns:
dict: header.
'''
pass
def get_object_ctime(self, locator, path):
'''
Get object creation time.
Args:
locator (str): locator name
path (str): Object path.
Returns:
object: Creation time.
'''
pass
def get_object_mtime(self, locator, path):
'''
Get object modification time.
Args:
locator (str): locator name
path (str): Object path.
Returns:
object: Modification time.
'''
pass
def get_object_size(self, locator, path):
'''
Get object size.
Args:
locator (str): locator name
path (str): Object path.
Returns:
int: Size.
'''
pass
def delete_object(self, locator, path, not_exists_ok=False):
'''
Delete object.
Args:
locator (str): locator name
path (str): Object path.
not_exists_ok (bool): If True, do not raise if object not exist.
'''
pass
| 25 | 23 | 19 | 3 | 9 | 8 | 3 | 0.92 | 0 | 7 | 0 | 0 | 23 | 11 | 23 | 23 | 481 | 88 | 205 | 56 | 176 | 188 | 189 | 52 | 164 | 13 | 0 | 5 | 70 |
2,923 |
Accelize/pycosio
|
Accelize_pycosio/pycosio/storage/swift.py
|
pycosio.storage.swift._SwiftSystem
|
class _SwiftSystem(_SystemBase):
"""
Swift system.
Args:
storage_parameters (dict): Swift connection keyword arguments.
This is generally OpenStack credentials and configuration.
(see "swiftclient.client.Connection" for more information)
unsecure (bool): If True, disables TLS/SSL to improves
transfer performance. But makes connection unsecure.
"""
_SIZE_KEYS = ('content-length', 'content_length', 'bytes')
_MTIME_KEYS = ('last-modified', 'last_modified')
def copy(self, src, dst, other_system=None):
"""
Copy object of the same storage.
Args:
src (str): Path or URL.
dst (str): Path or URL.
other_system (pycosio._core.io_system.SystemBase subclass): Unused.
"""
container, obj = self.split_locator(src)
with _handle_client_exception():
self.client.copy_object(
container=container, obj=obj, destination=self.relpath(dst))
def get_client_kwargs(self, path):
"""
Get base keyword arguments for client for a
specific path.
Args:
path (str): Absolute path or URL.
Returns:
dict: client args
"""
container, obj = self.split_locator(path)
kwargs = dict(container=container)
if obj:
kwargs['obj'] = obj
return kwargs
def _get_client(self):
"""
Swift client
Returns:
swiftclient.client.Connection: client
"""
kwargs = self._storage_parameters
# Handles unsecure mode
if self._unsecure:
kwargs = kwargs.copy()
kwargs['ssl_compression'] = False
return _swift.client.Connection(**kwargs)
def _get_roots(self):
"""
Return URL roots for this storage.
Returns:
tuple of str or re.Pattern: URL roots
"""
# URL (May have other format):
# - https://<endpoint>/v1/AUTH_<project>/<container>/<object>
return self.client.get_auth()[0],
def _head(self, client_kwargs):
"""
Returns object HTTP header.
Args:
client_kwargs (dict): Client arguments.
Returns:
dict: HTTP header.
"""
with _handle_client_exception():
# Object
if 'obj' in client_kwargs:
return self.client.head_object(**client_kwargs)
# Container
return self.client.head_container(**client_kwargs)
def _make_dir(self, client_kwargs):
"""
Make a directory.
args:
client_kwargs (dict): Client arguments.
"""
with _handle_client_exception():
# Object
if 'obj' in client_kwargs:
return self.client.put_object(
client_kwargs['container'], client_kwargs['obj'], b'')
# Container
return self.client.put_container(client_kwargs['container'])
def _remove(self, client_kwargs):
"""
Remove an object.
args:
client_kwargs (dict): Client arguments.
"""
with _handle_client_exception():
# Object
if 'obj' in client_kwargs:
return self.client.delete_object(
client_kwargs['container'], client_kwargs['obj'])
# Container
return self.client.delete_container(client_kwargs['container'])
def _list_locators(self):
"""
Lists locators.
Returns:
generator of tuple: locator name str, locator header dict
"""
with _handle_client_exception():
response = self.client.get_account()
for container in response[1]:
yield container.pop('name'), container
def _list_objects(self, client_kwargs, path, max_request_entries):
"""
Lists objects.
args:
client_kwargs (dict): Client arguments.
path (str): Path relative to current locator.
max_request_entries (int): If specified, maximum entries returned
by request.
Returns:
generator of tuple: object name str, object header dict
"""
kwargs = dict(prefix=path)
if max_request_entries:
kwargs['limit'] = max_request_entries
else:
kwargs['full_listing'] = True
with _handle_client_exception():
response = self.client.get_container(
client_kwargs['container'], **kwargs)
for obj in response[1]:
yield obj.pop('name'), obj
|
class _SwiftSystem(_SystemBase):
'''
Swift system.
Args:
storage_parameters (dict): Swift connection keyword arguments.
This is generally OpenStack credentials and configuration.
(see "swiftclient.client.Connection" for more information)
unsecure (bool): If True, disables TLS/SSL to improves
transfer performance. But makes connection unsecure.
'''
def copy(self, src, dst, other_system=None):
'''
Copy object of the same storage.
Args:
src (str): Path or URL.
dst (str): Path or URL.
other_system (pycosio._core.io_system.SystemBase subclass): Unused.
'''
pass
def get_client_kwargs(self, path):
'''
Get base keyword arguments for client for a
specific path.
Args:
path (str): Absolute path or URL.
Returns:
dict: client args
'''
pass
def _get_client(self):
'''
Swift client
Returns:
swiftclient.client.Connection: client
'''
pass
def _get_roots(self):
'''
Return URL roots for this storage.
Returns:
tuple of str or re.Pattern: URL roots
'''
pass
def _head(self, client_kwargs):
'''
Returns object HTTP header.
Args:
client_kwargs (dict): Client arguments.
Returns:
dict: HTTP header.
'''
pass
def _make_dir(self, client_kwargs):
'''
Make a directory.
args:
client_kwargs (dict): Client arguments.
'''
pass
def _remove(self, client_kwargs):
'''
Remove an object.
args:
client_kwargs (dict): Client arguments.
'''
pass
def _list_locators(self):
'''
Lists locators.
Returns:
generator of tuple: locator name str, locator header dict
'''
pass
def _list_objects(self, client_kwargs, path, max_request_entries):
'''
Lists objects.
args:
client_kwargs (dict): Client arguments.
path (str): Path relative to current locator.
max_request_entries (int): If specified, maximum entries returned
by request.
Returns:
generator of tuple: object name str, object header dict
'''
pass
| 10 | 10 | 15 | 2 | 6 | 7 | 2 | 1.36 | 1 | 1 | 0 | 0 | 9 | 0 | 9 | 67 | 161 | 31 | 55 | 21 | 45 | 75 | 50 | 21 | 40 | 3 | 5 | 2 | 17 |
2,924 |
Accelize/pycosio
|
Accelize_pycosio/pycosio/storage/swift.py
|
pycosio.storage.swift.SwiftRawIO
|
class SwiftRawIO(_ObjectRawIOBase):
"""Binary OpenStack Swift Object I/O
Args:
name (path-like object): URL or path to the file which will be opened.
mode (str): The mode can be 'r', 'w', 'a'
for reading (default), writing or appending
buffer_size (int): The size of buffer.
max_buffers (int): The maximum number of buffers to preload in read mode
or awaiting flush in write mode. 0 for no limit.
storage_parameters (dict): Swift connection keyword arguments.
This is generally OpenStack credentials and configuration.
(see "swiftclient.client.Connection" for more information)
unsecure (bool): If True, disables TLS/SSL to improves
transfer performance. But makes connection unsecure.
"""
_SYSTEM_CLASS = _SwiftSystem
@property
@_memoizedmethod
def _client_args(self):
"""
Client arguments as tuple.
Returns:
tuple of str: Client args.
"""
return (self._client_kwargs['container'],
self._client_kwargs['obj'])
def _read_range(self, start, end=0):
"""
Read a range of bytes in stream.
Args:
start (int): Start stream position.
end (int): End stream position.
0 To not specify end.
Returns:
bytes: number of bytes read
"""
try:
with _handle_client_exception():
return self._client.get_object(*self._client_args, headers=dict(
Range=self._http_range(start, end)))[1]
except _ClientException as exception:
if exception.http_status == 416:
# EOF
return b''
raise
def _readall(self):
"""
Read and return all the bytes from the stream until EOF.
Returns:
bytes: Object content
"""
with _handle_client_exception():
return self._client.get_object(*self._client_args)[1]
def _flush(self, buffer):
"""
Flush the write buffers of the stream if applicable.
Args:
buffer (memoryview): Buffer content.
"""
container, obj = self._client_args
with _handle_client_exception():
self._client.put_object(container, obj, buffer)
|
class SwiftRawIO(_ObjectRawIOBase):
'''Binary OpenStack Swift Object I/O
Args:
name (path-like object): URL or path to the file which will be opened.
mode (str): The mode can be 'r', 'w', 'a'
for reading (default), writing or appending
buffer_size (int): The size of buffer.
max_buffers (int): The maximum number of buffers to preload in read mode
or awaiting flush in write mode. 0 for no limit.
storage_parameters (dict): Swift connection keyword arguments.
This is generally OpenStack credentials and configuration.
(see "swiftclient.client.Connection" for more information)
unsecure (bool): If True, disables TLS/SSL to improves
transfer performance. But makes connection unsecure.
'''
@property
@_memoizedmethod
def _client_args(self):
'''
Client arguments as tuple.
Returns:
tuple of str: Client args.
'''
pass
def _read_range(self, start, end=0):
'''
Read a range of bytes in stream.
Args:
start (int): Start stream position.
end (int): End stream position.
0 To not specify end.
Returns:
bytes: number of bytes read
'''
pass
def _readall(self):
'''
Read and return all the bytes from the stream until EOF.
Returns:
bytes: Object content
'''
pass
def _flush(self, buffer):
'''
Flush the write buffers of the stream if applicable.
Args:
buffer (memoryview): Buffer content.
'''
pass
| 7 | 5 | 13 | 2 | 5 | 6 | 2 | 1.7 | 1 | 1 | 0 | 0 | 4 | 0 | 4 | 53 | 73 | 11 | 23 | 9 | 16 | 39 | 19 | 7 | 14 | 3 | 6 | 2 | 6 |
2,925 |
Accelize/pycosio
|
Accelize_pycosio/pycosio/storage/swift.py
|
pycosio.storage.swift.SwiftBufferedIO
|
class SwiftBufferedIO(_ObjectBufferedIOBase):
"""Buffered binary OpenStack Swift Object I/O
Args:
name (path-like object): URL or path to the file which will be opened.
mode (str): The mode can be 'r', 'w' for reading (default) or writing
max_workers (int): The maximum number of threads that can be used to
execute the given calls.
storage_parameters (dict): Swift connection keyword arguments.
This is generally OpenStack credentials and configuration.
(see "swiftclient.client.Connection" for more information)
unsecure (bool): If True, disables TLS/SSL to improves
transfer performance. But makes connection unsecure.
"""
__slots__ = ('_container', '_object_name', '_segment_name')
_RAW_CLASS = SwiftRawIO
def __init__(self, *args, **kwargs):
_ObjectBufferedIOBase.__init__(self, *args, **kwargs)
self._container, self._object_name = self._raw._client_args
if self._writable:
self._segment_name = self._object_name + '.%03d'
def _flush(self):
"""
Flush the write buffers of the stream.
"""
# Upload segment with workers
name = self._segment_name % self._seek
response = self._workers.submit(
self._client.put_object, self._container, name,
self._get_buffer())
# Save segment information in manifest
self._write_futures.append(dict(
etag=response, path='/'.join((self._container, name))))
def _close_writable(self):
"""
Close the object in write mode.
"""
# Wait segments upload completion
for segment in self._write_futures:
segment['etag'] = segment['etag'].result()
# Upload manifest file
with _handle_client_exception():
self._client.put_object(self._container, self._object_name, _dumps(
self._write_futures), query_string='multipart-manifest=put')
|
class SwiftBufferedIO(_ObjectBufferedIOBase):
'''Buffered binary OpenStack Swift Object I/O
Args:
name (path-like object): URL or path to the file which will be opened.
mode (str): The mode can be 'r', 'w' for reading (default) or writing
max_workers (int): The maximum number of threads that can be used to
execute the given calls.
storage_parameters (dict): Swift connection keyword arguments.
This is generally OpenStack credentials and configuration.
(see "swiftclient.client.Connection" for more information)
unsecure (bool): If True, disables TLS/SSL to improves
transfer performance. But makes connection unsecure.
'''
def __init__(self, *args, **kwargs):
pass
def _flush(self):
'''
Flush the write buffers of the stream.
'''
pass
def _close_writable(self):
'''
Close the object in write mode.
'''
pass
| 4 | 3 | 11 | 2 | 6 | 3 | 2 | 1.05 | 1 | 1 | 0 | 0 | 3 | 4 | 3 | 51 | 53 | 10 | 21 | 12 | 17 | 22 | 17 | 11 | 13 | 2 | 6 | 1 | 5 |
2,926 |
Accelize/pycosio
|
Accelize_pycosio/pycosio/storage/s3.py
|
pycosio.storage.s3._S3System
|
class _S3System(_SystemBase):
"""
S3 system.
Args:
storage_parameters (dict): Boto3 Session keyword arguments.
This is generally AWS credentials and configuration.
This dict should contain two sub-dicts:
'session': That pass its arguments to "boto3.session.Session"
; 'client': That pass its arguments to
"boto3.session.Session.client".
May be optional if running on AWS EC2 instances.
unsecure (bool): If True, disables TLS/SSL to improves
transfer performance. But makes connection unsecure.
"""
__slots__ = ('_session',)
_SIZE_KEYS = ('ContentLength',)
_CTIME_KEYS = ('CreationDate',)
_MTIME_KEYS = ('LastModified',)
def __init__(self, *args, **kwargs):
self._session = None
_SystemBase.__init__(self, *args, **kwargs)
def copy(self, src, dst, other_system=None):
"""
Copy object of the same storage.
Args:
src (str): Path or URL.
dst (str): Path or URL.
other_system (pycosio._core.io_system.SystemBase subclass): Unused.
"""
copy_source = self.get_client_kwargs(src)
copy_destination = self.get_client_kwargs(dst)
with _handle_client_error():
self.client.copy_object(CopySource=copy_source, **copy_destination)
def get_client_kwargs(self, path):
"""
Get base keyword arguments for client for a
specific path.
Args:
path (str): Absolute path or URL.
Returns:
dict: client args
"""
bucket_name, key = self.split_locator(path)
kwargs = dict(Bucket=bucket_name)
if key:
kwargs['Key'] = key
return kwargs
def _get_session(self):
"""
S3 Boto3 Session.
Returns:
boto3.session.Session: session
"""
if self._session is None:
self._session = _boto3.session.Session(
**self._storage_parameters.get('session', dict()))
return self._session
def _get_client(self):
"""
S3 Boto3 client
Returns:
boto3.session.Session.client: client
"""
client_kwargs = self._storage_parameters.get('client', dict())
# Handles unsecure mode
if self._unsecure:
client_kwargs = client_kwargs.copy()
client_kwargs['use_ssl'] = False
return self._get_session().client("s3", **client_kwargs)
def _get_roots(self):
"""
Return URL roots for this storage.
Returns:
tuple of str or re.Pattern: URL roots
"""
region = self._get_session().region_name or r'[\w-]+'
return (
# S3 scheme
# - s3://<bucket>/<key>
's3://',
# Virtual-hosted–style URL
# - http://<bucket>.s3.amazonaws.com/<key>
# - https://<bucket>.s3.amazonaws.com/<key>
# - http://<bucket>.s3-<region>.amazonaws.com/<key>
# - https://<bucket>.s3-<region>.amazonaws.com/<key>
_re.compile(r'https?://[\w.-]+\.s3\.amazonaws\.com'),
_re.compile(
r'https?://[\w.-]+\.s3-%s\.amazonaws\.com' % region),
# Path-hosted–style URL
# - http://s3.amazonaws.com/<bucket>/<key>
# - https://s3.amazonaws.com/<bucket>/<key>
# - http://s3-<region>.amazonaws.com/<bucket>/<key>
# - https://s3-<region>.amazonaws.com/<bucket>/<key>
_re.compile(r'https?://s3\.amazonaws\.com'),
_re.compile(r'https?://s3-%s\.amazonaws\.com' % region),
# Transfer acceleration URL
# - http://<bucket>.s3-accelerate.amazonaws.com
# - https://<bucket>.s3-accelerate.amazonaws.com
# - http://<bucket>.s3-accelerate.dualstack.amazonaws.com
# - https://<bucket>.s3-accelerate.dualstack.amazonaws.com
_re.compile(
r'https?://[\w.-]+\.s3-accelerate\.amazonaws\.com'),
_re.compile(
r'https?://[\w.-]+\.s3-accelerate\.dualstack'
r'\.amazonaws\.com'))
@staticmethod
def _get_time(header, keys, name):
"""
Get time from header
Args:
header (dict): Object header.
keys (tuple of str): Header keys.
name (str): Method name.
Returns:
float: The number of seconds since the epoch
"""
for key in keys:
try:
return header.pop(key).timestamp()
except KeyError:
continue
raise _UnsupportedOperation(name)
def _getsize_from_header(self, header):
"""
Return the size from header
Args:
header (dict): Object header.
Returns:
int: Size in bytes.
"""
try:
return header.pop('ContentLength')
except KeyError:
raise _UnsupportedOperation('getsize')
def _head(self, client_kwargs):
"""
Returns object or bucket HTTP header.
Args:
client_kwargs (dict): Client arguments.
Returns:
dict: HTTP header.
"""
with _handle_client_error():
# Object
if 'Key' in client_kwargs:
header = self.client.head_object(**client_kwargs)
# Bucket
else:
header = self.client.head_bucket(**client_kwargs)
# Clean up HTTP request information
for key in ('AcceptRanges', 'ResponseMetadata'):
header.pop(key, None)
return header
def _make_dir(self, client_kwargs):
"""
Make a directory.
args:
client_kwargs (dict): Client arguments.
"""
with _handle_client_error():
# Object
if 'Key' in client_kwargs:
return self.client.put_object(Body=b'', **client_kwargs)
# Bucket
return self.client.create_bucket(
Bucket=client_kwargs['Bucket'],
CreateBucketConfiguration=dict(
LocationConstraint=self._get_session().region_name))
def _remove(self, client_kwargs):
"""
Remove an object.
args:
client_kwargs (dict): Client arguments.
"""
with _handle_client_error():
# Object
if 'Key' in client_kwargs:
return self.client.delete_object(**client_kwargs)
# Bucket
return self.client.delete_bucket(Bucket=client_kwargs['Bucket'])
def _list_locators(self):
"""
Lists locators.
Returns:
generator of tuple: locator name str, locator header dict
"""
with _handle_client_error():
response = self.client.list_buckets()
for bucket in response['Buckets']:
yield bucket.pop('Name'), bucket
def _list_objects(self, client_kwargs, path, max_request_entries):
"""
Lists objects.
args:
client_kwargs (dict): Client arguments.
path (str): Path relative to current locator.
max_request_entries (int): If specified, maximum entries returned
by request.
Returns:
generator of tuple: object name str, object header dict
"""
client_kwargs = client_kwargs.copy()
if max_request_entries:
client_kwargs['MaxKeys'] = max_request_entries
while True:
with _handle_client_error():
response = self.client.list_objects_v2(
Prefix=path, **client_kwargs)
try:
for obj in response['Contents']:
yield obj.pop('Key'), obj
except KeyError:
raise _ObjectNotFoundError('Not found: %s' % path)
# Handles results on more than one page
try:
client_kwargs['ContinuationToken'] = response[
'NextContinuationToken']
except KeyError:
# End of results
break
|
class _S3System(_SystemBase):
'''
S3 system.
Args:
storage_parameters (dict): Boto3 Session keyword arguments.
This is generally AWS credentials and configuration.
This dict should contain two sub-dicts:
'session': That pass its arguments to "boto3.session.Session"
; 'client': That pass its arguments to
"boto3.session.Session.client".
May be optional if running on AWS EC2 instances.
unsecure (bool): If True, disables TLS/SSL to improves
transfer performance. But makes connection unsecure.
'''
def __init__(self, *args, **kwargs):
pass
def copy(self, src, dst, other_system=None):
'''
Copy object of the same storage.
Args:
src (str): Path or URL.
dst (str): Path or URL.
other_system (pycosio._core.io_system.SystemBase subclass): Unused.
'''
pass
def get_client_kwargs(self, path):
'''
Get base keyword arguments for client for a
specific path.
Args:
path (str): Absolute path or URL.
Returns:
dict: client args
'''
pass
def _get_session(self):
'''
S3 Boto3 Session.
Returns:
boto3.session.Session: session
'''
pass
def _get_client(self):
'''
S3 Boto3 client
Returns:
boto3.session.Session.client: client
'''
pass
def _get_roots(self):
'''
Return URL roots for this storage.
Returns:
tuple of str or re.Pattern: URL roots
'''
pass
@staticmethod
def _get_time(header, keys, name):
'''
Get time from header
Args:
header (dict): Object header.
keys (tuple of str): Header keys.
name (str): Method name.
Returns:
float: The number of seconds since the epoch
'''
pass
def _getsize_from_header(self, header):
'''
Return the size from header
Args:
header (dict): Object header.
Returns:
int: Size in bytes.
'''
pass
def _head(self, client_kwargs):
'''
Returns object or bucket HTTP header.
Args:
client_kwargs (dict): Client arguments.
Returns:
dict: HTTP header.
'''
pass
def _make_dir(self, client_kwargs):
'''
Make a directory.
args:
client_kwargs (dict): Client arguments.
'''
pass
def _remove(self, client_kwargs):
'''
Remove an object.
args:
client_kwargs (dict): Client arguments.
'''
pass
def _list_locators(self):
'''
Lists locators.
Returns:
generator of tuple: locator name str, locator header dict
'''
pass
def _list_objects(self, client_kwargs, path, max_request_entries):
'''
Lists objects.
args:
client_kwargs (dict): Client arguments.
path (str): Path relative to current locator.
max_request_entries (int): If specified, maximum entries returned
by request.
Returns:
generator of tuple: object name str, object header dict
'''
pass
| 15 | 13 | 18 | 2 | 7 | 8 | 2 | 1.16 | 1 | 2 | 0 | 0 | 12 | 1 | 13 | 71 | 265 | 45 | 102 | 33 | 87 | 118 | 83 | 32 | 69 | 6 | 5 | 3 | 29 |
2,927 |
Accelize/pycosio
|
Accelize_pycosio/pycosio/storage/s3.py
|
pycosio.storage.s3.S3RawIO
|
class S3RawIO(_ObjectRawIOBase):
"""Binary S3 Object I/O
Args:
name (path-like object): URL or path to the file which will be opened.
mode (str): The mode can be 'r', 'w', 'a'
for reading (default), writing or appending
storage_parameters (dict): Boto3 Session keyword arguments.
This is generally AWS credentials and configuration.
This dict should contain two sub-dicts:
'session': That pass its arguments to "boto3.session.Session"
; 'client': That pass its arguments to
"boto3.session.Session.client".
May be optional if running on AWS EC2 instances.
unsecure (bool): If True, disables TLS/SSL to improves
transfer performance. But makes connection unsecure.
"""
_SYSTEM_CLASS = _S3System
def _read_range(self, start, end=0):
"""
Read a range of bytes in stream.
Args:
start (int): Start stream position.
end (int): End stream position.
0 To not specify end.
Returns:
bytes: number of bytes read
"""
# Get object part from S3
try:
with _handle_client_error():
response = self._client.get_object(
Range=self._http_range(start, end), **self._client_kwargs)
# Check for end of file
except _ClientError as exception:
if exception.response['Error']['Code'] == 'InvalidRange':
# EOF
return bytes()
raise
# Get object content
return response['Body'].read()
def _readall(self):
"""
Read and return all the bytes from the stream until EOF.
Returns:
bytes: Object content
"""
with _handle_client_error():
return self._client.get_object(**self._client_kwargs)['Body'].read()
def _flush(self, buffer):
"""
Flush the write buffers of the stream if applicable.
Args:
buffer (memoryview): Buffer content.
"""
with _handle_client_error():
self._client.put_object(
Body=buffer.tobytes(), **self._client_kwargs)
|
class S3RawIO(_ObjectRawIOBase):
'''Binary S3 Object I/O
Args:
name (path-like object): URL or path to the file which will be opened.
mode (str): The mode can be 'r', 'w', 'a'
for reading (default), writing or appending
storage_parameters (dict): Boto3 Session keyword arguments.
This is generally AWS credentials and configuration.
This dict should contain two sub-dicts:
'session': That pass its arguments to "boto3.session.Session"
; 'client': That pass its arguments to
"boto3.session.Session.client".
May be optional if running on AWS EC2 instances.
unsecure (bool): If True, disables TLS/SSL to improves
transfer performance. But makes connection unsecure.
'''
def _read_range(self, start, end=0):
'''
Read a range of bytes in stream.
Args:
start (int): Start stream position.
end (int): End stream position.
0 To not specify end.
Returns:
bytes: number of bytes read
'''
pass
def _readall(self):
'''
Read and return all the bytes from the stream until EOF.
Returns:
bytes: Object content
'''
pass
def _flush(self, buffer):
'''
Flush the write buffers of the stream if applicable.
Args:
buffer (memoryview): Buffer content.
'''
pass
| 4 | 4 | 15 | 2 | 6 | 8 | 2 | 2 | 1 | 1 | 0 | 0 | 3 | 0 | 3 | 52 | 67 | 10 | 19 | 7 | 15 | 38 | 17 | 6 | 13 | 3 | 6 | 2 | 5 |
2,928 |
Accelize/pycosio
|
Accelize_pycosio/pycosio/storage/s3.py
|
pycosio.storage.s3.S3BufferedIO
|
class S3BufferedIO(_ObjectBufferedIOBase):
"""Buffered binary S3 Object I/O
Args:
name (path-like object): URL or path to the file which will be opened.
mode (str): The mode can be 'r', 'w' for reading (default) or writing
buffer_size (int): The size of buffer.
max_buffers (int): The maximum number of buffers to preload in read mode
or awaiting flush in write mode. 0 for no limit.
max_workers (int): The maximum number of threads that can be used to
execute the given calls.
storage_parameters (dict): Boto3 Session keyword arguments.
This is generally AWS credentials and configuration.
This dict should contain two sub-dicts:
'session': That pass its arguments to "boto3.session.Session"
; 'client': That pass its arguments to
"boto3.session.Session.client".
May be optional if running on AWS EC2 instances.
unsecure (bool): If True, disables TLS/SSL to improves
transfer performance. But makes connection unsecure.
"""
__slots__ = ('_upload_args',)
_RAW_CLASS = S3RawIO
#: Minimal buffer_size in bytes (S3 multipart upload minimal part size)
MINIMUM_BUFFER_SIZE = 5242880
def __init__(self, *args, **kwargs):
_ObjectBufferedIOBase.__init__(self, *args, **kwargs)
# Use multipart upload as write buffered mode
if self._writable:
self._upload_args = self._client_kwargs.copy()
def _flush(self):
"""
Flush the write buffers of the stream.
"""
# Initialize multi-part upload
if 'UploadId' not in self._upload_args:
with _handle_client_error():
self._upload_args[
'UploadId'] = self._client.create_multipart_upload(
**self._client_kwargs)['UploadId']
# Upload part with workers
response = self._workers.submit(
self._client.upload_part, Body=self._get_buffer().tobytes(),
PartNumber=self._seek, **self._upload_args)
# Save part information
self._write_futures.append(
dict(response=response, PartNumber=self._seek))
def _close_writable(self):
"""
Close the object in write mode.
"""
# Wait parts upload completion
for part in self._write_futures:
part['ETag'] = part.pop('response').result()['ETag']
# Complete multipart upload
with _handle_client_error():
try:
self._client.complete_multipart_upload(
MultipartUpload={'Parts': self._write_futures},
UploadId=self._upload_args['UploadId'],
**self._client_kwargs)
except _ClientError:
# Clean up if failure
self._client.abort_multipart_upload(
UploadId=self._upload_args['UploadId'],
**self._client_kwargs)
raise
|
class S3BufferedIO(_ObjectBufferedIOBase):
'''Buffered binary S3 Object I/O
Args:
name (path-like object): URL or path to the file which will be opened.
mode (str): The mode can be 'r', 'w' for reading (default) or writing
buffer_size (int): The size of buffer.
max_buffers (int): The maximum number of buffers to preload in read mode
or awaiting flush in write mode. 0 for no limit.
max_workers (int): The maximum number of threads that can be used to
execute the given calls.
storage_parameters (dict): Boto3 Session keyword arguments.
This is generally AWS credentials and configuration.
This dict should contain two sub-dicts:
'session': That pass its arguments to "boto3.session.Session"
; 'client': That pass its arguments to
"boto3.session.Session.client".
May be optional if running on AWS EC2 instances.
unsecure (bool): If True, disables TLS/SSL to improves
transfer performance. But makes connection unsecure.
'''
def __init__(self, *args, **kwargs):
pass
def _flush(self):
'''
Flush the write buffers of the stream.
'''
pass
def _close_writable(self):
'''
Close the object in write mode.
'''
pass
| 4 | 3 | 16 | 2 | 10 | 4 | 2 | 1 | 1 | 1 | 0 | 0 | 3 | 1 | 3 | 51 | 77 | 11 | 33 | 10 | 29 | 33 | 23 | 10 | 19 | 3 | 6 | 2 | 7 |
2,929 |
Accelize/pycosio
|
Accelize_pycosio/pycosio/storage/oss.py
|
pycosio.storage.oss._OSSSystem
|
class _OSSSystem(_SystemBase):
"""
OSS system.
Args:
storage_parameters (dict): OSS2 Auth keyword arguments and endpoint.
This is generally OSS credentials and configuration.
unsecure (bool): If True, disables TLS/SSL to improves
transfer performance. But makes connection unsecure.
"""
__slots__ = ('_unsecure', '_endpoint')
_CTIME_KEYS = ('Creation-Date', 'creation_date')
_MTIME_KEYS = ('Last-Modified', 'last_modified')
def __init__(self, storage_parameters=None, *args, **kwargs):
try:
storage_parameters = storage_parameters.copy()
self._endpoint = storage_parameters.pop('endpoint')
except (AttributeError, KeyError):
raise ValueError('"endpoint" is required as "storage_parameters"')
_SystemBase.__init__(self, storage_parameters=storage_parameters,
*args, **kwargs)
if self._unsecure:
self._endpoint = self._endpoint.replace('https://', 'http://')
def copy(self, src, dst, other_system=None):
"""
Copy object of the same storage.
Args:
src (str): Path or URL.
dst (str): Path or URL.
other_system (pycosio._core.io_system.SystemBase subclass): Unused.
"""
copy_source = self.get_client_kwargs(src)
copy_destination = self.get_client_kwargs(dst)
with _handle_oss_error():
bucket = self._get_bucket(copy_destination)
bucket.copy_object(
source_bucket_name=copy_source['bucket_name'],
source_key=copy_source['key'],
target_key=copy_destination['key'])
def get_client_kwargs(self, path):
"""
Get base keyword arguments for client for a
specific path.
Args:
path (str): Absolute path or URL.
Returns:
dict: client args
"""
bucket_name, key = self.split_locator(path)
kwargs = dict(bucket_name=bucket_name)
if key:
kwargs['key'] = key
return kwargs
def _get_client(self):
"""
OSS2 Auth client
Returns:
oss2.Auth or oss2.StsAuth: client
"""
return (_oss.StsAuth if 'security_token' in self._storage_parameters
else _oss.Auth if self._storage_parameters
else _oss.AnonymousAuth)(**self._storage_parameters)
def _get_roots(self):
"""
Return URL roots for this storage.
Returns:
tuple of str or re.Pattern: URL roots
"""
return (
# OSS Scheme
# - oss://<bucket>/<key>
'oss://',
# URL (With common aliyuncs.com endpoint):
# - http://<bucket>.oss-<region>.aliyuncs.com/<key>
# - https://<bucket>.oss-<region>.aliyuncs.com/<key>
# Note: "oss-<region>.aliyuncs.com" may be replaced by another
# endpoint
_re.compile((r'https?://[\w-]+.%s' % self._endpoint.split(
'//', 1)[1]).replace('.', r'\.')))
def _get_bucket(self, client_kwargs):
"""
Get bucket object.
Returns:
oss2.Bucket
"""
return _oss.Bucket(self.client, endpoint=self._endpoint,
bucket_name=client_kwargs['bucket_name'])
def islink(self, path=None, header=None):
"""
Returns True if object is a symbolic link.
Args:
path (str): File path or URL.
header (dict): Object header.
Returns:
bool: True if object is Symlink.
"""
if header is None:
header = self._head(self.get_client_kwargs(path))
for key in ('x-oss-object-type', 'type'):
try:
return header.pop(key) == 'Symlink'
except KeyError:
continue
return False
def _head(self, client_kwargs):
"""
Returns object HTTP header.
Args:
client_kwargs (dict): Client arguments.
Returns:
dict: HTTP header.
"""
with _handle_oss_error():
bucket = self._get_bucket(client_kwargs)
# Object
if 'key' in client_kwargs:
return bucket.head_object(
key=client_kwargs['key']).headers
# Bucket
return bucket.get_bucket_info().headers
def _make_dir(self, client_kwargs):
"""
Make a directory.
args:
client_kwargs (dict): Client arguments.
"""
with _handle_oss_error():
bucket = self._get_bucket(client_kwargs)
# Object
if 'key' in client_kwargs:
return bucket.put_object(
key=client_kwargs['key'], data=b'')
# Bucket
return bucket.create_bucket()
def _remove(self, client_kwargs):
"""
Remove an object.
args:
client_kwargs (dict): Client arguments.
"""
with _handle_oss_error():
bucket = self._get_bucket(client_kwargs)
# Object
if 'key' in client_kwargs:
return bucket.delete_object(key=client_kwargs['key'])
# Bucket
return bucket.delete_bucket()
@staticmethod
def _model_to_dict(model, ignore):
"""
Convert OSS model to dict.
Args:
model (oss2.models.RequestResult): Model.
ignore (tuple of str): Keys to not insert to dict.
Returns:
dict: Model dict version.
"""
return {attr: value for attr, value in model.__dict__.items()
if not attr.startswith('_') and attr not in ignore}
def _list_locators(self):
"""
Lists locators.
Returns:
generator of tuple: locator name str, locator header dict
"""
with _handle_oss_error():
response = _oss.Service(
self.client, endpoint=self._endpoint).list_buckets()
for bucket in response.buckets:
yield bucket.name, self._model_to_dict(bucket, ('name',))
def _list_objects(self, client_kwargs, path, max_request_entries):
"""
Lists objects.
args:
client_kwargs (dict): Client arguments.
path (str): Path relative to current locator.
max_request_entries (int): If specified, maximum entries returned
by request.
Returns:
generator of tuple: object name str, object header dict
"""
kwargs = dict()
if max_request_entries:
kwargs['max_keys'] = max_request_entries
bucket = self._get_bucket(client_kwargs)
while True:
with _handle_oss_error():
response = bucket.list_objects(prefix=path, **kwargs)
if not response.object_list:
# In case of empty dir, return empty dir path:
# if empty result, the dir do not exists.
raise _ObjectNotFoundError('Not found: %s' % path)
for obj in response.object_list:
yield obj.key, self._model_to_dict(obj, ('key',))
# Handles results on more than one page
if response.next_marker:
client_kwargs['marker'] = response.next_marker
else:
# End of results
break
|
class _OSSSystem(_SystemBase):
'''
OSS system.
Args:
storage_parameters (dict): OSS2 Auth keyword arguments and endpoint.
This is generally OSS credentials and configuration.
unsecure (bool): If True, disables TLS/SSL to improves
transfer performance. But makes connection unsecure.
'''
def __init__(self, storage_parameters=None, *args, **kwargs):
pass
def copy(self, src, dst, other_system=None):
'''
Copy object of the same storage.
Args:
src (str): Path or URL.
dst (str): Path or URL.
other_system (pycosio._core.io_system.SystemBase subclass): Unused.
'''
pass
def get_client_kwargs(self, path):
'''
Get base keyword arguments for client for a
specific path.
Args:
path (str): Absolute path or URL.
Returns:
dict: client args
'''
pass
def _get_client(self):
'''
OSS2 Auth client
Returns:
oss2.Auth or oss2.StsAuth: client
'''
pass
def _get_roots(self):
'''
Return URL roots for this storage.
Returns:
tuple of str or re.Pattern: URL roots
'''
pass
def _get_bucket(self, client_kwargs):
'''
Get bucket object.
Returns:
oss2.Bucket
'''
pass
def islink(self, path=None, header=None):
'''
Returns True if object is a symbolic link.
Args:
path (str): File path or URL.
header (dict): Object header.
Returns:
bool: True if object is Symlink.
'''
pass
def _head(self, client_kwargs):
'''
Returns object HTTP header.
Args:
client_kwargs (dict): Client arguments.
Returns:
dict: HTTP header.
'''
pass
def _make_dir(self, client_kwargs):
'''
Make a directory.
args:
client_kwargs (dict): Client arguments.
'''
pass
def _remove(self, client_kwargs):
'''
Remove an object.
args:
client_kwargs (dict): Client arguments.
'''
pass
@staticmethod
def _model_to_dict(model, ignore):
'''
Convert OSS model to dict.
Args:
model (oss2.models.RequestResult): Model.
ignore (tuple of str): Keys to not insert to dict.
Returns:
dict: Model dict version.
'''
pass
def _list_locators(self):
'''
Lists locators.
Returns:
generator of tuple: locator name str, locator header dict
'''
pass
def _list_objects(self, client_kwargs, path, max_request_entries):
'''
Lists objects.
args:
client_kwargs (dict): Client arguments.
path (str): Path relative to current locator.
max_request_entries (int): If specified, maximum entries returned
by request.
Returns:
generator of tuple: object name str, object header dict
'''
pass
| 15 | 13 | 17 | 3 | 7 | 7 | 2 | 1.07 | 1 | 4 | 0 | 0 | 12 | 2 | 13 | 71 | 249 | 50 | 96 | 35 | 81 | 103 | 80 | 33 | 66 | 6 | 5 | 2 | 30 |
2,930 |
Accelize/pycosio
|
Accelize_pycosio/pycosio/storage/http.py
|
pycosio.storage.http.HTTPRawIO
|
class HTTPRawIO(_ObjectRawIOBase):
"""Binary HTTP Object I/O
Args:
name (path-like object): URL to the file which will be opened.
mode (str): The mode can be 'r' for reading (default)
"""
_SYSTEM_CLASS = _HTTPSystem
_TIMEOUT = _HTTPSystem._TIMEOUT
def __init__(self, *args, **kwargs):
_ObjectRawIOBase.__init__(self, *args, **kwargs)
# Only support readonly
if 'r' not in self._mode:
raise _UnsupportedOperation('write')
# Check if object support random read
self._seekable = self._head().get('Accept-Ranges') == 'bytes'
def _read_range(self, start, end=0):
"""
Read a range of bytes in stream.
Args:
start (int): Start stream position.
end (int): End stream position.
0 To not specify end.
Returns:
bytes: number of bytes read
"""
# Get object part
response = self._client.request(
'GET', self.name, headers=dict(Range=self._http_range(start, end)),
timeout=self._TIMEOUT)
if response.status_code == 416:
# EOF
return b''
# Get object content
return _handle_http_errors(response).content
def _readall(self):
"""
Read and return all the bytes from the stream until EOF.
Returns:
bytes: Object content
"""
return _handle_http_errors(
self._client.request(
'GET', self.name, timeout=self._TIMEOUT)).content
def _flush(self, *_):
"""
Flush the write buffers of the stream if applicable.
"""
|
class HTTPRawIO(_ObjectRawIOBase):
'''Binary HTTP Object I/O
Args:
name (path-like object): URL to the file which will be opened.
mode (str): The mode can be 'r' for reading (default)
'''
def __init__(self, *args, **kwargs):
pass
def _read_range(self, start, end=0):
'''
Read a range of bytes in stream.
Args:
start (int): Start stream position.
end (int): End stream position.
0 To not specify end.
Returns:
bytes: number of bytes read
'''
pass
def _readall(self):
'''
Read and return all the bytes from the stream until EOF.
Returns:
bytes: Object content
'''
pass
def _flush(self, *_):
'''
Flush the write buffers of the stream if applicable.
'''
pass
| 5 | 4 | 12 | 2 | 4 | 6 | 2 | 1.35 | 1 | 1 | 0 | 0 | 4 | 2 | 4 | 53 | 60 | 13 | 20 | 10 | 15 | 27 | 16 | 9 | 11 | 2 | 6 | 1 | 6 |
2,931 |
Accelize/pycosio
|
Accelize_pycosio/pycosio/storage/oss.py
|
pycosio.storage.oss.OSSRawIO
|
class OSSRawIO(_ObjectRawIOBase):
"""Binary OSS Object I/O
Args:
name (path-like object): URL or path to the file which will be opened.
mode (str): The mode can be 'r', 'w', 'a'
for reading (default), writing or appending
storage_parameters (dict): OSS2 Auth keyword arguments and endpoint.
This is generally OSS credentials and configuration.
unsecure (bool): If True, disables TLS/SSL to improves
transfer performance. But makes connection unsecure.
"""
_SYSTEM_CLASS = _OSSSystem
@property
@_memoizedmethod
def _bucket(self):
"""
Bucket client.
Returns:
oss2.Bucket: Client.
"""
return self._system._get_bucket(self._client_kwargs)
@property
@_memoizedmethod
def _key(self):
"""
Object key.
Returns:
str: key.
"""
return self._client_kwargs['key']
def _read_range(self, start, end=0):
"""
Read a range of bytes in stream.
Args:
start (int): Start stream position.
end (int): End stream position.
0 To not specify end.
Returns:
bytes: number of bytes read
"""
if start >= self._size:
# EOF. Do not detect using 416 (Out of range) error, 200 returned.
return bytes()
# Get object bytes range
with _handle_oss_error():
response = self._bucket.get_object(key=self._key, headers=dict(
Range=self._http_range(
# Returns full file if end > size
start, end if end <= self._size else self._size)))
# Get object content
return response.read()
def _readall(self):
"""
Read and return all the bytes from the stream until EOF.
Returns:
bytes: Object content
"""
with _handle_oss_error():
return self._bucket.get_object(key=self._key).read()
def _flush(self, buffer):
"""
Flush the write buffers of the stream if applicable.
Args:
buffer (memoryview): Buffer content.
"""
with _handle_oss_error():
self._bucket.put_object(key=self._key, data=buffer.tobytes())
|
class OSSRawIO(_ObjectRawIOBase):
'''Binary OSS Object I/O
Args:
name (path-like object): URL or path to the file which will be opened.
mode (str): The mode can be 'r', 'w', 'a'
for reading (default), writing or appending
storage_parameters (dict): OSS2 Auth keyword arguments and endpoint.
This is generally OSS credentials and configuration.
unsecure (bool): If True, disables TLS/SSL to improves
transfer performance. But makes connection unsecure.
'''
@property
@_memoizedmethod
def _bucket(self):
'''
Bucket client.
Returns:
oss2.Bucket: Client.
'''
pass
@property
@_memoizedmethod
def _key(self):
'''
Object key.
Returns:
str: key.
'''
pass
def _read_range(self, start, end=0):
'''
Read a range of bytes in stream.
Args:
start (int): Start stream position.
end (int): End stream position.
0 To not specify end.
Returns:
bytes: number of bytes read
'''
pass
def _readall(self):
'''
Read and return all the bytes from the stream until EOF.
Returns:
bytes: Object content
'''
pass
def _flush(self, buffer):
'''
Flush the write buffers of the stream if applicable.
Args:
buffer (memoryview): Buffer content.
'''
pass
| 10 | 6 | 12 | 2 | 4 | 7 | 1 | 1.79 | 1 | 2 | 0 | 0 | 5 | 0 | 5 | 54 | 81 | 14 | 24 | 10 | 14 | 43 | 18 | 8 | 12 | 3 | 6 | 1 | 7 |
2,932 |
Accelize/pycosio
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Accelize_pycosio/tests/test_storage_oss.py
|
tests.test_storage_oss.test_mocked_storage.Bucket
|
class Bucket:
"""oss2.Bucket"""
def __init__(self, auth, endpoint, bucket_name=None, *_, **__):
"""oss2.Bucket.__init__"""
self._bucket_name = bucket_name
def get_object(self, key=None, headers=None, **_):
"""oss2.Bucket.get_object"""
return BytesIO(storage_mock.get_object(
self._bucket_name, key, header=headers))
def head_object(self, key=None, **_):
"""oss2.Bucket.head_object"""
return HeadObjectResult(Response(
headers=storage_mock.head_object(self._bucket_name, key)))
def put_object(self, key=None, data=None, **_):
"""oss2.Bucket.put_object"""
storage_mock.put_object(
self._bucket_name, key, data, new_file=True)
def delete_object(self, key=None, **_):
"""oss2.Bucket.delete_object"""
storage_mock.delete_object(self._bucket_name, key)
def get_bucket_info(self, **_):
"""oss2.Bucket.get_bucket_info"""
return Response(
headers=storage_mock.head_locator(self._bucket_name))
def copy_object(self, source_bucket_name=None, source_key=None,
target_key=None, **_):
"""oss2.Bucket.copy_object"""
storage_mock.copy_object(
src_path=source_key, src_locator=source_bucket_name,
dst_path=target_key, dst_locator=self._bucket_name)
def create_bucket(self, **_):
"""oss2.Bucket.create_bucket"""
storage_mock.put_locator(self._bucket_name)
def delete_bucket(self, **_):
"""oss2.Bucket.delete_bucket"""
storage_mock.delete_locator(self._bucket_name)
def list_objects(self, prefix=None, max_keys=None, **_):
"""oss2.Bucket.list_objects"""
response = storage_mock.get_locator(
self._bucket_name, prefix=prefix, limit=max_keys,
raise_404_if_empty=False)
object_list = []
for key, headers in response.items():
obj = HeadObjectResult(Response(headers=headers))
obj.key = key
object_list.append(obj)
return ListResult(object_list=object_list)
@staticmethod
def init_multipart_upload(*_, **__):
"""oss2.Bucket.init_multipart_upload"""
return Response(upload_id='123')
def complete_multipart_upload(
self, key=None, upload_id=None, parts=None, **_):
"""oss2.Bucket.complete_multipart_upload"""
assert upload_id == '123'
storage_mock.concat_objects(self._bucket_name, key, [
key + str(part.part_number) for part in parts
])
def upload_part(self, key=None, upload_id=None,
part_number=None, data=None, **_):
"""oss2.Bucket.upload_part"""
assert upload_id == '123'
return HeadObjectResult(Response(headers=storage_mock.put_object(
self._bucket_name, key + str(part_number), data)))
|
class Bucket:
'''oss2.Bucket'''
def __init__(self, auth, endpoint, bucket_name=None, *_, **__):
'''oss2.Bucket.__init__'''
pass
def get_object(self, key=None, headers=None, **_):
'''oss2.Bucket.get_object'''
pass
def head_object(self, key=None, **_):
'''oss2.Bucket.head_object'''
pass
def put_object(self, key=None, data=None, **_):
'''oss2.Bucket.put_object'''
pass
def delete_object(self, key=None, **_):
'''oss2.Bucket.delete_object'''
pass
def get_bucket_info(self, **_):
'''oss2.Bucket.get_bucket_info'''
pass
def copy_object(self, source_bucket_name=None, source_key=None,
target_key=None, **_):
'''oss2.Bucket.copy_object'''
pass
def create_bucket(self, **_):
'''oss2.Bucket.create_bucket'''
pass
def delete_bucket(self, **_):
'''oss2.Bucket.delete_bucket'''
pass
def list_objects(self, prefix=None, max_keys=None, **_):
'''oss2.Bucket.list_objects'''
pass
@staticmethod
def init_multipart_upload(*_, **__):
'''oss2.Bucket.init_multipart_upload'''
pass
def complete_multipart_upload(
self, key=None, upload_id=None, parts=None, **_):
'''oss2.Bucket.complete_multipart_upload'''
pass
def upload_part(self, key=None, upload_id=None,
part_number=None, data=None, **_):
'''oss2.Bucket.upload_part'''
pass
| 15 | 14 | 5 | 0 | 4 | 1 | 1 | 0.29 | 0 | 3 | 2 | 0 | 12 | 1 | 13 | 13 | 77 | 14 | 49 | 23 | 31 | 14 | 35 | 19 | 21 | 2 | 0 | 1 | 14 |
2,933 |
Accelize/pycosio
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Accelize_pycosio/tests/test_storage_http.py
|
tests.test_storage_http.test_mocked_storage.Session
|
class Session:
"""Fake Session"""
def __init__(self, *_, **__):
"""Do nothing"""
@staticmethod
def request(method, url, headers=None, **_):
"""Check arguments and returns fake result"""
# Remove scheme
try:
url = url.split('//')[1]
except IndexError:
pass
# Split path and locator
locator, path = url.split('/', 1)
# Perform requests
try:
if method == 'HEAD':
return Response(
headers=storage_mock.head_object(locator, path))
elif method == 'GET':
return Response(content=storage_mock.get_object(
locator, path, header=headers))
else:
raise ValueError('Unknown method: ' + method)
# Return exception as response with status_code
except HTTPException as exception:
return Response(status_code=exception.status_code)
|
class Session:
'''Fake Session'''
def __init__(self, *_, **__):
'''Do nothing'''
pass
@staticmethod
def request(method, url, headers=None, **_):
'''Check arguments and returns fake result'''
pass
| 4 | 3 | 14 | 2 | 9 | 3 | 3 | 0.35 | 0 | 4 | 2 | 0 | 1 | 0 | 2 | 2 | 32 | 5 | 20 | 6 | 16 | 7 | 15 | 4 | 12 | 5 | 0 | 2 | 6 |
2,934 |
Accelize/pycosio
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Accelize_pycosio/tests/test_core_functions.py
|
tests.test_core_functions.test_cos_open.DummyBufferedIO
|
class DummyBufferedIO(DummyIO):
"""Dummy buffered IO"""
|
class DummyBufferedIO(DummyIO):
'''Dummy buffered IO'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 2 | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | 2 | 0 | 0 |
2,935 |
Accelize/pycosio
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Accelize_pycosio/tests/test_core_functions.py
|
tests.test_core_functions.test_cos_open.DummyIO
|
class DummyIO(BytesIO):
"""Dummy IO"""
def __init__(self, *_, **__):
BytesIO.__init__(self, content)
|
class DummyIO(BytesIO):
'''Dummy IO'''
def __init__(self, *_, **__):
pass
| 2 | 1 | 2 | 0 | 2 | 0 | 1 | 0.33 | 1 | 0 | 0 | 2 | 1 | 0 | 1 | 1 | 5 | 1 | 3 | 2 | 1 | 1 | 3 | 2 | 1 | 1 | 1 | 0 | 1 |
2,936 |
Accelize/pycosio
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Accelize_pycosio/tests/test_core_functions.py
|
tests.test_core_functions.test_cos_open.DummyRawIO
|
class DummyRawIO(DummyIO):
"""Dummy raw IO"""
|
class DummyRawIO(DummyIO):
'''Dummy raw IO'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 2 | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | 2 | 0 | 0 |
2,937 |
Accelize/pycosio
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Accelize_pycosio/tests/test_core_functions.py
|
tests.test_core_functions.test_cos_open.DummySystem
|
class DummySystem(SystemBase):
"""dummy system"""
def __init__(self, *_, **__):
self.copied = False
self.raise_on_copy = False
def copy(self, *_, **__):
"""Checks called"""
if self.raise_on_copy:
raise UnsupportedOperation
self.copied = True
def copy_to_storage3(self, *_, **__):
"""Checks called"""
if self.raise_on_copy:
raise UnsupportedOperation
self.copied = True
def copy_from_storage3(self, *_, **__):
"""Checks called"""
if self.raise_on_copy:
raise UnsupportedOperation
self.copied = True
def relpath(self, path):
"""Returns fake result"""
return path
@staticmethod
def _head(*_, **__):
"""Do nothing"""
@staticmethod
def _get_roots(*_, **__):
"""Do nothing"""
return root,
@staticmethod
def _get_client(*_, **__):
"""Do nothing"""
@staticmethod
def get_client_kwargs(*_, **__):
"""Do nothing"""
|
class DummySystem(SystemBase):
'''dummy system'''
def __init__(self, *_, **__):
pass
def copy(self, *_, **__):
'''Checks called'''
pass
def copy_to_storage3(self, *_, **__):
'''Checks called'''
pass
def copy_from_storage3(self, *_, **__):
'''Checks called'''
pass
def relpath(self, path):
'''Returns fake result'''
pass
@staticmethod
def _head(*_, **__):
'''Do nothing'''
pass
@staticmethod
def _get_roots(*_, **__):
'''Do nothing'''
pass
@staticmethod
def _get_client(*_, **__):
'''Do nothing'''
pass
@staticmethod
def get_client_kwargs(*_, **__):
'''Do nothing'''
pass
| 14 | 9 | 3 | 0 | 2 | 1 | 1 | 0.33 | 1 | 0 | 0 | 0 | 5 | 2 | 9 | 9 | 45 | 9 | 27 | 16 | 13 | 9 | 23 | 12 | 13 | 2 | 1 | 1 | 12 |
2,938 |
Accelize/pycosio
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Accelize_pycosio/tests/test_core_functions.py
|
tests.test_core_functions.test_equivalent_functions.System
|
class System(SystemBase):
"""dummy system"""
def relpath(self, path):
"""Checks arguments and returns fake result"""
if excepted_path:
assert path.startswith(excepted_path)
return path.split(root)[1].strip('/')
@staticmethod
def isdir(path=None, *_, **__):
"""Checks arguments and returns fake result"""
if is_dir_no_access:
raise ObjectPermissionError
if check_ending_slash:
assert path[-1] == '/'
return path in dirs_exists or 'isdir' in path
@staticmethod
def isfile(path=None, *_, **__):
"""Checks arguments and returns fake result"""
return 'isfile' in path
@staticmethod
def list_objects(path='', first_level=False, **__):
"""Checks arguments and returns fake result"""
for obj in (
first_level_objects_list if first_level else objects_list):
yield obj
@staticmethod
def _make_dir(*_, **__):
"""Do nothing"""
dir_created.append(1)
@staticmethod
def _remove(*_, **__):
"""Do nothing"""
removed.append(1)
@staticmethod
def _head(*_, **__):
"""Do nothing"""
@staticmethod
def _get_roots(*_, **__):
"""Do nothing"""
return root,
@staticmethod
def _get_client(*_, **__):
"""Do nothing"""
@staticmethod
def get_client_kwargs(*_, **__):
"""Do nothing"""
|
class System(SystemBase):
'''dummy system'''
def relpath(self, path):
'''Checks arguments and returns fake result'''
pass
@staticmethod
def isdir(path=None, *_, **__):
'''Checks arguments and returns fake result'''
pass
@staticmethod
def isfile(path=None, *_, **__):
'''Checks arguments and returns fake result'''
pass
@staticmethod
def list_objects(path='', first_level=False, **__):
'''Checks arguments and returns fake result'''
pass
@staticmethod
def _make_dir(*_, **__):
'''Do nothing'''
pass
@staticmethod
def _remove(*_, **__):
'''Do nothing'''
pass
@staticmethod
def _head(*_, **__):
'''Do nothing'''
pass
@staticmethod
def _get_roots(*_, **__):
'''Do nothing'''
pass
@staticmethod
def _get_client(*_, **__):
'''Do nothing'''
pass
@staticmethod
def get_client_kwargs(*_, **__):
'''Do nothing'''
pass
| 20 | 11 | 4 | 0 | 3 | 1 | 2 | 0.31 | 1 | 1 | 1 | 0 | 1 | 0 | 10 | 10 | 56 | 10 | 35 | 21 | 15 | 11 | 25 | 12 | 14 | 3 | 1 | 1 | 15 |
2,939 |
Accelize/pycosio
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Accelize_pycosio/tests/test_core_io_base.py
|
tests.test_core_io_base.test_memoizedmethod.Dummy
|
class Dummy:
"""Dummy class"""
def __init__(self):
self._cache = {}
@memoizedmethod
def to_memoize(self, arg):
"""Fake method"""
return arg
|
class Dummy:
'''Dummy class'''
def __init__(self):
pass
@memoizedmethod
def to_memoize(self, arg):
'''Fake method'''
pass
| 4 | 2 | 3 | 0 | 2 | 1 | 1 | 0.33 | 0 | 0 | 0 | 0 | 2 | 1 | 2 | 2 | 9 | 1 | 6 | 5 | 2 | 2 | 5 | 4 | 2 | 1 | 0 | 0 | 2 |
2,940 |
Accelize/pycosio
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Accelize_pycosio/tests/test_core_io_buffered.py
|
tests.test_core_io_buffered.test_object_buffered_base_io.DummyBufferedIO
|
class DummyBufferedIO(ObjectBufferedIOBase):
"""Dummy buffered IO"""
_RAW_CLASS = DummyRawIO
DEFAULT_BUFFER_SIZE = buffer_size
MINIMUM_BUFFER_SIZE = 10
MAXIMUM_BUFFER_SIZE = 10000
def ensure_ready(self):
"""Ensure flush is complete"""
while any(
1 for future in self._write_futures
if not future.done()):
time.sleep(0.01)
def __init(self, *arg, **kwargs):
ObjectBufferedIOBase.__init__(self, *arg, **kwargs)
self.close_called = False
def _close_writable(self):
"""Checks called"""
self.close_called = True
self.ensure_ready()
def _flush(self):
"""Flush"""
self._write_futures.append(self._workers.submit(
flush, self._write_buffer[:self._buffer_seek]))
|
class DummyBufferedIO(ObjectBufferedIOBase):
'''Dummy buffered IO'''
def ensure_ready(self):
'''Ensure flush is complete'''
pass
def __init(self, *arg, **kwargs):
pass
def _close_writable(self):
'''Checks called'''
pass
def _flush(self):
'''Flush'''
pass
| 5 | 4 | 4 | 0 | 4 | 1 | 1 | 0.21 | 1 | 0 | 0 | 0 | 4 | 1 | 4 | 4 | 27 | 4 | 19 | 11 | 14 | 4 | 16 | 10 | 11 | 2 | 1 | 1 | 5 |
2,941 |
Accelize/pycosio
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Accelize_pycosio/tests/test_core_io_buffered.py
|
tests.test_core_io_buffered.test_object_buffered_base_io.DummyBufferedIOPartFlush
|
class DummyBufferedIOPartFlush(ObjectBufferedIORandomWriteBase):
"""Dummy buffered IO with part flush support"""
_RAW_CLASS = DummyRawIOPartFlush
|
class DummyBufferedIOPartFlush(ObjectBufferedIORandomWriteBase):
'''Dummy buffered IO with part flush support'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0.5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0 | 2 | 2 | 1 | 1 | 2 | 2 | 1 | 0 | 1 | 0 | 0 |
2,942 |
Accelize/pycosio
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Accelize_pycosio/tests/test_core_io_buffered.py
|
tests.test_core_io_buffered.test_object_buffered_base_io.DummyRawIO
|
class DummyRawIO(ObjectRawIOBase):
"""Dummy IO"""
_SYSTEM_CLASS = DummySystem
def _flush(self, buffer):
"""Do nothing"""
raw_flushed.extend(buffer)
def _read_range(self, start, end=0):
"""Read fake bytes"""
return ((size if end > size else end) - start) * b'0'
|
class DummyRawIO(ObjectRawIOBase):
'''Dummy IO'''
def _flush(self, buffer):
'''Do nothing'''
pass
def _read_range(self, start, end=0):
'''Read fake bytes'''
pass
| 3 | 3 | 3 | 0 | 2 | 1 | 2 | 0.5 | 1 | 0 | 0 | 1 | 2 | 0 | 2 | 2 | 11 | 2 | 6 | 4 | 3 | 3 | 6 | 4 | 3 | 2 | 1 | 0 | 3 |
2,943 |
Accelize/pycosio
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Accelize_pycosio/tests/test_core_io_buffered.py
|
tests.test_core_io_buffered.test_object_buffered_base_io.DummyRawIOPartFlush
|
class DummyRawIOPartFlush(DummyRawIO, ObjectRawIORandomWriteBase):
"""Dummy IO with part flush support"""
_size = 20
def _flush(self, buffer, start, *_):
"""Do nothing"""
if start == 50:
# Simulate buffer that need to wait previous one
time.sleep(0.1)
raw_flushed.extend(buffer)
|
class DummyRawIOPartFlush(DummyRawIO, ObjectRawIORandomWriteBase):
'''Dummy IO with part flush support'''
def _flush(self, buffer, start, *_):
'''Do nothing'''
pass
| 2 | 2 | 6 | 0 | 4 | 2 | 2 | 0.5 | 2 | 0 | 0 | 0 | 1 | 0 | 1 | 3 | 10 | 1 | 6 | 3 | 4 | 3 | 6 | 3 | 4 | 2 | 2 | 1 | 2 |
2,944 |
Accelize/pycosio
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Accelize_pycosio/tests/test_storage_oss.py
|
tests.test_storage_oss.test_mocked_storage.Auth
|
class Auth:
"""oss2.Auth/oss2.StsAuth/oss2.AnonymousAuth"""
def __init__(self, **_):
"""oss2.Auth.__init__"""
@staticmethod
def _sign_request(*_, **__):
"""oss2.Auth._sign_request"""
|
class Auth:
'''oss2.Auth/oss2.StsAuth/oss2.AnonymousAuth'''
def __init__(self, **_):
'''oss2.Auth.__init__'''
pass
@staticmethod
def _sign_request(*_, **__):
'''oss2.Auth._sign_request'''
pass
| 4 | 3 | 2 | 0 | 1 | 1 | 1 | 0.75 | 0 | 0 | 0 | 0 | 1 | 0 | 2 | 2 | 9 | 2 | 4 | 4 | 0 | 3 | 3 | 3 | 0 | 1 | 0 | 0 | 2 |
2,945 |
Accelize/pycosio
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Accelize_pycosio/tests/test_core_io_buffered.py
|
tests.test_core_io_buffered.test_object_buffered_base_io.DummySystem
|
class DummySystem:
"""Dummy system"""
client = None
def __init__(self, **_):
"""Do nothing"""
@staticmethod
def getsize(*_, **__):
"""Returns fake result"""
return size
@staticmethod
def head(*_, **__):
"""Returns fake result"""
return {}
@staticmethod
def relpath(path):
"""Returns fake result"""
return path
@staticmethod
def get_client_kwargs(*_, **__):
"""Returns fake result"""
return {}
|
class DummySystem:
'''Dummy system'''
def __init__(self, **_):
'''Do nothing'''
pass
@staticmethod
def getsize(*_, **__):
'''Returns fake result'''
pass
@staticmethod
def head(*_, **__):
'''Returns fake result'''
pass
@staticmethod
def relpath(path):
'''Returns fake result'''
pass
@staticmethod
def get_client_kwargs(*_, **__):
'''Returns fake result'''
pass
| 10 | 6 | 3 | 0 | 2 | 1 | 1 | 0.4 | 0 | 0 | 0 | 0 | 1 | 0 | 5 | 5 | 27 | 6 | 15 | 11 | 5 | 6 | 11 | 7 | 5 | 1 | 0 | 0 | 5 |
2,946 |
Accelize/pycosio
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Accelize_pycosio/tests/test_core_storage_manager.py
|
tests.test_core_storage_manager.test_mount.Response
|
class Response:
"""Fake response"""
status_code = 200
headers = {'Accept-Ranges': 'bytes',
'Content-Length': '100'}
|
class Response:
'''Fake response'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5 | 0 | 4 | 3 | 3 | 1 | 3 | 3 | 2 | 0 | 0 | 0 | 0 |
2,947 |
Accelize/pycosio
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Accelize_pycosio/tests/test_core_storage_manager.py
|
tests.test_core_storage_manager.test_mount.Session
|
class Session:
"""Fake Session"""
def __init__(self, *_, **__):
"""Do nothing"""
@staticmethod
def request(*_, **__):
"""Returns fake result"""
return Response()
|
class Session:
'''Fake Session'''
def __init__(self, *_, **__):
'''Do nothing'''
pass
@staticmethod
def request(*_, **__):
'''Returns fake result'''
pass
| 4 | 3 | 3 | 0 | 2 | 1 | 1 | 0.6 | 0 | 1 | 1 | 0 | 1 | 0 | 2 | 2 | 10 | 2 | 5 | 4 | 1 | 3 | 4 | 3 | 1 | 1 | 0 | 0 | 2 |
2,948 |
Accelize/pycosio
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Accelize_pycosio/tests/test_storage_azure_blob.py
|
tests.test_storage_azure_blob.test_mocked_storage.AppendBlobService
|
class AppendBlobService(BlobService):
"""azure.storage.blob.appendblobservice.AppendBlobService."""
BLOB_TYPE = _BlobTypes.AppendBlob
def create_blob(self, container_name=None, blob_name=None, **_):
"""azure.storage.blob.appendblobservice.AppendBlobService.
create_blob"""
storage_mock.put_object(container_name, blob_name, headers=dict(
blob_type=self.BLOB_TYPE), new_file=True)
@staticmethod
def append_block(container_name=None, blob_name=None, block=None, **_):
"""azure.storage.blob.appendblobservice.AppendBlobService.
append_block"""
start = storage_mock.get_object_size(container_name, blob_name)
storage_mock.put_object(
container_name, blob_name, content=block,
data_range=(start, start + len(block)))
|
class AppendBlobService(BlobService):
'''azure.storage.blob.appendblobservice.AppendBlobService.'''
def create_blob(self, container_name=None, blob_name=None, **_):
'''azure.storage.blob.appendblobservice.AppendBlobService.
create_blob'''
pass
@staticmethod
def append_block(container_name=None, blob_name=None, block=None, **_):
'''azure.storage.blob.appendblobservice.AppendBlobService.
append_block'''
pass
| 4 | 3 | 6 | 0 | 4 | 2 | 1 | 0.45 | 1 | 1 | 0 | 0 | 1 | 0 | 2 | 12 | 18 | 2 | 11 | 6 | 7 | 5 | 7 | 5 | 4 | 1 | 1 | 0 | 2 |
2,949 |
Accelize/pycosio
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Accelize_pycosio/tests/test_storage_azure_blob.py
|
tests.test_storage_azure_blob.test_mocked_storage.BlobService
|
class BlobService:
"""azure.storage.blob.baseblobservice.BaseBlobService"""
BLOB_TYPE = None
def __init__(self, *_, **__):
"""azure.storage.blob.baseblobservice.BaseBlobService.__init__"""
@staticmethod
def copy_blob(container_name=None, blob_name=None, copy_source=None,
**_):
"""azure.storage.blob.baseblobservice.BaseBlobService.copy_blob"""
copy_source = copy_source.split(root + '/')[1]
storage_mock.copy_object(
src_path=copy_source, dst_locator=container_name,
dst_path=blob_name)
def get_blob_properties(self, container_name=None, blob_name=None):
"""azure.storage.blob.baseblobservice.BaseBlobService.
get_blob_properties"""
args = container_name, blob_name
props = BlobProperties()
props.last_modified = storage_mock.get_object_mtime(*args)
props.content_length = storage_mock.get_object_size(*args)
props.blob_type = storage_mock.head_object(
container_name, blob_name)['blob_type']
return Blob(props=props, name=blob_name)
@staticmethod
def get_container_properties(container_name=None, **_):
"""azure.storage.blob.baseblobservice.BaseBlobService.
get_container_properties"""
props = ContainerProperties()
props.last_modified = storage_mock.get_locator_mtime(
container_name)
return Container(props=props, name=container_name)
@staticmethod
def list_containers(**_):
"""azure.storage.blob.baseblobservice.BaseBlobService.
list_containers"""
containers = []
for container_name in storage_mock.get_locators():
props = ContainerProperties()
props.last_modified = storage_mock.get_locator_mtime(
container_name)
containers.append(Container(props=props, name=container_name))
return containers
@staticmethod
def list_blobs(container_name=None, prefix=None, num_results=None, **_):
"""azure.storage.blob.baseblobservice.BaseBlobService.list_blobs"""
blobs = []
for blob_name in storage_mock.get_locator(
container_name, prefix=prefix, limit=num_results,
raise_404_if_empty=False):
props = BlobProperties()
props.last_modified = storage_mock.get_object_mtime(
container_name, blob_name)
props.content_length = storage_mock.get_object_size(
container_name, blob_name)
props.blob_type = storage_mock.head_object(
container_name, blob_name)['blob_type']
blobs.append(Blob(props=props, name=blob_name))
return blobs
@staticmethod
def create_container(container_name=None, **_):
"""azure.storage.blob.baseblobservice.BaseBlobService.
create_container"""
storage_mock.put_locator(container_name)
@staticmethod
def delete_container(container_name=None, **_):
"""azure.storage.blob.baseblobservice.BaseBlobService.
delete_container"""
storage_mock.delete_locator(container_name)
@staticmethod
def delete_blob(container_name=None, blob_name=None, **_):
"""azure.storage.blob.baseblobservice.BaseBlobService.delete_blob"""
storage_mock.delete_object(container_name, blob_name)
@staticmethod
def get_blob_to_stream(
container_name=None, blob_name=None, stream=None,
start_range=None, end_range=None, **_):
"""azure.storage.blob.baseblobservice.BaseBlobService.
get_blob_to_stream"""
if end_range is not None:
end_range += 1
stream.write(storage_mock.get_object(
container_name, blob_name, data_range=(start_range, end_range)))
|
class BlobService:
'''azure.storage.blob.baseblobservice.BaseBlobService'''
def __init__(self, *_, **__):
'''azure.storage.blob.baseblobservice.BaseBlobService.__init__'''
pass
@staticmethod
def copy_blob(container_name=None, blob_name=None, copy_source=None,
**_):
'''azure.storage.blob.baseblobservice.BaseBlobService.copy_blob'''
pass
def get_blob_properties(self, container_name=None, blob_name=None):
'''azure.storage.blob.baseblobservice.BaseBlobService.
get_blob_properties'''
pass
@staticmethod
def get_container_properties(container_name=None, **_):
'''azure.storage.blob.baseblobservice.BaseBlobService.
get_container_properties'''
pass
@staticmethod
def list_containers(**_):
'''azure.storage.blob.baseblobservice.BaseBlobService.
list_containers'''
pass
@staticmethod
def list_blobs(container_name=None, prefix=None, num_results=None, **_):
'''azure.storage.blob.baseblobservice.BaseBlobService.list_blobs'''
pass
@staticmethod
def create_container(container_name=None, **_):
'''azure.storage.blob.baseblobservice.BaseBlobService.
create_container'''
pass
@staticmethod
def delete_container(container_name=None, **_):
'''azure.storage.blob.baseblobservice.BaseBlobService.
delete_container'''
pass
@staticmethod
def delete_blob(container_name=None, blob_name=None, **_):
'''azure.storage.blob.baseblobservice.BaseBlobService.delete_blob'''
pass
@staticmethod
def get_blob_to_stream(
container_name=None, blob_name=None, stream=None,
start_range=None, end_range=None, **_):
'''azure.storage.blob.baseblobservice.BaseBlobService.
get_blob_to_stream'''
pass
| 19 | 11 | 7 | 0 | 5 | 2 | 1 | 0.27 | 0 | 0 | 0 | 3 | 2 | 0 | 10 | 10 | 91 | 10 | 64 | 32 | 42 | 17 | 43 | 21 | 32 | 2 | 0 | 1 | 13 |
2,950 |
Accelize/pycosio
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Accelize_pycosio/tests/test_storage_azure_blob.py
|
tests.test_storage_azure_blob.test_mocked_storage.BlockBlobService
|
class BlockBlobService(BlobService):
"""azure.storage.blob.blockblobservice.BlockBlobService"""
BLOB_TYPE = _BlobTypes.BlockBlob
def create_blob_from_bytes(
self, container_name=None, blob_name=None, blob=None, **_):
"""azure.storage.blob.blockblobservice.BlockBlobService.
create_blob_from_bytes"""
storage_mock.put_object(
container_name, blob_name, blob, headers=dict(
blob_type=self.BLOB_TYPE), new_file=True)
@staticmethod
def put_block(container_name=None, blob_name=None, block=None,
block_id=None, **_):
"""azure.storage.blob.blockblobservice.BlockBlobService.put_block"""
storage_mock.put_object(
container_name, '%s.%s' % (blob_name, block_id), content=block)
@staticmethod
def put_block_list(container_name=None, blob_name=None,
block_list=None, **_):
"""azure.storage.blob.blockblobservice.BlockBlobService.
put_block_list"""
blocks = []
for block in block_list:
blocks.append('%s.%s' % (blob_name, block.id))
storage_mock.concat_objects(container_name, blob_name, blocks)
@staticmethod
def get_block_list(**_):
"""azure.storage.blob.blockblobservice.BlockBlobService.
get_block_list"""
return BlobBlockList()
|
class BlockBlobService(BlobService):
'''azure.storage.blob.blockblobservice.BlockBlobService'''
def create_blob_from_bytes(
self, container_name=None, blob_name=None, blob=None, **_):
'''azure.storage.blob.blockblobservice.BlockBlobService.
create_blob_from_bytes'''
pass
@staticmethod
def put_block(container_name=None, blob_name=None, block=None,
block_id=None, **_):
'''azure.storage.blob.blockblobservice.BlockBlobService.put_block'''
pass
@staticmethod
def put_block_list(container_name=None, blob_name=None,
block_list=None, **_):
'''azure.storage.blob.blockblobservice.BlockBlobService.
put_block_list'''
pass
@staticmethod
def get_block_list(**_):
'''azure.storage.blob.blockblobservice.BlockBlobService.
get_block_list'''
pass
| 8 | 5 | 6 | 0 | 4 | 2 | 1 | 0.36 | 1 | 1 | 0 | 0 | 1 | 0 | 4 | 14 | 34 | 4 | 22 | 14 | 11 | 8 | 13 | 8 | 8 | 2 | 1 | 1 | 5 |
2,951 |
Accelize/pycosio
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Accelize_pycosio/tests/test_storage_azure_blob.py
|
tests.test_storage_azure_blob.test_mocked_storage.PageBlobService
|
class PageBlobService(BlobService):
"""azure.storage.blob.pageblobservice.PageBlobService"""
BLOB_TYPE = _BlobTypes.PageBlob
def create_blob(self, container_name=None, blob_name=None,
content_length=None, **_):
"""azure.storage.blob.pageblobservice.PageBlobService.create_blob"""
if content_length:
# Must be page aligned
assert not content_length % 512
# Create null pages
content = b'\0' * content_length
else:
content = None
storage_mock.put_object(
container_name, blob_name, content=content, headers=dict(
blob_type=self.BLOB_TYPE), new_file=True)
def create_blob_from_bytes(
self, container_name=None, blob_name=None, blob=None, **_):
"""azure.storage.blob.pageblobservice.PageBlobService.
create_blob_from_bytes"""
# Must be page aligned
assert not len(blob) % 512
storage_mock.put_object(
container_name, blob_name, content=blob, headers=dict(
blob_type=self.BLOB_TYPE), new_file=True)
def resize_blob(self, container_name=None, blob_name=None,
content_length=None, **_):
"""azure.storage.blob.pageblobservice.PageBlobService.
resize_blob"""
# Must be page aligned
assert not content_length % 512
# Add padding to resize blob
size = storage_mock.get_object_size(container_name, blob_name)
padding = content_length - size
storage_mock.put_object(
container_name, blob_name, content=b'\0' * padding,
data_range=(content_length - padding, content_length))
@staticmethod
def update_page(container_name=None, blob_name=None,
page=None, start_range=None, end_range=None, **_):
"""azure.storage.blob.pageblobservice.PageBlobService.update_page"""
# Don't use pythonic indexation
end_range += 1
# Must be page aligned
assert not start_range % 512
assert not end_range % 512
storage_mock.put_object(
container_name, blob_name, content=page,
data_range=(start_range, end_range))
|
class PageBlobService(BlobService):
'''azure.storage.blob.pageblobservice.PageBlobService'''
def create_blob(self, container_name=None, blob_name=None,
content_length=None, **_):
'''azure.storage.blob.pageblobservice.PageBlobService.create_blob'''
pass
def create_blob_from_bytes(
self, container_name=None, blob_name=None, blob=None, **_):
'''azure.storage.blob.pageblobservice.PageBlobService.
create_blob_from_bytes'''
pass
def resize_blob(self, container_name=None, blob_name=None,
content_length=None, **_):
'''azure.storage.blob.pageblobservice.PageBlobService.
resize_blob'''
pass
@staticmethod
def update_page(container_name=None, blob_name=None,
page=None, start_range=None, end_range=None, **_):
'''azure.storage.blob.pageblobservice.PageBlobService.update_page'''
pass
| 6 | 5 | 13 | 2 | 8 | 3 | 1 | 0.4 | 1 | 1 | 0 | 0 | 3 | 0 | 4 | 14 | 59 | 10 | 35 | 14 | 25 | 14 | 21 | 9 | 16 | 2 | 1 | 1 | 5 |
2,952 |
Accelize/pycosio
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Accelize_pycosio/tests/test_storage_azure_file.py
|
tests.test_storage_azure_file.test_mocked_storage.FileService
|
class FileService:
"""azure.storage.file.fileservice.FileService"""
def __init__(self, *_, **kwargs):
"""azure.storage.file.fileservice.FileService.__init__"""
self.kwargs = kwargs
@staticmethod
def copy_file(share_name=None, directory_name=None, file_name=None,
copy_source=None, **_):
"""azure.storage.file.fileservice.FileService.copy_file"""
copy_source = copy_source.split(root + '/')[1]
storage_mock.copy_object(
src_path=copy_source, dst_locator=share_name,
dst_path=join(directory_name, file_name))
@staticmethod
def get_file_properties(
share_name=None, directory_name=None, file_name=None, **_):
"""azure.storage.file.fileservice.FileService.get_file_properties"""
args = share_name, join(directory_name, file_name)
props = FileProperties()
props.last_modified = storage_mock.get_object_mtime(*args)
props.content_length = storage_mock.get_object_size(*args)
return File(props=props, name=file_name)
@staticmethod
def get_directory_properties(share_name=None, directory_name=None, **_):
"""
azure.storage.file.fileservice.FileService.get_directory_properties
"""
props = DirectoryProperties()
props.last_modified = storage_mock.get_object_mtime(
share_name, directory_name + '/')
return Directory(props=props, name=directory_name)
@staticmethod
def get_share_properties(share_name=None, **_):
"""
azure.storage.file.fileservice.FileService.get_share_properties
"""
props = ShareProperties()
props.last_modified = storage_mock.get_locator_mtime(share_name)
return Share(props=props, name=share_name)
@staticmethod
def list_shares():
"""azure.storage.file.fileservice.FileService.list_shares"""
shares = []
for share_name in storage_mock.get_locators():
props = ShareProperties()
props.last_modified = storage_mock.get_locator_mtime(
share_name)
shares.append(Share(props=props, name=share_name))
return shares
@staticmethod
def list_directories_and_files(
share_name=None, directory_name=None, num_results=None, **_):
"""
azure.storage.file.fileservice.FileService.
list_directories_and_files
"""
content = []
for name in storage_mock.get_locator(
share_name, prefix=directory_name, limit=num_results,
first_level=True, relative=True):
# This directory
if not name:
continue
# Directory
elif name.endswith('/'):
content.append(Directory(
props=DirectoryProperties(), name=name))
# File
else:
props = FileProperties()
path = join(directory_name, name)
props.last_modified = storage_mock.get_object_mtime(
share_name, path)
props.content_length = storage_mock.get_object_size(
share_name, path)
content.append(File(props=props, name=name))
return content
@staticmethod
def create_directory(share_name=None, directory_name=None, **_):
"""azure.storage.file.fileservice.FileService.create_directory"""
storage_mock.put_object(share_name, directory_name + '/')
@staticmethod
def create_share(share_name=None, **_):
"""azure.storage.file.fileservice.FileService.create_share"""
storage_mock.put_locator(share_name)
@staticmethod
def create_file(share_name=None, directory_name=None,
file_name=None, content_length=None, **_):
"""azure.storage.file.fileservice.FileService.create_file"""
if content_length:
# Create null padding
content = b'\0' * content_length
else:
content = None
storage_mock.put_object(
share_name, join(directory_name, file_name), content=content,
new_file=True)
@staticmethod
def delete_directory(share_name=None, directory_name=None, **_):
"""azure.storage.file.fileservice.FileService.delete_directory"""
storage_mock.delete_object(share_name, directory_name)
@staticmethod
def delete_share(share_name=None, **_):
"""azure.storage.file.fileservice.FileService.delete_share"""
storage_mock.delete_locator(share_name)
@staticmethod
def delete_file(share_name=None, directory_name=None,
file_name=None, **_):
"""azure.storage.file.fileservice.FileService.delete_file"""
storage_mock.delete_object(
share_name, join(directory_name, file_name))
@staticmethod
def get_file_to_stream(
share_name=None, directory_name=None, file_name=None,
stream=None, start_range=None, end_range=None, **_):
"""azure.storage.file.fileservice.FileService.get_file_to_stream"""
if end_range is not None:
end_range += 1
stream.write(storage_mock.get_object(
share_name, join(directory_name, file_name),
data_range=(start_range, end_range)))
@staticmethod
def create_file_from_bytes(
share_name=None, directory_name=None, file_name=None,
file=None, **_):
"""azure.storage.file.fileservice.FileService.
create_file_from_bytes"""
storage_mock.put_object(
share_name, join(directory_name, file_name), file,
new_file=True)
@staticmethod
def update_range(share_name=None, directory_name=None, file_name=None,
data=None, start_range=None, end_range=None, **_):
"""azure.storage.file.fileservice.FileService.update_range"""
if end_range is not None:
end_range += 1
storage_mock.put_object(
share_name, join(directory_name, file_name), content=data,
data_range=(start_range, end_range))
@staticmethod
def resize_file(share_name=None, directory_name=None,
file_name=None, content_length=None, **_):
"""azure.storage.file.fileservice.FileService.resize_file"""
path = join(directory_name, file_name)
# Add padding to resize file
size = storage_mock.get_object_size(share_name, path)
padding = content_length - size
storage_mock.put_object(
share_name, path, content=b'\0' * padding,
data_range=(content_length - padding, content_length))
|
class FileService:
'''azure.storage.file.fileservice.FileService'''
def __init__(self, *_, **kwargs):
'''azure.storage.file.fileservice.FileService.__init__'''
pass
@staticmethod
def copy_file(share_name=None, directory_name=None, file_name=None,
copy_source=None, **_):
'''azure.storage.file.fileservice.FileService.copy_file'''
pass
@staticmethod
def get_file_properties(
share_name=None, directory_name=None, file_name=None, **_):
'''azure.storage.file.fileservice.FileService.get_file_properties'''
pass
@staticmethod
def get_directory_properties(share_name=None, directory_name=None, **_):
'''
azure.storage.file.fileservice.FileService.get_directory_properties
'''
pass
@staticmethod
def get_share_properties(share_name=None, **_):
'''
azure.storage.file.fileservice.FileService.get_share_properties
'''
pass
@staticmethod
def list_shares():
'''azure.storage.file.fileservice.FileService.list_shares'''
pass
@staticmethod
def list_directories_and_files(
share_name=None, directory_name=None, num_results=None, **_):
'''
azure.storage.file.fileservice.FileService.
list_directories_and_files
'''
pass
@staticmethod
def create_directory(share_name=None, directory_name=None, **_):
'''azure.storage.file.fileservice.FileService.create_directory'''
pass
@staticmethod
def create_share(share_name=None, **_):
'''azure.storage.file.fileservice.FileService.create_share'''
pass
@staticmethod
def create_file(share_name=None, directory_name=None,
file_name=None, content_length=None, **_):
'''azure.storage.file.fileservice.FileService.create_file'''
pass
@staticmethod
def delete_directory(share_name=None, directory_name=None, **_):
'''azure.storage.file.fileservice.FileService.delete_directory'''
pass
@staticmethod
def delete_share(share_name=None, **_):
'''azure.storage.file.fileservice.FileService.delete_share'''
pass
@staticmethod
def delete_file(share_name=None, directory_name=None,
file_name=None, **_):
'''azure.storage.file.fileservice.FileService.delete_file'''
pass
@staticmethod
def get_file_to_stream(
share_name=None, directory_name=None, file_name=None,
stream=None, start_range=None, end_range=None, **_):
'''azure.storage.file.fileservice.FileService.get_file_to_stream'''
pass
@staticmethod
def create_file_from_bytes(
share_name=None, directory_name=None, file_name=None,
file=None, **_):
'''azure.storage.file.fileservice.FileService.
create_file_from_bytes'''
pass
@staticmethod
def update_range(share_name=None, directory_name=None, file_name=None,
data=None, start_range=None, end_range=None, **_):
'''azure.storage.file.fileservice.FileService.update_range'''
pass
@staticmethod
def resize_file(share_name=None, directory_name=None,
file_name=None, content_length=None, **_):
'''azure.storage.file.fileservice.FileService.resize_file'''
pass
| 34 | 18 | 8 | 0 | 6 | 2 | 1 | 0.26 | 0 | 0 | 0 | 0 | 1 | 1 | 17 | 17 | 171 | 22 | 118 | 61 | 73 | 31 | 69 | 34 | 51 | 4 | 0 | 2 | 24 |
2,953 |
Accelize/pycosio
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Accelize_pycosio/tests/test_storage_http.py
|
tests.test_storage_http.test_handle_http_errors.Response
|
class Response:
"""Dummy response"""
status_code = 200
reason = 'reason'
raised = False
def raise_for_status(self):
"""Do nothing"""
self.raised = True
|
class Response:
'''Dummy response'''
def raise_for_status(self):
'''Do nothing'''
pass
| 2 | 2 | 3 | 0 | 2 | 1 | 1 | 0.33 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 9 | 1 | 6 | 5 | 4 | 2 | 6 | 5 | 4 | 1 | 0 | 0 | 1 |
2,954 |
Accelize/pycosio
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Accelize_pycosio/tests/test_storage_http.py
|
tests.test_storage_http.test_mocked_storage.HTTPException
|
class HTTPException(Exception):
"""HTTP Exception
Args:
status_code (int): HTTP status
"""
def __init__(self, status_code):
self.status_code = status_code
|
class HTTPException(Exception):
'''HTTP Exception
Args:
status_code (int): HTTP status
'''
def __init__(self, status_code):
pass
| 2 | 1 | 2 | 0 | 2 | 0 | 1 | 1.33 | 1 | 0 | 0 | 0 | 1 | 1 | 1 | 11 | 9 | 2 | 3 | 3 | 1 | 4 | 3 | 3 | 1 | 1 | 3 | 0 | 1 |
2,955 |
Accelize/pycosio
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Accelize_pycosio/tests/test_storage_http.py
|
tests.test_storage_http.test_mocked_storage.Response
|
class Response:
"""HTTP request response"""
status_code = 200
reason = 'reason'
def __init__(self, **attributes):
for name, value in attributes.items():
setattr(self, name, value)
def raise_for_status(self):
"""Raise for status"""
if self.status_code >= 300:
raise HTTPError(self.reason, response=self)
|
class Response:
'''HTTP request response'''
def __init__(self, **attributes):
pass
def raise_for_status(self):
'''Raise for status'''
pass
| 3 | 2 | 4 | 0 | 3 | 1 | 2 | 0.22 | 0 | 1 | 0 | 0 | 2 | 0 | 2 | 2 | 13 | 2 | 9 | 6 | 6 | 2 | 9 | 6 | 6 | 2 | 0 | 1 | 4 |
2,956 |
Accelize/pycosio
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Accelize_pycosio/tests/test_core_io_system.py
|
tests.test_core_io_system.test_system_base.DummySystem
|
class DummySystem(SystemBase):
"""Dummy System"""
def get_client_kwargs(self, path):
"""Checks arguments and returns fake result"""
assert path
dummy_client_kwargs['path'] = path
return dummy_client_kwargs
def _get_client(self):
"""Returns fake result"""
return client
def _get_roots(self):
"""Returns fake result"""
return roots
def _list_objects(self, client_kwargs, *_, **__):
"""Checks arguments and returns fake result"""
assert client_kwargs == dummy_client_kwargs
path = client_kwargs['path'].strip('/')
if path == 'locator_no_access':
raise ObjectPermissionError
elif path in ('locator', 'locator/dir1'):
for obj in objects:
yield obj, object_header.copy()
else:
raise StopIteration
def _list_locators(self):
"""Returns fake result"""
for locator in locators:
yield locator, object_header.copy()
def _head(self, client_kwargs):
"""Checks arguments and returns fake result"""
assert client_kwargs == dummy_client_kwargs
if raise_not_exists_exception:
raise ObjectNotFoundError
return object_header.copy()
def _make_dir(self, client_kwargs):
"""Checks arguments"""
path = client_kwargs['path']
assert '/' not in path or path[-1] == '/'
assert client_kwargs == dummy_client_kwargs
def _remove(self, client_kwargs):
"""Checks arguments"""
assert client_kwargs == dummy_client_kwargs
|
class DummySystem(SystemBase):
'''Dummy System'''
def get_client_kwargs(self, path):
'''Checks arguments and returns fake result'''
pass
def _get_client(self):
'''Returns fake result'''
pass
def _get_roots(self):
'''Returns fake result'''
pass
def _list_objects(self, client_kwargs, *_, **__):
'''Checks arguments and returns fake result'''
pass
def _list_locators(self):
'''Returns fake result'''
pass
def _head(self, client_kwargs):
'''Checks arguments and returns fake result'''
pass
def _make_dir(self, client_kwargs):
'''Checks arguments'''
pass
def _remove(self, client_kwargs):
'''Checks arguments'''
pass
| 9 | 9 | 5 | 0 | 4 | 1 | 2 | 0.27 | 1 | 3 | 2 | 0 | 8 | 0 | 8 | 8 | 51 | 9 | 33 | 13 | 24 | 9 | 31 | 13 | 22 | 4 | 1 | 2 | 13 |
2,957 |
Accelize/pycosio
|
Accelize_pycosio/pycosio/storage/azure_blob/_page_blob.py
|
pycosio.storage.azure_blob._page_blob.AzurePageBlobBufferedIO
|
class AzurePageBlobBufferedIO(AzureBlobBufferedIO,
ObjectBufferedIORandomWriteBase):
"""Buffered binary Azure Page Blobs Storage Object I/O
Args:
name (path-like object): URL or path to the file which will be opened.
mode (str): The mode can be 'r', 'w' for reading (default) or writing
buffer_size (int): The size of buffer.
If not 512 bytes aligned, will be round to be page aligned.
max_buffers (int): The maximum number of buffers to preload in read mode
or awaiting flush in write mode. 0 for no limit.
max_workers (int): The maximum number of threads that can be used to
execute the given calls.
storage_parameters (dict): Azure service keyword arguments.
This is generally Azure credentials and configuration. See
"azure.storage.blob.baseblobservice.BaseBlobService" for more
information.
unsecure (bool): If True, disables TLS/SSL to improves
transfer performance. But makes connection unsecure.
content_length (int): Define the size to preallocate on new file
creation. This is not mandatory, and file will be resized on needs
but this allow to improve performance when file size is known in
advance. Any value will be rounded to be page aligned. Default to 0.
null_strip (bool): If True, strip null chars from end of read data to
remove page padding when reading, and ignore trailing null chars
on last page when seeking from end. Default to True.
"""
__DEFAULT_CLASS = False
_RAW_CLASS = AzurePageBlobRawIO
#: Maximal buffer_size value in bytes (Maximum upload page size)
MAXIMUM_BUFFER_SIZE = PageBlobService.MAX_PAGE_SIZE
#: Minimal buffer_size value in bytes (Page size)
MINIMUM_BUFFER_SIZE = 512
def __init__(self, *args, **kwargs):
ObjectBufferedIORandomWriteBase.__init__(self, *args, **kwargs)
if self._writable:
page_diff = self._buffer_size % 512
if page_diff:
# Round buffer size if not multiple of page size
self._buffer_size = min(
self._buffer_size + 512 - page_diff,
self.MAXIMUM_BUFFER_SIZE)
def _flush(self):
"""
Flush the write buffers of the stream if applicable.
In write mode, send the buffer content to the cloud object.
"""
buffer = self._get_buffer()
start = self._buffer_size * (self._seek - 1)
self._write_futures.append(self._workers.submit(
self._raw_flush, buffer=buffer, start=start,
end=start + len(buffer)))
|
class AzurePageBlobBufferedIO(AzureBlobBufferedIO,
ObjectBufferedIORandomWriteBase):
'''Buffered binary Azure Page Blobs Storage Object I/O
Args:
name (path-like object): URL or path to the file which will be opened.
mode (str): The mode can be 'r', 'w' for reading (default) or writing
buffer_size (int): The size of buffer.
If not 512 bytes aligned, will be round to be page aligned.
max_buffers (int): The maximum number of buffers to preload in read mode
or awaiting flush in write mode. 0 for no limit.
max_workers (int): The maximum number of threads that can be used to
execute the given calls.
storage_parameters (dict): Azure service keyword arguments.
This is generally Azure credentials and configuration. See
"azure.storage.blob.baseblobservice.BaseBlobService" for more
information.
unsecure (bool): If True, disables TLS/SSL to improves
transfer performance. But makes connection unsecure.
content_length (int): Define the size to preallocate on new file
creation. This is not mandatory, and file will be resized on needs
but this allow to improve performance when file size is known in
advance. Any value will be rounded to be page aligned. Default to 0.
null_strip (bool): If True, strip null chars from end of read data to
remove page padding when reading, and ignore trailing null chars
on last page when seeking from end. Default to True.
'''
def __init__(self, *args, **kwargs):
pass
def _flush(self):
'''
Flush the write buffers of the stream if applicable.
In write mode, send the buffer content to the cloud object.
'''
pass
| 3 | 2 | 11 | 2 | 7 | 3 | 2 | 1.55 | 2 | 0 | 0 | 0 | 2 | 2 | 2 | 54 | 59 | 8 | 20 | 13 | 16 | 31 | 15 | 11 | 12 | 3 | 7 | 2 | 4 |
2,958 |
Accelize/pycosio
|
Accelize_pycosio/pycosio/storage/azure_blob/_block_blob.py
|
pycosio.storage.azure_blob._block_blob.AzureBlockBlobRawIO
|
class AzureBlockBlobRawIO(AzureBlobRawIO):
"""Binary Azure BLock Blobs Storage Object I/O
Args:
name (path-like object): URL or path to the file which will be opened.
mode (str): The mode can be 'r', 'w', 'a'
for reading (default), writing or appending
storage_parameters (dict): Azure service keyword arguments.
This is generally Azure credentials and configuration. See
"azure.storage.blob.baseblobservice.BaseBlobService" for more
information.
unsecure (bool): If True, disables TLS/SSL to improves
transfer performance. But makes connection unsecure.
"""
__DEFAULT_CLASS = False
@property
@memoizedmethod
def _client(self):
"""
Returns client instance.
Returns:
azure.storage.blob.pageblobservice.PageBlobService: client
"""
return self._system.client[_BLOB_TYPE]
def _flush(self, buffer):
"""
Flush the write buffer of the stream if applicable.
Args:
buffer (memoryview): Buffer content.
"""
with _handle_azure_exception():
# Write entire file at once
self._client.create_blob_from_bytes(
blob=buffer.tobytes(), **self._client_kwargs)
def _create(self):
"""
Create the file if not exists.
"""
with _handle_azure_exception():
self._client.create_blob_from_bytes(blob=b'', **self._client_kwargs)
|
class AzureBlockBlobRawIO(AzureBlobRawIO):
'''Binary Azure BLock Blobs Storage Object I/O
Args:
name (path-like object): URL or path to the file which will be opened.
mode (str): The mode can be 'r', 'w', 'a'
for reading (default), writing or appending
storage_parameters (dict): Azure service keyword arguments.
This is generally Azure credentials and configuration. See
"azure.storage.blob.baseblobservice.BaseBlobService" for more
information.
unsecure (bool): If True, disables TLS/SSL to improves
transfer performance. But makes connection unsecure.
'''
@property
@memoizedmethod
def _client(self):
'''
Returns client instance.
Returns:
azure.storage.blob.pageblobservice.PageBlobService: client
'''
pass
def _flush(self, buffer):
'''
Flush the write buffer of the stream if applicable.
Args:
buffer (memoryview): Buffer content.
'''
pass
def _create(self):
'''
Create the file if not exists.
'''
pass
| 6 | 4 | 8 | 1 | 3 | 5 | 1 | 2 | 1 | 0 | 0 | 0 | 3 | 0 | 3 | 57 | 45 | 6 | 13 | 6 | 7 | 26 | 10 | 5 | 6 | 1 | 8 | 1 | 3 |
2,959 |
Accelize/pycosio
|
Accelize_pycosio/pycosio/_core/exceptions.py
|
pycosio._core.exceptions.ObjectExistsError
|
class ObjectExistsError(ObjectException):
"""Reraised as "FileExistsError" by handle_os_exceptions"""
|
class ObjectExistsError(ObjectException):
'''Reraised as "FileExistsError" by handle_os_exceptions'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 2 | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | 4 | 0 | 0 |
2,960 |
Accelize/pycosio
|
Accelize_pycosio/pycosio/storage/azure_blob/_system.py
|
pycosio.storage.azure_blob._system._AzureBlobSystem
|
class _AzureBlobSystem(_AzureBaseSystem):
"""
Azure Blobs Storage system.
Args:
storage_parameters (dict): Azure service keyword arguments.
This is generally Azure credentials and configuration. See
"azure.storage.blob.baseblobservice.BaseBlobService" for more
information.
unsecure (bool): If True, disables TLS/SSL to improves
transfer performance. But makes connection unsecure.
"""
def copy(self, src, dst, other_system=None):
"""
Copy object of the same storage.
Args:
src (str): Path or URL.
dst (str): Path or URL.
other_system (pycosio.storage.azure._AzureBaseSystem subclass):
The source storage system.
"""
with _handle_azure_exception():
self._client_block.copy_blob(
copy_source=(other_system or self)._format_src_url(src, self),
**self.get_client_kwargs(dst))
def _get_client(self):
"""
Azure blob service
Returns:
dict of azure.storage.blob.baseblobservice.BaseBlobService subclass:
Service
"""
parameters = self._secured_storage_parameters().copy()
# Parameter added by pycosio and unsupported by blob services.
try:
del parameters['blob_type']
except KeyError:
pass
return {_BlobTypes.PageBlob: PageBlobService(**parameters),
_BlobTypes.BlockBlob: BlockBlobService(**parameters),
_BlobTypes.AppendBlob: AppendBlobService(**parameters)}
@property
@memoizedmethod
def _client_block(self):
"""
Storage client
Returns:
azure.storage.blob.blockblobservice.BlockBlobService: client
"""
return self.client[_DEFAULT_BLOB_TYPE]
@property
@memoizedmethod
def _default_blob_type(self):
"""
Return default blob type to use when creating objects.
Returns:
str: Blob type.
"""
return self._storage_parameters.get('blob_type', _DEFAULT_BLOB_TYPE)
def get_client_kwargs(self, path):
"""
Get base keyword arguments for client for a
specific path.
Args:
path (str): Absolute path or URL.
Returns:
dict: client args
"""
# Remove query string from URL
path = path.split('?', 1)[0]
container_name, blob_name = self.split_locator(path)
kwargs = dict(container_name=container_name)
# Blob
if blob_name:
kwargs['blob_name'] = blob_name
return kwargs
def _get_roots(self):
"""
Return URL roots for this storage.
Returns:
tuple of str or re.Pattern: URL roots
"""
# URL:
# - http://<account>.blob.core.windows.net/<container>/<blob>
# - https://<account>.blob.core.windows.net/<container>/<blob>
# Note: "core.windows.net" may be replaced by another "endpoint_suffix"
return _re.compile(
r'https?://%s\.blob\.%s' % self._get_endpoint('blob')),
def _head(self, client_kwargs):
"""
Returns object or bucket HTTP header.
Args:
client_kwargs (dict): Client arguments.
Returns:
dict: HTTP header.
"""
with _handle_azure_exception():
# Blob
if 'blob_name' in client_kwargs:
result = self._client_block.get_blob_properties(**client_kwargs)
# Container
else:
result = self._client_block.get_container_properties(
**client_kwargs)
return self._model_to_dict(result)
def _list_locators(self):
"""
Lists locators.
Returns:
generator of tuple: locator name str, locator header dict
"""
with _handle_azure_exception():
for container in self._client_block.list_containers():
yield container.name, self._model_to_dict(container)
def _list_objects(self, client_kwargs, path, max_request_entries):
"""
Lists objects.
args:
client_kwargs (dict): Client arguments.
path (str): Path relative to current locator.
max_request_entries (int): If specified, maximum entries returned
by request.
Returns:
generator of tuple: object name str, object header dict
"""
client_kwargs = self._update_listing_client_kwargs(
client_kwargs, max_request_entries)
blob = None
with _handle_azure_exception():
for blob in self._client_block.list_blobs(
prefix=path, **client_kwargs):
yield blob.name, self._model_to_dict(blob)
# None only if path don't exists
if blob is None:
raise ObjectNotFoundError
def _make_dir(self, client_kwargs):
"""
Make a directory.
args:
client_kwargs (dict): Client arguments.
"""
with _handle_azure_exception():
# Blob
if 'blob_name' in client_kwargs:
return self._client_block.create_blob_from_bytes(
blob=b'', **client_kwargs)
# Container
return self._client_block.create_container(**client_kwargs)
def _remove(self, client_kwargs):
"""
Remove an object.
args:
client_kwargs (dict): Client arguments.
"""
with _handle_azure_exception():
# Blob
if 'blob_name' in client_kwargs:
return self._client_block.delete_blob(**client_kwargs)
# Container
return self._client_block.delete_container(**client_kwargs)
|
class _AzureBlobSystem(_AzureBaseSystem):
'''
Azure Blobs Storage system.
Args:
storage_parameters (dict): Azure service keyword arguments.
This is generally Azure credentials and configuration. See
"azure.storage.blob.baseblobservice.BaseBlobService" for more
information.
unsecure (bool): If True, disables TLS/SSL to improves
transfer performance. But makes connection unsecure.
'''
def copy(self, src, dst, other_system=None):
'''
Copy object of the same storage.
Args:
src (str): Path or URL.
dst (str): Path or URL.
other_system (pycosio.storage.azure._AzureBaseSystem subclass):
The source storage system.
'''
pass
def _get_client(self):
'''
Azure blob service
Returns:
dict of azure.storage.blob.baseblobservice.BaseBlobService subclass:
Service
'''
pass
@property
@memoizedmethod
def _client_block(self):
'''
Storage client
Returns:
azure.storage.blob.blockblobservice.BlockBlobService: client
'''
pass
@property
@memoizedmethod
def _default_blob_type(self):
'''
Return default blob type to use when creating objects.
Returns:
str: Blob type.
'''
pass
def get_client_kwargs(self, path):
'''
Get base keyword arguments for client for a
specific path.
Args:
path (str): Absolute path or URL.
Returns:
dict: client args
'''
pass
def _get_roots(self):
'''
Return URL roots for this storage.
Returns:
tuple of str or re.Pattern: URL roots
'''
pass
def _head(self, client_kwargs):
'''
Returns object or bucket HTTP header.
Args:
client_kwargs (dict): Client arguments.
Returns:
dict: HTTP header.
'''
pass
def _list_locators(self):
'''
Lists locators.
Returns:
generator of tuple: locator name str, locator header dict
'''
pass
def _list_objects(self, client_kwargs, path, max_request_entries):
'''
Lists objects.
args:
client_kwargs (dict): Client arguments.
path (str): Path relative to current locator.
max_request_entries (int): If specified, maximum entries returned
by request.
Returns:
generator of tuple: object name str, object header dict
'''
pass
def _make_dir(self, client_kwargs):
'''
Make a directory.
args:
client_kwargs (dict): Client arguments.
'''
pass
def _remove(self, client_kwargs):
'''
Remove an object.
args:
client_kwargs (dict): Client arguments.
'''
pass
| 16 | 12 | 15 | 2 | 6 | 8 | 2 | 1.41 | 1 | 3 | 1 | 0 | 11 | 0 | 11 | 76 | 196 | 37 | 66 | 20 | 50 | 93 | 52 | 18 | 40 | 3 | 6 | 2 | 19 |
2,961 |
Accelize/pycosio
|
Accelize_pycosio/pycosio/storage/azure_file.py
|
pycosio.storage.azure_file.AzureFileBufferedIO
|
class AzureFileBufferedIO(_ObjectBufferedIORandomWriteBase):
"""Buffered binary Azure Files Storage Object I/O
Args:
name (path-like object): URL or path to the file which will be opened.
mode (str): The mode can be 'r', 'w' for reading (default) or writing
buffer_size (int): The size of buffer.
max_buffers (int): The maximum number of buffers to preload in read mode
or awaiting flush in write mode. 0 for no limit.
max_workers (int): The maximum number of threads that can be used to
execute the given calls.
storage_parameters (dict): Azure service keyword arguments.
This is generally Azure credentials and configuration. See
"azure.storage.file.fileservice.FileService" for more information.
unsecure (bool): If True, disables TLS/SSL to improves
transfer performance. But makes connection unsecure.
content_length (int): Define the size to preallocate on new file
creation. This is not mandatory, and file will be resized on needs
but this allow to improve performance when file size is known in
advance.
"""
_RAW_CLASS = AzureFileRawIO
#: Maximal buffer_size value in bytes (Maximum upload range size)
MAXIMUM_BUFFER_SIZE = _FileService.MAX_RANGE_SIZE
|
class AzureFileBufferedIO(_ObjectBufferedIORandomWriteBase):
'''Buffered binary Azure Files Storage Object I/O
Args:
name (path-like object): URL or path to the file which will be opened.
mode (str): The mode can be 'r', 'w' for reading (default) or writing
buffer_size (int): The size of buffer.
max_buffers (int): The maximum number of buffers to preload in read mode
or awaiting flush in write mode. 0 for no limit.
max_workers (int): The maximum number of threads that can be used to
execute the given calls.
storage_parameters (dict): Azure service keyword arguments.
This is generally Azure credentials and configuration. See
"azure.storage.file.fileservice.FileService" for more information.
unsecure (bool): If True, disables TLS/SSL to improves
transfer performance. But makes connection unsecure.
content_length (int): Define the size to preallocate on new file
creation. This is not mandatory, and file will be resized on needs
but this allow to improve performance when file size is known in
advance.
'''
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 6.67 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 51 | 25 | 2 | 3 | 3 | 2 | 20 | 3 | 3 | 2 | 0 | 7 | 0 | 0 |
2,962 |
Accelize/pycosio
|
Accelize_pycosio/pycosio/_core/exceptions.py
|
pycosio._core.exceptions.ObjectException
|
class ObjectException(Exception):
"""Pycosio base exception"""
|
class ObjectException(Exception):
'''Pycosio base exception'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 4 | 0 | 0 | 0 | 10 | 2 | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | 3 | 0 | 0 |
2,963 |
Accelize/pycosio
|
Accelize_pycosio/pycosio/storage/http.py
|
pycosio.storage.http.HTTPBufferedIO
|
class HTTPBufferedIO(_ObjectBufferedIOBase):
"""Buffered binary HTTP Object I/O
Args:
name (path-like object): URL to the file which will be opened.
mode (str): The mode can be 'r' for reading.
buffer_size (int): The size of buffer.
max_buffers (int): The maximum number of buffers to preload in read mode
or awaiting flush in write mode. 0 for no limit.
max_workers (int): The maximum number of threads that can be used to
execute the given calls.
"""
_RAW_CLASS = HTTPRawIO
def _close_writable(self):
"""
Closes the object in write mode.
Performs any finalization operation required to
complete the object writing on the cloud.
"""
def _flush(self):
"""
Flush the write buffers of the stream if applicable.
In write mode, send the buffer content to the cloud object.
"""
|
class HTTPBufferedIO(_ObjectBufferedIOBase):
'''Buffered binary HTTP Object I/O
Args:
name (path-like object): URL to the file which will be opened.
mode (str): The mode can be 'r' for reading.
buffer_size (int): The size of buffer.
max_buffers (int): The maximum number of buffers to preload in read mode
or awaiting flush in write mode. 0 for no limit.
max_workers (int): The maximum number of threads that can be used to
execute the given calls.
'''
def _close_writable(self):
'''
Closes the object in write mode.
Performs any finalization operation required to
complete the object writing on the cloud.
'''
pass
def _flush(self):
'''
Flush the write buffers of the stream if applicable.
In write mode, send the buffer content to the cloud object.
'''
pass
| 3 | 3 | 7 | 1 | 1 | 5 | 1 | 4.75 | 1 | 0 | 0 | 0 | 2 | 0 | 2 | 50 | 29 | 6 | 4 | 4 | 1 | 19 | 4 | 4 | 1 | 1 | 6 | 0 | 2 |
2,964 |
Accelize/pycosio
|
Accelize_pycosio/pycosio/storage/azure_file.py
|
pycosio.storage.azure_file.AzureFileRawIO
|
class AzureFileRawIO(_AzureStorageRawIORangeWriteBase):
"""Binary Azure Files Storage Object I/O
Args:
name (path-like object): URL or path to the file which will be opened.
mode (str): The mode can be 'r', 'w', 'a'
for reading (default), writing or appending
storage_parameters (dict): Azure service keyword arguments.
This is generally Azure credentials and configuration. See
"azure.storage.file.fileservice.FileService" for more information.
unsecure (bool): If True, disables TLS/SSL to improves
transfer performance. But makes connection unsecure.
content_length (int): Define the size to preallocate on new file
creation. This is not mandatory, and file will be resized on needs
but this allow to improve performance when file size is known in
advance.
"""
_SYSTEM_CLASS = _AzureFileSystem
#: Maximum size of one flush operation
MAX_FLUSH_SIZE = _FileService.MAX_RANGE_SIZE
@property
@_memoizedmethod
def _get_to_stream(self):
"""
Azure storage function that read a range to a stream.
Returns:
function: Read function.
"""
return self._client.get_file_to_stream
@property
@_memoizedmethod
def _resize(self):
"""
Azure storage function that resize an object.
Returns:
function: Resize function.
"""
return self._client.resize_file
@property
@_memoizedmethod
def _create_from_size(self):
"""
Azure storage function that create an object.
Returns:
function: Create function.
"""
return self._client.create_file
def _update_range(self, data, **kwargs):
"""
Update range with data
Args:
data (bytes): data.
"""
self._client.update_range(data=data, **kwargs)
|
class AzureFileRawIO(_AzureStorageRawIORangeWriteBase):
'''Binary Azure Files Storage Object I/O
Args:
name (path-like object): URL or path to the file which will be opened.
mode (str): The mode can be 'r', 'w', 'a'
for reading (default), writing or appending
storage_parameters (dict): Azure service keyword arguments.
This is generally Azure credentials and configuration. See
"azure.storage.file.fileservice.FileService" for more information.
unsecure (bool): If True, disables TLS/SSL to improves
transfer performance. But makes connection unsecure.
content_length (int): Define the size to preallocate on new file
creation. This is not mandatory, and file will be resized on needs
but this allow to improve performance when file size is known in
advance.
'''
@property
@_memoizedmethod
def _get_to_stream(self):
'''
Azure storage function that read a range to a stream.
Returns:
function: Read function.
'''
pass
@property
@_memoizedmethod
def _resize(self):
'''
Azure storage function that resize an object.
Returns:
function: Resize function.
'''
pass
@property
@_memoizedmethod
def _create_from_size(self):
'''
Azure storage function that create an object.
Returns:
function: Create function.
'''
pass
def _update_range(self, data, **kwargs):
'''
Update range with data
Args:
data (bytes): data.
'''
pass
| 11 | 5 | 8 | 1 | 2 | 5 | 1 | 2.12 | 1 | 0 | 0 | 0 | 4 | 0 | 4 | 71 | 63 | 10 | 17 | 10 | 6 | 36 | 11 | 7 | 6 | 1 | 8 | 0 | 4 |
2,965 |
Accelize/pycosio
|
Accelize_pycosio/pycosio/storage/azure_file.py
|
pycosio.storage.azure_file._AzureFileSystem
|
class _AzureFileSystem(_AzureBaseSystem, _FileSystemBase):
"""
Azure Files Storage system.
Args:
storage_parameters (dict): Azure service keyword arguments.
This is generally Azure credentials and configuration. See
"azure.storage.file.fileservice.FileService" for more information.
unsecure (bool): If True, disables TLS/SSL to improves
transfer performance. But makes connection unsecure.
"""
def copy(self, src, dst, other_system=None):
"""
Copy object of the same storage.
Args:
src (str): Path or URL.
dst (str): Path or URL.
other_system (pycosio.storage.azure._AzureBaseSystem subclass):
The source storage system.
"""
with _handle_azure_exception():
self.client.copy_file(
copy_source=(other_system or self)._format_src_url(src, self),
**self.get_client_kwargs(dst))
copy_from_azure_blobs = copy # Allows copy from Azure Blobs Storage
def _get_client(self):
"""
Azure file service
Returns:
azure.storage.file.fileservice.FileService: Service
"""
return _FileService(**self._secured_storage_parameters())
def get_client_kwargs(self, path):
"""
Get base keyword arguments for client for a
specific path.
Args:
path (str): Absolute path or URL.
Returns:
dict: client args
"""
# Remove query string from URL
path = path.split('?', 1)[0]
share_name, relpath = self.split_locator(path)
kwargs = dict(share_name=share_name)
# Directory
if relpath and relpath[-1] == '/':
kwargs['directory_name'] = relpath.rstrip('/')
# File
elif relpath:
try:
kwargs['directory_name'], kwargs['file_name'] = relpath.rsplit(
'/', 1)
except ValueError:
kwargs['directory_name'] = ''
kwargs['file_name'] = relpath
# Else, Share only
return kwargs
def _get_roots(self):
"""
Return URL roots for this storage.
Returns:
tuple of str or re.Pattern: URL roots
"""
# SMB
# - smb://<account>.file.core.windows.net/<share>/<file>
# Mounted share
# - //<account>.file.core.windows.net/<share>/<file>
# - \\<account>.file.core.windows.net\<share>\<file>
# URL:
# - http://<account>.file.core.windows.net/<share>/<file>
# - https://<account>.file.core.windows.net/<share>/<file>
# Note: "core.windows.net" may be replaced by another endpoint
return _re.compile(
r'(https?://|smb://|//|\\)%s\.file\.%s' %
self._get_endpoint('file')),
def _head(self, client_kwargs):
"""
Returns object or bucket HTTP header.
Args:
client_kwargs (dict): Client arguments.
Returns:
dict: HTTP header.
"""
with _handle_azure_exception():
# File
if 'file_name' in client_kwargs:
result = self.client.get_file_properties(**client_kwargs)
# Directory
elif 'directory_name' in client_kwargs:
result = self.client.get_directory_properties(**client_kwargs)
# Share
else:
result = self.client.get_share_properties(**client_kwargs)
return self._model_to_dict(result)
def _list_locators(self):
"""
Lists locators.
Returns:
generator of tuple: locator name str, locator header dict
"""
with _handle_azure_exception():
for share in self.client.list_shares():
yield share.name, self._model_to_dict(share)
def _list_objects(self, client_kwargs, max_request_entries):
"""
Lists objects.
args:
client_kwargs (dict): Client arguments.
max_request_entries (int): If specified, maximum entries returned
by request.
Returns:
generator of tuple: object name str, object header dict,
directory bool
"""
client_kwargs = self._update_listing_client_kwargs(
client_kwargs, max_request_entries)
with _handle_azure_exception():
for obj in self.client.list_directories_and_files(**client_kwargs):
yield (obj.name, self._model_to_dict(obj),
isinstance(obj, _Directory))
def _make_dir(self, client_kwargs):
"""
Make a directory.
args:
client_kwargs (dict): Client arguments.
"""
with _handle_azure_exception():
# Directory
if 'directory_name' in client_kwargs:
return self.client.create_directory(
share_name=client_kwargs['share_name'],
directory_name=client_kwargs['directory_name'])
# Share
return self.client.create_share(**client_kwargs)
def _remove(self, client_kwargs):
"""
Remove an object.
args:
client_kwargs (dict): Client arguments.
"""
with _handle_azure_exception():
# File
if 'file_name' in client_kwargs:
return self.client.delete_file(
share_name=client_kwargs['share_name'],
directory_name=client_kwargs['directory_name'],
file_name=client_kwargs['file_name'])
# Directory
elif 'directory_name' in client_kwargs:
return self.client.delete_directory(
share_name=client_kwargs['share_name'],
directory_name=client_kwargs['directory_name'])
# Share
return self.client.delete_share(
share_name=client_kwargs['share_name'])
|
class _AzureFileSystem(_AzureBaseSystem, _FileSystemBase):
'''
Azure Files Storage system.
Args:
storage_parameters (dict): Azure service keyword arguments.
This is generally Azure credentials and configuration. See
"azure.storage.file.fileservice.FileService" for more information.
unsecure (bool): If True, disables TLS/SSL to improves
transfer performance. But makes connection unsecure.
'''
def copy(self, src, dst, other_system=None):
'''
Copy object of the same storage.
Args:
src (str): Path or URL.
dst (str): Path or URL.
other_system (pycosio.storage.azure._AzureBaseSystem subclass):
The source storage system.
'''
pass
def _get_client(self):
'''
Azure file service
Returns:
azure.storage.file.fileservice.FileService: Service
'''
pass
def get_client_kwargs(self, path):
'''
Get base keyword arguments for client for a
specific path.
Args:
path (str): Absolute path or URL.
Returns:
dict: client args
'''
pass
def _get_roots(self):
'''
Return URL roots for this storage.
Returns:
tuple of str or re.Pattern: URL roots
'''
pass
def _head(self, client_kwargs):
'''
Returns object or bucket HTTP header.
Args:
client_kwargs (dict): Client arguments.
Returns:
dict: HTTP header.
'''
pass
def _list_locators(self):
'''
Lists locators.
Returns:
generator of tuple: locator name str, locator header dict
'''
pass
def _list_objects(self, client_kwargs, max_request_entries):
'''
Lists objects.
args:
client_kwargs (dict): Client arguments.
max_request_entries (int): If specified, maximum entries returned
by request.
Returns:
generator of tuple: object name str, object header dict,
directory bool
'''
pass
def _make_dir(self, client_kwargs):
'''
Make a directory.
args:
client_kwargs (dict): Client arguments.
'''
pass
def _remove(self, client_kwargs):
'''
Remove an object.
args:
client_kwargs (dict): Client arguments.
'''
pass
| 10 | 10 | 19 | 3 | 7 | 9 | 2 | 1.33 | 2 | 2 | 0 | 0 | 9 | 0 | 9 | 76 | 192 | 37 | 67 | 16 | 57 | 89 | 48 | 16 | 38 | 4 | 6 | 2 | 19 |
2,966 |
Accelize/pycosio
|
Accelize_pycosio/pycosio/_core/exceptions.py
|
pycosio._core.exceptions.ObjectNotFoundError
|
class ObjectNotFoundError(ObjectException):
"""Reraised as "FileNotFoundError" by handle_os_exceptions"""
|
class ObjectNotFoundError(ObjectException):
'''Reraised as "FileNotFoundError" by handle_os_exceptions'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 2 | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | 4 | 0 | 0 |
2,967 |
Accelize/pycosio
|
Accelize_pycosio/pycosio/_core/exceptions.py
|
pycosio._core.exceptions.ObjectPermissionError
|
class ObjectPermissionError(ObjectException):
"""Reraised as "PermissionError" by handle_os_exceptions"""
|
class ObjectPermissionError(ObjectException):
'''Reraised as "PermissionError" by handle_os_exceptions'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 2 | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | 4 | 0 | 0 |
2,968 |
Accelize/pycosio
|
Accelize_pycosio/pycosio/_core/functions_os.py
|
pycosio._core.functions_os.DirEntry
|
class DirEntry:
"""
Object yielded by scandir() to expose the file path and other file
attributes of a directory entry.
Equivalent to "os.DirEntry".
Not intended to be instantiated directly.
"""
__slots__ = ('_cache', '_system', '_name', '_header', '_path',
'_bytes_path')
def __init__(self, scandir_path, system, name, header, bytes_path):
"""
Should only be instantiated by "scandir".
Args:
scandir_path (str): scandir path argument.
system (pycosio._core.io_system.SystemBase subclass):
Storage system.
name (str): Name of the object relative to "scandir_path".
header (dict): Object header
bytes_path (bool): True if path must be returned as bytes.
"""
self._cache = dict()
self._system = system
self._name = name
self._header = header
self._path = ''.join((
scandir_path if scandir_path[-1] == '/' else (scandir_path + '/'),
name))
self._bytes_path = bytes_path
@memoizedmethod
def __str__(self):
return "<DirEntry '%s'>" % self._name.rstrip('/')
__repr__ = __str__
@property
@memoizedmethod
def _client_kwargs(self):
"""
Get base keyword arguments for client
Returns:
dict: keyword arguments
"""
return self._system.get_client_kwargs(self._path)
@property
@memoizedmethod
def name(self):
"""
The entry’s base filename, relative to the scandir() path argument.
Returns:
str: name.
"""
name = self._name.rstrip('/')
if self._bytes_path:
name = fsencode(name)
return name
@property
@memoizedmethod
def path(self):
"""
The entry’s full path name:
equivalent to os.path.join(scandir_path, entry.name)
where scandir_path is the scandir() path argument.
The path is only absolute if the scandir() path argument was absolute.
Returns:
str: name.
"""
path = self._path.rstrip('/')
if self._bytes_path:
path = fsencode(path)
return path
@memoizedmethod
def inode(self):
"""
Return the inode number of the entry.
The result is cached on the os.DirEntry object.
Returns:
int: inode.
"""
return self.stat().st_ino
@memoizedmethod
def is_dir(self, follow_symlinks=True):
"""
Return True if this entry is a directory or a symbolic link pointing to
a directory; return False if the entry is or points to any other kind
of file, or if it doesn’t exist anymore.
The result is cached on the os.DirEntry object.
Args:
follow_symlinks (bool): Follow symlinks.
Not supported on cloud storage objects.
Returns:
bool: True if directory exists.
"""
try:
return (self._system.isdir(
path=self._path, client_kwargs=self._client_kwargs,
virtual_dir=False) or
# Some directories only exists virtually in object path and
# don't have headers.
bool(S_ISDIR(self.stat().st_mode)))
except ObjectPermissionError:
# The directory was listed, but unable to head it or access to its
# content
return True
@memoizedmethod
def is_file(self, follow_symlinks=True):
"""
Return True if this entry is a file or a symbolic link pointing to a
file; return False if the entry is or points to a directory or other
non-file entry, or if it doesn’t exist anymore.
The result is cached on the os.DirEntry object.
Args:
follow_symlinks (bool): Follow symlinks.
Not supported on cloud storage objects.
Returns:
bool: True if directory exists.
"""
return self._system.isfile(
path=self._path, client_kwargs=self._client_kwargs)
@memoizedmethod
def is_symlink(self):
"""
Return True if this entry is a symbolic link
The result is cached on the os.DirEntry object.
Returns:
bool: True if symbolic link.
"""
return bool(S_ISLNK(self.stat().st_mode))
@memoizedmethod
def stat(self, follow_symlinks=True):
"""
Return a stat_result object for this entry.
The result is cached on the os.DirEntry object.
Args:
follow_symlinks (bool): Follow symlinks.
Not supported on cloud storage objects.
Returns:
os.stat_result: Stat result object
"""
return self._system.stat(
path=self._path, client_kwargs=self._client_kwargs,
header=self._header)
|
class DirEntry:
'''
Object yielded by scandir() to expose the file path and other file
attributes of a directory entry.
Equivalent to "os.DirEntry".
Not intended to be instantiated directly.
'''
def __init__(self, scandir_path, system, name, header, bytes_path):
'''
Should only be instantiated by "scandir".
Args:
scandir_path (str): scandir path argument.
system (pycosio._core.io_system.SystemBase subclass):
Storage system.
name (str): Name of the object relative to "scandir_path".
header (dict): Object header
bytes_path (bool): True if path must be returned as bytes.
'''
pass
@memoizedmethod
def __str__(self):
pass
@property
@memoizedmethod
def _client_kwargs(self):
'''
Get base keyword arguments for client
Returns:
dict: keyword arguments
'''
pass
@property
@memoizedmethod
def name(self):
'''
The entry’s base filename, relative to the scandir() path argument.
Returns:
str: name.
'''
pass
@property
@memoizedmethod
def path(self):
'''
The entry’s full path name:
equivalent to os.path.join(scandir_path, entry.name)
where scandir_path is the scandir() path argument.
The path is only absolute if the scandir() path argument was absolute.
Returns:
str: name.
'''
pass
@memoizedmethod
def inode(self):
'''
Return the inode number of the entry.
The result is cached on the os.DirEntry object.
Returns:
int: inode.
'''
pass
@memoizedmethod
def is_dir(self, follow_symlinks=True):
'''
Return True if this entry is a directory or a symbolic link pointing to
a directory; return False if the entry is or points to any other kind
of file, or if it doesn’t exist anymore.
The result is cached on the os.DirEntry object.
Args:
follow_symlinks (bool): Follow symlinks.
Not supported on cloud storage objects.
Returns:
bool: True if directory exists.
'''
pass
@memoizedmethod
def is_file(self, follow_symlinks=True):
'''
Return True if this entry is a file or a symbolic link pointing to a
file; return False if the entry is or points to a directory or other
non-file entry, or if it doesn’t exist anymore.
The result is cached on the os.DirEntry object.
Args:
follow_symlinks (bool): Follow symlinks.
Not supported on cloud storage objects.
Returns:
bool: True if directory exists.
'''
pass
@memoizedmethod
def is_symlink(self):
'''
Return True if this entry is a symbolic link
The result is cached on the os.DirEntry object.
Returns:
bool: True if symbolic link.
'''
pass
@memoizedmethod
def stat(self, follow_symlinks=True):
'''
Return a stat_result object for this entry.
The result is cached on the os.DirEntry object.
Args:
follow_symlinks (bool): Follow symlinks.
Not supported on cloud storage objects.
Returns:
os.stat_result: Stat result object
'''
pass
| 23 | 10 | 14 | 2 | 4 | 8 | 1 | 1.4 | 0 | 3 | 1 | 0 | 10 | 6 | 10 | 10 | 172 | 33 | 58 | 30 | 35 | 81 | 37 | 21 | 26 | 2 | 0 | 1 | 14 |
2,969 |
Accelize/pycosio
|
Accelize_pycosio/pycosio/_core/io_base.py
|
pycosio._core.io_base.WorkerPoolBase
|
class WorkerPoolBase:
"""
Base class that handle a worker pool.
Args:
max_workers (int): Maximum number of workers.
"""
def __init__(self, max_workers=None):
self._workers_count = max_workers
@property
@memoizedmethod
def _workers(self):
"""Executor pool
Returns:
concurrent.futures.Executor: Executor pool"""
# Lazy instantiate workers pool on first call
return ThreadPoolExecutor(max_workers=self._workers_count)
def _generate_async(self, generator):
"""
Return the previous generator object after having run the first element
evaluation as a background task.
Args:
generator (iterable): A generator function.
Returns:
iterable: The generator function with first element evaluated
in background.
"""
first_value_future = self._workers.submit(next, generator)
def get_first_element(future=first_value_future):
"""
Get first element value from future.
Args:
future (concurrent.futures._base.Future): First value future.
Returns:
Evaluated value
"""
try:
yield future.result()
except StopIteration:
return
return chain(get_first_element(), generator)
|
class WorkerPoolBase:
'''
Base class that handle a worker pool.
Args:
max_workers (int): Maximum number of workers.
'''
def __init__(self, max_workers=None):
pass
@property
@memoizedmethod
def _workers(self):
'''Executor pool
Returns:
concurrent.futures.Executor: Executor pool'''
pass
def _generate_async(self, generator):
'''
Return the previous generator object after having run the first element
evaluation as a background task.
Args:
generator (iterable): A generator function.
Returns:
iterable: The generator function with first element evaluated
in background.
'''
pass
def get_first_element(future=first_value_future):
'''
Get first element value from future.
Args:
future (concurrent.futures._base.Future): First value future.
Returns:
Evaluated value
'''
pass
| 7 | 4 | 13 | 2 | 4 | 7 | 1 | 1.67 | 0 | 2 | 0 | 3 | 3 | 1 | 3 | 3 | 51 | 11 | 15 | 8 | 8 | 25 | 13 | 7 | 8 | 2 | 0 | 1 | 5 |
2,970 |
Accelize/pycosio
|
Accelize_pycosio/pycosio/_core/io_random_write.py
|
pycosio._core.io_random_write.ObjectBufferedIORandomWriteBase
|
class ObjectBufferedIORandomWriteBase(ObjectBufferedIOBase):
"""
Buffered base class for binary cloud storage object I/O that support
flushing parts of file instead of requiring flushing the full file at once.
"""
# Need to be flagged because it is not an abstract class
__DEFAULT_CLASS = False
def _flush(self):
"""
Flush the write buffers of the stream if applicable.
In write mode, send the buffer content to the cloud object.
"""
# Flush buffer to specified range
buffer = self._get_buffer()
start = self._buffer_size * (self._seek - 1)
end = start + len(buffer)
future = self._workers.submit(
self._flush_range, buffer=buffer, start=start, end=end)
self._write_futures.append(future)
future.add_done_callback(partial(self._update_size, end))
def _update_size(self, size, future):
"""
Keep track of the file size during writing.
If specified size value is greater than the current size, update the
current size using specified value.
Used as callback in default "_flush" implementation for files supporting
random write access.
Args:
size (int): Size value.
future (concurrent.futures._base.Future): future.
"""
with self._size_lock:
# Update value
if size > self._size and future.done:
# Size can be lower if seek down on an 'a' mode open file.
self._size = size
def _flush_range(self, buffer, start, end):
"""
Flush a buffer to a range of the file.
Meant to be used asynchronously, used to provides parallel flushing of
file parts when applicable.
Args:
buffer (memoryview): Buffer content.
start (int): Start of buffer position to flush.
end (int): End of buffer position to flush.
"""
# On first call, Get file size if exists
with self._size_lock:
if not self._size_synched:
self._size_synched = True
try:
self._size = self.raw._size
except (ObjectNotFoundError, UnsupportedOperation):
self._size = 0
# It is not possible to flush a part if start > size:
# If it is the case, wait that previous parts are flushed before
# flushing this one
while start > self._size:
sleep(self._FLUSH_WAIT)
# Flush buffer using RAW IO
self._raw_flush(buffer, start, end)
|
class ObjectBufferedIORandomWriteBase(ObjectBufferedIOBase):
'''
Buffered base class for binary cloud storage object I/O that support
flushing parts of file instead of requiring flushing the full file at once.
'''
def _flush(self):
'''
Flush the write buffers of the stream if applicable.
In write mode, send the buffer content to the cloud object.
'''
pass
def _update_size(self, size, future):
'''
Keep track of the file size during writing.
If specified size value is greater than the current size, update the
current size using specified value.
Used as callback in default "_flush" implementation for files supporting
random write access.
Args:
size (int): Size value.
future (concurrent.futures._base.Future): future.
'''
pass
def _flush_range(self, buffer, start, end):
'''
Flush a buffer to a range of the file.
Meant to be used asynchronously, used to provides parallel flushing of
file parts when applicable.
Args:
buffer (memoryview): Buffer content.
start (int): Start of buffer position to flush.
end (int): End of buffer position to flush.
'''
pass
| 4 | 4 | 21 | 3 | 8 | 10 | 2 | 1.44 | 1 | 2 | 1 | 4 | 3 | 2 | 3 | 51 | 73 | 12 | 25 | 11 | 21 | 36 | 24 | 11 | 20 | 4 | 6 | 3 | 7 |
2,971 |
Accelize/pycosio
|
Accelize_pycosio/pycosio/storage/azure.py
|
pycosio.storage.azure._AzureBaseSystem
|
class _AzureBaseSystem(_SystemBase):
"""
Common base for Azure storage systems.
Args:
storage_parameters (dict): Azure service keyword arguments.
This is generally Azure credentials and configuration. See
"azure.storage.blob.baseblobservice.BaseBlobService" for more
information.
unsecure (bool): If True, disables TLS/SSL to improves
transfer performance. But makes connection unsecure.
"""
__slots__ = ('_endpoint', '_endpoint_domain')
_MTIME_KEYS = ('last_modified',)
_SIZE_KEYS = ('content_length',)
def __init__(self, *args, **kwargs):
self._endpoint = None
self._endpoint_domain = None
_SystemBase.__init__(self, *args, **kwargs)
@staticmethod
def _get_time(header, keys, name):
"""
Get time from header
Args:
header (dict): Object header.
keys (tuple of str): Header keys.
name (str): Method name.
Returns:
float: The number of seconds since the epoch
"""
for key in keys:
try:
return header.pop(key).timestamp()
except KeyError:
continue
raise _UnsupportedOperation(name)
def _get_endpoint(self, sub_domain):
"""
Get endpoint information from storage parameters.
Update system with endpoint information and return information required
to define roots.
Args:
self (pycosio._core.io_system.SystemBase subclass): System.
sub_domain (str): Azure storage sub-domain.
Returns:
tuple of str: account_name, endpoint_suffix
"""
storage_parameters = self._storage_parameters or dict()
account_name = storage_parameters.get('account_name')
if not account_name:
raise ValueError('"account_name" is required for Azure storage')
suffix = storage_parameters.get(
'endpoint_suffix', 'core.windows.net')
self._endpoint = 'http%s://%s.%s.%s' % (
'' if self._unsecure else 's', account_name, sub_domain, suffix)
return account_name, suffix.replace('.', r'\.')
def _secured_storage_parameters(self):
"""
Updates storage parameters with unsecure mode.
Returns:
dict: Updated storage_parameters.
"""
parameters = self._storage_parameters or dict()
# Handles unsecure mode
if self._unsecure:
parameters = parameters.copy()
parameters['protocol'] = 'http'
return parameters
def _format_src_url(self, path, caller_system):
"""
Ensure path is absolute and use the correct URL format for use with
cross Azure storage account copy function.
Args:
path (str): Path or URL.
caller_system (pycosio.storage.azure._AzureBaseSystem subclass):
System calling this method (Can be another Azure system).
Returns:
str: URL.
"""
path = '%s/%s' % (self._endpoint, self.relpath(path))
# If SAS token available, use it to give cross account copy access.
if caller_system is not self:
try:
path = '%s?%s' % (path, self._storage_parameters['sas_token'])
except KeyError:
pass
return path
@staticmethod
def _update_listing_client_kwargs(client_kwargs, max_request_entries):
"""
Updates client kwargs for listing functions.
Args:
client_kwargs (dict): Client arguments.
max_request_entries (int): If specified, maximum entries returned
by request.
Returns:
dict: Updated client_kwargs
"""
client_kwargs = client_kwargs.copy()
if max_request_entries:
client_kwargs['num_results'] = max_request_entries
return client_kwargs
@staticmethod
def _model_to_dict(obj):
"""
Convert object model to dict.
Args:
obj: Object model.
Returns:
dict: Converted model.
"""
result = _properties_model_to_dict(obj.properties)
for attribute in ('metadata', 'snapshot'):
try:
value = getattr(obj, attribute)
except AttributeError:
continue
if value:
result[attribute] = value
return result
|
class _AzureBaseSystem(_SystemBase):
'''
Common base for Azure storage systems.
Args:
storage_parameters (dict): Azure service keyword arguments.
This is generally Azure credentials and configuration. See
"azure.storage.blob.baseblobservice.BaseBlobService" for more
information.
unsecure (bool): If True, disables TLS/SSL to improves
transfer performance. But makes connection unsecure.
'''
def __init__(self, *args, **kwargs):
pass
@staticmethod
def _get_time(header, keys, name):
'''
Get time from header
Args:
header (dict): Object header.
keys (tuple of str): Header keys.
name (str): Method name.
Returns:
float: The number of seconds since the epoch
'''
pass
def _get_endpoint(self, sub_domain):
'''
Get endpoint information from storage parameters.
Update system with endpoint information and return information required
to define roots.
Args:
self (pycosio._core.io_system.SystemBase subclass): System.
sub_domain (str): Azure storage sub-domain.
Returns:
tuple of str: account_name, endpoint_suffix
'''
pass
def _secured_storage_parameters(self):
'''
Updates storage parameters with unsecure mode.
Returns:
dict: Updated storage_parameters.
'''
pass
def _format_src_url(self, path, caller_system):
'''
Ensure path is absolute and use the correct URL format for use with
cross Azure storage account copy function.
Args:
path (str): Path or URL.
caller_system (pycosio.storage.azure._AzureBaseSystem subclass):
System calling this method (Can be another Azure system).
Returns:
str: URL.
'''
pass
@staticmethod
def _update_listing_client_kwargs(client_kwargs, max_request_entries):
'''
Updates client kwargs for listing functions.
Args:
client_kwargs (dict): Client arguments.
max_request_entries (int): If specified, maximum entries returned
by request.
Returns:
dict: Updated client_kwargs
'''
pass
@staticmethod
def _model_to_dict(obj):
'''
Convert object model to dict.
Args:
obj: Object model.
Returns:
dict: Converted model.
'''
pass
| 11 | 7 | 18 | 3 | 7 | 7 | 3 | 1.09 | 1 | 4 | 0 | 2 | 4 | 2 | 7 | 65 | 149 | 30 | 57 | 24 | 46 | 62 | 52 | 21 | 44 | 4 | 5 | 2 | 18 |
2,972 |
Accelize/pycosio
|
Accelize_pycosio/pycosio/storage/azure_blob/_append_blob.py
|
pycosio.storage.azure_blob._append_blob.AzureAppendBlobBufferedIO
|
class AzureAppendBlobBufferedIO(AzureBlobBufferedIO,
ObjectBufferedIORandomWriteBase):
"""Buffered binary Azure Append Blobs Storage Object I/O
This blob type is not seekable in write mode.
Args:
name (path-like object): URL or path to the file which will be opened.
mode (str): The mode can be 'r', 'w' for reading (default) or writing
buffer_size (int): The size of buffer.
max_buffers (int): The maximum number of buffers to preload in read mode
or awaiting flush in write mode. 0 for no limit.
max_workers (int): The maximum number of threads that can be used to
execute the given calls.
storage_parameters (dict): Azure service keyword arguments.
This is generally Azure credentials and configuration. See
"azure.storage.blob.baseblobservice.BaseBlobService" for more
information.
unsecure (bool): If True, disables TLS/SSL to improves
transfer performance. But makes connection unsecure.
"""
__DEFAULT_CLASS = False
_RAW_CLASS = AzureAppendBlobRawIO
def __init__(self, *args, **kwargs):
ObjectBufferedIORandomWriteBase.__init__(self, *args, **kwargs)
if self._writable:
# Can't upload in parallel, but can still upload sequentially as
# background task
self._workers_count = 1
def _flush(self):
"""
Flush the write buffer of the stream.
"""
self._write_futures.append(self._workers.submit(
self._client.append_block, block=self._get_buffer().tobytes(),
**self._client_kwargs))
|
class AzureAppendBlobBufferedIO(AzureBlobBufferedIO,
ObjectBufferedIORandomWriteBase):
'''Buffered binary Azure Append Blobs Storage Object I/O
This blob type is not seekable in write mode.
Args:
name (path-like object): URL or path to the file which will be opened.
mode (str): The mode can be 'r', 'w' for reading (default) or writing
buffer_size (int): The size of buffer.
max_buffers (int): The maximum number of buffers to preload in read mode
or awaiting flush in write mode. 0 for no limit.
max_workers (int): The maximum number of threads that can be used to
execute the given calls.
storage_parameters (dict): Azure service keyword arguments.
This is generally Azure credentials and configuration. See
"azure.storage.blob.baseblobservice.BaseBlobService" for more
information.
unsecure (bool): If True, disables TLS/SSL to improves
transfer performance. But makes connection unsecure.
'''
def __init__(self, *args, **kwargs):
pass
def _flush(self):
'''
Flush the write buffer of the stream.
'''
pass
| 3 | 2 | 7 | 1 | 4 | 3 | 2 | 1.83 | 2 | 0 | 0 | 0 | 2 | 1 | 2 | 54 | 39 | 5 | 12 | 7 | 8 | 22 | 9 | 6 | 6 | 2 | 7 | 1 | 3 |
2,973 |
Accelize/pycosio
|
Accelize_pycosio/pycosio/storage/azure_blob/_append_blob.py
|
pycosio.storage.azure_blob._append_blob.AzureAppendBlobRawIO
|
class AzureAppendBlobRawIO(AzureBlobRawIO, ObjectRawIORandomWriteBase):
"""Binary Azure Append Blobs Storage Object I/O
This blob type is not seekable in write mode.
Args:
name (path-like object): URL or path to the file which will be opened.
mode (str): The mode can be 'r', 'w', 'a'
for reading (default), writing or appending
storage_parameters (dict): Azure service keyword arguments.
This is generally Azure credentials and configuration. See
"azure.storage.blob.baseblobservice.BaseBlobService" for more
information.
unsecure (bool): If True, disables TLS/SSL to improves
transfer performance. But makes connection unsecure.
"""
__DEFAULT_CLASS = False
#: Maximum size of one flush operation
MAX_FLUSH_SIZE = AppendBlobService.MAX_BLOCK_SIZE
def __init__(self, *args, **kwargs):
AzureBlobRawIO.__init__(self, *args, **kwargs)
if self._writable:
# Not seekable in append mode
self._seekable = False
def _create(self):
"""
Create the file if not exists.
"""
with _handle_azure_exception():
self._client.create_blob(**self._client_kwargs)
@property
@memoizedmethod
def _client(self):
"""
Returns client instance.
Returns:
client
"""
return self._system.client[_BLOB_TYPE]
def _flush(self, buffer, *_):
"""
Flush the write buffer of the stream if applicable.
Args:
buffer (memoryview): Buffer content.
"""
buffer_size = len(buffer)
# If buffer too large, must flush by parts sequentially
if buffer_size > self.MAX_FLUSH_SIZE:
for part_start in range(0, buffer_size, self.MAX_FLUSH_SIZE):
# Split buffer and append
buffer_part = buffer[
part_start:part_start + self.MAX_FLUSH_SIZE]
with _handle_azure_exception():
self._client.append_block(
block=buffer_part.tobytes(), **self._client_kwargs)
# Small buffer, send it in one command.
elif buffer_size:
with _handle_azure_exception():
self._client.append_block(
block=buffer.tobytes(), **self._client_kwargs)
|
class AzureAppendBlobRawIO(AzureBlobRawIO, ObjectRawIORandomWriteBase):
'''Binary Azure Append Blobs Storage Object I/O
This blob type is not seekable in write mode.
Args:
name (path-like object): URL or path to the file which will be opened.
mode (str): The mode can be 'r', 'w', 'a'
for reading (default), writing or appending
storage_parameters (dict): Azure service keyword arguments.
This is generally Azure credentials and configuration. See
"azure.storage.blob.baseblobservice.BaseBlobService" for more
information.
unsecure (bool): If True, disables TLS/SSL to improves
transfer performance. But makes connection unsecure.
'''
def __init__(self, *args, **kwargs):
pass
def _create(self):
'''
Create the file if not exists.
'''
pass
@property
@memoizedmethod
def _client(self):
'''
Returns client instance.
Returns:
client
'''
pass
def _flush(self, buffer, *_):
'''
Flush the write buffer of the stream if applicable.
Args:
buffer (memoryview): Buffer content.
'''
pass
| 7 | 4 | 12 | 2 | 6 | 4 | 2 | 1.15 | 2 | 1 | 0 | 0 | 4 | 1 | 4 | 63 | 72 | 14 | 27 | 12 | 20 | 31 | 21 | 11 | 16 | 4 | 8 | 3 | 8 |
2,974 |
Accelize/pycosio
|
Accelize_pycosio/pycosio/storage/azure_blob/_base_blob.py
|
pycosio.storage.azure_blob._base_blob.AzureBlobBufferedIO
|
class AzureBlobBufferedIO(ObjectBufferedIOBase):
"""Buffered binary Azure Blobs Storage Object I/O
Args:
name (path-like object): URL or path to the file which will be opened.
mode (str): The mode can be 'r', 'w' for reading (default) or writing
buffer_size (int): The size of buffer.
max_buffers (int): The maximum number of buffers to preload in read mode
or awaiting flush in write mode. 0 for no limit.
max_workers (int): The maximum number of threads that can be used to
execute the given calls.
storage_parameters (dict): Azure service keyword arguments.
This is generally Azure credentials and configuration. See
"azure.storage.blob.baseblobservice.BaseBlobService" for more
information.
unsecure (bool): If True, disables TLS/SSL to improves
transfer performance. But makes connection unsecure.
blob_type (str): Blob type to use on new file creation.
Possibles values: BlockBlob (default), AppendBlob, PageBlob.
"""
_SYSTEM_CLASS = _AzureBlobSystem
__DEFAULT_CLASS = True
def __new__(cls, name, mode='r', buffer_size=None, max_buffers=0,
max_workers=None, **kwargs):
# If call from a subclass, instantiate this subclass directly
if cls is not AzureBlobBufferedIO:
return IOBase.__new__(cls)
# Get subclass
return IOBase.__new__(AZURE_BUFFERED[_new_blob(cls, name, kwargs)])
|
class AzureBlobBufferedIO(ObjectBufferedIOBase):
'''Buffered binary Azure Blobs Storage Object I/O
Args:
name (path-like object): URL or path to the file which will be opened.
mode (str): The mode can be 'r', 'w' for reading (default) or writing
buffer_size (int): The size of buffer.
max_buffers (int): The maximum number of buffers to preload in read mode
or awaiting flush in write mode. 0 for no limit.
max_workers (int): The maximum number of threads that can be used to
execute the given calls.
storage_parameters (dict): Azure service keyword arguments.
This is generally Azure credentials and configuration. See
"azure.storage.blob.baseblobservice.BaseBlobService" for more
information.
unsecure (bool): If True, disables TLS/SSL to improves
transfer performance. But makes connection unsecure.
blob_type (str): Blob type to use on new file creation.
Possibles values: BlockBlob (default), AppendBlob, PageBlob.
'''
def __new__(cls, name, mode='r', buffer_size=None, max_buffers=0,
max_workers=None, **kwargs):
pass
| 2 | 1 | 8 | 1 | 5 | 2 | 2 | 2.5 | 1 | 0 | 0 | 3 | 1 | 0 | 1 | 49 | 31 | 3 | 8 | 5 | 5 | 20 | 7 | 4 | 5 | 2 | 6 | 1 | 2 |
2,975 |
Accelize/pycosio
|
Accelize_pycosio/pycosio/storage/azure_blob/_base_blob.py
|
pycosio.storage.azure_blob._base_blob.AzureBlobRawIO
|
class AzureBlobRawIO(_AzureStorageRawIOBase):
"""Binary Azure Blobs Storage Object I/O
Args:
name (path-like object): URL or path to the file which will be opened.
mode (str): The mode can be 'r', 'w', 'a'
for reading (default), writing or appending
storage_parameters (dict): Azure service keyword arguments.
This is generally Azure credentials and configuration. See
"azure.storage.blob.baseblobservice.BaseBlobService" for more
information.
unsecure (bool): If True, disables TLS/SSL to improves
transfer performance. But makes connection unsecure.
blob_type (str): Blob type to use on new file creation.
Possibles values: BlockBlob (default), AppendBlob, PageBlob.
"""
_SYSTEM_CLASS = _AzureBlobSystem
__DEFAULT_CLASS = True
def __new__(cls, name, mode='r', **kwargs):
# If call from a subclass, instantiate this subclass directly
if cls is not AzureBlobRawIO:
return IOBase.__new__(cls)
# Get subclass
return IOBase.__new__(AZURE_RAW[_new_blob(cls, name, kwargs)])
@property
@memoizedmethod
def _get_to_stream(self):
"""
Azure storage function that read a range to a stream.
Returns:
function: Read function.
"""
return self._client.get_blob_to_stream
|
class AzureBlobRawIO(_AzureStorageRawIOBase):
'''Binary Azure Blobs Storage Object I/O
Args:
name (path-like object): URL or path to the file which will be opened.
mode (str): The mode can be 'r', 'w', 'a'
for reading (default), writing or appending
storage_parameters (dict): Azure service keyword arguments.
This is generally Azure credentials and configuration. See
"azure.storage.blob.baseblobservice.BaseBlobService" for more
information.
unsecure (bool): If True, disables TLS/SSL to improves
transfer performance. But makes connection unsecure.
blob_type (str): Blob type to use on new file creation.
Possibles values: BlockBlob (default), AppendBlob, PageBlob.
'''
def __new__(cls, name, mode='r', **kwargs):
pass
@property
@memoizedmethod
def _get_to_stream(self):
'''
Azure storage function that read a range to a stream.
Returns:
function: Read function.
'''
pass
| 5 | 2 | 8 | 1 | 3 | 4 | 2 | 1.91 | 1 | 0 | 0 | 3 | 2 | 0 | 2 | 54 | 37 | 5 | 11 | 6 | 6 | 21 | 9 | 5 | 6 | 2 | 7 | 1 | 3 |
2,976 |
Accelize/pycosio
|
Accelize_pycosio/pycosio/storage/azure_blob/_block_blob.py
|
pycosio.storage.azure_blob._block_blob.AzureBlockBlobBufferedIO
|
class AzureBlockBlobBufferedIO(AzureBlobBufferedIO):
"""Buffered binary Azure Block Blobs Storage Object I/O
Args:
name (path-like object): URL or path to the file which will be opened.
mode (str): The mode can be 'r', 'w' for reading (default) or writing
buffer_size (int): The size of buffer.
max_buffers (int): The maximum number of buffers to preload in read mode
or awaiting flush in write mode. 0 for no limit.
max_workers (int): The maximum number of threads that can be used to
execute the given calls.
storage_parameters (dict): Azure service keyword arguments.
This is generally Azure credentials and configuration. See
"azure.storage.blob.baseblobservice.BaseBlobService" for more
information.
unsecure (bool): If True, disables TLS/SSL to improves
transfer performance. But makes connection unsecure.
"""
__slots__ = ('_blocks',)
__DEFAULT_CLASS = False
_RAW_CLASS = AzureBlockBlobRawIO
def __init__(self, *args, **kwargs):
ObjectBufferedIOBase.__init__(self, *args, **kwargs)
if self._writable:
self._blocks = []
@staticmethod
def _get_random_block_id(length):
"""
Generate a random ID.
Args:
length (int): ID length.
Returns:
str: Random block ID.
"""
return ''.join(choice(_ascii_lowercase) for _ in range(length))
def _flush(self):
"""
Flush the write buffer of the stream.
"""
block_id = self._get_random_block_id(32)
# Upload block with workers
self._write_futures.append(self._workers.submit(
self._client.put_block, block=self._get_buffer().tobytes(),
block_id=block_id, **self._client_kwargs))
# Save block information
self._blocks.append(BlobBlock(id=block_id))
def _close_writable(self):
"""
Close the object in write mode.
"""
for future in self._write_futures:
future.result()
block_list = self._client.get_block_list(**self._client_kwargs)
self._client.put_block_list(
block_list=block_list.committed_blocks + self._blocks,
**self._client_kwargs)
|
class AzureBlockBlobBufferedIO(AzureBlobBufferedIO):
'''Buffered binary Azure Block Blobs Storage Object I/O
Args:
name (path-like object): URL or path to the file which will be opened.
mode (str): The mode can be 'r', 'w' for reading (default) or writing
buffer_size (int): The size of buffer.
max_buffers (int): The maximum number of buffers to preload in read mode
or awaiting flush in write mode. 0 for no limit.
max_workers (int): The maximum number of threads that can be used to
execute the given calls.
storage_parameters (dict): Azure service keyword arguments.
This is generally Azure credentials and configuration. See
"azure.storage.blob.baseblobservice.BaseBlobService" for more
information.
unsecure (bool): If True, disables TLS/SSL to improves
transfer performance. But makes connection unsecure.
'''
def __init__(self, *args, **kwargs):
pass
@staticmethod
def _get_random_block_id(length):
'''
Generate a random ID.
Args:
length (int): ID length.
Returns:
str: Random block ID.
'''
pass
def _flush(self):
'''
Flush the write buffer of the stream.
'''
pass
def _close_writable(self):
'''
Close the object in write mode.
'''
pass
| 6 | 4 | 10 | 1 | 5 | 4 | 2 | 1.29 | 1 | 1 | 0 | 0 | 3 | 1 | 4 | 53 | 66 | 11 | 24 | 14 | 18 | 31 | 19 | 12 | 14 | 2 | 7 | 1 | 6 |
2,977 |
Accelize/pycosio
|
Accelize_pycosio/pycosio/_core/io_base.py
|
pycosio._core.io_base.ObjectIOBase
|
class ObjectIOBase(IOBase):
"""
Base class to handle cloud object.
Args:
name (path-like object): URL or path to the file which will be opened.
mode (str): The mode can be 'r', 'w', 'a', 'x'
for reading (default), writing or appending
"""
__slots__ = ('_name', '_mode', '_seek', '_seek_lock', '_cache', '_closed',
'_writable', '_readable', '_seekable')
def __init__(self, name, mode='r'):
IOBase.__init__(self)
self._name = fsdecode(name)
self._mode = mode
# Thread safe stream position
self._seek = 0
self._seek_lock = Lock()
# Cache for values
self._cache = {}
# Set to True once file is closed
self._closed = False
# Select supported features based on mode
self._writable = False
self._readable = False
self._seekable = True
if 'w' in mode or 'a' in mode or 'x' in mode:
self._writable = True
elif 'r' in mode:
self._readable = True
else:
raise ValueError('Invalid mode "%s"' % mode)
def __str__(self):
return "<%s.%s name='%s' mode='%s'>" % (
self.__class__.__module__, self.__class__.__name__,
self._name, self._mode)
__repr__ = __str__
@property
def mode(self):
"""
The mode.
Returns:
str: Mode.
"""
return self._mode
@property
def name(self):
"""
The file name.
Returns:
str: Name.
"""
return self._name
def readable(self):
"""
Return True if the stream can be read from.
If False, read() will raise OSError.
Returns:
bool: Supports reading.
"""
return self._readable
def seekable(self):
"""
Return True if the stream supports random access.
If False, seek(), tell() and truncate() will raise OSError.
Returns:
bool: Supports random access.
"""
return self._seekable
def tell(self):
"""Return the current stream position.
Returns:
int: Stream position."""
if not self._seekable:
raise UnsupportedOperation('tell')
with self._seek_lock:
return self._seek
def writable(self):
"""
Return True if the stream supports writing.
If False, write() and truncate() will raise OSError.
Returns:
bool: Supports writing.
"""
return self._writable
|
class ObjectIOBase(IOBase):
'''
Base class to handle cloud object.
Args:
name (path-like object): URL or path to the file which will be opened.
mode (str): The mode can be 'r', 'w', 'a', 'x'
for reading (default), writing or appending
'''
def __init__(self, name, mode='r'):
pass
def __str__(self):
pass
@property
def mode(self):
'''
The mode.
Returns:
str: Mode.
'''
pass
@property
def name(self):
'''
The file name.
Returns:
str: Name.
'''
pass
def readable(self):
'''
Return True if the stream can be read from.
If False, read() will raise OSError.
Returns:
bool: Supports reading.
'''
pass
def seekable(self):
'''
Return True if the stream supports random access.
If False, seek(), tell() and truncate() will raise OSError.
Returns:
bool: Supports random access.
'''
pass
def tell(self):
'''Return the current stream position.
Returns:
int: Stream position.'''
pass
def writable(self):
'''
Return True if the stream supports writing.
If False, write() and truncate() will raise OSError.
Returns:
bool: Supports writing.
'''
pass
| 11 | 7 | 11 | 2 | 5 | 4 | 1 | 1 | 1 | 1 | 0 | 2 | 8 | 9 | 8 | 28 | 109 | 25 | 42 | 22 | 31 | 42 | 35 | 20 | 26 | 3 | 4 | 1 | 11 |
2,978 |
Accelize/pycosio
|
Accelize_pycosio/pycosio/storage/azure_blob/_page_blob.py
|
pycosio.storage.azure_blob._page_blob.AzurePageBlobRawIO
|
class AzurePageBlobRawIO(AzureBlobRawIO, _AzureStorageRawIORangeWriteBase):
"""Binary Azure Page Blobs Storage Object I/O
Args:
name (path-like object): URL or path to the file which will be opened.
mode (str): The mode can be 'r', 'w', 'a'
for reading (default), writing or appending
storage_parameters (dict): Azure service keyword arguments.
This is generally Azure credentials and configuration. See
"azure.storage.blob.baseblobservice.BaseBlobService" for more
information.
unsecure (bool): If True, disables TLS/SSL to improves
transfer performance. But makes connection unsecure.
content_length (int): Define the size to preallocate on new file
creation. This is not mandatory, and file will be resized on needs
but this allow to improve performance when file size is known in
advance. Any value will be rounded to be page aligned. Default to 0.
ignore_padding (bool): If True, strip null chars padding from end of
read data and ignore padding when seeking from end
(whence=os.SEEK_END). Default to True.
"""
__slots__ = ('_ignore_padding',)
__DEFAULT_CLASS = False
#: Maximum size of one flush operation
MAX_FLUSH_SIZE = PageBlobService.MAX_PAGE_SIZE
def __init__(self, *args, **kwargs):
self._ignore_padding = kwargs.get('ignore_padding', True)
_AzureStorageRawIORangeWriteBase.__init__(self, *args, **kwargs)
@property
@memoizedmethod
def _client(self):
"""
Returns client instance.
Returns:
client
"""
return self._system.client[_BLOB_TYPE]
@property
@memoizedmethod
def _resize(self):
"""
Azure storage function that resize an object.
Returns:
function: Resize function.
"""
return self._client.resize_blob
def _init_append(self):
"""
Initializes file on 'a' mode.
"""
# Ensure content length is page aligned
if self._content_length % 512:
self._content_length += 512 - self._content_length % 512
_AzureStorageRawIORangeWriteBase._init_append(self)
# If ignore padding, seek to real end of blob
if self._ignore_padding:
self._seek = self._seek_end_ignore_padding()
def _create(self):
"""
Create the file if not exists.
"""
# Ensure content length is page aligned
if self._content_length % 512:
self._content_length += 512 - self._content_length % 512
_AzureStorageRawIORangeWriteBase._create(self)
@property
@memoizedmethod
def _create_from_size(self):
"""
Azure storage function that create an object.
Returns:
function: Create function.
"""
return self._client.create_blob
def _update_range(self, data, **kwargs):
"""
Update range with data
Args:
data (bytes): data.
"""
self._client.update_page(page=data, **kwargs)
def _read_range(self, start, end=0, null_strip=None):
"""
Read a range of bytes in stream.
Args:
start (int): Start stream position.
end (int): End stream position.
0 To not specify end.
Returns:
bytes: number of bytes read
"""
data = AzureBlobRawIO._read_range(self, start, end)
if (null_strip is None and self._ignore_padding) or null_strip:
# Remove trailing Null chars (Empty page end)
return data.rstrip(b'\0')
return data
def _readall(self):
"""
Read and return all the bytes from the stream until EOF.
Returns:
bytes: Object content
"""
data = AzureBlobRawIO._readall(self)
if self._ignore_padding:
# Remove trailing Null chars (Empty page end)
return data.rstrip(b'\0')
return data
def seek(self, offset, whence=SEEK_SET):
"""
Change the stream position to the given byte offset.
Args:
offset: Offset is interpreted relative to the position indicated by
whence.
whence: The default value for whence is SEEK_SET.
Values for whence are:
SEEK_SET or 0 – start of the stream (the default);
offset should be zero or positive
SEEK_CUR or 1 – current stream position;
offset may be negative
SEEK_END or 2 – end of the stream;
offset is usually negative
Returns:
int: The new absolute position.
"""
if self._ignore_padding and whence == SEEK_END:
# If seek on last page, remove padding
offset = self._seek_end_ignore_padding(offset)
whence = SEEK_SET
return ObjectRawIORandomWriteBase.seek(self, offset, whence)
def _seek_end_ignore_padding(self, offset=0):
"""
Compute seek position if seeking from end ignoring null padding.
Args:
offset (int): relative position to seek.
Returns:
int: New seek value.
"""
# Read last pages
page_end = self._size
page_seek = page_end + min(offset, 0)
page_start = page_seek - (page_seek % 512 or 512)
last_pages = self._read_range(page_start, page_end, null_strip=True)
# Move seek to last not null byte
return page_start + len(last_pages) + offset
def _flush(self, buffer, start, end):
"""
Flush the write buffer of the stream if applicable.
Args:
buffer (memoryview): Buffer content.
start (int): Start of buffer position to flush.
Supported only with page blobs.
end (int): End of buffer position to flush.
Supported only with page blobs.
"""
buffer_size = len(buffer)
if buffer_size:
# Buffer must be aligned on pages
end_page_diff = end % 512
start_page_diff = start % 512
if end_page_diff or start_page_diff:
# Create a new aligned buffer
end_page_diff = 512 - end_page_diff
end += end_page_diff
start -= start_page_diff
unaligned_buffer = buffer
buffer_size = end - start
buffer = memoryview(bytearray(buffer_size))
# If exists, Get aligned range from current file
if self._exists() == 1 and start < self._size:
buffer[:] = memoryview(self._read_range(
start, end, null_strip=False))
# Update with current buffer
buffer[start_page_diff:-end_page_diff] = unaligned_buffer
_AzureStorageRawIORangeWriteBase._flush(self, buffer, start, end)
|
class AzurePageBlobRawIO(AzureBlobRawIO, _AzureStorageRawIORangeWriteBase):
'''Binary Azure Page Blobs Storage Object I/O
Args:
name (path-like object): URL or path to the file which will be opened.
mode (str): The mode can be 'r', 'w', 'a'
for reading (default), writing or appending
storage_parameters (dict): Azure service keyword arguments.
This is generally Azure credentials and configuration. See
"azure.storage.blob.baseblobservice.BaseBlobService" for more
information.
unsecure (bool): If True, disables TLS/SSL to improves
transfer performance. But makes connection unsecure.
content_length (int): Define the size to preallocate on new file
creation. This is not mandatory, and file will be resized on needs
but this allow to improve performance when file size is known in
advance. Any value will be rounded to be page aligned. Default to 0.
ignore_padding (bool): If True, strip null chars padding from end of
read data and ignore padding when seeking from end
(whence=os.SEEK_END). Default to True.
'''
def __init__(self, *args, **kwargs):
pass
@property
@memoizedmethod
def _client(self):
'''
Returns client instance.
Returns:
client
'''
pass
@property
@memoizedmethod
def _resize(self):
'''
Azure storage function that resize an object.
Returns:
function: Resize function.
'''
pass
def _init_append(self):
'''
Initializes file on 'a' mode.
'''
pass
def _create(self):
'''
Create the file if not exists.
'''
pass
@property
@memoizedmethod
def _create_from_size(self):
'''
Azure storage function that create an object.
Returns:
function: Create function.
'''
pass
def _update_range(self, data, **kwargs):
'''
Update range with data
Args:
data (bytes): data.
'''
pass
def _read_range(self, start, end=0, null_strip=None):
'''
Read a range of bytes in stream.
Args:
start (int): Start stream position.
end (int): End stream position.
0 To not specify end.
Returns:
bytes: number of bytes read
'''
pass
def _readall(self):
'''
Read and return all the bytes from the stream until EOF.
Returns:
bytes: Object content
'''
pass
def seek(self, offset, whence=SEEK_SET):
'''
Change the stream position to the given byte offset.
Args:
offset: Offset is interpreted relative to the position indicated by
whence.
whence: The default value for whence is SEEK_SET.
Values for whence are:
SEEK_SET or 0 – start of the stream (the default);
offset should be zero or positive
SEEK_CUR or 1 – current stream position;
offset may be negative
SEEK_END or 2 – end of the stream;
offset is usually negative
Returns:
int: The new absolute position.
'''
pass
def _seek_end_ignore_padding(self, offset=0):
'''
Compute seek position if seeking from end ignoring null padding.
Args:
offset (int): relative position to seek.
Returns:
int: New seek value.
'''
pass
def _flush(self, buffer, start, end):
'''
Flush the write buffer of the stream if applicable.
Args:
buffer (memoryview): Buffer content.
start (int): Start of buffer position to flush.
Supported only with page blobs.
end (int): End of buffer position to flush.
Supported only with page blobs.
'''
pass
| 19 | 12 | 14 | 2 | 5 | 7 | 2 | 1.51 | 2 | 2 | 0 | 0 | 12 | 2 | 12 | 81 | 212 | 39 | 69 | 31 | 50 | 104 | 62 | 28 | 49 | 4 | 8 | 3 | 21 |
2,979 |
ActionAgile/trellostats
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ActionAgile_trellostats/trellostats/models.py
|
trellostats.models.Snapshot.Meta
|
class Meta:
database = db_proxy
|
class Meta:
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 | 0 | 2 | 2 | 1 | 0 | 2 | 2 | 1 | 0 | 0 | 0 | 0 |
2,980 |
ActionAgile/trellostats
|
ActionAgile_trellostats/trellostats/models.py
|
trellostats.models.Snapshot
|
class Snapshot(Model):
board_id = CharField()
done_id = CharField()
when = DateTimeField(default=datetime.now)
cycle_time = FloatField()
class Meta:
database = db_proxy
def __repr__(self):
return "<Snapshot:{}:{}>".format(self.board_id, self.cycle_time)
|
class Snapshot(Model):
class Meta:
def __repr__(self):
pass
| 3 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 11 | 2 | 9 | 8 | 6 | 0 | 9 | 8 | 6 | 1 | 1 | 0 | 1 |
2,981 |
ActionAgile/trellostats
|
ActionAgile_trellostats/trellostats/trellostats.py
|
trellostats.trellostats.TrelloStats
|
class TrelloStats(object):
"""
Main class that does the API thingummy.
We want to do it direct as we'll be making lots of calls
around card history, so need to be able to juice them up
with some gevent razzmatazz.
"""
def __init__(self, trellis_context):
self.app_key = trellis_context.get('app_key')
self.app_token = trellis_context.get('app_token')
self.board_id = trellis_context.get('board_id')
def _do_get(self, url):
try:
return requests.get(url).json()
except ValueError:
raise TrelloStatsException("Invalid options - check your board id.")
except ConnectionError:
raise TrelloStatsException("Cannot connect to Trello API.")
def get_token(self):
webbrowser.open(TOKEN_URL.format(self.app_key))
def get_lists(self):
url = BOARD_URL.format(self.board_id, self.app_key,
self.app_token)
return self._do_get(url)
def get_list_id_from_name(self, name):
try:
lists = self.get_lists()
if lists:
return [li.get('id') for li in lists if li.get('name') == name][0]
except IndexError:
pass
def get_list_data(self, list_id):
url = LIST_URL.format(list_id, self.app_key, self.app_token)
return self._do_get(url)
def _get_history_for_cards(self, cards):
urls = [ACTION_URL.format(card.get('id'), self.app_key,
self.app_token)
for card in cards]
rs = (grequests.get(u) for u in urls)
return grequests.map(rs)
def _get_cycle_time(self, card_history, units='days'):
dates = (x.get('date') for x in card_history.json())
date_objects = sorted([parse(date) for date in dates])
return getattr((date_objects[-1] - date_objects[0]), units)
def cycle_time(self, cards):
try:
card_histories = self._get_history_for_cards(cards.get('cards'))
cycle_time = np.mean([self._get_cycle_time(card_history)
for card_history in card_histories])
return cycle_time
except AttributeError:
raise TrelloStatsException("Can't get history of None.\
Have you put in the correct title\
of the Done column?")
def __repr__(self):
return "<TrelloStats: {}>".format(self.app_token)
|
class TrelloStats(object):
'''
Main class that does the API thingummy.
We want to do it direct as we'll be making lots of calls
around card history, so need to be able to juice them up
with some gevent razzmatazz.
'''
def __init__(self, trellis_context):
pass
def _do_get(self, url):
pass
def get_token(self):
pass
def get_lists(self):
pass
def get_list_id_from_name(self, name):
pass
def get_list_data(self, list_id):
pass
def _get_history_for_cards(self, cards):
pass
def _get_cycle_time(self, card_history, units='days'):
pass
def cycle_time(self, cards):
pass
def __repr__(self):
pass
| 11 | 1 | 5 | 0 | 5 | 0 | 2 | 0.12 | 1 | 6 | 1 | 0 | 10 | 3 | 10 | 10 | 67 | 11 | 50 | 23 | 39 | 6 | 44 | 23 | 33 | 3 | 1 | 2 | 15 |
2,982 |
ActionAgile/trellostats
|
ActionAgile_trellostats/trellostats/trellostats.py
|
trellostats.trellostats.TrelloStatsException
|
class TrelloStatsException(Exception):
pass
|
class TrelloStatsException(Exception):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 2 | 0 | 2 | 1 | 1 | 0 | 2 | 1 | 1 | 0 | 3 | 0 | 0 |
2,983 |
ActiveState/simplealchemy
|
ActiveState_simplealchemy/simplealchemy.py
|
simplealchemy.SimpleDatabase
|
class SimpleDatabase(object):
metadata = None # to be set up derived classes
class DoesNotExist(IOError):
def __init__(self, path):
super(IOError, self).__init__(
'database file %s does not exist' % path)
def __init__(self, path, touch=False):
"""
touch - create database, if it does not exist
"""
self.path = path
sqlite_uri = 'sqlite:///%s' % self.path
self.engine = create_engine(sqlite_uri, echo=False)
self.create_session = sessionmaker(
bind=self.engine,
autocommit=False,
# See the comment by Michael Bayer
# http://groups.google.com/group/sqlalchemy/browse_thread/thread/7c1eb642435adde7
# expire_on_commit=False
)
self.create_scoped_session = scoped_session(self.create_session)
if not exists(self.path):
if touch:
assert exists(dirname(self.path)), 'missing: ' + dirname(self.path)
self.metadata.create_all(self.engine)
else:
raise self.DoesNotExist(path)
def reset(self):
"""Reset the database
Drop all tables and recreate them
"""
self.metadata.drop_all(self.engine)
self.metadata.create_all(self.engine)
def close(self):
self.engine.dispose()
@contextmanager
def transaction(self, session=None):
"""Start a new transaction based on the passed session object. If session
is not passed, then create one and make sure of closing it finally.
"""
local_session = None
if session is None:
local_session = session = self.create_scoped_session()
try:
yield session
finally:
# Since ``local_session`` was created locally, close it here itself
if local_session is not None:
# but wait!
# http://groups.google.com/group/sqlalchemy/browse_thread/thread/7c1eb642435adde7
# To workaround this issue with sqlalchemy, we can either:
# 1) pass the session object explicitly
# 2) do not close the session at all (bad idea - could lead to memory leaks)
#
# Till pypm implements atomic transations in client.installer,
# we retain this hack (i.e., we choose (2) for now)
pass # local_session.close()
def __str__(self):
return '{0.__class__.__name__}<{0.path}>'.format(self)
|
class SimpleDatabase(object):
class DoesNotExist(IOError):
def __init__(self, path):
pass
def __init__(self, path):
'''
touch - create database, if it does not exist
'''
pass
def reset(self):
'''Reset the database
Drop all tables and recreate them
'''
pass
def close(self):
pass
@contextmanager
def transaction(self, session=None):
'''Start a new transaction based on the passed session object. If session
is not passed, then create one and make sure of closing it finally.
'''
pass
def __str__(self):
pass
| 9 | 3 | 10 | 1 | 6 | 4 | 2 | 0.61 | 1 | 1 | 1 | 0 | 5 | 4 | 5 | 5 | 68 | 9 | 38 | 16 | 29 | 23 | 31 | 15 | 23 | 3 | 1 | 2 | 10 |
2,984 |
ActiveState/simplealchemy
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ActiveState_simplealchemy/simplealchemy.py
|
simplealchemy.SimpleDatabase.DoesNotExist
|
class DoesNotExist(IOError):
def __init__(self, path):
super(IOError, self).__init__(
'database file %s does not exist' % path)
|
class DoesNotExist(IOError):
def __init__(self, path):
pass
| 2 | 0 | 3 | 0 | 3 | 0 | 1 | 0 | 1 | 1 | 0 | 0 | 1 | 0 | 1 | 1 | 4 | 0 | 4 | 2 | 2 | 0 | 3 | 2 | 1 | 1 | 1 | 0 | 1 |
2,985 |
ActiveState/simplealchemy
|
ActiveState_simplealchemy/simplealchemy.py
|
simplealchemy._get_best_column_type
|
class _get_best_column_type():
"""Return the best column type for the given name."""
mapping = dict(
name = String,
version = String,
keywords = String,
home_page = String,
license = String,
author = String,
author_email = String,
maintainer = String,
maintainer_email = String,
osarch = String,
pyver = String,
pkg_version = String,
relpath = String,
tags = String,
original_source = String,
patched_source = String,
summary = Text,
description = Text,
python3 = Boolean,
metadata_hash = String,
install_requires = Pickle2Type,
files_list = Pickle2Type,
)
def __call__(self, name):
try:
return self.mapping[name]
except KeyError:
raise KeyError(
'missing key. add type for "{0}" in self.mapping'.format(
name))
|
class _get_best_column_type():
'''Return the best column type for the given name.'''
def __call__(self, name):
pass
| 2 | 1 | 7 | 0 | 7 | 0 | 2 | 0.03 | 0 | 1 | 0 | 0 | 1 | 0 | 1 | 1 | 37 | 4 | 32 | 3 | 30 | 1 | 7 | 3 | 5 | 2 | 0 | 1 | 2 |
2,986 |
ActiveState/simplealchemy
|
ActiveState_simplealchemy/simplealchemy.py
|
simplealchemy.SimpleObject
|
class SimpleObject(object):
"""Object with a collection of fields.
The following features are supported:
1) Automatically initialize the fields in __init__
2) Inherit and extend with additional fields
2) Ability to convert from other object types (with extra/less fields)
3) Interoperate with sqlalchemy.orm (i.e., plain `self.foo=value` works)
"""
# Public fields in this object
FIELDS = []
def __init__(self, **kwargs):
"""Initialize the object with FIELDS whose values are in ``kwargs``"""
self.__assert_field_mapping(kwargs)
for field in self.FIELDS:
setattr(self, field, kwargs[field])
@classmethod
def create_from(cls, another, **kwargs):
"""Create from another object of different type.
Another object must be from a derived class of SimpleObject (which
contains FIELDS)
"""
reused_fields = {}
for field, value in another.get_fields():
if field in cls.FIELDS:
reused_fields[field] = value
reused_fields.update(kwargs)
return cls(**reused_fields)
def get_fields(self):
"""Return fields as a list of (name,value)"""
for field in self.FIELDS:
yield field, getattr(self, field)
def to_dict(self):
return dict(self.get_fields())
def to_json(self):
return json.dumps(self.to_dict())
@classmethod
def from_json(cls, json_string):
values = json.loads(json_string)
return cls(**_remove_unicode_keys(values))
def __assert_field_mapping(self, mapping):
"""Assert that mapping.keys() == FIELDS.
The programmer is not supposed to pass extra/less number of fields
"""
passed_keys = set(mapping.keys())
class_fields = set(self.FIELDS)
if passed_keys != class_fields:
raise ValueError('\n'.join([
"{0} got different fields from expected".format(
self.__class__),
" got : {0}".format(list(sorted(passed_keys))),
" expected: {0}".format(list(sorted(class_fields)))]))
|
class SimpleObject(object):
'''Object with a collection of fields.
The following features are supported:
1) Automatically initialize the fields in __init__
2) Inherit and extend with additional fields
2) Ability to convert from other object types (with extra/less fields)
3) Interoperate with sqlalchemy.orm (i.e., plain `self.foo=value` works)
'''
def __init__(self, **kwargs):
'''Initialize the object with FIELDS whose values are in ``kwargs``'''
pass
@classmethod
def create_from(cls, another, **kwargs):
'''Create from another object of different type.
Another object must be from a derived class of SimpleObject (which
contains FIELDS)
'''
pass
def get_fields(self):
'''Return fields as a list of (name,value)'''
pass
def to_dict(self):
pass
def to_json(self):
pass
@classmethod
def from_json(cls, json_string):
pass
def __assert_field_mapping(self, mapping):
'''Assert that mapping.keys() == FIELDS.
The programmer is not supposed to pass extra/less number of fields
'''
pass
| 10 | 5 | 6 | 0 | 4 | 1 | 2 | 0.5 | 1 | 4 | 0 | 0 | 5 | 0 | 7 | 7 | 64 | 13 | 34 | 18 | 24 | 17 | 28 | 16 | 20 | 3 | 1 | 2 | 12 |
2,987 |
ActivisionGameScience/assertpy
|
ActivisionGameScience_assertpy/assertpy/collection.py
|
assertpy.collection.CollectionMixin
|
class CollectionMixin(object):
"""Collection assertions mixin."""
def is_iterable(self):
"""Asserts that val is iterable collection."""
if not isinstance(self.val, Iterable):
self._err('Expected iterable, but was not.')
return self
def is_not_iterable(self):
"""Asserts that val is not iterable collection."""
if isinstance(self.val, Iterable):
self._err('Expected not iterable, but was.')
return self
def is_subset_of(self, *supersets):
"""Asserts that val is iterable and a subset of the given superset or flattened superset if multiple supersets are given."""
if not isinstance(self.val, Iterable):
raise TypeError('val is not iterable')
if len(supersets) == 0:
raise ValueError('one or more superset args must be given')
missing = []
if hasattr(self.val, 'keys') and callable(getattr(self.val, 'keys')) and hasattr(self.val, '__getitem__'):
# flatten superset dicts
superdict = {}
for l,j in enumerate(supersets):
self._check_dict_like(j, check_values=False, name='arg #%d' % (l+1))
for k in j.keys():
superdict.update({k: j[k]})
for i in self.val.keys():
if i not in superdict:
missing.append({i: self.val[i]}) # bad key
elif self.val[i] != superdict[i]:
missing.append({i: self.val[i]}) # bad val
if missing:
self._err('Expected <%s> to be subset of %s, but %s %s missing.' % (self.val, self._fmt_items(superdict), self._fmt_items(missing), 'was' if len(missing) == 1 else 'were'))
else:
# flatten supersets
superset = set()
for j in supersets:
try:
for k in j:
superset.add(k)
except Exception:
superset.add(j)
for i in self.val:
if i not in superset:
missing.append(i)
if missing:
self._err('Expected <%s> to be subset of %s, but %s %s missing.' % (self.val, self._fmt_items(superset), self._fmt_items(missing), 'was' if len(missing) == 1 else 'were'))
return self
|
class CollectionMixin(object):
'''Collection assertions mixin.'''
def is_iterable(self):
'''Asserts that val is iterable collection.'''
pass
def is_not_iterable(self):
'''Asserts that val is not iterable collection.'''
pass
def is_subset_of(self, *supersets):
'''Asserts that val is iterable and a subset of the given superset or flattened superset if multiple supersets are given.'''
pass
| 4 | 4 | 17 | 1 | 14 | 3 | 7 | 0.21 | 1 | 5 | 0 | 1 | 3 | 0 | 3 | 3 | 55 | 7 | 42 | 10 | 38 | 9 | 40 | 10 | 36 | 18 | 1 | 4 | 22 |
2,988 |
ActivisionGameScience/assertpy
|
ActivisionGameScience_assertpy/assertpy/date.py
|
assertpy.date.DateMixin
|
class DateMixin(object):
"""Date and time assertions mixin."""
### datetime assertions ###
def is_before(self, other):
"""Asserts that val is a date and is before other date."""
if type(self.val) is not datetime.datetime:
raise TypeError('val must be datetime, but was type <%s>' % type(self.val).__name__)
if type(other) is not datetime.datetime:
raise TypeError('given arg must be datetime, but was type <%s>' % type(other).__name__)
if self.val >= other:
self._err('Expected <%s> to be before <%s>, but was not.' % (self.val.strftime('%Y-%m-%d %H:%M:%S'), other.strftime('%Y-%m-%d %H:%M:%S')))
return self
def is_after(self, other):
"""Asserts that val is a date and is after other date."""
if type(self.val) is not datetime.datetime:
raise TypeError('val must be datetime, but was type <%s>' % type(self.val).__name__)
if type(other) is not datetime.datetime:
raise TypeError('given arg must be datetime, but was type <%s>' % type(other).__name__)
if self.val <= other:
self._err('Expected <%s> to be after <%s>, but was not.' % (self.val.strftime('%Y-%m-%d %H:%M:%S'), other.strftime('%Y-%m-%d %H:%M:%S')))
return self
def is_equal_to_ignoring_milliseconds(self, other):
if type(self.val) is not datetime.datetime:
raise TypeError('val must be datetime, but was type <%s>' % type(self.val).__name__)
if type(other) is not datetime.datetime:
raise TypeError('given arg must be datetime, but was type <%s>' % type(other).__name__)
if self.val.date() != other.date() or self.val.hour != other.hour or self.val.minute != other.minute or self.val.second != other.second:
self._err('Expected <%s> to be equal to <%s>, but was not.' % (self.val.strftime('%Y-%m-%d %H:%M:%S'), other.strftime('%Y-%m-%d %H:%M:%S')))
return self
def is_equal_to_ignoring_seconds(self, other):
if type(self.val) is not datetime.datetime:
raise TypeError('val must be datetime, but was type <%s>' % type(self.val).__name__)
if type(other) is not datetime.datetime:
raise TypeError('given arg must be datetime, but was type <%s>' % type(other).__name__)
if self.val.date() != other.date() or self.val.hour != other.hour or self.val.minute != other.minute:
self._err('Expected <%s> to be equal to <%s>, but was not.' % (self.val.strftime('%Y-%m-%d %H:%M'), other.strftime('%Y-%m-%d %H:%M')))
return self
def is_equal_to_ignoring_time(self, other):
if type(self.val) is not datetime.datetime:
raise TypeError('val must be datetime, but was type <%s>' % type(self.val).__name__)
if type(other) is not datetime.datetime:
raise TypeError('given arg must be datetime, but was type <%s>' % type(other).__name__)
if self.val.date() != other.date():
self._err('Expected <%s> to be equal to <%s>, but was not.' % (self.val.strftime('%Y-%m-%d'), other.strftime('%Y-%m-%d')))
return self
|
class DateMixin(object):
'''Date and time assertions mixin.'''
def is_before(self, other):
'''Asserts that val is a date and is before other date.'''
pass
def is_after(self, other):
'''Asserts that val is a date and is after other date.'''
pass
def is_equal_to_ignoring_milliseconds(self, other):
pass
def is_equal_to_ignoring_seconds(self, other):
pass
def is_equal_to_ignoring_time(self, other):
pass
| 6 | 3 | 8 | 0 | 8 | 0 | 4 | 0.1 | 1 | 3 | 0 | 1 | 5 | 0 | 5 | 5 | 50 | 5 | 41 | 6 | 35 | 4 | 41 | 6 | 35 | 4 | 1 | 1 | 20 |
2,989 |
ActivisionGameScience/assertpy
|
ActivisionGameScience_assertpy/assertpy/contains.py
|
assertpy.contains.ContainsMixin
|
class ContainsMixin(object):
"""Containment assertions mixin."""
def contains(self, *items):
"""Asserts that val contains the given item or items."""
if len(items) == 0:
raise ValueError('one or more args must be given')
elif len(items) == 1:
if items[0] not in self.val:
if self._check_dict_like(self.val, return_as_bool=True):
self._err('Expected <%s> to contain key <%s>, but did not.' % (self.val, items[0]))
else:
self._err('Expected <%s> to contain item <%s>, but did not.' % (self.val, items[0]))
else:
missing = []
for i in items:
if i not in self.val:
missing.append(i)
if missing:
if self._check_dict_like(self.val, return_as_bool=True):
self._err('Expected <%s> to contain keys %s, but did not contain key%s %s.' % (self.val, self._fmt_items(items), '' if len(missing) == 0 else 's', self._fmt_items(missing)))
else:
self._err('Expected <%s> to contain items %s, but did not contain %s.' % (self.val, self._fmt_items(items), self._fmt_items(missing)))
return self
def does_not_contain(self, *items):
"""Asserts that val does not contain the given item or items."""
if len(items) == 0:
raise ValueError('one or more args must be given')
elif len(items) == 1:
if items[0] in self.val:
self._err('Expected <%s> to not contain item <%s>, but did.' % (self.val, items[0]))
else:
found = []
for i in items:
if i in self.val:
found.append(i)
if found:
self._err('Expected <%s> to not contain items %s, but did contain %s.' % (self.val, self._fmt_items(items), self._fmt_items(found)))
return self
def contains_only(self, *items):
"""Asserts that val contains only the given item or items."""
if len(items) == 0:
raise ValueError('one or more args must be given')
else:
extra = []
for i in self.val:
if i not in items:
extra.append(i)
if extra:
self._err('Expected <%s> to contain only %s, but did contain %s.' % (self.val, self._fmt_items(items), self._fmt_items(extra)))
missing = []
for i in items:
if i not in self.val:
missing.append(i)
if missing:
self._err('Expected <%s> to contain only %s, but did not contain %s.' % (self.val, self._fmt_items(items), self._fmt_items(missing)))
return self
def contains_sequence(self, *items):
"""Asserts that val contains the given sequence of items in order."""
if len(items) == 0:
raise ValueError('one or more args must be given')
else:
try:
for i in xrange(len(self.val) - len(items) + 1):
for j in xrange(len(items)):
if self.val[i+j] != items[j]:
break
else:
return self
except TypeError:
raise TypeError('val is not iterable')
self._err('Expected <%s> to contain sequence %s, but did not.' % (self.val, self._fmt_items(items)))
def contains_duplicates(self):
"""Asserts that val is iterable and contains duplicate items."""
try:
if len(self.val) != len(set(self.val)):
return self
except TypeError:
raise TypeError('val is not iterable')
self._err('Expected <%s> to contain duplicates, but did not.' % self.val)
def does_not_contain_duplicates(self):
"""Asserts that val is iterable and does not contain any duplicate items."""
try:
if len(self.val) == len(set(self.val)):
return self
except TypeError:
raise TypeError('val is not iterable')
self._err('Expected <%s> to not contain duplicates, but did.' % self.val)
def is_empty(self):
"""Asserts that val is empty."""
if len(self.val) != 0:
if isinstance(self.val, str_types):
self._err('Expected <%s> to be empty string, but was not.' % self.val)
else:
self._err('Expected <%s> to be empty, but was not.' % self.val)
return self
def is_not_empty(self):
"""Asserts that val is not empty."""
if len(self.val) == 0:
if isinstance(self.val, str_types):
self._err('Expected not empty string, but was empty.')
else:
self._err('Expected not empty, but was empty.')
return self
def is_in(self, *items):
"""Asserts that val is equal to one of the given items."""
if len(items) == 0:
raise ValueError('one or more args must be given')
else:
for i in items:
if self.val == i:
return self
self._err('Expected <%s> to be in %s, but was not.' % (self.val, self._fmt_items(items)))
def is_not_in(self, *items):
"""Asserts that val is not equal to one of the given items."""
if len(items) == 0:
raise ValueError('one or more args must be given')
else:
for i in items:
if self.val == i:
self._err('Expected <%s> to not be in %s, but was.' % (self.val, self._fmt_items(items)))
return self
|
class ContainsMixin(object):
'''Containment assertions mixin.'''
def contains(self, *items):
'''Asserts that val contains the given item or items.'''
pass
def does_not_contain(self, *items):
'''Asserts that val does not contain the given item or items.'''
pass
def contains_only(self, *items):
'''Asserts that val contains only the given item or items.'''
pass
def contains_sequence(self, *items):
'''Asserts that val contains the given sequence of items in order.'''
pass
def contains_duplicates(self):
'''Asserts that val is iterable and contains duplicate items.'''
pass
def does_not_contain_duplicates(self):
'''Asserts that val is iterable and does not contain any duplicate items.'''
pass
def is_empty(self):
'''Asserts that val is empty.'''
pass
def is_not_empty(self):
'''Asserts that val is not empty.'''
pass
def is_in(self, *items):
'''Asserts that val is equal to one of the given items.'''
pass
def is_not_in(self, *items):
'''Asserts that val is not equal to one of the given items.'''
pass
| 11 | 11 | 12 | 0 | 11 | 1 | 5 | 0.1 | 1 | 3 | 0 | 1 | 10 | 1 | 10 | 10 | 132 | 11 | 110 | 23 | 99 | 11 | 98 | 22 | 87 | 10 | 1 | 5 | 50 |
2,990 |
ActivisionGameScience/assertpy
|
ActivisionGameScience_assertpy/assertpy/base.py
|
assertpy.base.BaseMixin
|
class BaseMixin(object):
"""Base mixin."""
def described_as(self, description):
"""Describes the assertion. On failure, the description is included in the error message."""
self.description = str(description)
return self
def is_equal_to(self, other, **kwargs):
"""Asserts that val is equal to other."""
if self._check_dict_like(self.val, check_values=False, return_as_bool=True) and \
self._check_dict_like(other, check_values=False, return_as_bool=True):
if self._dict_not_equal(self.val, other, ignore=kwargs.get('ignore'), include=kwargs.get('include')):
self._dict_err(self.val, other, ignore=kwargs.get('ignore'), include=kwargs.get('include'))
else:
if self.val != other:
self._err('Expected <%s> to be equal to <%s>, but was not.' % (self.val, other))
return self
def is_not_equal_to(self, other):
"""Asserts that val is not equal to other."""
if self.val == other:
self._err('Expected <%s> to be not equal to <%s>, but was.' % (self.val, other))
return self
def is_same_as(self, other):
"""Asserts that the val is identical to other, via 'is' compare."""
if self.val is not other:
self._err('Expected <%s> to be identical to <%s>, but was not.' % (self.val, other))
return self
def is_not_same_as(self, other):
"""Asserts that the val is not identical to other, via 'is' compare."""
if self.val is other:
self._err('Expected <%s> to be not identical to <%s>, but was.' % (self.val, other))
return self
def is_true(self):
"""Asserts that val is true."""
if not self.val:
self._err('Expected <True>, but was not.')
return self
def is_false(self):
"""Asserts that val is false."""
if self.val:
self._err('Expected <False>, but was not.')
return self
def is_none(self):
"""Asserts that val is none."""
if self.val is not None:
self._err('Expected <%s> to be <None>, but was not.' % self.val)
return self
def is_not_none(self):
"""Asserts that val is not none."""
if self.val is None:
self._err('Expected not <None>, but was.')
return self
def _type(self, val):
if hasattr(val, '__name__'):
return val.__name__
elif hasattr(val, '__class__'):
return val.__class__.__name__
return 'unknown'
def is_type_of(self, some_type):
"""Asserts that val is of the given type."""
if type(some_type) is not type and not issubclass(type(some_type), type):
raise TypeError('given arg must be a type')
if type(self.val) is not some_type:
t = self._type(self.val)
self._err('Expected <%s:%s> to be of type <%s>, but was not.' % (self.val, t, some_type.__name__))
return self
def is_instance_of(self, some_class):
"""Asserts that val is an instance of the given class."""
try:
if not isinstance(self.val, some_class):
t = self._type(self.val)
self._err('Expected <%s:%s> to be instance of class <%s>, but was not.' % (self.val, t, some_class.__name__))
except TypeError:
raise TypeError('given arg must be a class')
return self
def is_length(self, length):
"""Asserts that val is the given length."""
if type(length) is not int:
raise TypeError('given arg must be an int')
if length < 0:
raise ValueError('given arg must be a positive int')
if len(self.val) != length:
self._err('Expected <%s> to be of length <%d>, but was <%d>.' % (self.val, length, len(self.val)))
return self
|
class BaseMixin(object):
'''Base mixin.'''
def described_as(self, description):
'''Describes the assertion. On failure, the description is included in the error message.'''
pass
def is_equal_to(self, other, **kwargs):
'''Asserts that val is equal to other.'''
pass
def is_not_equal_to(self, other):
'''Asserts that val is not equal to other.'''
pass
def is_same_as(self, other):
'''Asserts that the val is identical to other, via 'is' compare.'''
pass
def is_not_same_as(self, other):
'''Asserts that the val is not identical to other, via 'is' compare.'''
pass
def is_true(self):
'''Asserts that val is true.'''
pass
def is_false(self):
'''Asserts that val is false.'''
pass
def is_none(self):
'''Asserts that val is none.'''
pass
def is_not_none(self):
'''Asserts that val is not none.'''
pass
def _type(self, val):
pass
def is_type_of(self, some_type):
'''Asserts that val is of the given type.'''
pass
def is_instance_of(self, some_class):
'''Asserts that val is an instance of the given class.'''
pass
def is_length(self, length):
'''Asserts that val is the given length.'''
pass
| 14 | 13 | 6 | 0 | 5 | 1 | 2 | 0.19 | 1 | 5 | 0 | 1 | 13 | 2 | 13 | 13 | 96 | 13 | 70 | 18 | 56 | 13 | 67 | 17 | 53 | 4 | 1 | 2 | 32 |
2,991 |
ActivisionGameScience/assertpy
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ActivisionGameScience_assertpy/assertpy/snapshot.py
|
assertpy.snapshot.SnapshotMixin.snapshot._Encoder
|
class _Encoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, set):
return {'__type__': 'set', '__data__': list(o)}
elif isinstance(o, complex):
return {'__type__': 'complex', '__data__': [o.real, o.imag]}
elif isinstance(o, datetime.datetime):
return {'__type__': 'datetime', '__data__': o.strftime('%Y-%m-%d %H:%M:%S')}
elif '__dict__' in dir(o) and type(o) is not type:
return {
'__type__': 'instance',
'__class__': o.__class__.__name__,
'__module__': o.__class__.__module__,
'__data__': o.__dict__
}
return json.JSONEncoder.default(self, o)
|
class _Encoder(json.JSONEncoder):
def default(self, o):
pass
| 2 | 0 | 15 | 0 | 15 | 0 | 5 | 0 | 1 | 5 | 0 | 0 | 1 | 0 | 1 | 5 | 16 | 0 | 16 | 2 | 14 | 0 | 8 | 2 | 6 | 5 | 2 | 1 | 5 |
2,992 |
ActivisionGameScience/assertpy
|
ActivisionGameScience_assertpy/assertpy/assertpy.py
|
assertpy.assertpy.AssertionBuilder
|
class AssertionBuilder(DynamicMixin, ExceptionMixin, SnapshotMixin, ExtractingMixin,
FileMixin, DateMixin, DictMixin, CollectionMixin, StringMixin, NumericMixin,
ContainsMixin, HelpersMixin, BaseMixin, object):
"""Assertion builder."""
def __init__(self, val, description='', kind=None, expected=None, logger=None):
"""Construct the assertion builder."""
self.val = val
self.description = description
self.kind = kind
self.expected = expected
self.logger = logger if logger else _default_logger
def _builder(self, val, description='', kind=None, expected=None, logger=None):
"""Helper to build a new Builder. Only used when we don't want to chain."""
return builder(val, description, kind, expected, logger)
def _err(self, msg):
"""Helper to raise an AssertionError, and optionally prepend custom description."""
out = '%s%s' % ('[%s] ' % self.description if len(self.description) > 0 else '', msg)
if self.kind == 'warn':
self.logger.warning(out)
return self
elif self.kind == 'soft':
global _soft_err
_soft_err.append(out)
return self
else:
raise AssertionError(out)
|
class AssertionBuilder(DynamicMixin, ExceptionMixin, SnapshotMixin, ExtractingMixin,
FileMixin, DateMixin, DictMixin, CollectionMixin, StringMixin, NumericMixin,
ContainsMixin, HelpersMixin, BaseMixin, object):
'''Assertion builder.'''
def __init__(self, val, description='', kind=None, expected=None, logger=None):
'''Construct the assertion builder.'''
pass
def _builder(self, val, description='', kind=None, expected=None, logger=None):
'''Helper to build a new Builder. Only used when we don't want to chain.'''
pass
def _err(self, msg):
'''Helper to raise an AssertionError, and optionally prepend custom description.'''
pass
| 4 | 4 | 7 | 0 | 6 | 1 | 2 | 0.18 | 14 | 1 | 0 | 0 | 3 | 5 | 3 | 91 | 29 | 3 | 22 | 13 | 15 | 4 | 18 | 11 | 13 | 4 | 2 | 1 | 7 |
2,993 |
ActivisionGameScience/assertpy
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ActivisionGameScience_assertpy/tests/test_snapshots.py
|
test_snapshots.Foo
|
class Foo(object):
def __init__(self, x=0):
self.x = x
self.y = 1
def __eq__(self, other):
if isinstance(self, other.__class__):
return self.__dict__ == other.__dict__
return NotImplemented
|
class Foo(object):
def __init__(self, x=0):
pass
def __eq__(self, other):
pass
| 3 | 0 | 4 | 0 | 4 | 0 | 2 | 0 | 1 | 0 | 0 | 1 | 2 | 2 | 2 | 2 | 9 | 1 | 8 | 5 | 5 | 0 | 8 | 5 | 5 | 2 | 1 | 1 | 3 |
2,994 |
ActivisionGameScience/assertpy
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ActivisionGameScience_assertpy/tests/test_snapshots.py
|
test_snapshots.Bar
|
class Bar(Foo):
def __eq__(self, other):
return NotImplemented
|
class Bar(Foo):
def __eq__(self, other):
pass
| 2 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 3 | 3 | 0 | 3 | 2 | 1 | 0 | 3 | 2 | 1 | 1 | 2 | 0 | 1 |
2,995 |
ActivisionGameScience/assertpy
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ActivisionGameScience_assertpy/assertpy/snapshot.py
|
assertpy.snapshot.SnapshotMixin.snapshot._Decoder
|
class _Decoder(json.JSONDecoder):
def __init__(self):
json.JSONDecoder.__init__(self, object_hook=self.object_hook)
def object_hook(self, d):
if '__type__' in d and '__data__' in d:
if d['__type__'] == 'set':
return set(d['__data__'])
elif d['__type__'] == 'complex':
return complex(d['__data__'][0], d['__data__'][1])
elif d['__type__'] == 'datetime':
return datetime.datetime.strptime(d['__data__'], '%Y-%m-%d %H:%M:%S')
elif d['__type__'] == 'instance':
mod = __import__(
d['__module__'], fromlist=[d['__class__']])
klass = getattr(mod, d['__class__'])
inst = klass.__new__(klass)
inst.__dict__ = d['__data__']
return inst
return d
|
class _Decoder(json.JSONDecoder):
def __init__(self):
pass
def object_hook(self, d):
pass
| 3 | 0 | 9 | 0 | 9 | 0 | 4 | 0 | 1 | 3 | 0 | 0 | 2 | 0 | 2 | 5 | 19 | 1 | 18 | 6 | 15 | 0 | 15 | 6 | 12 | 6 | 2 | 2 | 7 |
2,996 |
ActivisionGameScience/assertpy
|
ActivisionGameScience_assertpy/tests/test_readme.py
|
test_readme.TestReadme
|
class TestReadme(object):
@classmethod
def setupClass(cls):
print('\nTEST test_readme.py : v%d.%d.%d' % (sys.version_info[0], sys.version_info[1], sys.version_info[2]))
def setup(self):
with open('foo.txt', 'w') as fp:
fp.write('foobar')
def teardown(self):
os.remove('foo.txt')
def test_something(self):
assert_that(1 + 2).is_equal_to(3)
assert_that('foobar').is_length(6).starts_with('foo').ends_with('bar')
assert_that(['a', 'b', 'c']).contains('a').does_not_contain('x')
def test_strings(self):
assert_that('').is_not_none()
assert_that('').is_empty()
assert_that('').is_false()
assert_that('').is_type_of(str)
assert_that('').is_instance_of(str)
assert_that('foo').is_length(3)
assert_that('foo').is_not_empty()
assert_that('foo').is_true()
assert_that('foo').is_alpha()
assert_that('123').is_digit()
assert_that('foo').is_lower()
assert_that('FOO').is_upper()
assert_that('foo').is_iterable()
assert_that('foo').is_equal_to('foo')
assert_that('foo').is_not_equal_to('bar')
assert_that('foo').is_equal_to_ignoring_case('FOO')
if sys.version_info[0] == 3:
assert_that('foo').is_unicode()
else:
assert_that(u'foo').is_unicode()
assert_that('foo').contains('f')
assert_that('foo').contains('f','oo')
assert_that('foo').contains_ignoring_case('F','oO')
assert_that('foo').does_not_contain('x')
assert_that('foo').contains_only('f','o')
assert_that('foo').contains_sequence('o','o')
assert_that('foo').contains_duplicates()
assert_that('fox').does_not_contain_duplicates()
assert_that('foo').is_in('foo','bar','baz')
assert_that('foo').is_not_in('boo','bar','baz')
assert_that('foo').is_subset_of('abcdefghijklmnopqrstuvwxyz')
assert_that('foo').starts_with('f')
assert_that('foo').ends_with('oo')
assert_that('foo').matches(r'\w')
assert_that('123-456-7890').matches(r'\d{3}-\d{3}-\d{4}')
assert_that('foo').does_not_match(r'\d+')
# partial matches, these all pass
assert_that('foo').matches(r'\w')
assert_that('foo').matches(r'oo')
assert_that('foo').matches(r'\w{2}')
# match the entire string with an anchored regex pattern, passes
assert_that('foo').matches(r'^\w{3}$')
# fails
try:
assert_that('foo').matches(r'^\w{2}$')
fail('should have raised error')
except AssertionError:
pass
def test_ints(self):
assert_that(0).is_not_none()
assert_that(0).is_false()
assert_that(0).is_type_of(int)
assert_that(0).is_instance_of(int)
assert_that(0).is_zero()
assert_that(1).is_not_zero()
assert_that(1).is_positive()
assert_that(-1).is_negative()
assert_that(123).is_equal_to(123)
assert_that(123).is_not_equal_to(456)
assert_that(123).is_greater_than(100)
assert_that(123).is_greater_than_or_equal_to(123)
assert_that(123).is_less_than(200)
assert_that(123).is_less_than_or_equal_to(200)
assert_that(123).is_between(100, 200)
assert_that(123).is_close_to(100, 25)
assert_that(1).is_in(0,1,2,3)
assert_that(1).is_not_in(-1,-2,-3)
def test_floats(self):
assert_that(0.0).is_not_none()
assert_that(0.0).is_false()
assert_that(0.0).is_type_of(float)
assert_that(0.0).is_instance_of(float)
assert_that(123.4).is_equal_to(123.4)
assert_that(123.4).is_not_equal_to(456.7)
assert_that(123.4).is_greater_than(100.1)
assert_that(123.4).is_greater_than_or_equal_to(123.4)
assert_that(123.4).is_less_than(200.2)
assert_that(123.4).is_less_than_or_equal_to(123.4)
assert_that(123.4).is_between(100.1, 200.2)
assert_that(123.4).is_close_to(123, 0.5)
assert_that(float('NaN')).is_nan()
assert_that(123.4).is_not_nan()
assert_that(float('Inf')).is_inf()
assert_that(123.4).is_not_inf()
def test_lists(self):
assert_that([]).is_not_none()
assert_that([]).is_empty()
assert_that([]).is_false()
assert_that([]).is_type_of(list)
assert_that([]).is_instance_of(list)
assert_that([]).is_iterable()
assert_that(['a','b']).is_length(2)
assert_that(['a','b']).is_not_empty()
assert_that(['a','b']).is_equal_to(['a','b'])
assert_that(['a','b']).is_not_equal_to(['b','a'])
assert_that(['a','b']).contains('a')
assert_that(['a','b']).contains('b','a')
assert_that(['a','b']).does_not_contain('x','y')
assert_that(['a','b']).contains_only('a','b')
assert_that(['a','a']).contains_only('a')
assert_that(['a','b','c']).contains_sequence('b','c')
assert_that(['a','b']).is_subset_of(['a','b','c'])
assert_that(['a','x','x']).contains_duplicates()
assert_that(['a','b','c']).does_not_contain_duplicates()
assert_that(['a','b','c']).starts_with('a')
assert_that(['a','b','c']).ends_with('c')
def test_tuples(self):
assert_that(()).is_not_none()
assert_that(()).is_empty()
assert_that(()).is_false()
assert_that(()).is_type_of(tuple)
assert_that(()).is_instance_of(tuple)
assert_that(()).is_iterable()
assert_that((1,2,3)).is_length(3)
assert_that((1,2,3)).is_not_empty()
assert_that((1,2,3)).is_equal_to((1,2,3))
assert_that((1,2,3)).is_not_equal_to((1,2,4))
assert_that((1,2,3)).contains(1)
assert_that((1,2,3)).contains(3,2,1)
assert_that((1,2,3)).does_not_contain(4,5,6)
assert_that((1,2,3)).contains_only(1,2,3)
assert_that((1,1,1)).contains_only(1)
assert_that((1,2,3)).contains_sequence(2,3)
assert_that((1,2,3)).is_subset_of((1,2,3,4))
assert_that((1,2,2)).contains_duplicates()
assert_that((1,2,3)).does_not_contain_duplicates()
assert_that((1,2,3)).starts_with(1)
assert_that((1,2,3)).ends_with(3)
def test_dicts(self):
assert_that({}).is_not_none()
assert_that({}).is_empty()
assert_that({}).is_false()
assert_that({}).is_type_of(dict)
assert_that({}).is_instance_of(dict)
assert_that({'a':1,'b':2}).is_length(2)
assert_that({'a':1,'b':2}).is_not_empty()
assert_that({'a':1,'b':2}).is_equal_to({'a':1,'b':2})
assert_that({'a':1,'b':2}).is_equal_to({'b':2,'a':1})
assert_that({'a':1,'b':2}).is_not_equal_to({'a':1,'b':3})
assert_that({'a':1,'b':2}).contains('a')
assert_that({'a':1,'b':2}).contains('b','a')
assert_that({'a':1,'b':2}).does_not_contain('x')
assert_that({'a':1,'b':2}).does_not_contain('x','y')
assert_that({'a':1,'b':2}).contains_only('a','b')
assert_that({'a':1,'b':2}).is_subset_of({'a':1,'b':2,'c':3})
# contains_key() is just an alias for contains()
assert_that({'a':1,'b':2}).contains_key('a')
assert_that({'a':1,'b':2}).contains_key('b','a')
# does_not_contain_key() is just an alias for does_not_contain()
assert_that({'a':1,'b':2}).does_not_contain_key('x')
assert_that({'a':1,'b':2}).does_not_contain_key('x','y')
assert_that({'a':1,'b':2}).contains_value(1)
assert_that({'a':1,'b':2}).contains_value(2,1)
assert_that({'a':1,'b':2}).does_not_contain_value(3)
assert_that({'a':1,'b':2}).does_not_contain_value(3,4)
assert_that({'a':1,'b':2}).contains_entry({'a':1})
assert_that({'a':1,'b':2}).contains_entry({'a':1},{'b':2})
assert_that({'a':1,'b':2}).does_not_contain_entry({'a':2})
assert_that({'a':1,'b':2}).does_not_contain_entry({'a':2},{'b':1})
# lists of dicts can be flattened on key
fred = {'first_name': 'Fred', 'last_name': 'Smith'}
bob = {'first_name': 'Bob', 'last_name': 'Barr'}
people = [fred, bob]
assert_that(people).extracting('first_name').is_equal_to(['Fred','Bob'])
assert_that(people).extracting('first_name').contains('Fred','Bob')
def test_dict_compare(self):
# ignore
assert_that({'a':1,'b':2}).is_equal_to({'a':1}, ignore='b')
assert_that({'a':1,'b':2,'c':3}).is_equal_to({'a':1}, ignore=['b','c'])
assert_that({'a':1,'b':{'c':2,'d':3}}).is_equal_to({'a':1,'b':{'c':2}}, ignore=('b','d'))
# include
assert_that({'a':1,'b':2}).is_equal_to({'a':1}, include='a')
assert_that({'a':1,'b':2,'c':3}).is_equal_to({'a':1,'b':2}, include=['a','b'])
assert_that({'a':1,'b':{'c':2,'d':3}}).is_equal_to({'b':{'d':3}}, include=('b','d'))
# both
assert_that({'a':1,'b':{'c':2,'d':3,'e':4,'f':5}}).is_equal_to(
{'b':{'d':3,'f':5}},
ignore=[('b','c'),('b','e')],
include='b'
)
def test_sets(self):
assert_that(set([])).is_not_none()
assert_that(set([])).is_empty()
assert_that(set([])).is_false()
assert_that(set([])).is_type_of(set)
assert_that(set([])).is_instance_of(set)
assert_that(set(['a','b'])).is_length(2)
assert_that(set(['a','b'])).is_not_empty()
assert_that(set(['a','b'])).is_equal_to(set(['a','b']))
assert_that(set(['a','b'])).is_equal_to(set(['b','a']))
assert_that(set(['a','b'])).is_not_equal_to(set(['a','x']))
assert_that(set(['a','b'])).contains('a')
assert_that(set(['a','b'])).contains('b','a')
assert_that(set(['a','b'])).does_not_contain('x','y')
assert_that(set(['a','b'])).contains_only('a','b')
assert_that(set(['a','b'])).is_subset_of(set(['a','b','c']))
assert_that(set(['a','b'])).is_subset_of(set(['a']), set(['b']))
def test_booleans(self):
assert_that(True).is_true()
assert_that(False).is_false()
assert_that(True).is_type_of(bool)
def test_dates(self):
today = datetime.datetime.today()
yesterday = today - datetime.timedelta(days=1)
assert_that(yesterday).is_before(today)
assert_that(today).is_after(yesterday)
today_0us = today - datetime.timedelta(microseconds=today.microsecond)
today_0s = today - datetime.timedelta(seconds=today.second)
today_0h = today - datetime.timedelta(hours=today.hour)
assert_that(today).is_equal_to_ignoring_milliseconds(today_0us)
assert_that(today).is_equal_to_ignoring_seconds(today_0s)
assert_that(today).is_equal_to_ignoring_time(today_0h)
assert_that(today).is_equal_to(today)
middle = today - datetime.timedelta(hours=12)
hours_24 = datetime.timedelta(hours=24)
assert_that(today).is_greater_than(yesterday)
assert_that(yesterday).is_less_than(today)
assert_that(middle).is_between(yesterday, today)
#note that the tolerance must be a datetime.timedelta object
assert_that(yesterday).is_close_to(today, hours_24)
# 1980-01-02 03:04:05.000006
x = datetime.datetime(1980, 1, 2, 3, 4, 5, 6)
assert_that(x).has_year(1980)
assert_that(x).has_month(1)
assert_that(x).has_day(2)
assert_that(x).has_hour(3)
assert_that(x).has_minute(4)
assert_that(x).has_second(5)
assert_that(x).has_microsecond(6)
def test_files(self):
assert_that('foo.txt').exists()
assert_that('missing.txt').does_not_exist()
assert_that('foo.txt').is_file()
#assert_that('mydir').exists()
assert_that('missing_dir').does_not_exist()
#assert_that('mydir').is_directory()
assert_that('foo.txt').is_named('foo.txt')
#assert_that('foo.txt').is_child_of('mydir')
contents = contents_of('foo.txt', 'ascii')
assert_that(contents).starts_with('foo').ends_with('bar').contains('oob')
def test_objects(self):
fred = Person('Fred','Smith')
assert_that(fred).is_not_none()
assert_that(fred).is_true()
assert_that(fred).is_type_of(Person)
assert_that(fred).is_instance_of(object)
assert_that(fred).is_same_as(fred)
assert_that(fred.first_name).is_equal_to('Fred')
assert_that(fred.name).is_equal_to('Fred Smith')
assert_that(fred.say_hello()).is_equal_to('Hello, Fred!')
fred = Person('Fred','Smith')
bob = Person('Bob','Barr')
people = [fred, bob]
assert_that(people).extracting('first_name').is_equal_to(['Fred','Bob'])
assert_that(people).extracting('first_name').contains('Fred','Bob')
assert_that(people).extracting('first_name').does_not_contain('Charlie')
fred = Person('Fred','Smith')
joe = Developer('Joe','Coder')
people = [fred, joe]
assert_that(people).extracting('first_name').contains('Fred','Joe')
assert_that(people).extracting('first_name', 'last_name').contains(('Fred','Smith'), ('Joe','Coder'))
assert_that(people).extracting('name').contains('Fred Smith', 'Joe Coder')
assert_that(people).extracting('say_hello').contains('Hello, Fred!', 'Joe writes code.')
def test_dyn(self):
fred = Person('Fred','Smith')
assert_that(fred.first_name).is_equal_to('Fred')
assert_that(fred.name).is_equal_to('Fred Smith')
assert_that(fred.say_hello()).is_equal_to('Hello, Fred!')
assert_that(fred).has_first_name('Fred')
assert_that(fred).has_name('Fred Smith')
assert_that(fred).has_say_hello('Hello, Fred!')
def test_failure(self):
try:
some_func('foo')
fail('should have raised error')
except RuntimeError as e:
assert_that(str(e)).is_equal_to('some err')
def test_expected_exceptions(self):
assert_that(some_func).raises(RuntimeError).when_called_with('foo')
assert_that(some_func).raises(RuntimeError).when_called_with('foo')\
.is_length(8).starts_with('some').is_equal_to('some err')
def test_custom_error_message(self):
try:
assert_that(1+2).is_equal_to(2)
fail('should have raised error')
except AssertionError as e:
assert_that(str(e)).is_equal_to('Expected <3> to be equal to <2>, but was not.')
try:
assert_that(1+2).described_as('adding stuff').is_equal_to(2)
fail('should have raised error')
except AssertionError as e:
assert_that(str(e)).is_equal_to('[adding stuff] Expected <3> to be equal to <2>, but was not.')
def test_assert_warn(self):
assert_warn('foo').is_length(4)
assert_warn('foo').is_empty()
assert_warn('foo').is_false()
assert_warn('foo').is_digit()
assert_warn('123').is_alpha()
assert_warn('foo').is_upper()
assert_warn('FOO').is_lower()
assert_warn('foo').is_equal_to('bar')
assert_warn('foo').is_not_equal_to('foo')
assert_warn('foo').is_equal_to_ignoring_case('BAR')
def test_soft_assertions(self):
try:
with soft_assertions():
assert_that('foo').is_length(4)
assert_that('foo').is_empty()
assert_that('foo').is_false()
assert_that('foo').is_digit()
assert_that('123').is_alpha()
assert_that('foo').is_upper()
assert_that('FOO').is_lower()
assert_that('foo').is_equal_to('bar')
assert_that('foo').is_not_equal_to('foo')
assert_that('foo').is_equal_to_ignoring_case('BAR')
fail('should have raised error')
except AssertionError as e:
assert_that(str(e)).contains('1. Expected <foo> to be of length <4>, but was <3>.')
assert_that(str(e)).contains('2. Expected <foo> to be empty string, but was not.')
assert_that(str(e)).contains('3. Expected <False>, but was not.')
assert_that(str(e)).contains('4. Expected <foo> to contain only digits, but did not.')
assert_that(str(e)).contains('5. Expected <123> to contain only alphabetic chars, but did not.')
assert_that(str(e)).contains('6. Expected <foo> to contain only uppercase chars, but did not.')
assert_that(str(e)).contains('7. Expected <FOO> to contain only lowercase chars, but did not.')
assert_that(str(e)).contains('8. Expected <foo> to be equal to <bar>, but was not.')
assert_that(str(e)).contains('9. Expected <foo> to be not equal to <foo>, but was.')
assert_that(str(e)).contains('10. Expected <foo> to be case-insensitive equal to <BAR>, but was not.')
def test_chaining(self):
fred = Person('Fred','Smith')
joe = Person('Joe','Jones')
people = [fred, joe]
assert_that('foo').is_length(3).starts_with('f').ends_with('oo')
assert_that([1,2,3]).is_type_of(list).contains(1,2).does_not_contain(4,5)
assert_that(fred).has_first_name('Fred').has_last_name('Smith').has_shoe_size(12)
assert_that(people).is_length(2).extracting('first_name').contains('Fred','Joe')
|
class TestReadme(object):
@classmethod
def setupClass(cls):
pass
def setupClass(cls):
pass
def teardown(self):
pass
def test_something(self):
pass
def test_strings(self):
pass
def test_ints(self):
pass
def test_floats(self):
pass
def test_lists(self):
pass
def test_tuples(self):
pass
def test_dicts(self):
pass
def test_dict_compare(self):
pass
def test_sets(self):
pass
def test_booleans(self):
pass
def test_dates(self):
pass
def test_files(self):
pass
def test_objects(self):
pass
def test_dyn(self):
pass
def test_failure(self):
pass
def test_expected_exceptions(self):
pass
def test_custom_error_message(self):
pass
def test_assert_warn(self):
pass
def test_soft_assertions(self):
pass
def test_chaining(self):
pass
| 25 | 0 | 18 | 3 | 15 | 1 | 1 | 0.04 | 1 | 14 | 2 | 0 | 22 | 0 | 23 | 23 | 438 | 88 | 336 | 49 | 311 | 14 | 329 | 44 | 305 | 3 | 1 | 2 | 29 |
2,997 |
ActivisionGameScience/assertpy
|
ActivisionGameScience_assertpy/tests/test_readme.py
|
test_readme.Person
|
class Person(object):
def __init__(self, first_name, last_name, shoe_size = 12):
self.first_name = first_name
self.last_name = last_name
self.shoe_size = shoe_size
@property
def name(self):
return '%s %s' % (self.first_name, self.last_name)
def say_hello(self):
return 'Hello, %s!' % self.first_name
|
class Person(object):
def __init__(self, first_name, last_name, shoe_size = 12):
pass
@property
def name(self):
pass
def say_hello(self):
pass
| 5 | 0 | 3 | 0 | 3 | 0 | 1 | 0 | 1 | 0 | 0 | 1 | 3 | 3 | 3 | 3 | 12 | 2 | 10 | 8 | 5 | 0 | 9 | 7 | 5 | 1 | 1 | 0 | 3 |
2,998 |
ActivisionGameScience/assertpy
|
ActivisionGameScience_assertpy/assertpy/assertpy.py
|
assertpy.assertpy.WarningLoggingAdapter
|
class WarningLoggingAdapter(logging.LoggerAdapter):
"""Logging adapter to unwind the stack to get the correct callee filename and line number."""
def process(self, msg, kwargs):
def _unwind(frame, fn='assert_warn'):
if frame and fn in frame.f_code.co_names:
return frame
return _unwind(frame.f_back, fn)
frame = _unwind(inspect.currentframe())
lineno = frame.f_lineno
filename = os.path.basename(frame.f_code.co_filename)
return '[%s:%d]: %s' % (filename, lineno, msg), kwargs
|
class WarningLoggingAdapter(logging.LoggerAdapter):
'''Logging adapter to unwind the stack to get the correct callee filename and line number.'''
def process(self, msg, kwargs):
pass
def _unwind(frame, fn='assert_warn'):
pass
| 3 | 1 | 7 | 1 | 7 | 0 | 2 | 0.1 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 20 | 12 | 1 | 10 | 6 | 7 | 1 | 10 | 6 | 7 | 2 | 2 | 1 | 3 |
2,999 |
ActivisionGameScience/assertpy
|
ActivisionGameScience_assertpy/assertpy/dict.py
|
assertpy.dict.DictMixin
|
class DictMixin(object):
"""Dict assertions mixin."""
def contains_key(self, *keys):
"""Asserts the val is a dict and contains the given key or keys. Alias for contains()."""
self._check_dict_like(self.val, check_values=False, check_getitem=False)
return self.contains(*keys)
def does_not_contain_key(self, *keys):
"""Asserts the val is a dict and does not contain the given key or keys. Alias for does_not_contain()."""
self._check_dict_like(self.val, check_values=False, check_getitem=False)
return self.does_not_contain(*keys)
def contains_value(self, *values):
"""Asserts that val is a dict and contains the given value or values."""
self._check_dict_like(self.val, check_getitem=False)
if len(values) == 0:
raise ValueError('one or more value args must be given')
missing = []
for v in values:
if v not in self.val.values():
missing.append(v)
if missing:
self._err('Expected <%s> to contain values %s, but did not contain %s.' % (self.val, self._fmt_items(values), self._fmt_items(missing)))
return self
def does_not_contain_value(self, *values):
"""Asserts that val is a dict and does not contain the given value or values."""
self._check_dict_like(self.val, check_getitem=False)
if len(values) == 0:
raise ValueError('one or more value args must be given')
else:
found = []
for v in values:
if v in self.val.values():
found.append(v)
if found:
self._err('Expected <%s> to not contain values %s, but did contain %s.' % (self.val, self._fmt_items(values), self._fmt_items(found)))
return self
def contains_entry(self, *args, **kwargs):
"""Asserts that val is a dict and contains the given entry or entries."""
self._check_dict_like(self.val, check_values=False)
entries = list(args) + [{k:v} for k,v in kwargs.items()]
if len(entries) == 0:
raise ValueError('one or more entry args must be given')
missing = []
for e in entries:
if type(e) is not dict:
raise TypeError('given entry arg must be a dict')
if len(e) != 1:
raise ValueError('given entry args must contain exactly one key-value pair')
k = next(iter(e))
if k not in self.val:
missing.append(e) # bad key
elif self.val[k] != e[k]:
missing.append(e) # bad val
if missing:
self._err('Expected <%s> to contain entries %s, but did not contain %s.' % (self.val, self._fmt_items(entries), self._fmt_items(missing)))
return self
def does_not_contain_entry(self, *args, **kwargs):
"""Asserts that val is a dict and does not contain the given entry or entries."""
self._check_dict_like(self.val, check_values=False)
entries = list(args) + [{k:v} for k,v in kwargs.items()]
if len(entries) == 0:
raise ValueError('one or more entry args must be given')
found = []
for e in entries:
if type(e) is not dict:
raise TypeError('given entry arg must be a dict')
if len(e) != 1:
raise ValueError('given entry args must contain exactly one key-value pair')
k = next(iter(e))
if k in self.val and e[k] == self.val[k]:
found.append(e)
if found:
self._err('Expected <%s> to not contain entries %s, but did contain %s.' % (self.val, self._fmt_items(entries), self._fmt_items(found)))
return self
|
class DictMixin(object):
'''Dict assertions mixin.'''
def contains_key(self, *keys):
'''Asserts the val is a dict and contains the given key or keys. Alias for contains().'''
pass
def does_not_contain_key(self, *keys):
'''Asserts the val is a dict and does not contain the given key or keys. Alias for does_not_contain().'''
pass
def contains_value(self, *values):
'''Asserts that val is a dict and contains the given value or values.'''
pass
def does_not_contain_value(self, *values):
'''Asserts that val is a dict and does not contain the given value or values.'''
pass
def contains_entry(self, *args, **kwargs):
'''Asserts that val is a dict and contains the given entry or entries.'''
pass
def does_not_contain_entry(self, *args, **kwargs):
'''Asserts that val is a dict and does not contain the given entry or entries.'''
pass
| 7 | 7 | 12 | 0 | 11 | 1 | 5 | 0.14 | 1 | 5 | 0 | 1 | 6 | 1 | 6 | 6 | 79 | 6 | 66 | 18 | 59 | 9 | 64 | 17 | 57 | 8 | 1 | 3 | 27 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.