id
int64
0
843k
repository_name
stringlengths
7
55
file_path
stringlengths
9
332
class_name
stringlengths
3
290
human_written_code
stringlengths
12
4.36M
class_skeleton
stringlengths
19
2.2M
total_program_units
int64
1
9.57k
total_doc_str
int64
0
4.2k
AvgCountLine
float64
0
7.89k
AvgCountLineBlank
float64
0
300
AvgCountLineCode
float64
0
7.89k
AvgCountLineComment
float64
0
7.89k
AvgCyclomatic
float64
0
130
CommentToCodeRatio
float64
0
176
CountClassBase
float64
0
48
CountClassCoupled
float64
0
589
CountClassCoupledModified
float64
0
581
CountClassDerived
float64
0
5.37k
CountDeclInstanceMethod
float64
0
4.2k
CountDeclInstanceVariable
float64
0
299
CountDeclMethod
float64
0
4.2k
CountDeclMethodAll
float64
0
4.2k
CountLine
float64
1
115k
CountLineBlank
float64
0
9.01k
CountLineCode
float64
0
94.4k
CountLineCodeDecl
float64
0
46.1k
CountLineCodeExe
float64
0
91.3k
CountLineComment
float64
0
27k
CountStmt
float64
1
93.2k
CountStmtDecl
float64
0
46.1k
CountStmtExe
float64
0
90.2k
MaxCyclomatic
float64
0
759
MaxInheritanceTree
float64
0
16
MaxNesting
float64
0
34
SumCyclomatic
float64
0
6k
145,548
Linaro/squad
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Linaro_squad/test/frontend/test_test_job.py
test.frontend.test_test_job.TestJobViewTest
class TestJobViewTest(TestCase): def setUp(self): self.client = Client() self.group = core_models.Group.objects.create(slug='mygroup') self.project = self.group.projects.create(slug='myproject') self.build = self.project.builds.create(version='1') self.backend = models.Backend.objects.create( url='http://example.com', username='foobar', token='mypassword', ) def test_basics(self): testjob = models.TestJob.objects.create( target=self.project, target_build=self.build, environment='myenv', backend=self.backend, ) self.assertIsNone(testjob.job_id) @patch('squad.ci.backend.null.Backend.job_url', return_value=None) def test_testjob_page(self, backend_job_url): job_id = 1234 testjob = models.TestJob.objects.create( target=self.project, target_build=self.build, environment='myenv', backend=self.backend, job_id=job_id ) response = self.client.get('/testjob/%s' % testjob.id) self.assertEqual(200, response.status_code) @patch("squad.ci.models.Backend.get_implementation") def test_testjob_redirect(self, implementation): return_url = "http://example.com/job/1234" class BackendImpl: def job_url(self, job_id): return return_url implementation.return_value = BackendImpl() testjob = models.TestJob.objects.create( target=self.project, target_build=self.build, environment='myenv', backend=self.backend, job_id=1234 ) response = self.client.get('/testjob/%s' % testjob.id) self.backend.get_implementation.assert_called() self.assertEqual(302, response.status_code) self.assertEqual(return_url, response.url) def test_testjob_non_existing(self): response = self.client.get('/testjob/9999') self.assertEqual(404, response.status_code) def test_testjob_non_integer(self): response = self.client.get('/testjob/9999%20abcd') self.assertEqual(404, response.status_code)
class TestJobViewTest(TestCase): def setUp(self): pass def test_basics(self): pass @patch('squad.ci.backend.null.Backend.job_url', return_value=None) def test_testjob_page(self, backend_job_url): pass @patch("squad.ci.models.Backend.get_implementation") def test_testjob_redirect(self, implementation): pass class BackendImpl: def job_url(self, job_id): pass def test_testjob_non_existing(self): pass def test_testjob_non_integer(self): pass
11
0
8
1
8
0
1
0
1
3
3
0
6
5
6
6
65
10
55
25
44
0
32
23
23
1
1
0
7
145,549
Linaro/squad
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Linaro_squad/test/frontend/test_test_job.py
test.frontend.test_test_job.TestJobViewTest.test_testjob_redirect.BackendImpl
class BackendImpl: def job_url(self, job_id): return return_url
class BackendImpl: def job_url(self, job_id): pass
2
0
2
0
2
0
1
0
0
1
1
0
1
0
1
1
3
0
3
2
1
0
3
2
1
1
0
0
1
145,550
Linaro/squad
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Linaro_squad/test/integration/test_build_notification_from_ci.py
test.integration.test_build_notification_from_ci.BuildNotificationFromCI
class BuildNotificationFromCI(TestCase): @patch('squad.core.tasks.maybe_notify_project_status') @patch('squad.ci.backend.null.Backend.job_url', return_value="http://example.com/123") @patch('squad.ci.backend.null.Backend.fetch') def test_fetch_triggers_notification(self, fetch, job_url, notify): fetch.return_value = (job_status, completed, metadata, tests, metrics, logs) group = Group.objects.create(slug='mygroup') project = group.projects.create(slug='myproject') project.subscriptions.create(email='foo@example.com') build = project.builds.create(version='1') backend = Backend.objects.create( url='http://example.com', username='foobar', token='mypassword', ) testjob = backend.test_jobs.create( target=project, target_build=build, job_id='123', environment='myenv', ) backend.fetch(testjob.id) status = build.status status.refresh_from_db() notify.delay.assert_called_with(status.id) self.assertTrue(status.finished)
class BuildNotificationFromCI(TestCase): @patch('squad.core.tasks.maybe_notify_project_status') @patch('squad.ci.backend.null.Backend.job_url', return_value="http://example.com/123") @patch('squad.ci.backend.null.Backend.fetch') def test_fetch_triggers_notification(self, fetch, job_url, notify): pass
5
0
24
2
22
0
1
0
1
1
1
0
1
0
1
1
29
3
26
9
21
0
14
8
12
1
1
0
1
145,551
Linaro/squad
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Linaro_squad/test/plugins/test_gerrit.py
test.plugins.test_gerrit.FakeRequests.auth
class auth(): class HTTPBasicAuth(): def __init__(self, user, password): self.user = user self.password = password
class auth(): class HTTPBasicAuth(): def __init__(self, user, password): pass
3
0
3
0
3
0
1
0
0
0
0
0
0
0
0
0
5
0
5
5
2
0
5
5
2
1
0
0
1
145,552
Linaro/squad
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Linaro_squad/test/plugins/test_gerrit.py
test.plugins.test_gerrit.FakeRequests.auth.HTTPBasicAuth
class HTTPBasicAuth(): def __init__(self, user, password): self.user = user self.password = password
class HTTPBasicAuth(): def __init__(self, user, password): pass
2
0
3
0
3
0
1
0
0
0
0
0
1
2
1
1
4
0
4
4
2
0
4
4
2
1
0
0
1
145,553
Linaro/squad
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Linaro_squad/test/plugins/test_gerrit.py
test.plugins.test_gerrit.FakeSubprocess.CalledProcessError
class CalledProcessError(BaseException): def __str__(self): return 'Could not establish connection to host'
class CalledProcessError(BaseException): def __str__(self): pass
2
0
2
0
2
0
1
0
1
0
0
0
1
0
1
10
3
0
3
2
1
0
3
2
1
1
2
0
1
145,554
Linaro/squad
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Linaro_squad/test/plugins/test_gerrit.py
test.plugins.test_gerrit.GerritPluginTest
class GerritPluginTest(TestCase): def setUp(self): group = Group.objects.create(slug='mygroup') self.project = group.projects.create(slug='myproject') self.http_patch_source = PatchSource.objects.create( name='http-gerrit', url='https://the.host', username='theuser', password='1234', implementation='gerrit', token='' ) self.ssh_patch_source = PatchSource.objects.create( name='ssh-gerrit', url='ssh://the.host', username='theuser', password='', implementation='gerrit', token='' ) self.build1 = self.project.builds.create( version='1', patch_source=self.http_patch_source, patch_id='1,1') self.build2 = self.project.builds.create( version='2', patch_source=self.ssh_patch_source, patch_id='1,1') self.build3 = self.project.builds.create( version='3', patch_source=self.ssh_patch_source, patch_id=':') self.build4 = self.project.builds.create( version='4', patch_source=self.http_patch_source, patch_id='1/1') def test_basic_validation(self): validation_error = False try: self.http_patch_source.full_clean() self.ssh_patch_source.full_clean() except ValidationError: validation_error = True self.assertFalse(validation_error) @patch('squad.plugins.gerrit.requests', FakeRequests) def test_http(self): plugin = self.build1.patch_source.get_implementation() self.assertTrue(plugin.notify_patch_build_created(self.build1)) @patch('squad.plugins.gerrit.requests', FakeRequests) def test_http_notify_patch_build_created(self): plugin = self.build1.patch_source.get_implementation() self.assertTrue(plugin.notify_patch_build_created(self.build1)) self.assertIn('Build created', FakeRequests.given_json()['message']) @patch('squad.plugins.gerrit.requests', FakeRequests) def test_get_url(self): self.build1.patch_source.get_implementation() gerrit_url = self.build1.patch_source.get_url(self.build1) self.assertEqual(gerrit_url, "https://the.host/c/foo/bar/+/1/1") @patch('squad.plugins.gerrit.requests', FakeRequests) def test_get_url_ssh(self): self.build2.patch_source.get_implementation() gerrit_url = self.build2.patch_source.get_url(self.build2) self.assertEqual(gerrit_url, None) @patch('squad.plugins.gerrit.requests', FakeRequests) def test_http_notify_patch_build_finished(self): plugin = self.build1.patch_source.get_implementation() self.assertTrue(plugin.notify_patch_build_finished(self.build1)) self.assertIn('Build finished', FakeRequests.given_json()['message']) @patch('squad.plugins.gerrit.requests', FakeRequests) def test_http_notify_patch_build4_finished(self): plugin = self.build4.patch_source.get_implementation() self.assertTrue(plugin.notify_patch_build_finished(self.build4)) self.assertIn('Build finished', FakeRequests.given_json()['message']) @patch('squad.plugins.gerrit.requests', FakeRequests) def test_http_notify_patch_build_finished_with_failures(self): self.build1.status.tests_fail = 1 self.build1.status.save() plugin = self.build1.patch_source.get_implementation() self.assertTrue(plugin.notify_patch_build_finished(self.build1)) self.assertIn('Build finished', FakeRequests.given_json()['message']) self.assertIn('Some tests failed (1)', FakeRequests.given_json()['message']) @patch('squad.plugins.gerrit.subprocess', FakeSubprocess) def test_ssh(self): plugin = self.build2.patch_source.get_implementation() self.assertTrue(plugin.notify_patch_build_created(self.build2)) @patch('squad.plugins.gerrit.subprocess', FakeSubprocess) def test_ssh_failed_login(self): self.build2.patch_source.username = 'wronguser' plugin = self.build2.patch_source.get_implementation() self.assertFalse(plugin.notify_patch_build_created(self.build2)) @patch('squad.plugins.gerrit.subprocess', FakeSubprocess) def test_ssh_notify_patch_build_created(self): plugin = self.build2.patch_source.get_implementation() self.assertTrue(plugin.notify_patch_build_created(self.build2)) self.assertIn('Build created', FakeSubprocess.given_cmd()) @patch('squad.plugins.gerrit.subprocess', FakeSubprocess) def test_ssh_notify_patch_build_finished(self): plugin = self.build2.patch_source.get_implementation() self.assertTrue(plugin.notify_patch_build_finished(self.build2)) self.assertIn('Build finished', FakeSubprocess.given_cmd()) @patch('squad.plugins.gerrit.subprocess', FakeSubprocess) def test_ssh_notify_patch_build_finished_with_failures(self): self.build2.status.tests_fail = 1 self.build2.status.save() plugin = self.build2.patch_source.get_implementation() self.assertTrue(plugin.notify_patch_build_finished(self.build2)) self.assertIn('Build finished', FakeSubprocess.given_cmd()) self.assertIn('Some tests failed (1)', FakeSubprocess.given_cmd()) def test_malformed_patch_id(self): plugin = self.build3.patch_source.get_implementation() self.assertFalse(plugin.notify_patch_build_created(self.build3)) @patch('squad.plugins.gerrit.subprocess', FakeSubprocess) def test_ssh_default_labels(self): self.build2.status.tests_fail = 1 self.build2.status.save() plugin = self.build2.patch_source.get_implementation() self.assertTrue(plugin.notify_patch_build_finished(self.build2)) self.assertIn('--label code-review=-1', FakeSubprocess.given_cmd()) self.build2.status.tests_fail = 0 self.build2.status.save() plugin = self.build2.patch_source.get_implementation() self.assertTrue(plugin.notify_patch_build_finished(self.build2)) self.assertNotIn('--label code-review=+1', FakeSubprocess.given_cmd()) @patch('squad.plugins.gerrit.subprocess', FakeSubprocess) def test_ssh_custom_labels(self): self.project.project_settings = plugins_settings self.project.save() self.build2.status.tests_fail = 1 self.build2.status.save() plugin = self.build2.patch_source.get_implementation() self.assertTrue(plugin.notify_patch_build_finished(self.build2)) self.assertIn('--label custom-code-review=-1', FakeSubprocess.given_cmd()) self.assertIn('--label other-label=-2', FakeSubprocess.given_cmd()) self.assertNotIn('--label my-custom-label=+1', FakeSubprocess.given_cmd()) self.build2.status.tests_fail = 0 self.build2.status.save() plugin = self.build2.patch_source.get_implementation() self.assertTrue(plugin.notify_patch_build_finished(self.build2)) self.assertIn('--label my-custom-label=+1', FakeSubprocess.given_cmd()) self.assertNotIn('--label custom-code-review=-1', FakeSubprocess.given_cmd()) @patch('squad.plugins.gerrit.requests', FakeRequests) def test_rest_default_labels(self): self.build1.status.tests_fail = 1 self.build1.status.save() plugin = self.build1.patch_source.get_implementation() self.assertTrue(plugin.notify_patch_build_finished(self.build1)) labels = FakeRequests.given_json()['labels'] self.assertEqual('-1', labels.get('Code-Review')) self.build1.status.tests_fail = 0 self.build1.status.save() plugin = self.build1.patch_source.get_implementation() self.assertTrue(plugin.notify_patch_build_finished(self.build1)) labels = FakeRequests.given_json()['labels'] self.assertEqual(None, labels.get('Code-Review')) @patch('squad.plugins.gerrit.requests', FakeRequests) def test_rest_custom_labels(self): self.project.project_settings = plugins_settings self.project.save() self.build1.status.tests_fail = 1 self.build1.status.save() plugin = self.build1.patch_source.get_implementation() self.assertTrue(plugin.notify_patch_build_finished(self.build1)) labels = FakeRequests.given_json()['labels'] self.assertEqual('-1', labels.get('Custom-Code-Review')) self.assertEqual('-2', labels.get('Other-Label')) self.assertEqual(None, labels.get('My-Custom-Label')) self.build1.status.tests_fail = 0 self.build1.status.save() plugin = self.build1.patch_source.get_implementation() self.assertTrue(plugin.notify_patch_build_finished(self.build1)) labels = FakeRequests.given_json()['labels'] self.assertEqual('+1', labels.get('My-Custom-Label')) self.assertEqual(None, labels.get('Custom-Code-Review'))
class GerritPluginTest(TestCase): def setUp(self): pass def test_basic_validation(self): pass @patch('squad.plugins.gerrit.requests', FakeRequests) def test_http(self): pass @patch('squad.plugins.gerrit.requests', FakeRequests) def test_http_notify_patch_build_created(self): pass @patch('squad.plugins.gerrit.requests', FakeRequests) def test_get_url(self): pass @patch('squad.plugins.gerrit.requests', FakeRequests) def test_get_url_ssh(self): pass @patch('squad.plugins.gerrit.requests', FakeRequests) def test_http_notify_patch_build_finished(self): pass @patch('squad.plugins.gerrit.requests', FakeRequests) def test_http_notify_patch_build4_finished(self): pass @patch('squad.plugins.gerrit.requests', FakeRequests) def test_http_notify_patch_build_finished_with_failures(self): pass @patch('squad.plugins.gerrit.subprocess', FakeSubprocess) def test_ssh(self): pass @patch('squad.plugins.gerrit.subprocess', FakeSubprocess) def test_ssh_failed_login(self): pass @patch('squad.plugins.gerrit.subprocess', FakeSubprocess) def test_ssh_notify_patch_build_created(self): pass @patch('squad.plugins.gerrit.subprocess', FakeSubprocess) def test_ssh_notify_patch_build_finished(self): pass @patch('squad.plugins.gerrit.subprocess', FakeSubprocess) def test_ssh_notify_patch_build_finished_with_failures(self): pass def test_malformed_patch_id(self): pass @patch('squad.plugins.gerrit.subprocess', FakeSubprocess) def test_ssh_default_labels(self): pass @patch('squad.plugins.gerrit.subprocess', FakeSubprocess) def test_ssh_custom_labels(self): pass @patch('squad.plugins.gerrit.requests', FakeRequests) def test_rest_default_labels(self): pass @patch('squad.plugins.gerrit.requests', FakeRequests) def test_rest_custom_labels(self): pass
36
0
8
0
8
0
1
0
1
3
3
0
19
7
19
19
187
26
161
64
125
0
131
48
111
2
1
1
20
145,555
Linaro/squad
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Linaro_squad/test/plugins/test_github.py
test.plugins.test_github.GithubPluginTest
class GithubPluginTest(TestCase): def setUp(self): group = Group.objects.create(slug='mygroup') self.project = group.projects.create( slug='myproject', enabled_plugins_list='example') self.patch_source = PatchSource.objects.create( name='github', url='https://api.github.com', username='example', token='123456789', ) self.build = self.project.builds.create( version='1', patch_source=self.patch_source, patch_id='foo/bar/deadbeef') self.patch_source_wrong = PatchSource.objects.create( name='github_wrong', url='https://api.github.com/', username='example', token='123456789', ) self.build_wrong = self.project.builds.create( version='2', patch_source=self.patch_source_wrong, patch_id='foo/bar/deadbeef') self.github = Plugin() @patch('squad.plugins.github.requests') def test_github_post(self, requests): Plugin.__github_post__( self.build_wrong, '/test/{owner}/{repository}/{commit}', {"a": "b"}) requests.post.assert_called_with( 'https://api.github.com/test/foo/bar/deadbeef', headers={'Authorization': 'token 123456789'}, json={"a": "b"}, ) @patch('squad.plugins.github.requests') def test_github_post_wrong_url(self, requests): Plugin.__github_post__( self.build, '/test/{owner}/{repository}/{commit}', {"a": "b"}) requests.post.assert_called_with( 'https://api.github.com/test/foo/bar/deadbeef', headers={'Authorization': 'token 123456789'}, json={"a": "b"}, ) @patch('squad.plugins.github.Plugin.__github_post__') def test_notify_patch_build_created(self, __github_post__): self.github.notify_patch_build_created(self.build) __github_post__.assert_called_with( self.build, "/repos/{owner}/{repository}/statuses/{commit}", ANY) @patch('squad.plugins.github.Plugin.__github_post__') def test_notify_patch_build_finished(self, __github_post__): self.github.notify_patch_build_finished(self.build) __github_post__.assert_called_with( self.build, "/repos/{owner}/{repository}/statuses/{commit}", ANY) @patch('squad.plugins.github.Plugin.__github_post__') def test_notify_patch_build_finished_no_failures(self, __github_post__): self.build.status.tests_pass = 1 self.build.status.save() state, _ = self.github.__get_finished_state__(self.build) self.assertEqual("success", state) @patch('squad.plugins.github.Plugin.__github_post__') def test_notify_patch_build_finished_with_failures(self, __github_post__): self.build.status.tests_fail = 1 self.build.status.save() state, _ = self.github.__get_finished_state__(self.build) self.assertEqual("failure", state) def test_github_get_url(self): expected_url = "https://api.github.com/repos/foo/bar/commits/deadbeef" actual_url = self.github.get_url(self.build) self.assertEqual(expected_url, actual_url)
class GithubPluginTest(TestCase): def setUp(self): pass @patch('squad.plugins.github.requests') def test_github_post(self, requests): pass @patch('squad.plugins.github.requests') def test_github_post_wrong_url(self, requests): pass @patch('squad.plugins.github.Plugin.__github_post__') def test_notify_patch_build_created(self, __github_post__): pass @patch('squad.plugins.github.Plugin.__github_post__') def test_notify_patch_build_finished(self, __github_post__): pass @patch('squad.plugins.github.Plugin.__github_post__') def test_notify_patch_build_finished_no_failures(self, __github_post__): pass @patch('squad.plugins.github.Plugin.__github_post__') def test_notify_patch_build_finished_with_failures(self, __github_post__): pass def test_github_get_url(self): pass
15
0
7
0
7
0
1
0
1
2
2
0
8
6
8
8
68
9
59
26
44
0
35
20
26
1
1
0
8
145,556
Linaro/squad
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Linaro_squad/test/ci/test_tasks.py
test.ci.test_tasks.PollTest
class PollTest(TestCase): @patch("squad.ci.models.Backend.poll") def test_poll_no_backends(self, poll_method): poll.apply() poll_method.assert_not_called() @patch("squad.ci.models.Backend.poll") def test_poll_all_backends(self, poll_method): models.Backend.objects.create() poll.apply() poll_method.assert_called_once() @patch("squad.ci.models.Backend.poll") def test_poll_one_backend(self, poll_method): b1 = models.Backend.objects.create(name='b1') models.Backend.objects.create(name='b2') poll.apply(args=[b1.id]) poll_method.assert_called_once() @patch("squad.ci.tasks.fetch") def test_poll_calls_fetch_on_all_test_jobs(self, fetch_method): group = core_models.Group.objects.create(slug='testgroup') project = group.projects.create(slug='testproject') backend = models.Backend.objects.create(name='b1') testjob = backend.test_jobs.create(target=project, submitted=True) poll.apply() fetch_method.apply_async.assert_called_with( args=(testjob.id,), task_id=task_id(testjob))
class PollTest(TestCase): @patch("squad.ci.models.Backend.poll") def test_poll_no_backends(self, poll_method): pass @patch("squad.ci.models.Backend.poll") def test_poll_all_backends(self, poll_method): pass @patch("squad.ci.models.Backend.poll") def test_poll_one_backend(self, poll_method): pass @patch("squad.ci.tasks.fetch") def test_poll_calls_fetch_on_all_test_jobs(self, fetch_method): pass
9
0
5
0
5
0
1
0
1
1
1
0
4
0
4
4
28
4
24
14
15
0
20
10
15
1
1
0
4
145,557
Linaro/squad
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Linaro_squad/test/ci/test_tasks.py
test.ci.test_tasks.FetchTestRaceConditionWaitAllJobsToBeFetched
class FetchTestRaceConditionWaitAllJobsToBeFetched(TransactionTestCase): """ If another testjob for this build is finished, it'll trigger UpdateProjectStatus which will invoke Build.finished and will see that all testjobs for this build are finished. Except that if this current test job is still running plugins, like VTS/CTS which take long time, the build will be considered to be finished and finishing events will be triggered such as email reports and callbacks """ def setUp(self): group = core_models.Group.objects.create(slug='test') project = group.projects.create(slug='test') self.build = project.builds.create(version='test-build') backend = models.Backend.objects.create() self.testjob1 = models.TestJob.objects.create( backend=backend, target=project, target_build=self.build, job_id='job-1', ) self.testjob2 = models.TestJob.objects.create( backend=backend, target=project, target_build=self.build, job_id='job-2', ) def mock_backend_fetch(test_job): status = '' completed = True metadata = {} tests = {} metrics = {} logs = '' return status, completed, metadata, tests, metrics, logs def mock_receive_testrun(target, update_project_status): global __sleeping__ # Let's present job1 takes a bit longer if __sleeping__ is False: time.sleep(2) __sleeping__ = True return ReceiveTestRun(target, update_project_status=update_project_status) @tag('skip_sqlite') @patch('squad.ci.backend.null.Backend.job_url') @patch('squad.ci.models.ReceiveTestRun', side_effect=mock_receive_testrun) @patch('squad.ci.backend.null.Backend.fetch', side_effect=mock_backend_fetch) def test_race_condition_on_fetch(self, fetch_method, mock_receive, mock_url): mock_url.return_value = "job-url" def thread(testjob_id): fetch(testjob_id) connection.close() parallel_task_1 = threading.Thread( target=thread, args=(self.testjob1.id,)) parallel_task_2 = threading.Thread( target=thread, args=(self.testjob2.id,)) parallel_task_1.start() parallel_task_2.start() time.sleep(1) self.testjob1.refresh_from_db() self.testjob2.refresh_from_db() finished, _ = self.build.finished self.assertFalse(finished) parallel_task_1.join() parallel_task_2.join() finished, _ = self.build.finished self.assertTrue(finished)
class FetchTestRaceConditionWaitAllJobsToBeFetched(TransactionTestCase): ''' If another testjob for this build is finished, it'll trigger UpdateProjectStatus which will invoke Build.finished and will see that all testjobs for this build are finished. Except that if this current test job is still running plugins, like VTS/CTS which take long time, the build will be considered to be finished and finishing events will be triggered such as email reports and callbacks ''' def setUp(self): pass def mock_backend_fetch(test_job): pass def mock_receive_testrun(target, update_project_status): pass @tag('skip_sqlite') @patch('squad.ci.backend.null.Backend.job_url') @patch('squad.ci.models.ReceiveTestRun', side_effect=mock_receive_testrun) @patch('squad.ci.backend.null.Backend.fetch', side_effect=mock_backend_fetch) def test_race_condition_on_fetch(self, fetch_method, mock_receive, mock_url): pass def thread(testjob_id): pass
10
1
12
2
10
0
1
0.15
1
4
3
0
4
3
4
4
74
12
54
23
43
8
40
22
33
2
1
1
6
145,558
Linaro/squad
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Linaro_squad/test/ci/test_tasks.py
test.ci.test_tasks.FetchTestRaceCondition
class FetchTestRaceCondition(TransactionTestCase): def setUp(self): group = core_models.Group.objects.create(slug='test') project = group.projects.create(slug='test') build = project.builds.create(version='test-build') backend = models.Backend.objects.create() self.testjob = models.TestJob.objects.create( backend=backend, target=project, target_build=build, job_id='test', ) def mock_backend_fetch(test_job): time.sleep(0.5) status = '' completed = True metadata = {} tests = {} metrics = {} logs = '' return status, completed, metadata, tests, metrics, logs @tag('skip_sqlite') @patch('squad.ci.backend.null.Backend.job_url', return_value='http://url') @patch('squad.ci.backend.null.Backend.fetch', side_effect=mock_backend_fetch) def test_race_condition_on_fetch(self, fetch_method, job_url_method): def thread(testjob_id): fetch(testjob_id) connection.close() parallel_task_1 = threading.Thread( target=thread, args=(self.testjob.id,)) parallel_task_2 = threading.Thread( target=thread, args=(self.testjob.id,)) parallel_task_1.start() parallel_task_2.start() parallel_task_1.join() parallel_task_2.join() self.assertEqual(1, fetch_method.call_count)
class FetchTestRaceCondition(TransactionTestCase): def setUp(self): pass def mock_backend_fetch(test_job): pass @tag('skip_sqlite') @patch('squad.ci.backend.null.Backend.job_url', return_value='http://url') @patch('squad.ci.backend.null.Backend.fetch', side_effect=mock_backend_fetch) def test_race_condition_on_fetch(self, fetch_method, job_url_method): pass def thread(testjob_id): pass
8
0
10
1
9
0
1
0
1
3
2
0
3
1
3
3
42
7
35
19
27
0
27
18
22
1
1
0
4
145,559
Linaro/squad
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Linaro_squad/test/ci/test_tasks.py
test.ci.test_tasks.FetchTest
class FetchTest(TestCase): def setUp(self): group = core_models.Group.objects.create(slug='test') project = group.projects.create(slug='test') build = project.builds.create(version='test') backend = models.Backend.objects.create() self.test_job = models.TestJob.objects.create( backend=backend, target=project, target_build=build, job_id='test', ) def mock_backend_fetch(test_job): status = '' completed = True metadata = {} tests = {} metrics = {} logs = '' test_job.failure = None test_job.save() return status, completed, metadata, tests, metrics, logs @patch('squad.ci.models.Backend.fetch') def test_fetch(self, fetch_method): fetch.apply(args=[self.test_job.id]) fetch_method.assert_called_with(self.test_job.id) @patch('squad.ci.backend.null.Backend.fetch') def test_exception_when_fetching(self, fetch_method): fetch_method.side_effect = FetchIssue("ERROR") fetch.apply(args=[self.test_job.id]) self.test_job.refresh_from_db() self.assertEqual("ERROR", self.test_job.failure) self.assertTrue(self.test_job.fetched) @patch('squad.ci.backend.null.Backend.fetch') def test_temporary_exception_when_fetching(self, fetch_method): fetch_method.side_effect = TemporaryFetchIssue("ERROR") fetch.apply(args=[self.test_job.id]) self.test_job.refresh_from_db() self.assertEqual("ERROR", self.test_job.failure) self.assertFalse(self.test_job.fetched) @patch('squad.ci.backend.null.Backend.fetch') @patch('squad.ci.backend.null.Backend.job_url') def test_clear_exception_after_successful_fetch(self, job_url, fetch_method): fetch_method.side_effect = TemporaryFetchIssue("ERROR") fetch.apply(args=[self.test_job.id]) self.test_job.refresh_from_db() self.assertEqual("ERROR", self.test_job.failure) self.assertFalse(self.test_job.fetched) fetch_method.side_effect = FetchTest.mock_backend_fetch job_url.side_effect = lambda a: 'test' fetch.apply(args=[self.test_job.id]) self.test_job.refresh_from_db() fetch_method.assert_called_with(self.test_job) self.assertIsNone(self.test_job.failure) self.assertTrue(self.test_job.fetched) @patch('squad.ci.backend.null.Backend.fetch') def test_counts_attempts_with_temporary_exceptions(self, fetch_method): attemps = self.test_job.fetch_attempts fetch_method.side_effect = TemporaryFetchIssue("ERROR") fetch.apply(args=[self.test_job.id]) self.test_job.refresh_from_db() self.assertEqual(attemps + 1, self.test_job.fetch_attempts) @patch('squad.ci.models.Backend.fetch') def test_fetch_no_job_id(self, fetch_method): testjob = models.TestJob.objects.create( backend=self.test_job.backend, target=self.test_job.target, target_build=self.test_job.target_build, ) fetch.apply(args=[testjob.id]) fetch_method.assert_not_called() @patch('squad.ci.models.Backend.fetch') def test_fetch_deleted_job(self, fetch_method): fetch.apply(args=[99999999999]) fetch_method.assert_not_called()
class FetchTest(TestCase): def setUp(self): pass def mock_backend_fetch(test_job): pass @patch('squad.ci.models.Backend.fetch') def test_fetch(self, fetch_method): pass @patch('squad.ci.backend.null.Backend.fetch') def test_exception_when_fetching(self, fetch_method): pass @patch('squad.ci.backend.null.Backend.fetch') def test_temporary_exception_when_fetching(self, fetch_method): pass @patch('squad.ci.backend.null.Backend.fetch') @patch('squad.ci.backend.null.Backend.job_url') def test_clear_exception_after_successful_fetch(self, job_url, fetch_method): pass @patch('squad.ci.backend.null.Backend.fetch') def test_counts_attempts_with_temporary_exceptions(self, fetch_method): pass @patch('squad.ci.models.Backend.fetch') def test_fetch_no_job_id(self, fetch_method): pass @patch('squad.ci.models.Backend.fetch') def test_fetch_deleted_job(self, fetch_method): pass
18
0
8
1
7
0
1
0
1
4
4
0
9
1
9
9
89
14
75
30
57
0
58
23
48
1
1
0
9
145,560
Linaro/squad
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Linaro_squad/squad/core/models.py
squad.core.models.Test.Confidence
class Confidence(object): def __init__(self, threshold, tests): self.threshold = threshold self.tests = tests @property def count(self): return len(self.tests) @property def passes(self): return sum(1 for t in self.tests if t.result) @property def score(self): if not self.count: return 0 return 100 * (self.passes / self.count)
class Confidence(object): def __init__(self, threshold, tests): pass @property def count(self): pass @property def passes(self): pass @property def score(self): pass
8
0
3
0
3
0
1
0
1
0
0
0
4
2
4
4
19
4
15
11
7
0
12
7
7
2
1
1
5
145,561
Linaro/squad
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Linaro_squad/squad/core/models.py
squad.core.models.Test.History
class History(object): def __init__(self, since, count, last_different): self.since = since self.count = count self.last_different = last_different
class History(object): def __init__(self, since, count, last_different): pass
2
0
4
0
4
0
1
0
1
0
0
0
1
3
1
1
5
0
5
5
3
0
5
5
3
1
1
0
1
145,562
Linaro/squad
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Linaro_squad/squad/core/models.py
squad.core.models.Test.Meta
class Meta: ordering = ['metadata__name']
class Meta: pass
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
2
0
2
2
1
0
2
2
1
0
0
0
0
145,563
Linaro/squad
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Linaro_squad/squad/core/models.py
squad.core.models.TestRun.Meta
class Meta: unique_together = ('build', 'job_id')
class Meta: pass
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
2
0
2
2
1
0
2
2
1
0
0
0
0
145,564
Linaro/squad
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Linaro_squad/squad/core/models.py
squad.core.models.UserNamespace.Meta
class Meta: proxy = True
class Meta: pass
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
2
0
2
2
1
0
2
2
1
0
0
0
0
145,565
Linaro/squad
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Linaro_squad/squad/frontend/build_settings.py
squad.frontend.build_settings.BuildSettingsForm.Meta
class Meta: model = Build fields = ['keep_data']
class Meta: pass
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
3
0
3
3
2
0
3
3
2
0
0
0
0
145,566
Linaro/squad
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Linaro_squad/squad/frontend/ci.py
squad.frontend.ci.TestjobFilter.Meta
class Meta: model = TestJob fields = ['name', 'job_status', 'submitted', 'fetched', 'job_id', 'environment']
class Meta: pass
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
3
0
3
3
2
0
3
3
2
0
0
0
0
145,567
Linaro/squad
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Linaro_squad/squad/frontend/group_settings.py
squad.frontend.group_settings.GroupForm.Meta
class Meta: model = Group fields = ['name', 'description']
class Meta: pass
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
3
0
3
3
2
0
3
3
2
0
0
0
0
145,568
Linaro/squad
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Linaro_squad/squad/frontend/group_settings.py
squad.frontend.group_settings.GroupFormAdvanced.Meta
class Meta: model = Group fields = ['settings']
class Meta: pass
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
3
0
3
3
2
0
3
3
2
0
0
0
0
145,569
Linaro/squad
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Linaro_squad/squad/frontend/group_settings.py
squad.frontend.group_settings.GroupMemberForm.Meta
class Meta: model = GroupMember fields = ['group', 'user', 'access']
class Meta: pass
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
3
0
3
3
2
0
3
3
2
0
0
0
0
145,570
Linaro/squad
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Linaro_squad/squad/frontend/group_settings.py
squad.frontend.group_settings.NewGroupForm.Meta
class Meta(GroupForm.Meta): fields = ['slug'] + GroupForm.Meta.fields
class Meta(GroupForm.Meta): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
2
0
2
2
1
0
2
2
1
0
1
0
0
145,571
Linaro/squad
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Linaro_squad/squad/frontend/group_settings.py
squad.frontend.group_settings.NewProjectForm.Meta
class Meta: model = Project fields = ['group', 'slug', 'name', 'is_public', 'description']
class Meta: pass
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
3
0
3
3
2
0
3
3
2
0
0
0
0
145,572
Linaro/squad
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Linaro_squad/squad/frontend/project_settings.py
squad.frontend.project_settings.EnvironmentForm.Meta
class Meta: model = Environment fields = ['slug', 'expected_test_runs']
class Meta: pass
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
3
0
3
3
2
0
3
3
2
0
0
0
0
145,573
Linaro/squad
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Linaro_squad/squad/core/models.py
squad.core.models.SuiteMetadata.Meta
class Meta: unique_together = ('kind', 'suite', 'name') verbose_name_plural = 'Suite metadata'
class Meta: pass
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
3
0
3
3
2
0
3
3
2
0
0
0
0
145,574
Linaro/squad
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Linaro_squad/squad/frontend/project_settings.py
squad.frontend.project_settings.ProjectForm.Meta
class Meta: model = Project fields = ['name', 'is_public', 'is_archived', 'description', 'enabled_plugins_list', 'wait_before_notification', 'notification_timeout', 'force_finishing_builds_on_timeout', 'data_retention_days', 'important_metadata_keys']
class Meta: pass
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
6
0
6
3
5
0
3
3
2
0
0
0
0
145,575
Linaro/squad
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Linaro_squad/squad/frontend/project_settings.py
squad.frontend.project_settings.ProjectFormBuildConfidence.Meta
class Meta: model = Project fields = ['build_confidence_count', 'build_confidence_threshold']
class Meta: pass
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
3
0
3
3
2
0
3
3
2
0
0
0
0
145,576
Linaro/squad
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Linaro_squad/squad/frontend/user_settings.py
squad.frontend.user_settings.ProfileForm.Meta
class Meta: model = User fields = ['first_name', 'last_name', 'email']
class Meta: pass
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
3
0
3
3
2
0
3
3
2
0
0
0
0
145,577
Linaro/squad
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Linaro_squad/squad/frontend/user_settings.py
squad.frontend.user_settings.UserPreferencesForm.Meta
class Meta: model = UserPreferences fields = ['display_failures_only']
class Meta: pass
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
3
0
3
3
2
0
3
3
2
0
0
0
0
145,578
Linaro/squad
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Linaro_squad/squad/frontend/views.py
squad.frontend.views.TestResultTable.Cell
class Cell(object): def __init__(self): self.has_failures = False self.has_known_failures = False self.statuses = [] @property def has_data(self): return len(self.statuses) > 0
class Cell(object): def __init__(self): pass @property def has_data(self): pass
4
0
3
0
3
0
1
0
1
0
0
0
2
3
2
2
10
2
8
7
4
0
7
6
4
1
1
0
2
145,579
Linaro/squad
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Linaro_squad/test/api/test_ci.py
test.api.test_ci.CiApiTest
class CiApiTest(TestCase): def setUp(self): self.group = core_models.Group.objects.create(slug='mygroup') self.usergroup = core_models.UserNamespace.objects.create( slug='~project-member-user') self.project = self.group.projects.create(slug='myproject') self.userproject = self.usergroup.projects.create(slug='userproject') self.project_admin_user = User.objects.create(username='project-admin') self.group.add_admin(self.project_admin_user) self.project_privileged_user = User.objects.create( username='project-user') self.group.add_user(self.project_privileged_user, 'privileged') self.project_submitter_level_user = User.objects.create( username='project-user-submitter-level') self.group.add_user(self.project_submitter_level_user, 'submitter') self.project_member_user = User.objects.create( username='project-member-user') self.group.add_user(self.project_member_user, 'member') self.usergroup.add_user(self.project_member_user, 'privileged') self.build = self.project.builds.create(version='1') self.userbuild = self.userproject.builds.create(version='1') Token.objects.create(user=self.project_privileged_user, key='thekey') Token.objects.create( user=self.project_submitter_level_user, key='thesubmitterkey') Token.objects.create(user=self.project_member_user, key='memberkey') Token.objects.create(user=self.project_admin_user, key='adminkey') self.backend = models.Backend.objects.create( name='lava', implementation_type='fake') self.tuxsuite = models.Backend.objects.create( name='tuxsuite', implementation_type='tuxsuite') self.client = APIClient('thekey') self.submitter_client = APIClient('thesubmitterkey') self.restclient = RestAPIClient('thekey') self.memberclient = APIClient('memberkey') self.adminclient = APIClient('adminkey') def test_auth(self): self.client.token = 'invalid-token' r = self.client.post('/api/submitjob/mygroup/myproject/1/myenv') self.assertEqual(403, r.status_code) self.assertEqual( 'User needs permission to submit test jobs.', r.json()['detail']) r = self.client.post('/api/watchjob/mygroup/myproject/1/myenv') self.assertEqual(403, r.status_code) self.assertEqual( 'User needs permission to submit test jobs.', r.json()['detail']) def test_group_project_not_found(self): r = self.client.post( '/api/submitjob/nonexistentgroup/myproject/1/myenv') self.assertEqual(404, r.status_code) self.assertEqual('No Group matches the given query.', r.json()['detail']) r = self.client.post( '/api/submitjob/mygroup/nonexistentproject/1/myenv') self.assertEqual(404, r.status_code) self.assertEqual('No Project matches the given query.', r.json()['detail']) def test_creates_test_run(self): args = { 'backend': 'lava', 'definition': 'foo: 1', } r = self.submitter_client.post( '/api/submitjob/mygroup/myproject/1/myenv', args) self.assertEqual(403, r.status_code) r = self.client.post('/api/submitjob/mygroup/myproject/1/myenv', args) self.assertEqual(201, r.status_code) testjob_queryset = models.TestJob.objects.filter( target=self.project, environment='myenv', target_build=self.build, backend=self.backend, definition='foo: 1', ) self.assertEqual( 1, testjob_queryset.count() ) logentry_queryset = LogEntry.objects.filter( user_id=self.project_privileged_user.pk, object_id=testjob_queryset.last().pk ) self.assertEqual( 1, logentry_queryset.count() ) self.assertEqual( ADDITION, logentry_queryset.last().action_flag ) def test_submitjob_private_group(self): args = { 'backend': 'lava', 'definition': 'foo: 1', } r = self.memberclient.post( '/api/submitjob/~project-member-user/userproject/1/myenv', args) self.assertEqual(201, r.status_code) testjob_queryset = models.TestJob.objects.filter( target=self.userproject, environment='myenv', target_build=self.userbuild, backend=self.backend, definition='foo: 1', ) self.assertEqual( 1, testjob_queryset.count() ) logentry_queryset = LogEntry.objects.filter( user_id=self.project_member_user.pk, object_id=testjob_queryset.last().pk ) self.assertEqual( 1, logentry_queryset.count() ) self.assertEqual( ADDITION, logentry_queryset.last().action_flag ) def test_invalid_backend_test_run(self): args = { 'backend': 'lava.foo', 'definition': 'foo: 1', } r = self.client.post('/api/submitjob/mygroup/myproject/1/myenv', args) self.assertEqual(400, r.status_code) def test_missing_definition_test_run(self): args = { 'backend': 'lava' } r = self.client.post('/api/submitjob/mygroup/myproject/1/myenv', args) self.assertEqual(400, r.status_code) @patch('squad.ci.backend.fake.Backend.check_job_definition', return_value='bad definition') def test_malformed_definition(self, check_job_definition): args = { 'backend': 'lava', 'definition': 'unmatched double quotes: "a""' } r = self.client.post('/api/submitjob/mygroup/myproject/1/myenv', args) self.assertEqual(400, r.status_code) def test_disabled_environment(self): args = { 'backend': 'lava', 'definition': 'foo: 1', } r = self.client.post('/api/submitjob/mygroup/myproject/1/myenv', args) self.assertEqual(201, r.status_code) testjob_queryset = models.TestJob.objects.filter( target=self.project, environment='myenv', target_build=self.build, backend=self.backend, definition='foo: 1', ) self.assertEqual(1, testjob_queryset.count()) disabled_env = self.project.environments.create( slug='disabled-env', expected_test_runs=-1) r = self.client.post( '/api/submitjob/mygroup/myproject/1/%s' % disabled_env.slug, args) self.assertEqual(400, r.status_code) self.assertEqual(r.content.decode( ), "environment '%s' is disabled and squad will not accept new submissions to it" % disabled_env.slug) def test_accepts_definition_as_file_upload(self): args = { 'backend': 'lava', 'definition': open(twoline_job_definition_file) } r = self.client.post('/api/submitjob/mygroup/myproject/1/myenv', args) self.assertEqual(201, r.status_code) testjob = models.TestJob.objects.filter( target=self.project, environment='myenv', target_build=self.build, backend=self.backend, definition='bar: something\nfoo: 1', ).get() # when parsing back to yaml, it weirdly adds an extra linebreak at the end self.assertEqual('bar: something\nfoo: 1\n', testjob.show_definition) @patch("squad.ci.tasks.submit.delay") def test_schedules_submission(self, submit): args = { 'backend': 'lava', 'definition': 'foo: 1', } r = self.submitter_client.post( '/api/submitjob/mygroup/myproject/1/myenv', args) self.assertEqual(403, r.status_code) self.client.post('/api/submitjob/mygroup/myproject/1/myenv', args) job_id = models.TestJob.objects.last().id submit.assert_called_with(job_id) @patch("squad.ci.tasks.fetch.apply_async") def test_auth_on_watch_testjob(self, fetch): testjob_id = 1234 args = { 'backend': 'lava', 'testjob_id': testjob_id, } self.client.token = 'invalid-token' r = self.client.post('/api/watchjob/mygroup/myproject/1/myenv', args) self.assertEqual(403, r.status_code) @patch("squad.ci.tasks.fetch.delay") def test_watch_testjob(self, fetch): testjob_id = 1234 args = { 'backend': 'lava', 'testjob_id': testjob_id, } r = self.client.post('/api/watchjob/mygroup/myproject/1/myenv', args) self.assertEqual(201, r.status_code) testjob_queryset = models.TestJob.objects.filter( target=self.project, environment='myenv', target_build=self.build, backend=self.backend, submitted=True, job_id=testjob_id ) self.assertEqual( 1, testjob_queryset.count() ) testjob = testjob_queryset.first() self.assertTrue(len(testjob.definition) > 0) fetch.assert_called_with(testjob_queryset.first().id) logentry_queryset = LogEntry.objects.filter( user_id=self.project_privileged_user.pk, object_id=testjob_queryset.last().pk ) self.assertEqual( 1, logentry_queryset.count() ) self.assertEqual( ADDITION, logentry_queryset.last().action_flag ) @patch("squad.ci.tasks.fetch.delay") def test_watch_testjob_do_not_fetch_rightaway(self, fetch): testjob_id = 1234 args = { 'backend': 'lava', 'testjob_id': testjob_id, } r = self.client.post( '/api/watchjob/mygroup/myproject/1/myenv?delay_fetch', args) self.assertEqual(201, r.status_code) testjob_queryset = models.TestJob.objects.filter( target=self.project, environment='myenv', target_build=self.build, backend=self.backend, submitted=True, job_id=testjob_id ) self.assertEqual( 1, testjob_queryset.count() ) fetch.assert_not_called() logentry_queryset = LogEntry.objects.filter( user_id=self.project_privileged_user.pk, object_id=testjob_queryset.last().pk ) self.assertEqual( 1, logentry_queryset.count() ) self.assertEqual( ADDITION, logentry_queryset.last().action_flag ) @patch("squad.ci.tasks.fetch.apply_async") def test_watch_testjob_private_group(self, fetch): testjob_id = 1234 args = { 'backend': 'lava', 'testjob_id': testjob_id, } r = self.memberclient.post( '/api/watchjob/~project-member-user/userproject/1/myenv', args) self.assertEqual(201, r.status_code) testjob_queryset = models.TestJob.objects.filter( target=self.userproject, environment='myenv', target_build=self.userbuild, backend=self.backend, submitted=True, job_id=testjob_id ) self.assertEqual( 1, testjob_queryset.count() ) logentry_queryset = LogEntry.objects.filter( user_id=self.project_member_user.pk, object_id=testjob_queryset.last().pk ) self.assertEqual( 1, logentry_queryset.count() ) self.assertEqual( ADDITION, logentry_queryset.last().action_flag ) @patch("squad.ci.tasks.fetch.apply_async") def test_watch_testjob_mising_id(self, fetch): args = { 'backend': 'lava' } r = self.client.post('/api/watchjob/mygroup/myproject/1/myenv', args) self.assertEqual(400, r.status_code) @patch("squad.ci.tasks.fetch.apply_async") def test_watch_testjob_malformed_id(self, fetch): args = { 'backend': 'lava' } r = self.client.post('/api/watchjob/mygroup/myproject/1/myenv', args) self.assertEqual(400, r.status_code) def test_fetch_testjob_not_supported_by_backend(self): # Backend does not exist r = self.client.post( '/api/fetchjob/mygroup/myproject/1/myenv/does-not-exist') self.assertEqual(400, r.status_code) # Backend does not support callbacks r = self.client.post('/api/fetchjob/mygroup/myproject/1/myenv/lava') self.assertEqual(400, r.status_code) def test_fetch_testjob_group_project_does_not_exist(self): r = self.client.post( '/api/fetchjob/doesnotexist/myproject/1/myenv/lava') self.assertEqual(400, r.status_code) r = self.client.post('/api/fetchjob/mygroup/doesnotexist/1/myenv/lava') self.assertEqual(400, r.status_code) @patch("squad.ci.tasks.fetch.delay") @patch("squad.ci.backend.tuxsuite.Backend.process_callback") @patch("squad.ci.backend.tuxsuite.Backend.validate_callback") def test_fetch_testjob_invalid(self, mock_validate_callback, mock_process_callback, mock_fetch): mock_validate_callback.side_effect = Exception("bad") r = self.client.post( '/api/fetchjob/mygroup/myproject/1/myenv/tuxsuite') self.assertEqual(400, r.status_code) mock_validate_callback.assert_called() mock_validate_callback.reset_mock() mock_validate_callback.side_effect = None r = self.client.post( '/api/fetchjob/mygroup/myproject/1/myenv/tuxsuite') self.assertEqual(400, r.status_code) mock_validate_callback.assert_called() mock_validate_callback.reset_mock() mock_process_callback.side_effect = Exception("bad") r = self.client.post('/api/fetchjob/mygroup/myproject/1/myenv/tuxsuite', {"a": 1}, content_type="application/json") self.assertEqual(400, r.status_code) mock_validate_callback.assert_called() mock_process_callback.assert_called() mock_fetch.assert_not_called() @patch("squad.ci.tasks.fetch.delay") @patch("squad.ci.backend.tuxsuite.Backend.process_callback") @patch("squad.ci.backend.tuxsuite.Backend.validate_callback") def test_fetch_testjob(self, mock_validate_callback, mock_process_callback, mock_fetch): testjob = models.TestJob(id=123) mock_process_callback.return_value = testjob r = self.client.post('/api/fetchjob/mygroup/myproject/1/myenv/tuxsuite', {"a": 1}, content_type="application/json") self.assertEqual(201, r.status_code) mock_validate_callback.assert_called() mock_process_callback.assert_called() mock_fetch.assert_called_with(testjob.id) @patch('squad.ci.models.Backend.get_implementation') def test_resubmit_submitter(self, get_implementation): impl = MagicMock() impl.resubmit = MagicMock() get_implementation.return_value = impl t = self.backend.test_jobs.create( target=self.project, can_resubmit=True, job_id="12345", ) r = self.client.post('/api/resubmit/%s' % t.pk) self.assertEqual(201, r.status_code) impl.resubmit.assert_called() t.refresh_from_db() self.assertEqual(False, t.can_resubmit) @patch('squad.ci.models.Backend.get_implementation') def test_resubmit_submitter_cant_resubmit(self, get_implementation): impl = MagicMock() impl.resubmit = MagicMock() get_implementation.return_value = impl t = self.backend.test_jobs.create( target=self.project, can_resubmit=False ) r = self.client.post('/api/resubmit/%s' % t.pk) self.assertEqual(403, r.status_code) impl.resubmit.assert_not_called() t.refresh_from_db() self.assertEqual(False, t.can_resubmit) @patch('squad.ci.models.Backend.get_implementation') def test_resubmit_submitter_token_auth(self, get_implementation): impl = MagicMock() impl.resubmit = MagicMock() get_implementation.return_value = impl t = self.backend.test_jobs.create( target=self.project, can_resubmit=True, job_id="12345", ) r = self.restclient.post('/api/resubmit/%s' % t.pk) self.assertEqual(201, r.status_code) impl.resubmit.assert_called() t.refresh_from_db() self.assertEqual(False, t.can_resubmit) @patch('squad.ci.models.Backend.get_implementation') def test_resubmit_submitter_auth_token_cant_resubmit(self, get_implementation): impl = MagicMock() impl.resubmit = MagicMock() get_implementation.return_value = impl t = self.backend.test_jobs.create( target=self.project, can_resubmit=False ) r = self.restclient.post('/api/resubmit/%s' % t.pk) self.assertEqual(403, r.status_code) impl.resubmit.assert_not_called() t.refresh_from_db() self.assertEqual(False, t.can_resubmit) @patch('squad.ci.models.Backend.get_implementation') def test_force_resubmit_submitter_token_auth(self, get_implementation): impl = MagicMock() impl.resubmit = MagicMock() get_implementation.return_value = impl t = self.backend.test_jobs.create( target=self.project, can_resubmit=True, job_id="12345", ) r = self.restclient.post('/api/resubmit/%s' % t.pk) self.assertEqual(201, r.status_code) impl.resubmit.assert_called() t.refresh_from_db() self.assertEqual(False, t.can_resubmit) @patch('squad.ci.models.Backend.get_implementation') def test_resubmit_admin(self, get_implementation): impl = MagicMock() impl.resubmit = MagicMock() get_implementation.return_value = impl t = self.backend.test_jobs.create( target=self.project, can_resubmit=True, job_id="12345", ) r = self.adminclient.post('/api/resubmit/%s' % t.pk) self.assertEqual(201, r.status_code) impl.resubmit.assert_called() t.refresh_from_db() self.assertEqual(False, t.can_resubmit) def test_disallowed_resubmit(self): t = self.backend.test_jobs.create( target=self.project, can_resubmit=True ) r = self.memberclient.post('/api/resubmit/%s' % t.pk) self.assertEqual(401, r.status_code) def test_resubmit_invalid_id(self): staff_user_password = "secret" staff_user = User.objects.create_superuser( username="staffuser", email="staff@example.com", password=staff_user_password, is_staff=True) staff_user.save() client = Client() client.login(username=staff_user.username, password=staff_user_password) r = client.post('/api/resubmit/999') self.assertEqual(404, r.status_code) @patch('squad.ci.models.TestJob.resubmit', side_effect=SubmissionIssue('BOOM')) def test_resubmit_error(self, resubmit): t = self.backend.test_jobs.create( target=self.project, can_resubmit=True ) r = self.adminclient.post('/api/resubmit/%s' % t.pk) self.assertEqual(500, r.status_code) self.assertEqual('BOOM', r.content.decode())
class CiApiTest(TestCase): def setUp(self): pass def test_auth(self): pass def test_group_project_not_found(self): pass def test_creates_test_run(self): pass def test_submitjob_private_group(self): pass def test_invalid_backend_test_run(self): pass def test_missing_definition_test_run(self): pass @patch('squad.ci.backend.fake.Backend.check_job_definition', return_value='bad definition') def test_malformed_definition(self, check_job_definition): pass def test_disabled_environment(self): pass def test_accepts_definition_as_file_upload(self): pass @patch("squad.ci.tasks.submit.delay") def test_schedules_submission(self, submit): pass @patch("squad.ci.tasks.fetch.apply_async") def test_auth_on_watch_testjob(self, fetch): pass @patch("squad.ci.tasks.fetch.delay") def test_watch_testjob(self, fetch): pass @patch("squad.ci.tasks.fetch.delay") def test_watch_testjob_do_not_fetch_rightaway(self, fetch): pass @patch("squad.ci.tasks.fetch.apply_async") def test_watch_testjob_private_group(self, fetch): pass @patch("squad.ci.tasks.fetch.apply_async") def test_watch_testjob_mising_id(self, fetch): pass @patch("squad.ci.tasks.fetch.apply_async") def test_watch_testjob_malformed_id(self, fetch): pass def test_fetch_testjob_not_supported_by_backend(self): pass def test_fetch_testjob_group_project_does_not_exist(self): pass @patch("squad.ci.tasks.fetch.delay") @patch("squad.ci.backend.tuxsuite.Backend.process_callback") @patch("squad.ci.backend.tuxsuite.Backend.validate_callback") def test_fetch_testjob_invalid(self, mock_validate_callback, mock_process_callback, mock_fetch): pass @patch("squad.ci.tasks.fetch.delay") @patch("squad.ci.backend.tuxsuite.Backend.process_callback") @patch("squad.ci.backend.tuxsuite.Backend.validate_callback") def test_fetch_testjob_not_supported_by_backend(self): pass @patch('squad.ci.models.Backend.get_implementation') def test_resubmit_submitter(self, get_implementation): pass @patch('squad.ci.models.Backend.get_implementation') def test_resubmit_submitter_cant_resubmit(self, get_implementation): pass @patch('squad.ci.models.Backend.get_implementation') def test_resubmit_submitter_token_auth(self, get_implementation): pass @patch('squad.ci.models.Backend.get_implementation') def test_resubmit_submitter_auth_token_cant_resubmit(self, get_implementation): pass @patch('squad.ci.models.Backend.get_implementation') def test_force_resubmit_submitter_token_auth(self, get_implementation): pass @patch('squad.ci.models.Backend.get_implementation') def test_resubmit_admin(self, get_implementation): pass def test_disallowed_resubmit(self): pass def test_resubmit_invalid_id(self): pass @patch('squad.ci.models.TestJob.resubmit', side_effect=SubmissionIssue('BOOM')) def test_resubmit_error(self, resubmit): pass
52
0
15
1
14
0
1
0.01
1
7
5
0
30
17
30
30
510
52
455
145
403
3
258
128
227
1
1
0
30
145,580
Linaro/squad
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Linaro_squad/test/api/test_rest.py
test.api.test_rest.RestApiTest
class RestApiTest(APITestCase): def setUp(self): self.group = models.Group.objects.create(slug='mygroup') self.group2 = models.Group.objects.create(slug='mygroup2') self.project = self.group.projects.create(slug='myproject') self.project2 = self.group.projects.create(slug='myproject2') self.project3 = self.group2.projects.create(slug='myproject2') t = timezone.make_aware(datetime.datetime(2018, 10, 1, 1, 0, 0)) self.build = self.project.builds.create(version='1', datetime=t) t2 = timezone.make_aware(datetime.datetime(2018, 10, 2, 1, 0, 0)) self.build2 = self.project.builds.create(version='2', datetime=t2) t3 = timezone.make_aware(datetime.datetime(2018, 10, 3, 1, 0, 0)) self.build3 = self.project.builds.create(version='3', datetime=t3) t4 = timezone.make_aware(datetime.datetime(2018, 10, 4, 1, 0, 0)) self.build4 = self.project.builds.create(version='v4', datetime=t4) t5 = timezone.make_aware(datetime.datetime(2018, 10, 5, 1, 0, 0)) self.build5 = self.project.builds.create(version='5', datetime=t5) t6 = timezone.make_aware(datetime.datetime(2018, 10, 6, 1, 0, 0)) self.build6 = self.project.builds.create(version='v6', datetime=t6) self.build7 = self.project3.builds.create(version='1', datetime=t2) self.environment = self.project.environments.create( slug='myenv', expected_test_runs=1) self.environment2 = self.project3.environments.create( slug='myenv', expected_test_runs=1) self.environment_a = self.project.environments.create(slug='env-a') self.environment_a2 = self.project3.environments.create(slug='env-a') self.testrun = self.build.test_runs.create( environment=self.environment, metadata_file='{"key1": "val1"}') self.testrun2 = self.build2.test_runs.create( environment=self.environment) self.testrun3 = self.build3.test_runs.create( environment=self.environment) self.testrun4 = self.build4.test_runs.create( environment=self.environment, completed=True) self.testrun6 = self.build6.test_runs.create( environment=self.environment, completed=True) self.testrun_a = self.build.test_runs.create( environment=self.environment_a, metadata_file='{"key2": "val2"}') self.testrun2_a = self.build2.test_runs.create( environment=self.environment_a, build=self.build2) self.testrun3_a = self.build3.test_runs.create( environment=self.environment_a, build=self.build3) self.testrun7 = self.build7.test_runs.create( environment=self.environment2) self.testrun7_a = self.build7.test_runs.create( environment=self.environment_a2) self.backend = ci_models.Backend.objects.create(name='foobar') self.fake_backend = ci_models.Backend.objects.create( name='foobarfake', implementation_type='fake') self.patchsource = models.PatchSource.objects.create( name='baz_source', username='u', url='http://example.com', token='secret') self.knownissue = models.KnownIssue.objects.create( title='knownissue_foo', test_name='test/bar', active=True) self.knownissue.environments.add(self.environment) self.knownissue2 = models.KnownIssue.objects.create( title='knownissue_bar', test_name='test/foo', active=True) self.knownissue2.environments.add(self.environment_a) self.testuser = models.User.objects.create( username='test_user', email="test@example.com", is_superuser=False) self.testjob = ci_models.TestJob.objects.create( definition="foo: bar", backend=self.backend, target=self.project, target_build=self.build, environment='myenv', testrun=self.testrun ) self.testjob2 = ci_models.TestJob.objects.create( definition="foo: bar", backend=self.backend, target=self.project, target_build=self.build2, environment='myenv', testrun=self.testrun2, ) self.testjob3 = ci_models.TestJob.objects.create( definition="foo: bar", backend=self.backend, target=self.project, target_build=self.build3, environment='myenv', testrun=self.testrun3 ) self.testjob5 = ci_models.TestJob.objects.create( definition="foo: bar", backend=self.fake_backend, target=self.project, target_build=self.build5, environment='myenv', job_id='1234', submitted=True ) self.testjob6 = ci_models.TestJob.objects.create( definition="foo: bar", backend=self.fake_backend, target=self.project, target_build=self.build5, environment='myenv', job_id='1235', submitted=True, fetched=True, can_resubmit=True, parent_job=self.testjob5, ) testrun_sets = [ [self.testrun, self.testrun2, self.testrun3, self.testrun7], # environment: myenv [self.testrun_a, self.testrun2_a, self.testrun3_a, self.testrun7_a], # environment: env-a ] tests = { 'foo/test1': ['pass', 'fail', 'pass', 'pass'], # fix 'foo/test2': ['pass', 'pass', 'fail', 'fail'], # regression 'foo/test3': ['pass', 'pass', 'pass', 'pass'], 'foo/test4': ['pass', 'pass', 'pass', 'pass'], 'foo/test5': ['pass', 'pass', 'pass', 'pass'], 'foo/test6': ['pass', 'pass', 'pass', 'pass'], 'foo/test7': ['pass', 'pass', 'pass', 'pass'], 'foo/test8': ['pass', 'pass', 'pass', 'pass'], 'foo/test9': ['pass', 'pass', 'pass', 'pass'], 'bar/test1': ['pass', 'pass', 'pass', 'pass'], 'bar/test2': ['fail', 'fail', 'fail', 'pass'], 'bar/test3': ['fail', 'fail', 'fail', 'fail'], 'bar/test4': ['fail', 'fail', 'fail', 'fail'], 'bar/test5': ['fail', 'fail', 'fail', 'fail'], 'bar/test6': ['fail', 'fail', 'fail', 'fail'], 'bar/test7': ['fail', 'fail', 'fail', 'fail'], 'bar/test8': ['fail', 'fail', 'fail', 'fail'], 'bar/test9': ['fail', 'fail', 'fail', 'fail'], } for test_name in tests.keys(): for testruns in testrun_sets: for i, testrun in enumerate(testruns): result = tests[test_name][i] s, t = test_name.split('/') r = {'pass': True, 'fail': False}[result] suite, _ = testrun.build.project.suites.get_or_create( slug=s) metadata, _ = models.SuiteMetadata.objects.get_or_create( suite=s, name=t, kind='test') testrun.tests.create(suite=suite, result=r, metadata=metadata, build=testrun.build, environment=testrun.environment) testrun.tests.create(suite=suite, result=None, metadata=metadata, build=testrun.build, environment=testrun.environment) testrun.tests.create(suite=suite, result=False, has_known_issues=True, metadata=metadata, build=testrun.build, environment=testrun.environment) metric_suite = 'mymetricsuite' suite, _ = self.project.suites.get_or_create(slug=metric_suite) metadata, _ = models.SuiteMetadata.objects.get_or_create( suite=metric_suite, name='mymetric', kind='metric') self.testrun.metrics.create( suite=suite, result=1, metadata=metadata, build=self.build, environment=self.environment) self.emailtemplate = models.EmailTemplate.objects.create( name="fooTemplate", subject="abc", plain_text="def", ) self.validemailtemplate = models.EmailTemplate.objects.create( name="validTemplate", subject="subject", plain_text="{% if foo %}bar{% endif %}", html="{% if foo %}bar{% endif %}" ) self.invalidemailtemplate = models.EmailTemplate.objects.create( name="invalidTemplate", subject="subject", plain_text="{% if foo %}bar", html="{% if foo %}bar" ) def hit(self, url): with count_queries('url:' + url): response = self.client.get(url) self.assertEqual(200, response.status_code) text = response.content.decode('utf-8') if response['Content-Type'] == 'application/json': return json.loads(text) else: return text def post(self, url, data): user, _ = models.User.objects.get_or_create( username='u', is_superuser=True) if not self.group.members.filter(pk=user.pk).exists(): self.group.add_admin(user) token, _ = Token.objects.get_or_create(user=user) self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key) response = self.client.post(url, data) return response def get(self, url): user, _ = models.User.objects.get_or_create( username='u', is_superuser=True) if not self.group.members.filter(pk=user.pk).exists(): self.group.add_admin(user) token, _ = Token.objects.get_or_create(user=user) self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key) response = self.client.get(url) return response def receive(self, datestr, env, metrics={}, tests={}): receive = ReceiveTestRun(self.project) testrun, _ = receive( version=datestr, environment_slug=env, metadata_file=json.dumps( {"datetime": datestr + "T00:00:00+00:00", "job_id": "1"} ), metrics_file=json.dumps(metrics), tests_file=json.dumps(tests), ) return testrun def test_root(self): self.hit('/api/') def test_projects(self): data = self.hit('/api/projects/') self.assertEqual(3, len(data['results'])) def test_project_basic_settings(self): data = self.hit('/api/projects/%d/basic_settings/' % self.project.id) self.assertTrue("build_confidence_count" in data) self.assertTrue("build_confidence_threshold" in data) def test_project_builds(self): data = self.hit('/api/projects/%d/builds/' % self.project.id) self.assertEqual(6, len(data['results'])) def test_project_test_results(self): response = self.client.get( '/api/projects/%d/test_results/' % self.project.id) self.assertEqual(400, response.status_code) data = self.hit( '/api/projects/%d/test_results/?test_name=foo/test1' % self.project.id) self.assertTrue(len(data) > 0) def test_create_project_with_enabled_plugin_list_1_element(self): response = self.post( '/api/projects/', { 'group': "http://testserver/api/groups/%d/" % self.group.id, 'slug': 'newproject', 'enabled_plugins_list': ['foo'], } ) self.assertEqual(201, response.status_code) project = self.hit('/api/projects/?slug=newproject')['results'][0] self.assertEqual(['foo'], project['enabled_plugins_list']) def test_create_project_with_enabled_plugin_list_2_elements(self): response = self.post( '/api/projects/', { 'group': "http://testserver/api/groups/%d/" % self.group.id, 'slug': 'newproject', 'enabled_plugins_list': ['foo', 'bar'], } ) self.assertEqual(201, response.status_code) project = self.hit('/api/projects/?slug=newproject')['results'][0] self.assertEqual(['foo', 'bar'], project['enabled_plugins_list']) def test_create_project_with_non_admin_account(self): user, _ = models.User.objects.get_or_create(username='u') self.group.add_user(user) token, _ = Token.objects.get_or_create(user=user) self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key) response = self.client.post( '/api/projects/', { 'group': "http://testserver/api/groups/%d/" % self.group.id, 'slug': 'newproject', 'enabled_plugins_list': ['foo', 'bar'], } ) self.assertEqual(201, response.status_code) project = self.client.get( '/api/projects/?slug=newproject').json()['results'][0] self.assertEqual(['foo', 'bar'], project['enabled_plugins_list']) def test_project_subscribe_unsubscribe_email(self): email_addr = "foo@bar.com" response = self.post( '/api/projects/%s/subscribe/' % self.project.pk, { 'email': email_addr } ) self.assertEqual(201, response.status_code) subscription_queryset = self.project.subscriptions.filter( email=email_addr) self.assertTrue(subscription_queryset.exists()) user = models.User.objects.get(username='u') subscription = subscription_queryset.last().pk logentry_queryset = LogEntry.objects.filter( user_id=user.pk, object_id=subscription ) self.assertEqual( 1, logentry_queryset.count() ) self.assertEqual( ADDITION, logentry_queryset.first().action_flag ) response1 = self.post( '/api/projects/%s/unsubscribe/' % self.project.pk, { 'email': email_addr } ) self.assertEqual(200, response1.status_code) self.assertFalse(self.project.subscriptions.filter( email=email_addr).exists()) self.assertEqual( 2, logentry_queryset.count() ) self.assertEqual( DELETION, logentry_queryset.first().action_flag ) def test_project_unsubscribe_email_different_project(self): email_addr = "foo@bar.com" response = self.post( '/api/projects/%s/subscribe/' % self.project.pk, { 'email': email_addr } ) self.assertEqual(201, response.status_code) response1 = self.post( '/api/projects/%s/subscribe/' % self.project2.pk, { 'email': email_addr } ) self.assertEqual(201, response1.status_code) self.assertTrue(self.project.subscriptions.filter( email=email_addr).exists()) self.assertTrue(self.project2.subscriptions.filter( email=email_addr).exists()) response2 = self.post( '/api/projects/%s/unsubscribe/' % self.project2.pk, { 'email': email_addr } ) self.assertEqual(200, response2.status_code) self.assertTrue(self.project.subscriptions.filter( email=email_addr).exists()) self.assertFalse(self.project2.subscriptions.filter( email=email_addr).exists()) def test_project_subscribe_invalid_email(self): email_addr = "foo@bar@com" response = self.post( '/api/projects/%s/subscribe/' % self.project.pk, { 'email': email_addr } ) self.assertEqual(400, response.status_code) self.assertFalse(self.project.subscriptions.filter( email=email_addr).exists()) def test_project_unsubscribe_invalid_email(self): email_addr = "foo@bar@com" response = self.post( '/api/projects/%s/unsubscribe/' % self.project.pk, { 'email': email_addr } ) self.assertEqual(400, response.status_code) def test_project_subscribe_unsubscribe_user(self): response = self.post( '/api/projects/%s/subscribe/' % self.project.pk, { 'email': self.testuser.email } ) self.assertEqual(201, response.status_code) self.assertTrue(self.project.subscriptions.filter( user=self.testuser).exists()) response1 = self.post( '/api/projects/%s/unsubscribe/' % self.project.pk, { 'email': self.testuser.email } ) self.assertEqual(200, response1.status_code) self.assertFalse(self.project.subscriptions.filter( email=self.testuser.email).exists()) def test_project_subscribe_missing_email(self): email_addr = "foo@bar.com" response = self.post( '/api/projects/%s/subscribe/' % self.project.pk, { 'email_foo': email_addr } ) self.assertEqual(400, response.status_code) def test_project_unsubscribe_missing_email(self): email_addr = "foo@bar.com" response = self.post( '/api/projects/%s/unsubscribe/' % self.project.pk, { 'email_foo': email_addr } ) self.assertEqual(400, response.status_code) def test_project_filter_by_datetime(self): data = self.hit("/api/projects/") self.assertEqual(3, data["count"]) day_ago = datetime.timedelta(days=1) self.project.datetime -= day_ago self.project.save() yesterday = datetime.datetime.now() - day_ago data = self.hit(f"/api/projects/?datetime__gt={yesterday}") self.assertEqual(2, data["count"]) def test_builds(self): data = self.hit('/api/builds/') self.assertEqual(7, len(data['results'])) def test_builds_id_filter(self): last = self.project.builds.last() data = self.hit(f'/api/builds/?id__lt={last.id}') self.assertEqual(5, len(data['results'])) def test_builds_status(self): self.build2.test_jobs.all().delete() self.build3.test_jobs.all().delete() UpdateProjectStatus()(self.testrun2) UpdateProjectStatus()(self.testrun3) data = self.hit('/api/builds/%d/status/' % self.build3.id) self.assertIn('foo/test2', data['regressions']) self.assertIn('foo/test1', data['fixes']) self.assertNotIn('myenv', data['details']) def test_builds_email_missing_status(self): # this should not happen normally, but let's test it anyway self.build3.status.delete() response = self.client.get('/api/builds/%d/email/' % self.build3.id) self.assertEqual(404, response.status_code) def test_builds_email(self): # update ProjectStatus self.build2.test_jobs.all().delete() self.build3.test_jobs.all().delete() UpdateProjectStatus()(self.testrun2) UpdateProjectStatus()(self.testrun3) response = self.hit('/api/builds/%d/email/' % self.build3.id) self.assertIn('foo/test2', response) # sanity check # make sure proper baseline is used self.assertIn('Regressions (compared to build 2)', response) def test_builds_email_custom_template(self): # update ProjectStatus UpdateProjectStatus()(self.testrun2) UpdateProjectStatus()(self.testrun3) response = self.client.get( '/api/builds/%d/email/?template=%s' % (self.build3.id, self.validemailtemplate.pk)) self.assertEqual(200, response.status_code) self.assertEqual("text/plain", response['Content-Type']) def test_builds_email_custom_invalid_template(self): # update ProjectStatus UpdateProjectStatus()(self.testrun2) UpdateProjectStatus()(self.testrun3) response = self.client.get( '/api/builds/%d/email/?template=%s' % (self.build3.id, self.invalidemailtemplate.pk)) self.assertEqual(400, response.status_code) def test_builds_email_custom_baseline(self): UpdateProjectStatus()(self.testrun) UpdateProjectStatus()(self.testrun3) response = self.client.get( '/api/builds/%d/email/?baseline=%s&output=text/plain' % (self.build3.id, self.build.id)) self.assertContains(response, "Regressions (compared to build 1)") def test_builds_email_custom_baseline_html(self): UpdateProjectStatus()(self.testrun) UpdateProjectStatus()(self.testrun3) response = self.client.get( '/api/builds/%d/email/?baseline=%s&output=text/html' % (self.build3.id, self.build.id)) self.assertContains( response, "Regressions (compared to build 1)", html=True) def test_builds_email_custom_baseline_missing_status(self): UpdateProjectStatus()(self.testrun) self.build2.status.delete() response = self.client.get( '/api/builds/%d/email/?baseline=%s' % (self.build.id, self.build2.id)) self.assertEqual(400, response.status_code) def test_builds_email_custom_invalid_baseline(self): UpdateProjectStatus()(self.testrun) response = self.client.get( '/api/builds/%d/email/?baseline=999' % (self.build.id)) self.assertEqual(400, response.status_code) @patch('squad.core.tasks.prepare_report.delay') def test_build_report(self, prepare_report_mock): response = self.client.get('/api/builds/%d/report/' % self.build3.id) self.assertEqual(202, response.status_code) report_object = self.build3.delayed_reports.last() self.assertTrue(response.json()['url'].endswith( reverse('delayedreport-detail', args=[report_object.pk]))) self.assertIsNotNone(report_object) self.assertIsNone(report_object.status_code) # default baseline is used self.assertEqual(report_object.baseline, None) prepare_report_mock.assert_called() logentry_queryset = LogEntry.objects.filter( object_id=report_object.pk ) self.assertEqual( 0, # do not create LogEntry for anonymous users logentry_queryset.count() ) def test_build_callbacks(self): response = self.get('/api/builds/%d/callbacks/' % self.build.id) self.assertEqual(202, response.status_code) self.assertEqual(0, len(response.json()['results'])) callback_url = 'http://callback.url' response = self.post('/api/builds/%d/callbacks/' % self.build.id, {'callback_url': callback_url}) self.assertEqual(202, response.status_code) self.assertEqual('OK', response.json()['message']) response = self.get('/api/builds/%d/callbacks/' % self.build.id) self.assertEqual(202, response.status_code) self.assertEqual(1, len(response.json()['results'])) response = self.post('/api/builds/%d/callbacks/' % self.build.id, {'callback_url': 'invalid-callback.url'}) self.assertEqual(400, response.status_code) self.assertEqual('Enter a valid URL.', response.json()['message']) response = self.post('/api/builds/%d/callbacks/' % self.build.id, {'callback_url': callback_url}) self.assertEqual(400, response.status_code) self.assertEqual( 'Callback with this Object reference type, Object reference id, Url and Event already exists.', response.json()['message']) def test_build_callback_headers(self): headers = '{"Authorization": "token 123456"}' self.project.project_settings = '{"CALLBACK_HEADERS": %s}' % headers self.project.save() callback_url = 'http://callback.url' response = self.post('/api/builds/%d/callbacks/' % self.build.id, {'callback_url': callback_url}) self.assertEqual(202, response.status_code) self.assertEqual('OK', response.json()['message']) self.assertEqual(1, self.build.callbacks.filter( url=callback_url, headers=headers).count()) # Check that headers in project settings gets overwritten if it comes from the user user_headers = '{"Authorization": "token 654321"}' callback_url += '/with-headers' response = self.post('/api/builds/%d/callbacks/' % self.build.id, {'callback_url': callback_url, 'callback_headers': user_headers}) self.assertEqual(202, response.status_code) self.assertEqual('OK', response.json()['message']) self.assertEqual(1, self.build.callbacks.filter( url=callback_url, headers=user_headers).count()) @patch('squad.core.tasks.prepare_report.delay') def test_zz_build_report_logentry(self, prepare_report_mock): response = self.get('/api/builds/%d/report/' % self.build3.id) self.assertEqual(202, response.status_code) report_object = self.build3.delayed_reports.last() self.assertTrue(response.json()['url'].endswith( reverse('delayedreport-detail', args=[report_object.pk]))) self.assertIsNotNone(report_object) self.assertIsNone(report_object.status_code) # default baseline is used self.assertEqual(report_object.baseline, None) prepare_report_mock.assert_called() user = models.User.objects.get(username='u') logentry_queryset = LogEntry.objects.filter( user_id=user.pk, object_id=report_object.pk ) self.assertEqual( 1, logentry_queryset.count() ) self.assertEqual( ADDITION, logentry_queryset.first().action_flag ) @patch('squad.core.tasks.prepare_report.delay') def test_build_report_baseline(self, prepare_report_mock): response = self.client.get( '/api/builds/%d/report/?baseline=%s' % (self.build3.id, self.build.id)) self.assertEqual(202, response.status_code) report_object = self.build3.delayed_reports.last() self.assertTrue(response.json()['url'].endswith( reverse('delayedreport-detail', args=[report_object.pk]))) report_json = self.client.get(response.json()['url']) self.assertTrue(report_json.json()['baseline'].endswith( reverse('build-status', args=[self.build.id]))) self.assertIsNotNone(report_object) self.assertIsNone(report_object.status_code) self.assertEqual(report_object.baseline, self.build.status) prepare_report_mock.assert_called() @patch('squad.core.tasks.prepare_report.delay') def test_build_report_baseline_cache(self, prepare_report_mock): response = self.client.get( '/api/builds/%d/report/?baseline=%s' % (self.build3.id, self.build.id)) self.assertEqual(202, response.status_code) report_object = self.build3.delayed_reports.last() report_url = response.json()['url'] self.assertTrue(report_url.endswith( reverse('delayedreport-detail', args=[report_object.pk]))) report_json = self.client.get(report_url) self.assertTrue(report_json.json()['baseline'].endswith( reverse('build-status', args=[self.build.id]))) self.assertIsNotNone(report_object) self.assertIsNone(report_object.status_code) self.assertEqual(report_object.baseline, self.build.status) prepare_report_mock.assert_called() response2 = self.client.get( '/api/builds/%d/report/?baseline=%s' % (self.build3.id, self.build.id)) self.assertEqual(report_url, response2.json()['url']) @patch('squad.core.tasks.prepare_report.delay') def test_build_report_invalid_baseline(self, prepare_report_mock): response = self.client.get( '/api/builds/%d/report/?baseline=123456789' % (self.build3.id)) self.assertEqual(202, response.status_code) report_object = self.build3.delayed_reports.last() self.assertTrue(response.json()['url'].endswith( reverse('delayedreport-detail', args=[report_object.pk]))) report_json = self.client.get(response.json()['url']) self.assertIsNone(report_json.json()['baseline']) self.assertIsNotNone(report_object) self.assertIsNotNone(report_object.status_code) self.assertEqual(report_object.status_code, 400) prepare_report_mock.assert_not_called() @patch('squad.core.tasks.prepare_report.delay') def test_build_report_baseline2(self, prepare_report_mock): response = self.client.get( '/api/builds/%d/report/?baseline=%s' % (self.build3.id, self.build2.id)) self.assertEqual(202, response.status_code) report_object = self.build3.delayed_reports.last() self.assertTrue(response.json()['url'].endswith( reverse('delayedreport-detail', args=[report_object.pk]))) report_object = self.client.get(response.json()['url']) self.assertTrue(report_object.json()['baseline'].endswith( reverse('build-status', args=[self.build2.id]))) prepare_report_mock.assert_called() @patch('squad.core.tasks.prepare_report.delay') def test_build_report_retry(self, prepare_report_mock): response = self.client.get('/api/builds/%d/report/' % self.build3.id) self.assertEqual(202, response.status_code) self.client.get('/api/builds/%d/report/' % self.build3.id) report_object = self.build3.delayed_reports.last() self.assertTrue(response.json()['url'].endswith( reverse('delayedreport-detail', args=[report_object.pk]))) self.assertIsNotNone(report_object) self.assertIsNone(report_object.status_code) prepare_report_mock.assert_called_once() prepare_report_mock.reset_mock() response2 = self.client.get('/api/builds/%d/report/' % self.build3.id) self.assertEqual(response.json(), response2.json()) prepare_report_mock.assert_not_called() @patch('squad.core.tasks.prepare_report.delay') def test_build_report_retry_force(self, prepare_report_mock): response = self.client.get('/api/builds/%d/report/' % self.build3.id) self.assertEqual(202, response.status_code) self.client.get('/api/builds/%d/report/' % self.build3.id) report_object = self.build3.delayed_reports.last() self.assertTrue(response.json()['url'].endswith( reverse('delayedreport-detail', args=[report_object.pk]))) self.assertIsNotNone(report_object) self.assertIsNone(report_object.status_code) prepare_report_mock.assert_called_once() prepare_report_mock.reset_mock() response2 = self.client.get( '/api/builds/%d/report/?force=true' % self.build3.id) self.assertNotEqual(response.json()['url'], response2.json()['url']) prepare_report_mock.assert_called_once() @patch('squad.core.tasks.prepare_report.delay') def test_build_report_retry_force_logentry(self, prepare_report_mock): response = self.get('/api/builds/%d/report/' % self.build3.id) self.assertEqual(202, response.status_code) user = models.User.objects.get(username='u') report_object = self.build3.delayed_reports.last() logentry_queryset = LogEntry.objects.filter( user_id=user.pk, object_id=report_object.pk ) self.assertEqual( 1, logentry_queryset.count() ) self.assertEqual( ADDITION, logentry_queryset.first().action_flag ) self.client.get('/api/builds/%d/report/' % self.build3.id) self.assertEqual( 2, logentry_queryset.count() ) self.assertEqual( CHANGE, logentry_queryset.first().action_flag ) self.assertTrue(response.json()['url'].endswith( reverse('delayedreport-detail', args=[report_object.pk]))) self.assertIsNotNone(report_object) self.assertIsNone(report_object.status_code) prepare_report_mock.assert_called_once() prepare_report_mock.reset_mock() response2 = self.get( '/api/builds/%d/report/?force=true' % self.build3.id) self.assertNotEqual(response.json()['url'], response2.json()['url']) prepare_report_mock.assert_called_once() report_object2 = self.build3.delayed_reports.last() logentry_queryset2 = LogEntry.objects.filter( user_id=user.pk, object_id=report_object2.pk ) self.assertEqual( 1, logentry_queryset2.count() ) self.assertEqual( ADDITION, logentry_queryset2.first().action_flag ) def test_build_testruns(self): data = self.hit('/api/builds/%d/testruns/' % self.build.id) self.assertEqual(2, len(data['results'])) def test_build_testjobs(self): data = self.hit('/api/builds/%d/testjobs/' % self.build.id) self.assertEqual(1, len(data['results'])) def test_build_testjobs_summary(self): data = self.hit('/api/builds/%d/testjobs_summary/' % self.build.id) self.assertEqual(1, len(data['results'])) self.assertEqual({'null': 1}, data['results']) def test_build_testjobs_summary_per_environment(self): data = self.hit( '/api/builds/%d/testjobs_summary/?per_environment=1' % self.build.id) self.assertEqual(1, len(data['results'])) self.assertEqual({'myenv': {'null': 1}}, data['results']) def test_build_tests(self): data = self.hit('/api/builds/%d/tests/' % self.build.id) self.assertEqual(36, len(data['results'])) def test_build_tests_per_environment(self): data = self.hit( '/api/builds/%d/tests/?environment__slug=myenv' % self.build.id) self.assertEqual(18, len(data['results'])) def test_build_tests_per_environment_not_found(self): data = self.hit( '/api/builds/%d/tests/?environment__slug=mycrazynonexistenenv' % self.build.id) self.assertEqual(0, len(data['results'])) def test_build_tests_per_suite(self): data = self.hit('/api/builds/%d/tests/?suite__slug=foo' % self.build.id) self.assertEqual(18, len(data['results'])) def test_build_tests_per_suite_not_found(self): data = self.hit( '/api/builds/%d/tests/?suite__slug=fooooooodoesreallyexist' % self.build.id) self.assertEqual(0, len(data['results'])) def test_build_tests_per_suite_and_environment(self): data = self.hit( '/api/builds/%d/tests/?environment__slug=myenv&suite__slug=foo' % self.build.id) self.assertEqual(9, len(data['results'])) data = self.hit( '/api/builds/%d/tests/?environment__slug=mycraaaaazyenv&suite__slug=foo' % self.build.id) self.assertEqual(0, len(data['results'])) data = self.hit( '/api/builds/%d/tests/?environment__slug=myenv&suite__slug=foooooooosuitedoestexist' % self.build.id) self.assertEqual(0, len(data['results'])) def test_build_failures_with_confidence(self): data = self.hit( '/api/builds/%d/failures_with_confidence/' % self.build3.id) self.assertEqual(data['count'], 18) self.assertIsNone(data['next']) self.assertIsNone(data['previous']) self.assertEqual(len(data['results']), 18) failure = data['results'].pop(0) self.assertEqual(failure['name'], 'foo/test2') self.assertEqual(failure['confidence'], { 'count': 2, 'passes': 2, 'score': 100.0}) def test_build_failures_with_confidence_with_first_build(self): """ The first build will not have any history, so the confidence scores for those failures should all be zero """ data = self.hit( '/api/builds/%d/failures_with_confidence/' % self.build.id) for f in data['results']: self.assertEqual(f['confidence'], { 'count': 0, 'passes': 0, 'score': 0}) def test_build_failures_with_confidence_with_pagination(self): data = self.hit( '/api/builds/%d/failures_with_confidence/?limit=2' % self.build3.id) self.assertEqual(data['count'], 18) self.assertIsNotNone(data['next']) self.assertIsNone(data['previous']) self.assertEqual(len(data['results']), 2) failure = data['results'][0] self.assertEqual(failure['name'], 'foo/test2') self.assertEqual(failure['confidence'], { 'count': 2, 'passes': 2, 'score': 100.0}) failure = data['results'][1] self.assertEqual(failure['name'], 'foo/test2') self.assertEqual(failure['confidence'], { 'count': 2, 'passes': 2, 'score': 100.0}) def test_build_failures_with_confidence_releases_only(self): data = self.hit( '/api/builds/%d/failures_with_confidence/?releases_only=1' % self.build2.id) self.assertEqual(data['count'], 18) self.assertIsNone(data['next']) self.assertIsNone(data['previous']) self.assertEqual(len(data['results']), 18) failure = data['results'].pop(0) self.assertEqual(failure['name'], 'foo/test1') # No release build yet self.assertEqual(failure['confidence'], { 'count': 0, 'passes': 0, 'score': 0.0}) self.build.is_release = True self.build.save() data = self.hit( '/api/builds/%d/failures_with_confidence/?releases_only=1' % self.build2.id) self.assertEqual(data['count'], 18) self.assertIsNone(data['next']) self.assertIsNone(data['previous']) self.assertEqual(len(data['results']), 18) failure = data['results'].pop(0) self.assertEqual(failure['name'], 'foo/test1') self.assertEqual(failure['confidence'], { 'count': 1, 'passes': 1, 'score': 100.0}) def test_build_metrics(self): data = self.hit('/api/builds/%d/metrics/' % self.build.id) self.assertEqual(1, len(data['results'])) def test_build_metadata(self): data = self.hit('/api/builds/%d/metadata/' % self.build.id) self.assertEqual('val1', data['key1']) self.assertEqual('val2', data['key2']) def test_build_metadata_by_testrun(self): data = self.hit('/api/builds/%d/metadata_by_testrun/' % self.build.id) self.assertEqual({"key1": "val1"}, data[str(self.testrun.id)]) self.assertEqual({"key2": "val2"}, data[str(self.testrun_a.id)]) def test_build_filter(self): created_at = str(self.build3.created_at.isoformat() ).replace('+00:00', 'Z') data = self.hit('/api/builds/?created_at=%s' % created_at) self.assertEqual(1, len(data['results'])) def test_build_filter_by_project(self): project_full_name = self.build3.project.full_name data = self.hit('/api/builds/?project__full_name=%s' % project_full_name) self.assertEqual(6, len(data['results'])) def test_build_minimum_fields(self): data = self.hit('/api/builds/%d/?fields=version' % self.build.id) should_not_exist = {'url', 'id', 'testruns', 'testjobs', 'status', 'metadata', 'finished', 'created_at', 'datetime', 'patch_id', 'keep_data', 'project', 'patch_source', 'patch_baseline'} fields = set(data.keys()) self.assertEqual(set(), should_not_exist & fields) self.assertTrue('version' in fields) self.assertEqual(1, len(fields)) def test_build_cancel(self): testjob = self.build.test_jobs.first() testjob.submitted = True testjob.save() self.assertEqual(self.build.test_jobs.filter( job_status='Canceled').count(), 0) data = self.post('/api/builds/%d/cancel/' % self.build.id, {}) self.assertEqual(data.status_code, 200) self.assertEqual(data.json()['count'], 1) self.assertEqual(self.build.test_jobs.filter( job_status='Canceled').count(), 1) def test_build_compare_400_on_unfinished_build(self): response = self.get('/api/builds/%d/compare/?target=%d' % (self.build.id, self.build2.id)) self.assertEqual(400, response.status_code) self.assertEqual( '["Cannot report regressions/fixes on non-finished builds"]', response.content.decode('utf-8')) def test_build_compare_against_same_project(self): url = '/api/builds/%d/compare/?target=%d&force=true' % ( self.build.id, self.build2.id) data = self.hit(url) expected = { "regressions": { "myenv": { "foo": ["test1"] }, "env-a": { "foo": ["test1"] } }, "fixes": {} } self.assertEqual(expected, data) def test_build_compare_against_different_project(self): url = '/api/builds/%d/compare/?target=%d&force=true' % ( self.build.id, self.build7.id) data = self.hit(url) expected = { 'regressions': { 'myenv': { 'foo': ['test2'] }, 'env-a': { 'foo': ['test2'] } }, 'fixes': { 'myenv': { 'bar': ['test2'] }, 'env-a': { 'bar': ['test2'] } } } self.assertEqual(expected, data) def test_testjob(self): data = self.hit('/api/testjobs/%d/' % self.testjob.id) self.assertEqual('myenv', data['environment']) def test_testjob_filter_by_created_at(self): very_old_date = str(datetime.datetime.now() - datetime.timedelta(days=365)) data = self.hit('/api/testjobs/?created_at=%s' % very_old_date) self.assertEqual(0, len(data['results'])) def test_testjob_resubmitted_jobs(self): data = self.hit('/api/testjobs/%d/resubmitted_jobs/' % self.testjob5.id) self.assertIn(str(self.testjob5.id), data['results'][0]['parent_job']) self.assertEqual(self.testjob6.id, data['results'][0]['id']) def test_tests(self): data = self.hit('/api/tests/') self.assertEqual(list, type(data['results'])) def test_tests_filter_by_name(self): data = self.hit('/api/tests/?name=test1') self.assertEqual(list, type(data['results'])) self.assertEqual(16, len(data['results'])) def test_tests_filter_by_name_not_found(self): data = self.hit('/api/tests/?name=test-that-does-not-exist') self.assertEqual(list, type(data['results'])) self.assertEqual(0, len(data['results'])) def test_tests_filter_by_metadata_name(self): data = self.hit('/api/tests/?metadata__name=test1') self.assertEqual(list, type(data['results'])) self.assertEqual(16, len(data['results'])) def test_tests_filter_by_metadata_id(self): metadata = models.SuiteMetadata.objects.get( suite='foo', name='test1', kind='test') data = self.hit('/api/tests/?metadata_id=%s' % metadata.id) self.assertEqual(list, type(data['results'])) self.assertEqual(8, len(data['results'])) def test_tests_filter_by_metadata_name_not_found(self): data = self.hit('/api/tests/?metadata__name=test-that-does-not-exist') self.assertEqual(list, type(data['results'])) self.assertEqual(0, len(data['results'])) def test_tests_filter_by_metadata_id_not_found(self): response = self.get('/api/tests/?metadata_id=100000') self.assertEqual(400, response.status_code) def test_tests_filter_by_environment(self): data = self.hit('/api/tests/?environment__slug=myenv') self.assertEqual(list, type(data['results'])) self.assertEqual(50, len(data['results'])) def test_tests_filter_by_environment_id(self): data = self.hit('/api/tests/?environment_id=%s' % self.environment.id) self.assertEqual(list, type(data['results'])) self.assertEqual(50, len(data['results'])) for test in data['results']: self.assertIn('/%s/' % self.environment.id, test['environment']) def test_tests_filter_by_environment_not_found(self): data = self.hit('/api/tests/?environment__slug=mycrazyenvslug') self.assertEqual(list, type(data['results'])) self.assertEqual(0, len(data['results'])) def test_tests_filter_by_environment_id_not_found(self): response = self.get('/api/tests/?environment_id=100000') self.assertEqual(400, response.status_code) def test_tests_filter_by_build(self): data = self.hit('/api/tests/?build__version=1') self.assertEqual(list, type(data['results'])) self.assertEqual(50, len(data['results'])) def test_tests_filter_by_build_id(self): data = self.hit('/api/tests/?build_id=%s' % self.build.id) self.assertEqual(list, type(data['results'])) self.assertEqual(36, len(data['results'])) for test in data['results']: self.assertIn('/%s/' % self.build.id, test['build']) def test_tests_filter_by_build_not_found(self): data = self.hit( '/api/tests/?build__version=this-build-should-not-exist-really') self.assertEqual(list, type(data['results'])) self.assertEqual(0, len(data['results'])) def test_tests_filter_by_build_id_not_found(self): response = self.get('/api/tests/?build_id=100000') self.assertEqual(400, response.status_code) def test_tests_filter_by_suite(self): data = self.hit('/api/tests/?suite__slug=foo') self.assertEqual(list, type(data['results'])) self.assertEqual(50, len(data['results'])) def test_tests_filter_by_suite_id(self): suite = self.project.suites.get(slug='foo') data = self.hit('/api/tests/?suite_id=%s' % suite.id) self.assertEqual(list, type(data['results'])) self.assertEqual(50, len(data['results'])) for test in data['results']: self.assertIn('/%s/' % suite.id, test['suite']) def test_tests_filter_by_result_pass(self): data = self.hit('/api/tests/?result=true') self.assertEqual(list, type(data['results'])) self.assertEqual(50, len(data['results'])) for test in data['results']: self.assertTrue(test['result']) def test_tests_filter_by_result_fail(self): data = self.hit('/api/tests/?result=false') self.assertEqual(list, type(data['results'])) self.assertEqual(50, len(data['results'])) for test in data['results']: self.assertFalse(test['result']) def test_tests_filter_by_result_xfail(self): data = self.hit('/api/tests/?result=false&has_known_issues=true') self.assertEqual(list, type(data['results'])) self.assertEqual(1, len(data['results'])) self.assertTrue(data['results'][0]['has_known_issues']) def test_tests_filter_by_result_skip(self): data = self.hit('/api/tests/?result__isnull=true') self.assertEqual(list, type(data['results'])) self.assertEqual(1, len(data['results'])) self.assertIsNone(data['results'][0]['result']) def test_tests_with_page_size(self): data = self.hit('/api/tests/?limit=2') self.assertEqual(list, type(data['results'])) self.assertEqual(2, len(data['results'])) def test_tests_minimal_fields(self): data = self.hit('/api/tests/?fields=name,status') self.assertEqual(list, type(data['results'])) self.assertEqual(50, len(data['results'])) should_not_exist = {'url', 'build', 'environment', 'test_run', 'short_name', 'result', 'log', 'has_known_issues', 'suite', 'known_issues'} for test in data['results']: fields = set(test.keys()) self.assertEqual(set(), should_not_exist & fields) self.assertTrue('name' in fields) self.assertTrue('status' in fields) self.assertEqual(2, len(fields)) def test_tests_with_known_issues_fields(self): data = self.hit('/api/tests/?fields=known_issues') self.assertEqual(list, type(data['results'])) self.assertEqual(50, len(data['results'])) should_not_exist = {'url', 'build', 'environment', 'test_run', 'short_name', 'result', 'log', 'has_known_issues', 'suite', 'name', 'status'} for test in data['results']: fields = set(test.keys()) self.assertEqual(set(), should_not_exist & fields) self.assertTrue('known_issues' in fields) self.assertEqual(1, len(fields)) def test_tests_no_status_fields(self): data = self.hit('/api/tests/?fields=name') self.assertEqual(list, type(data['results'])) self.assertEqual(50, len(data['results'])) should_not_exist = {'url', 'build', 'environment', 'test_run', 'short_name', 'result', 'log', 'has_known_issues', 'suite', 'status', 'known_issues'} for test in data['results']: fields = set(test.keys()) self.assertEqual(set(), should_not_exist & fields) self.assertTrue('name' in fields) self.assertEqual(1, len(fields)) def test_metrics(self): data = self.hit('/api/metrics/') self.assertEqual(list, type(data['results'])) def test_metrics_with_page_size(self): self.receive("2020-01-01", "myenv2", metrics={ "foo": {'value': 1, 'unit': 'boxes'}, "bar/baz": {'value': 2, 'unit': None}}) data = self.hit('/api/metrics/?limit=2') self.assertEqual(list, type(data['results'])) self.assertEqual(2, len(data['results'])) def test_metrics_filter_by_metadata_name(self): data = self.hit('/api/metrics/?metadata__name=mymetric') self.assertEqual(list, type(data['results'])) self.assertEqual(1, len(data['results'])) def test_metrics_filter_by_metadata_name_not_found(self): data = self.hit( '/api/metrics/?metadata__name=metric-that-does-not-exist') self.assertEqual(list, type(data['results'])) self.assertEqual(0, len(data['results'])) def test_metrics_filter_by_environment(self): data = self.hit('/api/metrics/?environment__slug=myenv') self.assertEqual(list, type(data['results'])) self.assertEqual(1, len(data['results'])) def test_metrics_filter_by_environment_not_found(self): data = self.hit('/api/metrics/?environment__slug=mycrazyenvslug') self.assertEqual(list, type(data['results'])) self.assertEqual(0, len(data['results'])) def test_metrics_filter_by_build(self): data = self.hit('/api/metrics/?build__version=1') self.assertEqual(list, type(data['results'])) self.assertEqual(1, len(data['results'])) def test_metrics_filter_by_build_not_found(self): data = self.hit( '/api/metrics/?build__version=this-build-should-not-exist-really') self.assertEqual(list, type(data['results'])) self.assertEqual(0, len(data['results'])) def test_testruns(self): data = self.hit('/api/testruns/%d/' % self.testrun.id) self.assertEqual(self.testrun.id, data['id']) def test_testruns_null_metrics_attr(self): data = self.hit("/api/testruns/%d/" % self.testrun4.id) self.assertIsNone(data["metrics"]) self.assertIsNone(data["metrics_file"]) def test_testruns_null_tests_attr(self): data = self.hit("/api/testruns/%d/" % self.testrun4.id) self.assertIsNone(data["tests"]) self.assertIsNone(data["tests_file"]) def test_testruns_not_null_metrics_attr(self): testrun = self.receive( "2020-01-01", "myenv2", metrics={"foo": {'value': 1, 'unit': 'boxes'}, "bar/baz": {'value': 2, 'unit': None}} ) data = self.hit("/api/testruns/%d/" % testrun.id) self.assertIsNotNone(data["metrics"]) self.assertIsNotNone(data["metrics_file"]) def test_testruns_not_null_tests_attr(self): testrun = self.receive( "2017-01-01", "myenv2", tests={"foo": "pass", "bar": "fail"} ) data = self.hit("/api/testruns/%d/" % testrun.id) self.assertIsNotNone(data["tests"]) self.assertIsNotNone(data["tests_file"]) def test_testruns_filter(self): data = self.hit('/api/testruns/?completed=%s&build__id=%s' % (self.testrun4.completed, self.build4.id)) self.assertEqual(1, len(data['results'])) def test_testruns_tests(self): data = self.hit('/api/testruns/%d/tests/' % self.testrun.id) self.assertEqual(list, type(data['results'])) def test_testruns_metrics(self): data = self.hit('/api/testruns/%d/metrics/' % self.testrun.id) self.assertEqual(list, type(data['results'])) def test_testruns_attachments(self): data = self.hit('/api/testruns/%d/' % self.testrun.id) self.assertEqual([], data['attachments']) filename = 'benchmarks.json' filepath = 'test/api/%s' % filename contents = None with open(filepath, 'rb') as fp: contents = fp.read() attachment = self.testrun.attachments.create( filename=filename, length=147) attachment.save_file(filename, contents) data = self.hit('/api/testruns/%d/' % self.testrun.id) expected = { 'download_url': 'http://testserver/api/testruns/%d/attachments/?filename=%s' % (self.testrun.id, filename), 'filename': filename, 'length': 147, 'mimetype': 'application/octet-stream' } self.assertEqual([expected], data['attachments']) response = self.client.get( '/api/testruns/%d/attachments/?filename=%s' % (self.testrun.id, filename)) self.assertEqual(200, response.status_code) self.assertEqual(contents, response.content) attachment.storage.delete(False) def test_testruns_status(self): ParseTestRunData()(self.testrun) RecordTestRunStatus()(self.testrun) data = self.hit('/api/testruns/%d/status/' % self.testrun.id) data2 = self.hit( '/api/testruns/%d/status/?suite__isnull=true' % self.testrun.id) self.assertEqual(3, len(data['results'])) self.assertEqual(10, data['results'][0]['tests_pass']) self.assertEqual(8, data['results'][0]['tests_fail']) self.assertEqual(0, data['results'][0]['tests_xfail']) self.assertEqual(0, data['results'][0]['tests_skip']) self.assertEqual(0, data['results'][0]['metrics_summary']) self.assertEqual(1, len(data2['results'])) self.assertEqual(None, data2['results'][0]['suite']) def test_testjob_definition(self): data = self.hit('/api/testjobs/%d/definition/' % self.testjob.id) self.assertEqual('foo: bar', data) def test_testjob_resubmit(self): data = self.post('/api/testjobs/%d/resubmit/' % self.testjob6.id, {}) self.assertEqual(data.status_code, 200) self.assertEqual(data.json()['message'], "OK") user = models.User.objects.get(username='u') logentry_queryset = LogEntry.objects.filter( user_id=user.pk, object_id=self.testjob6.resubmitted_jobs.first().pk ) self.assertEqual( 1, logentry_queryset.count() ) self.assertEqual( ADDITION, logentry_queryset.first().action_flag ) def test_testjob_force_resubmit(self): data = self.post('/api/testjobs/%d/force_resubmit/' % self.testjob5.id, {}) self.assertEqual(data.status_code, 200) self.assertEqual(data.json()['message'], "OK") user = models.User.objects.get(username='u') logentry_queryset = LogEntry.objects.filter( user_id=user.pk, object_id=self.testjob5.resubmitted_jobs.last().pk ) self.assertEqual( 1, logentry_queryset.count() ) self.assertEqual( ADDITION, logentry_queryset.first().action_flag ) def test_testjob_cancel(self): data = self.post('/api/testjobs/%d/cancel/' % self.testjob5.id, {}) self.assertEqual(data.status_code, 200) self.assertEqual(data.json()['job_id'], self.testjob5.job_id) self.assertEqual(data.json()['status'], self.testjob5.job_status) user = models.User.objects.get(username='u') logentry_queryset = LogEntry.objects.filter( user_id=user.pk, object_id=self.testjob5.pk ) self.assertEqual( 1, logentry_queryset.count() ) self.assertEqual( CHANGE, logentry_queryset.first().action_flag ) def test_testjob_cancel_fail(self): data = self.post('/api/testjobs/%d/cancel/' % self.testjob2.id, {}) self.assertEqual(data.status_code, 200) self.assertEqual(data.json()['job_id'], self.testjob2.job_id) self.assertEqual(data.json()['status'], 'Canceled') @patch('squad.ci.tasks.fetch.delay') def test_testjob_fetch(self, fetch_task): data = self.post('/api/testjobs/%d/fetch/' % self.testjob5.id, {}) self.assertEqual(data.status_code, 200) self.assertEqual(data.json()['job_id'], self.testjob5.job_id) self.assertEqual(data.json()['status'], 'Queued for fetching') fetch_task.assert_called_with(self.testjob5.id) def test_testjob_backend_filter(self): data = self.get('/api/testjobs/?backend__implementation_type=fake') self.assertEqual(data.status_code, 200) for testjob in data.json()['results']: self.assertIn(f'/{self.fake_backend.id}/', testjob['backend']) def test_backends(self): data = self.hit('/api/backends/') self.assertEqual('foobar', data['results'][0]['name']) def test_backends_id_field_only(self): data = self.hit('/api/backends/?fields=id') self.assertNotIn('implementation_type', data['results'][0].keys()) def test_backends_only_four_fields(self): data = self.hit( '/api/backends/?fields=name,implementation_type,poll_interval,max_fetch_attempts') self.assertListEqual(['name', 'implementation_type', 'poll_interval', 'max_fetch_attempts'], list(data['results'][0].keys())) self.assertNotIn('id', data['results'][0].keys()) def test_environments(self): data = self.hit('/api/environments/') self.assertEqual( ['env-a', 'myenv'], list(sorted(set([item['slug'] for item in data['results']])))) def test_email_template(self): data = self.hit('/api/emailtemplates/') self.assertEqual('fooTemplate', data['results'][0]['name']) def test_groups(self): data = self.hit('/api/groups/') self.assertEqual('mygroup', data['results'][0]['slug']) def test_groups_slug_field_only(self): data = self.hit('/api/groups/?fields=slug') self.assertEqual('mygroup', data['results'][0]['slug']) self.assertNotIn('id', data['results'][0].keys()) def test_patch_source(self): data = self.hit('/api/patchsources/') self.assertEqual(1, len(data['results'])) def test_known_issues(self): data = self.hit('/api/knownissues/') self.assertEqual(2, len(data['results'])) def test_known_issues_filter_by_environment(self): env_id = self.environment_a.id data = self.hit('/api/knownissues/?environment=%d' % env_id) self.assertEqual(1, len(data['results'])) self.assertEqual('knownissue_bar', data['results'][0]['title']) def test_project_compare_builds_with_finished_status_and_regressions(self): foo_suite, _ = self.project.suites.get_or_create(slug='foo') foo_metadata, _ = models.SuiteMetadata.objects.get_or_create( suite=foo_suite.slug, name='dummy', kind='test') self.testrun4.tests.get_or_create(suite=foo_suite, metadata=foo_metadata, result=True, build=self.testrun4.build, environment=self.testrun4.environment) self.testrun6.tests.get_or_create(suite=foo_suite, metadata=foo_metadata, result=False, build=self.testrun6.build, environment=self.testrun6.environment) UpdateProjectStatus()(self.testrun4) UpdateProjectStatus()(self.testrun6) data = self.hit('/api/projects/%d/compare_builds/?baseline=%d&to_compare=%d' % (self.project.id, self.build4.id, self.build6.id)) self.assertEqual(1, len(data['regressions']['myenv']['foo'])) def test_project_compare_builds_by_metrics(self): receive = ReceiveTestRun(self.project) baseline = self.project.builds.create(version='baseline-metric') target = self.project.builds.create(version='target-metric') # Add regression self.project.thresholds.create(name='foo/regressed-metric') receive(baseline.version, 'myenv', metrics_file='{"foo/regressed-metric": 1}') receive(target.version, 'myenv', metrics_file='{"foo/regressed-metric": 2}') # Add improvement self.project.thresholds.create(name='bar/improved-metric') receive(baseline.version, 'myenv', metrics_file='{"bar/improved-metric": 2}') receive(target.version, 'myenv', metrics_file='{"bar/improved-metric": 1}') data = self.hit('/api/projects/%d/compare_builds/?baseline=%d&to_compare=%d&by=metrics' % (self.project.id, baseline.id, target.id)) self.assertEqual(1, len(data['regressions']['myenv']['foo'])) self.assertEqual(1, len(data['fixes']['myenv']['bar'])) def test_project_compare_builds_with_non_finished_status(self): response = self.client.get('/api/projects/%d/compare_builds/?baseline=%d&to_compare=%d' % (self.project.id, self.build2.id, self.build3.id)) self.assertEqual(400, response.status_code) def test_project_compare_builds_with_non_finished_status_force_unfinished(self): response = self.client.get('/api/projects/%d/compare_builds/?baseline=%d&to_compare=%d&force=1' % (self.project.id, self.build2.id, self.build3.id)) self.assertEqual(200, response.status_code) def test_project_compare_builds_with_finished_status_with_verions_as_args(self): response = self.client.get('/api/projects/%s/compare_builds/?baseline=%s&to_compare=%s' % (self.project.id, self.build4.version, self.build6.version)) self.assertEqual(400, response.status_code) def test_suites(self): data = self.hit('/api/suites/') self.assertEqual(5, data['count']) def test_suite_tests(self): foo_suite = self.project.suites.get(slug='foo') data = self.hit('/api/suites/%d/tests/?limit=1000' % foo_suite.id) self.assertEqual(54, len(data['results'])) def test_metricthresholds_add(self): metric_name = 'the-threshold' response = self.post( '/api/metricthresholds/', { 'project': "http://testserver/api/projects/%d/" % self.project.id, 'name': metric_name, } ) self.assertEqual(201, response.status_code) self.assertEqual(1, self.project.thresholds.filter( name=metric_name).count()) self.hit('/api/metricthresholds/%d/' % self.project.thresholds.first().id) def test_metricthresholds_duplicates_all_envs(self): metric_name = 'duplicated-threshold-all-envs' response = self.post( '/api/metricthresholds/', { 'project': "http://testserver/api/projects/%d/" % self.project.id, 'name': metric_name, } ) self.assertEqual(201, response.status_code) self.assertEqual(1, self.project.thresholds.filter( name=metric_name).count()) # already exists project-wide response = self.post( '/api/metricthresholds/', { 'project': "http://testserver/api/projects/%d/" % self.project.id, 'name': metric_name, 'environment': "http://testserver/api/environments/%d/" % self.environment.id } ) self.assertEqual(400, response.status_code) self.assertEqual(1, self.project.thresholds.filter( name=metric_name).count()) def test_metricthresholds_duplicates_specific_env(self): metric_name = 'duplicated-threshold-specific-env' response = self.post( '/api/metricthresholds/', { 'project': "http://testserver/api/projects/%d/" % self.project.id, 'name': metric_name, 'environment': "http://testserver/api/environments/%d/" % self.environment.id } ) self.assertEqual(201, response.status_code) self.assertEqual(1, self.project.thresholds.filter( name=metric_name).count()) # already exists an environment-specific one response = self.post( '/api/metricthresholds/', { 'project': "http://testserver/api/projects/%d/" % self.project.id, 'name': metric_name, } ) self.assertEqual(400, response.status_code) self.assertEqual(1, self.project.thresholds.filter( name=metric_name).count()) def test_statuses(self): ParseTestRunData()(self.testrun) RecordTestRunStatus()(self.testrun) data = self.hit('/api/statuses/') self.assertEqual(3, len(data['results'])) data = self.hit( f'/api/statuses/?test_run__environment_id={self.environment2.id}') self.assertEqual(0, len(data['results'])) data = self.hit( f'/api/statuses/?test_run__environment_id={self.environment.id}') self.assertEqual(3, len(data['results']))
class RestApiTest(APITestCase): def setUp(self): pass def hit(self, url): pass def post(self, url, data): pass def get(self, url): pass def receive(self, datestr, env, metrics={}, tests={}): pass def test_root(self): pass def test_projects(self): pass def test_project_basic_settings(self): pass def test_project_builds(self): pass def test_project_test_results(self): pass def test_create_project_with_enabled_plugin_list_1_element(self): pass def test_create_project_with_enabled_plugin_list_2_elements(self): pass def test_create_project_with_non_admin_account(self): pass def test_project_subscribe_unsubscribe_email(self): pass def test_project_unsubscribe_email_different_project(self): pass def test_project_subscribe_invalid_email(self): pass def test_project_unsubscribe_invalid_email(self): pass def test_project_subscribe_unsubscribe_user(self): pass def test_project_subscribe_missing_email(self): pass def test_project_unsubscribe_missing_email(self): pass def test_project_filter_by_datetime(self): pass def test_builds(self): pass def test_builds_id_filter(self): pass def test_builds_status(self): pass def test_builds_email_missing_status(self): pass def test_builds_email_missing_status(self): pass def test_builds_email_custom_template(self): pass def test_builds_email_custom_invalid_template(self): pass def test_builds_email_custom_baseline(self): pass def test_builds_email_custom_baseline_html(self): pass def test_builds_email_custom_baseline_missing_status(self): pass def test_builds_email_custom_invalid_baseline(self): pass @patch('squad.core.tasks.prepare_report.delay') def test_build_report(self, prepare_report_mock): pass def test_build_callbacks(self): pass def test_build_callback_headers(self): pass @patch('squad.core.tasks.prepare_report.delay') def test_zz_build_report_logentry(self, prepare_report_mock): pass @patch('squad.core.tasks.prepare_report.delay') def test_build_report_baseline(self, prepare_report_mock): pass @patch('squad.core.tasks.prepare_report.delay') def test_build_report_baseline_cache(self, prepare_report_mock): pass @patch('squad.core.tasks.prepare_report.delay') def test_build_report_invalid_baseline(self, prepare_report_mock): pass @patch('squad.core.tasks.prepare_report.delay') def test_build_report_baseline2(self, prepare_report_mock): pass @patch('squad.core.tasks.prepare_report.delay') def test_build_report_retry(self, prepare_report_mock): pass @patch('squad.core.tasks.prepare_report.delay') def test_build_report_retry_force(self, prepare_report_mock): pass @patch('squad.core.tasks.prepare_report.delay') def test_build_report_retry_force_logentry(self, prepare_report_mock): pass def test_build_testruns(self): pass def test_build_testjobs(self): pass def test_build_testjobs_summary(self): pass def test_build_testjobs_summary_per_environment(self): pass def test_build_tests(self): pass def test_build_tests_per_environment(self): pass def test_build_tests_per_environment_not_found(self): pass def test_build_tests_per_suite(self): pass def test_build_tests_per_suite_not_found(self): pass def test_build_tests_per_suite_and_environment(self): pass def test_build_failures_with_confidence(self): pass def test_build_failures_with_confidence_with_first_build(self): ''' The first build will not have any history, so the confidence scores for those failures should all be zero ''' pass def test_build_failures_with_confidence_with_pagination(self): pass def test_build_failures_with_confidence_releases_only(self): pass def test_build_metrics(self): pass def test_build_metadata(self): pass def test_build_metadata_by_testrun(self): pass def test_build_filter(self): pass def test_build_filter_by_project(self): pass def test_build_minimum_fields(self): pass def test_build_cancel(self): pass def test_build_compare_400_on_unfinished_build(self): pass def test_build_compare_against_same_project(self): pass def test_build_compare_against_different_project(self): pass def test_testjob(self): pass def test_testjob_filter_by_created_at(self): pass def test_testjob_resubmitted_jobs(self): pass def test_tests(self): pass def test_tests_filter_by_name(self): pass def test_tests_filter_by_name_not_found(self): pass def test_tests_filter_by_metadata_name(self): pass def test_tests_filter_by_metadata_id(self): pass def test_tests_filter_by_metadata_name_not_found(self): pass def test_tests_filter_by_metadata_id_not_found(self): pass def test_tests_filter_by_environment(self): pass def test_tests_filter_by_environment_id(self): pass def test_tests_filter_by_environment_not_found(self): pass def test_tests_filter_by_environment_id_not_found(self): pass def test_tests_filter_by_build(self): pass def test_tests_filter_by_build_id(self): pass def test_tests_filter_by_build_not_found(self): pass def test_tests_filter_by_build_id_not_found(self): pass def test_tests_filter_by_suite(self): pass def test_tests_filter_by_suite_id(self): pass def test_tests_filter_by_result_pass(self): pass def test_tests_filter_by_result_fail(self): pass def test_tests_filter_by_result_xfail(self): pass def test_tests_filter_by_result_skip(self): pass def test_tests_with_page_size(self): pass def test_tests_minimal_fields(self): pass def test_tests_with_known_issues_fields(self): pass def test_tests_no_status_fields(self): pass def test_metrics(self): pass def test_metrics_with_page_size(self): pass def test_metrics_filter_by_metadata_name(self): pass def test_metrics_filter_by_metadata_name_not_found(self): pass def test_metrics_filter_by_environment(self): pass def test_metrics_filter_by_environment_not_found(self): pass def test_metrics_filter_by_build(self): pass def test_metrics_filter_by_build_not_found(self): pass def test_testruns(self): pass def test_testruns_null_metrics_attr(self): pass def test_testruns_null_tests_attr(self): pass def test_testruns_not_null_metrics_attr(self): pass def test_testruns_not_null_tests_attr(self): pass def test_testruns_filter(self): pass def test_testruns_tests(self): pass def test_testruns_metrics(self): pass def test_testruns_attachments(self): pass def test_testruns_status(self): pass def test_testjob_definition(self): pass def test_testjob_resubmitted_jobs(self): pass def test_testjob_force_resubmit(self): pass def test_testjob_cancel(self): pass def test_testjob_cancel_fail(self): pass @patch('squad.ci.tasks.fetch.delay') def test_testjob_fetch(self, fetch_task): pass def test_testjob_backend_filter(self): pass def test_backends(self): pass def test_backends_id_field_only(self): pass def test_backends_only_four_fields(self): pass def test_environments(self): pass def test_email_template(self): pass def test_groups(self): pass def test_groups_slug_field_only(self): pass def test_patch_source(self): pass def test_known_issues(self): pass def test_known_issues_filter_by_environment(self): pass def test_project_compare_builds_with_finished_status_and_regressions(self): pass def test_project_compare_builds_by_metrics(self): pass def test_project_compare_builds_with_non_finished_status(self): pass def test_project_compare_builds_with_non_finished_status_force_unfinished(self): pass def test_project_compare_builds_with_finished_status_with_verions_as_args(self): pass def test_suites(self): pass def test_suite_tests(self): pass def test_metricthresholds_add(self): pass def test_metricthresholds_duplicates_all_envs(self): pass def test_metricthresholds_duplicates_specific_env(self): pass def test_statuses(self): pass
152
1
9
0
9
0
1
0.02
1
17
10
0
141
40
141
141
1,431
187
1,231
462
1,079
22
907
451
765
4
1
3
157
145,581
Linaro/squad
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Linaro_squad/test/ci/backend/test_lava.py
test.ci.backend.test_lava.LavaTest
class LavaTest(TestCase): def setUp(self): ci_infra_error_messages = [RESUBMIT_STRING, RESUBMIT_STRING2, RESUBMIT_STRING4] self.backend = Backend.objects.create( url='http://example.com/RPC2', username='myuser', token='mypassword', implementation_type='lava', backend_settings='{"CI_LAVA_INFRA_ERROR_MESSAGES": %s, "CI_LAVA_HANDLE_BOOT": true}' % ci_infra_error_messages, ) self.group = Group.objects.create( name="group_foo" ) self.project = Project.objects.create( name="project_foo", group=self.group, ) self.build = self.project.builds.create(version='1') def test_detect(self): impl = self.backend.get_implementation() self.assertIsInstance(impl, LAVABackend) @patch("squad.ci.backend.lava.Backend.__submit__", return_value='1234') def test_submit(self, __submit__): lava = LAVABackend(None) test_definition = "foo: 1\njob_name: bar" testjob = TestJob( definition=test_definition, target=self.project, backend=self.backend) self.assertEqual(['1234'], lava.submit(testjob)) self.assertEqual('bar', testjob.name) __submit__.assert_called_with(test_definition) def test_check_job_definition(self): lava = LAVABackend(None) definition = 'bad: "mismatch quotes""' check = lava.check_job_definition(definition) self.assertIn('found unexpected end of stream', check) def test_lava_job_name(self): lava = LAVABackend(None) self.assertIsNone(lava._Backend__lava_job_name('no name:')) self.assertIsNone(lava._Backend__lava_job_name('- list')) self.assertIsNone(lava._Backend__lava_job_name('string')) self.assertEqual('', lava._Backend__lava_job_name('job_name:')) self.assertEqual( 'job-name', lava._Backend__lava_job_name('job_name: job-name')) truncated_name = lava._Backend__lava_job_name( 'job_name: ' + ('a' * 300)) self.assertEqual(255, len(truncated_name)) @patch("requests.post", side_effect=requests.exceptions.Timeout) def test_submit_timeout(self, post): test_definition = "foo: 1\njob_name: bar" testjob = TestJob( definition=test_definition, target=self.project, backend=self.backend) self.assertRaises(requests.exceptions.Timeout, self.backend.submit, testjob) @patch("requests.post", side_effect=requests.exceptions.Timeout) def test_submit_rest_timeout(self, post): self.backend.url.replace("RPC2/", "api/v0.2/") test_definition = "foo: 1\njob_name: bar" testjob = TestJob( definition=test_definition, target=self.project, backend=self.backend) self.assertRaises(requests.exceptions.Timeout, self.backend.submit, testjob) @patch("squad.ci.backend.lava.Backend.__cancel_job__", return_value=True) def test_cancel(self, __cancel__): test_definition = "foo: 1\njob_name: bar" testjob = TestJob( definition=test_definition, submitted=True, job_id="12345", target=self.project, backend=self.backend) testjob.cancel() __cancel__.assert_called() @patch("requests.post", side_effect=requests.exceptions.Timeout) def test_cancel_timeout(self, __cancel__): test_definition = "foo: 1\njob_name: bar" testjob = TestJob( definition=test_definition, submitted=True, job_id="12345", target=self.project, backend=self.backend) self.assertRaises(requests.exceptions.Timeout, testjob.cancel) @patch("requests.post", side_effect=requests.exceptions.Timeout) def test_cancel_rest_timeout(self, __cancel__): self.backend.url.replace("RPC2/", "api/v0.2/") test_definition = "foo: 1\njob_name: bar" testjob = TestJob( definition=test_definition, submitted=True, job_id="12345", target=self.project, backend=self.backend) self.assertRaises(requests.exceptions.Timeout, testjob.cancel) @patch("squad.ci.backend.lava.Backend.__submit__", return_value=['1234.0', '1234.1']) def test_submit_multinode(self, __submit__): lava = LAVABackend(None) test_definition = "foo: 1\njob_name: bar" testjob = TestJob( definition=test_definition, target=self.project, backend=self.backend) self.assertEqual(['1234.0', '1234.1'], lava.submit(testjob)) self.assertEqual('bar', testjob.name) __submit__.assert_called_with(test_definition) @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS) def test_fetch_basics(self, get_results, get_details, test_log): lava = LAVABackend(self.backend) testjob = TestJob( job_id='9999', target=self.project, backend=self.backend) results = lava.fetch(testjob) get_details.assert_called_with('9999') get_results.assert_called_with('9999') self.assertEqual('Complete', results[0]) testjob.refresh_from_db() self.assertIsNotNone(testjob.started_at) self.assertIsNotNone(testjob.ended_at) @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS_INVALID_DATES) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS) def test_fetch_invalid_dates(self, get_results, get_details, test_log): lava = LAVABackend(self.backend) testjob = TestJob( job_id='9999', target=self.project, backend=self.backend) results = lava.fetch(testjob) get_details.assert_called_with('9999') get_results.assert_called_with('9999') self.assertEqual('Complete', results[0]) testjob.refresh_from_db() self.assertIsNone(testjob.started_at) self.assertIsNone(testjob.ended_at) @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS_START_DATE) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS) def test_fetch_missing_dates(self, get_results, get_details, test_log): lava = LAVABackend(self.backend) testjob = TestJob( job_id='9999', target=self.project, backend=self.backend) results = lava.fetch(testjob) get_details.assert_called_with('9999') get_results.assert_called_with('9999') self.assertEqual('Complete', results[0]) testjob.refresh_from_db() self.assertIsNotNone(testjob.started_at) self.assertIsNone(testjob.ended_at) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS_RUNNING) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__") def test_fetch_not_finished(self, get_results, get_details): lava = LAVABackend(None) testjob = TestJob( job_id='9999', target=self.project, backend=self.backend) lava.fetch(testjob) get_results.assert_not_called() @patch("squad.ci.backend.lava.Backend.__get_job_details__", side_effect=requests.exceptions.Timeout) def test_fetch_timeout(self, get_details): lava = LAVABackend(None) testjob = TestJob( job_id='9999', target=self.project, backend=self.backend) # Make sure lava.fetch() raises fetch issue, # backend.fetch() will increase testjob.fetch_attempt accordingly with self.assertRaises(TemporaryFetchIssue): lava.fetch(testjob) @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS) def test_parse_results_metadata(self, get_results, get_details, test_log): lava = LAVABackend(self.backend) testjob = TestJob( job_id='1234', target=self.project, backend=self.backend) status, completed, metadata, results, metrics, logs = lava.fetch( testjob) self.assertEqual(JOB_METADATA, metadata) @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS_NO_METADATA) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS) def test_parse_results_empty_metadata(self, get_results, get_details, test_log): lava = LAVABackend(self.backend) testjob = TestJob( job_id='1234', target=self.project, backend=self.backend) status, completed, metadata, results, metrics, logs = lava.fetch( testjob) self.assertEqual({}, metadata) @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS_WITH_SUITE_VERSIONS) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS_WITH_SUITE_VERSIONS) def test_parse_results_metadata_with_suite_versions(self, get_results, get_details, test_log): lava = LAVABackend(None) testjob = TestJob( job_id='1234', target=self.project, backend=self.backend) status, completed, metadata, results, metrics, logs = lava.fetch( testjob) self.assertEqual({"suite1": "1.0"}, metadata['suite_versions']) @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS) def test_parse_results_ignore_lava_suite_backend_settings(self, get_results, get_details, test_log): self.backend.backend_settings = 'CI_LAVA_HANDLE_SUITE: true' self.backend.save() lava = self.backend testjob = TestJob.objects.create( job_id='1234', backend=self.backend, target=self.project, target_build=self.build) lava.fetch(testjob.id) testjob.refresh_from_db() results = testjob.testrun.tests metrics = testjob.testrun.metrics self.assertEqual(True, lava.get_implementation( ).settings.get('CI_LAVA_HANDLE_SUITE')) self.assertEqual(2, results.count()) self.assertEqual(3, metrics.count()) self.assertEqual(0, results.filter( metadata__name='device_foo').count()) self.assertEqual(True, results.filter( metadata__name='validate').get().result) self.assertEqual(3, testjob.testrun.metrics.count()) self.assertEqual(29.72, metrics.filter( metadata__name='auto-login-action').get().result) self.assertEqual(0.0, metrics.filter( metadata__name='power-off').get().result) self.assertEqual(10.0, metrics.filter( metadata__name='case_foo').get().result) @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS) def test_parse_results_ignore_lava_suite_project_settings(self, get_results, get_details, test_log): self.project.project_settings = 'CI_LAVA_HANDLE_SUITE: true' self.project.save() lava = self.backend testjob = TestJob.objects.create( job_id='1234', backend=self.backend, target=self.project, target_build=self.build) lava.fetch(testjob.id) testjob.refresh_from_db() results = testjob.testrun.tests metrics = testjob.testrun.metrics self.assertEqual(None, lava.get_implementation( ).settings.get('CI_LAVA_HANDLE_SUITE')) self.assertEqual(2, results.count()) self.assertEqual(3, metrics.count()) self.assertEqual(0, results.filter( metadata__name='device_foo').count()) self.assertEqual(True, results.filter( metadata__name='validate').get().result) self.assertEqual(3, testjob.testrun.metrics.count()) self.assertEqual(29.72, metrics.filter( metadata__name='auto-login-action').get().result) self.assertEqual(0.0, metrics.filter( metadata__name='power-off').get().result) self.assertEqual(10.0, metrics.filter( metadata__name='case_foo').get().result) @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS) def test_parse_results_ignore_lava_suite_empty_project_settings(self, get_results, get_details, test_log): self.project.project_settings = '' lava = self.backend testjob = TestJob.objects.create( job_id='1234', backend=self.backend, target=self.project, target_build=self.build) lava.fetch(testjob.id) testjob.refresh_from_db() results = testjob.testrun.tests metrics = testjob.testrun.metrics self.assertEqual(None, lava.get_implementation( ).settings.get('CI_LAVA_HANDLE_SUITE')) self.assertEqual(2, results.count()) self.assertEqual(2, metrics.count()) self.assertEqual(1, results.filter( metadata__name='device_foo').count()) self.assertEqual(0, results.filter(metadata__name='validate').count()) self.assertEqual(2, testjob.testrun.metrics.count()) self.assertEqual(0, metrics.filter(metadata__name='power-off').count()) self.assertEqual(0, metrics.filter( metadata__name='auto-login-action').count()) self.assertEqual(29.72, metrics.filter( metadata__name='time-device_foo').get().result) self.assertEqual(10.0, metrics.filter( metadata__name='case_foo').get().result) @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS) def test_parse_results_ignore_lava_suite_project_settings_overwrites_backend(self, get_results, get_details, test_log): self.backend.backend_settings = 'CI_LAVA_HANDLE_SUITE: true' self.backend.save() lava = self.backend # Project settings has higher priority than backend settings self.project.project_settings = 'CI_LAVA_HANDLE_SUITE: false' self.project.save() testjob = TestJob.objects.create( job_id='1234', backend=self.backend, target=self.project, target_build=self.build) lava.fetch(testjob.id) testjob.refresh_from_db() results = testjob.testrun.tests metrics = testjob.testrun.metrics self.assertEqual(True, lava.get_implementation( ).settings.get('CI_LAVA_HANDLE_SUITE')) self.assertEqual(1, results.count()) self.assertEqual(1, metrics.count()) self.assertEqual(0, results.filter( metadata__name='device_foo').count()) self.assertEqual(0, results.filter(metadata__name='validate').count()) self.assertEqual(1, testjob.testrun.metrics.count()) self.assertEqual(0, metrics.filter(metadata__name='power-off').count()) self.assertEqual(0, metrics.filter( metadata__name='auto-login-action').count()) self.assertEqual(0, metrics.filter( metadata__name='time-device_foo').count()) self.assertEqual(10.0, metrics.filter( metadata__name='case_foo').get().result) @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS) def test_parse_results_ignore_lava_boot(self, get_results, get_details, download_test_log): self.backend.backend_settings = '' lava = self.backend testjob = TestJob.objects.create( job_id='1234', backend=self.backend, target=self.project, target_build=self.build) lava.fetch(testjob.id) testjob.refresh_from_db() results = testjob.testrun.tests metrics = testjob.testrun.metrics self.assertEqual(1, results.count()) self.assertEqual(1, metrics.count()) self.assertEqual(0, results.filter( metadata__name='device_foo').count()) self.assertEqual(0, metrics.filter( metadata__name='time-device_foo').count()) self.assertEqual(1, results.filter(metadata__name='case_bar').count()) self.assertEqual(1, metrics.filter(metadata__name='case_foo').count()) self.assertEqual(10.0, metrics.filter( metadata__name='case_foo').get().result) @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS_INCOMPLETE) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS_WITH_JOB_INFRA_ERROR) def test_parse_results_ignore_infra_errors(self, get_results, get_details, download_test_log): self.backend.backend_settings = '{"CI_LAVA_WORK_AROUND_INFRA_ERRORS": true}' lava = self.backend testjob = TestJob.objects.create( job_id='1234', backend=self.backend, target=self.project, target_build=self.build) lava.fetch(testjob.id) testjob.refresh_from_db() results = testjob.testrun.tests metrics = testjob.testrun.metrics self.assertEqual(True, testjob.testrun.completed) self.assertEqual(1, results.count()) self.assertEqual(1, metrics.count()) self.assertEqual(0, results.filter( metadata__name='device_foo').count()) self.assertEqual(0, metrics.filter( metadata__name='time-device_foo').count()) self.assertEqual(1, results.filter(metadata__name='case_bar').count()) self.assertEqual(1, metrics.filter(metadata__name='case_foo').count()) self.assertEqual(10.0, metrics.filter( metadata__name='case_foo').get().result) @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS_INCOMPLETE) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS_WITH_JOB_INFRA_ERROR) def test_parse_results_dont_ignore_infra_errors(self, get_results, get_details, download_test_log): self.backend.backend_settings = '{"CI_LAVA_WORK_AROUND_INFRA_ERRORS": false}' lava = self.backend testjob = TestJob.objects.create( job_id='1234', backend=self.backend, target=self.project, target_build=self.build) lava.fetch(testjob.id) testjob.refresh_from_db() results = testjob.testrun.tests metrics = testjob.testrun.metrics self.assertEqual(False, testjob.testrun.completed) self.assertEqual(0, results.count()) self.assertEqual(0, metrics.count()) @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS) def test_parse_results_handle_lava_suite_and_ignore_lava_boot(self, get_results, get_details, download_test_log): self.backend.backend_settings = '{"CI_LAVA_HANDLE_SUITE": true, "CI_LAVA_HANDLE_BOOT": false}' self.backend.save() lava = self.backend testjob = TestJob.objects.create( job_id='1234', backend=self.backend, target=self.project, target_build=self.build) lava.fetch(testjob.id) testjob.refresh_from_db() results = testjob.testrun.tests metrics = testjob.testrun.metrics self.assertEqual(True, lava.get_implementation( ).settings.get('CI_LAVA_HANDLE_SUITE')) self.assertEqual(False, lava.get_implementation( ).settings.get('CI_LAVA_HANDLE_BOOT')) self.assertEqual(2, results.count()) self.assertEqual(3, metrics.count()) self.assertEqual(0, results.filter( metadata__name='device_foo').count()) self.assertEqual(True, results.filter( metadata__name='validate').get().result) self.assertEqual(29.72, metrics.filter( metadata__name='auto-login-action').get().result) self.assertEqual(0.0, metrics.filter( metadata__name='power-off').get().result) self.assertEqual(10.0, metrics.filter( metadata__name='case_foo').get().result) self.assertEqual(0, metrics.filter( metadata__name='time-device_foo').count()) @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS) def test_parse_results(self, get_results, get_details, download_test_log): lava = LAVABackend(self.backend) testjob = TestJob( job_id='1234', target=self.project, backend=self.backend) status, completed, metadata, results, metrics, logs = lava.fetch( testjob) self.assertEqual(len(results), 2) self.assertIn('log', results['DefinitionFoo/case_bar'].keys()) self.assertEqual(len(metrics), 2) self.assertEqual(10, metrics['DefinitionFoo/case_foo']["value"]) self.assertEqual('job_foo', testjob.name) @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS_REST) def test_parse_results_rest(self, get_results, get_details, download_test_log): # this test is a workaround of LAVA bug # https://git.lavasoftware.org/lava/lava/-/issues/449 lava = LAVABackend(self.backend) testjob = TestJob( job_id='1234', target=self.project, backend=self.backend) status, completed, metadata, results, metrics, logs = lava.fetch( testjob) self.assertEqual(len(results), 2) self.assertIn('log', results['DefinitionFoo/case_bar'].keys()) self.assertEqual(len(metrics), 2) self.assertEqual(10, metrics['DefinitionFoo/case_foo']["value"]) self.assertEqual('job_foo', testjob.name) @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS) def test_parse_results_clone_measurements(self, get_results, get_details, test_log): self.backend.backend_settings = '{"CI_LAVA_CLONE_MEASUREMENTS": true, "CI_LAVA_HANDLE_BOOT": true}' # Project settings has higher priority than backend settings self.project.project_settings = 'CI_LAVA_CLONE_MEASUREMENTS: true' lava = LAVABackend(self.backend) testjob = TestJob( job_id='1235', backend=self.backend, target=self.project, target_build=self.build, environment="foo_env") status, completed, metadata, results, metrics, logs = lava.fetch( testjob) self.assertEqual(len(results), 3) self.assertEqual(len(metrics), 2) self.assertEqual(10, metrics['DefinitionFoo/case_foo']["value"]) self.assertEqual('job_foo', testjob.name) @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS_INFRA_FAILURE) def test_completed(self, get_results, get_details, get_logs): lava = LAVABackend(None) testjob = TestJob( job_id='1234', backend=self.backend, target=self.project) status, completed, metadata, results, metrics, logs = lava.fetch( testjob) self.assertFalse(completed) @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS_INFRA_FAILURE_STR) def test_incomplete_string_results_metadata(self, get_results, get_details, get_logs): lava = LAVABackend(None) testjob = TestJob( job_id='1234', backend=self.backend, target=self.project) status, completed, metadata, results, metrics, logs = lava.fetch( testjob) self.assertFalse(completed) self.assertEqual( TEST_RESULTS_INFRA_FAILURE_STR[0]['metadata'], testjob.failure) @patch("squad.ci.backend.lava.Backend.__resubmit__") @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS_INFRA_FAILURE_STR_NO_MESSAGE) def test_incomplete_string_results_metadata_null_error_msg(self, get_results, get_details, get_logs, resubmit): self.project.project_settings = '{"CI_LAVA_INFRA_ERROR_MESSAGES": "not-really-important"}' self.project.save() lava = LAVABackend(None) testjob = TestJob( job_id='1234', backend=self.backend, target=self.project) status, completed, metadata, results, metrics, logs = lava.fetch( testjob) self.assertFalse(completed) resubmit.assert_not_called() @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS_CANCELED) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS) def test_canceled(self, get_results, get_details, get_logs): lava = LAVABackend(None) testjob = TestJob( job_id='1234', backend=self.backend, target=self.project) status, completed, metadata, results, metrics, logs = lava.fetch( testjob) self.assertFalse(completed) @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS_INFRA_FAILURE_RESUBMIT) def test_automated_resubmit_email(self, get_results, get_details, get_logs): self.project.admin_subscriptions.create(email='foo@example.com') lava = LAVABackend(self.backend) testjob = TestJob( job_id='1234', backend=self.backend, target=self.project) resubmitted_job = TestJob( job_id='1235', backend=self.backend, target=self.project, resubmitted_count=1) resubmitted_job.save() lava.resubmit = MagicMock(return_value=resubmitted_job) status, completed, metadata, results, metrics, logs = lava.fetch( testjob) lava.resubmit.assert_called() # there should be an admin email sent after resubmission self.assertEqual(1, len(mail.outbox)) @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS_INFRA_FAILURE_RESUBMIT) def test_automated_dont_resubmit_email(self, get_results, get_details, get_logs): self.project.admin_subscriptions.create(email='foo@example.com') lava = LAVABackend(self.backend) # update lava backend settings in place lava.settings['CI_LAVA_SEND_ADMIN_EMAIL'] = False testjob = TestJob( job_id='1234', backend=self.backend, target=self.project) resubmitted_job = TestJob( job_id='1235', backend=self.backend, target=self.project, resubmitted_count=1) resubmitted_job.save() lava.resubmit = MagicMock(return_value=resubmitted_job) status, completed, metadata, results, metrics, logs = lava.fetch( testjob) lava.resubmit.assert_called() # there should not be an admin email sent after resubmission self.assertEqual(0, len(mail.outbox)) @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS_INFRA_FAILURE_RESUBMIT) @patch("squad.ci.backend.lava.Backend.__resubmit__", return_value="1235") def test_automated_resubmit(self, lava_resubmit, get_results, get_details, get_logs): lava = LAVABackend(self.backend) testjob = TestJob( job_id='1234', backend=self.backend, target=self.project) status, completed, metadata, results, metrics, logs = lava.fetch( testjob) lava_resubmit.assert_called() new_test_job = TestJob.objects.all().last() self.assertEqual(1, new_test_job.resubmitted_count) self.assertFalse(testjob.can_resubmit) @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS_INFRA_FAILURE_CUSTOM) @patch("squad.ci.backend.lava.Backend.__resubmit__", return_value="1235") def test_automated_resubmit_with_project_settings(self, lava_resubmit, get_results, get_details, get_logs): self.project.project_settings = yaml.dump( {'CI_LAVA_INFRA_ERROR_MESSAGES': [TEST_RESULT_FAILURE_CUSTOM]}) lava = LAVABackend(self.backend) testjob = TestJob( job_id='1234', backend=self.backend, target=self.project) status, completed, metadata, results, metrics, logs = lava.fetch( testjob) lava_resubmit.assert_called() new_test_job = TestJob.objects.all().last() self.assertEqual(1, new_test_job.resubmitted_count) self.assertFalse(testjob.can_resubmit) @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS_INFRA_FAILURE_RESUBMIT2) @patch("squad.ci.backend.lava.Backend.__resubmit__", return_value="1235") def test_automated_resubmit2(self, lava_resubmit, get_results, get_details, get_logs): lava = LAVABackend(self.backend) testjob = TestJob( job_id='1234', backend=self.backend, target=self.project) status, completed, metadata, results, metrics, logs = lava.fetch( testjob) lava_resubmit.assert_called() new_test_job = TestJob.objects.all().last() self.assertEqual(1, new_test_job.resubmitted_count) self.assertFalse(testjob.can_resubmit) @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS_INFRA_FAILURE_RESUBMIT3) @patch("squad.ci.backend.lava.Backend.__resubmit__", return_value="1235") def test_automated_resubmit3(self, lava_resubmit, get_results, get_details, get_logs): lava = LAVABackend(self.backend) testjob = TestJob( job_id='1234', backend=self.backend, target=self.project) status, completed, metadata, results, metrics, logs = lava.fetch( testjob) lava_resubmit.assert_called() new_test_job = TestJob.objects.all().last() self.assertEqual(1, new_test_job.resubmitted_count) self.assertFalse(testjob.can_resubmit) @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS_INFRA_FAILURE_RESUBMIT4) @patch("squad.ci.backend.lava.Backend.__resubmit__", return_value="1235") def test_automated_resubmit4(self, lava_resubmit, get_results, get_details, get_logs): lava = LAVABackend(self.backend) testjob = TestJob( job_id='1234', backend=self.backend, target=self.project) status, completed, metadata, results, metrics, logs = lava.fetch( testjob) lava_resubmit.assert_called() new_test_job = TestJob.objects.all().last() self.assertEqual(1, new_test_job.resubmitted_count) self.assertFalse(testjob.can_resubmit) @patch('squad.ci.backend.lava.Backend.__submit__', side_effect=HTTP_400) def test_submit_400(self, __submit__): lava = LAVABackend(None) testjob = TestJob( job_id='1234', target=self.project, definition='{}', backend=self.backend) with self.assertRaises(SubmissionIssue): lava.submit(testjob) @patch('squad.ci.backend.lava.Backend.__submit__', side_effect=HTTP_503) def test_submit_xmlrpc_503(self, __submit__): lava = LAVABackend(None) testjob = TestJob( job_id='1234', target=self.project, definition='{}', backend=self.backend) with self.assertRaises(TemporarySubmissionIssue): lava.submit(testjob) @patch('requests.post', side_effect=HTTP_503) def test_submit_http_503(self, __submit__): lava = LAVABackend(None) lava.use_xml_rpc = False lava.api_url_base = 'http://example.com/' testjob = TestJob( job_id='1234', target=self.project, definition='{}', backend=self.backend) with self.assertRaises(TemporarySubmissionIssue): lava.submit(testjob) @patch('squad.ci.backend.lava.Backend.__submit__', side_effect=HTTP_401) def test_submit_unauthorized(self, __submit__): lava = LAVABackend(None) testjob = TestJob( job_id='1234', target=self.project, definition='{}', backend=self.backend) with self.assertRaises(TemporarySubmissionIssue): lava.submit(testjob) def test_get_listen_url(self): backend = MagicMock() backend.url = 'https://foo.tld/RPC2' lava = LAVABackend(backend) lava.__get_publisher_event_socket__ = MagicMock( return_value='tcp://bar.tld:9999') self.assertEqual('tcp://bar.tld:9999', lava.get_listener_url()) lava.__get_publisher_event_socket__ = MagicMock( return_value='tcp://*:9999') self.assertEqual('tcp://foo.tld:9999', lava.get_listener_url()) @patch('squad.ci.backend.lava.fetch') def test_receive_event(self, fetch): lava = LAVABackend(self.backend) testjob = TestJob.objects.create( backend=self.backend, target=self.project, target_build=self.build, environment='myenv', submitted=True, fetched=False, job_id='123', name="foo", ) lava.receive_event('foo.com.testjob', { "job": '123', 'state': 'Finished', 'health': 'Complete'}) fetch.apply_async.assert_called_with(args=[testjob.id]) self.assertEqual('Complete', TestJob.objects.get( pk=testjob.id).job_status) def test_receive_event_no_testjob(self): backend = MagicMock() backend.url = 'https://foo.tld/RPC2' lava = LAVABackend(backend) # just not crashing is OK lava.receive_event('foo.com.testjob', {}) def test_receive_event_wrong_topic(self): backend = MagicMock() backend.url = 'https://foo.tld/RPC2' lava = LAVABackend(backend) # just not crashing is OK lava.receive_event('foo.com.device', {'job': '123'}) @patch('squad.ci.backend.lava.fetch') def test_receive_event_no_status(self, fetch): lava = LAVABackend(self.backend) testjob = TestJob.objects.create( backend=self.backend, target=self.project, target_build=self.build, environment='myenv', submitted=True, fetched=False, job_id='123', name="foo", ) lava.receive_event('foo.com.testjob', {"job": '123'}) self.assertEqual('Unknown', TestJob.objects.get( pk=testjob.id).job_status) def test_lava_log_parsing(self): lava = LAVABackend(self.backend) log_data = BytesIO(LOG_DATA) log = lava.__parse_log__(log_data) self.assertIn("target message", log) self.assertIn("feedback message", log) self.assertNotIn("info message", log) @patch('requests.get') def test_lava_log_download(self, requests_get): lava1 = LAVABackend(self.backend) requests_get.side_effect = requests.exceptions.ChunkedEncodingError( "Connection closed") log = lava1.__download_full_log__(999) requests_get.assert_called() self.assertEqual(b'', log) @patch('requests.get') def test_lava_log_download_rest(self, requests_get): # check REST API path self.backend.url.replace("RPC2/", "api/v0.2/") lava2 = LAVABackend(self.backend) requests_get.side_effect = requests.exceptions.ChunkedEncodingError( "Connection closed") log = lava2.__download_full_log__(999) requests_get.assert_called() self.assertEqual(b'', log) def test_broken_lava_log_parsing(self): lava = LAVABackend(self.backend) log_data = BytesIO(BROKEN_LOG_DATA) log = lava.__parse_log__(log_data) self.assertEqual(0, len(log)) def test_empty_lava_log_parsing(self): lava = LAVABackend(self.backend) log_data = BytesIO() log = lava.__parse_log__(log_data) self.assertEqual(0, len(log)) def test_test_log_unicode_error(self): lava = LAVABackend(self.backend) log_data = BytesIO(b'a non-decodable unicode char: \xb1\n') test_log = lava.__download_test_log__(log_data, 1, 3) self.assertIn("a non-decodable unicode char:", test_log) @patch("squad.ci.backend.lava.Backend.__resubmit__", side_effect=HTTP_500) def test_resubmit_deleted_job(self, __resubmit__): lava = LAVABackend(None) test_definition = "foo: 1\njob_name: bar" testjob = TestJob( definition=test_definition, backend=self.backend, target=self.project, job_id='9999', ) with self.assertRaises(SubmissionIssue): lava.resubmit(testjob) @patch("squad.ci.backend.lava.Backend.__submit__", return_value=[]) def test_resubmit_job_lava_error(self, __resubmit__): lava = self.backend.get_implementation() test_definition = "foo: 1\njob_name: bar" testjob = TestJob( definition=test_definition, backend=self.backend, target=self.project, submitted=True, job_status='Canceled', job_id="1236" ) self.assertRaises(TemporarySubmissionIssue, lava.resubmit, testjob) def test_resubmit_job_lava_4xx_reply(self): self.backend.url = "http://example.com/api/v0.2/" lava = self.backend.get_implementation() test_definition = "foo: 1\njob_name: bar" testjob = TestJob( definition=test_definition, backend=self.backend, target=self.project, submitted=True, job_status='Canceled', job_id="1236" ) with requests_mock.Mocker() as m: m.post("http://example.com/api/v0.2/jobs/1236/resubmit/", status_code=405, text="Method not allowed") self.assertRaises(TemporarySubmissionIssue, lava.resubmit, testjob)
class LavaTest(TestCase): def setUp(self): pass def test_detect(self): pass @patch("squad.ci.backend.lava.Backend.__submit__", return_value='1234') def test_submit(self, __submit__): pass def test_check_job_definition(self): pass def test_lava_job_name(self): pass @patch("requests.post", side_effect=requests.exceptions.Timeout) def test_submit_timeout(self, post): pass @patch("requests.post", side_effect=requests.exceptions.Timeout) def test_submit_rest_timeout(self, post): pass @patch("squad.ci.backend.lava.Backend.__cancel_job__", return_value=True) def test_cancel(self, __cancel__): pass @patch("requests.post", side_effect=requests.exceptions.Timeout) def test_cancel_timeout(self, __cancel__): pass @patch("requests.post", side_effect=requests.exceptions.Timeout) def test_cancel_rest_timeout(self, __cancel__): pass @patch("squad.ci.backend.lava.Backend.__submit__", return_value=['1234.0', '1234.1']) def test_submit_multinode(self, __submit__): pass @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS) def test_fetch_basics(self, get_results, get_details, test_log): pass @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS_INVALID_DATES) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS) def test_fetch_invalid_dates(self, get_results, get_details, test_log): pass @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS_START_DATE) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS) def test_fetch_missing_dates(self, get_results, get_details, test_log): pass @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS_RUNNING) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__") def test_fetch_not_finished(self, get_results, get_details): pass @patch("squad.ci.backend.lava.Backend.__get_job_details__", side_effect=requests.exceptions.Timeout) def test_fetch_timeout(self, get_details): pass @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS) def test_parse_results_metadata(self, get_results, get_details, test_log): pass @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS_NO_METADATA) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS) def test_parse_results_empty_metadata(self, get_results, get_details, test_log): pass @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS_WITH_SUITE_VERSIONS) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS_WITH_SUITE_VERSIONS) def test_parse_results_metadata_with_suite_versions(self, get_results, get_details, test_log): pass @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS) def test_parse_results_ignore_lava_suite_backend_settings(self, get_results, get_details, test_log): pass @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS) def test_parse_results_ignore_lava_suite_project_settings(self, get_results, get_details, test_log): pass @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS) def test_parse_results_ignore_lava_suite_empty_project_settings(self, get_results, get_details, test_log): pass @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS) def test_parse_results_ignore_lava_suite_project_settings_overwrites_backend(self, get_results, get_details, test_log): pass @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS) def test_parse_results_ignore_lava_boot(self, get_results, get_details, download_test_log): pass @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS_INCOMPLETE) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS_WITH_JOB_INFRA_ERROR) def test_parse_results_ignore_infra_errors(self, get_results, get_details, download_test_log): pass @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS_INCOMPLETE) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS_WITH_JOB_INFRA_ERROR) def test_parse_results_dont_ignore_infra_errors(self, get_results, get_details, download_test_log): pass @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS) def test_parse_results_handle_lava_suite_and_ignore_lava_boot(self, get_results, get_details, download_test_log): pass @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS) def test_parse_results_metadata(self, get_results, get_details, test_log): pass @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS_REST) def test_parse_results_rest(self, get_results, get_details, download_test_log): pass @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS) def test_parse_results_clone_measurements(self, get_results, get_details, test_log): pass @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS_INFRA_FAILURE) def test_completed(self, get_results, get_details, get_logs): pass @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS_INFRA_FAILURE_STR) def test_incomplete_string_results_metadata(self, get_results, get_details, get_logs): pass @patch("squad.ci.backend.lava.Backend.__resubmit__") @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS_INFRA_FAILURE_STR_NO_MESSAGE) def test_incomplete_string_results_metadata_null_error_msg(self, get_results, get_details, get_logs, resubmit): pass @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS_CANCELED) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS) def test_canceled(self, get_results, get_details, get_logs): pass @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS_INFRA_FAILURE_RESUBMIT) def test_automated_resubmit_email(self, get_results, get_details, get_logs): pass @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS_INFRA_FAILURE_RESUBMIT) def test_automated_dont_resubmit_email(self, get_results, get_details, get_logs): pass @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS_INFRA_FAILURE_RESUBMIT) @patch("squad.ci.backend.lava.Backend.__resubmit__", return_value="1235") def test_automated_resubmit_email(self, get_results, get_details, get_logs): pass @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS_INFRA_FAILURE_CUSTOM) @patch("squad.ci.backend.lava.Backend.__resubmit__", return_value="1235") def test_automated_resubmit_with_project_settings(self, lava_resubmit, get_results, get_details, get_logs): pass @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS_INFRA_FAILURE_RESUBMIT2) @patch("squad.ci.backend.lava.Backend.__resubmit__", return_value="1235") def test_automated_resubmit2(self, lava_resubmit, get_results, get_details, get_logs): pass @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS_INFRA_FAILURE_RESUBMIT3) @patch("squad.ci.backend.lava.Backend.__resubmit__", return_value="1235") def test_automated_resubmit3(self, lava_resubmit, get_results, get_details, get_logs): pass @patch("squad.ci.backend.lava.Backend.__download_full_log__", return_value=LOG_DATA) @patch("squad.ci.backend.lava.Backend.__get_job_details__", return_value=JOB_DETAILS) @patch("squad.ci.backend.lava.Backend.__get_testjob_results_yaml__", return_value=TEST_RESULTS_INFRA_FAILURE_RESUBMIT4) @patch("squad.ci.backend.lava.Backend.__resubmit__", return_value="1235") def test_automated_resubmit4(self, lava_resubmit, get_results, get_details, get_logs): pass @patch('squad.ci.backend.lava.Backend.__submit__', side_effect=HTTP_400) def test_submit_400(self, __submit__): pass @patch('squad.ci.backend.lava.Backend.__submit__', side_effect=HTTP_503) def test_submit_xmlrpc_503(self, __submit__): pass @patch('requests.post', side_effect=HTTP_503) def test_submit_http_503(self, __submit__): pass @patch('squad.ci.backend.lava.Backend.__submit__', side_effect=HTTP_401) def test_submit_unauthorized(self, __submit__): pass def test_get_listen_url(self): pass @patch('squad.ci.backend.lava.fetch') def test_receive_event(self, fetch): pass def test_receive_event_no_testjob(self): pass def test_receive_event_wrong_topic(self): pass @patch('squad.ci.backend.lava.fetch') def test_receive_event_no_status(self, fetch): pass def test_lava_log_parsing(self): pass @patch('requests.get') def test_lava_log_download(self, requests_get): pass @patch('requests.get') def test_lava_log_download_rest(self, requests_get): pass def test_broken_lava_log_parsing(self): pass def test_empty_lava_log_parsing(self): pass def test_test_log_unicode_error(self): pass @patch("squad.ci.backend.lava.Backend.__resubmit__", side_effect=HTTP_500) def test_resubmit_deleted_job(self, __resubmit__): pass @patch("squad.ci.backend.lava.Backend.__submit__", return_value=[]) def test_resubmit_job_lava_error(self, __resubmit__): pass def test_resubmit_job_lava_4xx_reply(self): pass
170
0
12
1
11
0
1
0.02
1
7
6
0
59
4
59
59
883
89
782
281
612
12
471
233
411
1
1
1
59
145,582
Linaro/squad
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Linaro_squad/test/ci/backend/test_tuxsuite.py
test.ci.backend.test_tuxsuite.TuxSuiteTest
class TuxSuiteTest(TestCase): def setUp(self): self.backend = Backend.objects.create( url=TUXSUITE_URL, implementation_type='tuxsuite', backend_settings=""" { "BUILD_METADATA_KEYS": [ "build_status", "download_url", "git_describe", "git_ref", "git_repo", "git_sha", "git_short_log", "kernel_version", "kconfig", "target_arch", "toolchain", "does_not_exist" ], "OEBUILD_METADATA_KEYS": [ "download_url", "sources", ], "TEST_METADATA_KEYS": [ "does_not_exist" ], "TEST_BUILD_METADATA_KEYS": [ "build_name", "kconfig", "toolchain" ], } """, ) self.group = Group.objects.create( name="tuxgroup" ) self.project = Project.objects.create( name="tuxprojext", group=self.group, ) self.environment = self.project.environments.create(slug="myenv") self.build = self.project.builds.create(version='tuxbuild') self.tuxsuite = TuxSuiteBackend(self.backend) def test_detect(self): impl = self.backend.get_implementation() self.assertIsInstance(impl, TuxSuiteBackend) def test_not_implemented(self): testjob = self.build.test_jobs.create( target=self.project, backend=self.backend) with self.assertRaises(NotImplementedError): self.tuxsuite.submit(testjob) with self.assertRaises(NotImplementedError): self.tuxsuite.resubmit(testjob) with self.assertRaises(NotImplementedError): self.tuxsuite.listen() def test_generate_test_name(self): results = { 'toolchain': 'gcc-10', 'kconfig': ['defconfig', 'CONFIG_DUMMY=1'], } sha = hashlib.sha1() for k in results['kconfig'][1:]: sha.update(k.encode()) expected_name = results['toolchain'] + \ '-defconfig-' + sha.hexdigest()[0:8] self.assertEqual( expected_name, self.tuxsuite.generate_test_name(results)) def test_parse_job_id(self): result = self.tuxsuite.parse_job_id( 'BUILD:linaro@anders#1yPYGaOEPNwr2pCqBgONY43zORq') self.assertEqual( ('BUILD', 'linaro@anders', '1yPYGaOEPNwr2pCqBgONY43zORq'), result) result = self.tuxsuite.parse_job_id( 'TEST:linaro@anders#1yPYGaOEPNwr2pCqBgONY43zORq') self.assertEqual( ('TEST', 'linaro@anders', '1yPYGaOEPNwr2pCqBgONY43zORq'), result) result = self.tuxsuite.parse_job_id( 'TEST:linaro.ltd@anders.roxel#1yPYGaOEPNwr2pCqBgONY43zORq') self.assertEqual(('TEST', 'linaro.ltd@anders.roxel', '1yPYGaOEPNwr2pCqBgONY43zORq'), result) with self.assertRaises(FetchIssue): self.tuxsuite.parse_job_id('not-really-vallid') with self.assertRaises(FetchIssue): self.tuxsuite.parse_job_id( 'BLAH:linaro@anders#1yPYGaOEPNwr2pCqBgONY43zORq') def test_job_url(self): # Builds job url job_id = 'BUILD:linaro@anders#1yPYGaOEPNwr2pCqBgONY43zORq' testjob = self.build.test_jobs.create( target=self.project, backend=self.backend, job_id=job_id) endpoint = '/groups/linaro/projects/anders/builds/1yPYGaOEPNwr2pCqBgONY43zORq' expected = urljoin(TUXSUITE_URL, endpoint) self.assertEqual(expected, self.tuxsuite.job_url(testjob)) # Tests job url job_id = 'TEST:linaro@anders#1yPYGaOEPNwr2pCqBgONY43zORq' testjob = self.build.test_jobs.create( target=self.project, backend=self.backend, job_id=job_id) endpoint = '/groups/linaro/projects/anders/tests/1yPYGaOEPNwr2pCqBgONY43zORq' expected = urljoin(TUXSUITE_URL, endpoint) self.assertEqual(expected, self.tuxsuite.job_url(testjob)) def test_parse_build_metadata(self): results = { 'download_url': 'http://builds.tuxbuild.com/123', } metadata = { "example_metadata": "blah", } metadata_file = { "build": { "environment": { "KBUILD_BUILD_HOST": "tuxmake", "KBUILD_BUILD_TIMESTAMP": "@1715675769", "KBUILD_BUILD_USER": "tuxmake", "KCFLAGS": "-ffile-prefix-map=/home/tuxbuild/.cache/tuxmake/builds/2/build/=", "SCCACHE_BUCKET": "sccache.tuxbuild.com", }, "jobs": 8, "kconfig": "clps711x_defconfig", "kconfig_add": [], "reproducer_cmdline": [ "tuxmake", "--target-arch=arm", "--kconfig=clps711x_defconfig", "--toolchain=clang-18", "--wrapper=sccache", "--environment=KBUILD_BUILD_TIMESTAMP=@1715675769", "--environment=KBUILD_BUILD_USER=tuxmake", "--environment=KBUILD_BUILD_HOST=tuxmake", "--environment=KCFLAGS=-ffile-prefix-map=/home/tuxbuild/.cache/tuxmake/builds/2/build/=", "--environment=SCCACHE_BUCKET=sccache.tuxbuild.com", "--runtime=podman", "--image=855116176053.dkr.ecr.us-east-1.amazonaws.com/tuxmake/arm_clang-18", "LLVM=1", "LLVM_IAS=1", "config", "default", "kernel", "xipkernel", "modules", "dtbs", "dtbs-legacy", "debugkernel", "headers", ], "runtime": "podman", "target_arch": "arm", "targets": [ "config", "default", "kernel", "xipkernel", "modules", "dtbs", "dtbs-legacy", "debugkernel", "headers", ], "toolchain": "clang-18", "verbose": False, "wrapper": "sccache", }, "compiler": { "name": "clang", "version": "18.1.4", "version_full": "Debian clang version 18.1.4 (++20240416103102+1deeee3f5da4-1~exp1~20240416223224.98)", }, "hardware": {"cores": 8, "free_disk_space": 58738, "ram": 14974}, "os": {"name": "Debian GNU/Linux", "version": "11"}, "resources": {"disk_space": 136}, "results": { "artifacts": { "config": ["config"], "debugkernel": ["vmlinux.xz", "System.map"], "default": [], "dtbs": ["dtbs.tar.xz"], "dtbs-legacy": ["dtbs.tar.xz"], "headers": ["headers.tar.xz"], "kernel": ["zImage"], "log": ["build.log", "build-debug.log"], "modules": [], "xipkernel": [], }, "duration": { "build": 186.54648447036743, "cleanup": 1.350804090499878, "copy": 0.006262302398681641, "metadata": 0.7471709251403809, "prepare": 70.95046162605286, "validate": 0.00015616416931152344, }, "errors": 0, "status": "PASS", "targets": { "config": {"duration": 4.253992795944214, "status": "PASS"}, "debugkernel": { "duration": 2.8869056701660156, "status": "PASS", }, "default": {"duration": 167.16954827308655, "status": "PASS"}, "dtbs": {"duration": 2.950917959213257, "status": "PASS"}, "dtbs-legacy": { "duration": 0.525646448135376, "status": "SKIP", }, "headers": {"duration": 5.598230361938477, "status": "PASS"}, "kernel": {"duration": 2.525160551071167, "status": "PASS"}, "modules": {"duration": 0.32180237770080566, "status": "SKIP"}, "xipkernel": {"duration": 0.314225435256958, "status": "SKIP"}, }, "warnings": 0, }, "runtime": { "image_digest": "855116176053.dkr.ecr.us-east-1.amazonaws.com/tuxmake/arm_clang-18@sha256:087c232cbeee1d92a541614852bbd298ef8406315c1461b67692b29d4fb2090a", "image_name": "855116176053.dkr.ecr.us-east-1.amazonaws.com/tuxmake/arm_clang-18", "image_tag": None, "version": "podman version 4.6.2", }, "sccache": {"cache_hits": 66, "cache_misses": 0}, "source": { "kernelrelease": "6.9.0-next-20240514", "kernelversion": "6.9.0", }, "system_map": {"text_offset": "0xc0008000"}, "tools": { "ar": "GNU ar (GNU Binutils for Debian) 2.35.2", "as": "GNU assembler (GNU Binutils for Debian) 2.35.2", "bc": "bc 1.07.1", "bison": "bison (GNU Bison) 3.7.5", "ccache": "ccache version 4.2", "clang": "Debian clang version 18.1.4 (++20240416103102+1deeee3f5da4-1~exp1~20240416223224.98)", "depmod": "kmod version 28", "fdformat": "fdformat from util-linux 2.36.1", "flex": "flex 2.6.4", "gcc": "gcc (Debian 10.2.1-6) 10.2.1 20210110", "ld": "GNU ld (GNU Binutils for Debian) 2.35.2", "lld": "Debian LLD 18.1.4 (compatible with GNU linkers)", "make": "GNU Make 4.3", "openssl": "OpenSSL 1.1.1w 11 Sep 2023", "pahole": "v1.25", "ps": "ps from procps-ng 3.3.17", "sccache": "sccache 0.2.9", }, "tuxmake": {"version": "1.23.2"}, "uname": { "kernel": "Linux", "kernel_release": "6.5.0-1014-aws", "kernel_version": "#14~22.04.1-Ubuntu SMP Thu Feb 15 15:27:06 UTC 2024", "machine": "x86_64", "operating_system": "GNU/Linux", }, "vmlinux": { "bss_size": 96144, "data_size": 806948, "file_size": 6236152, "text_size": 4193860, }, } expected = { "example_metadata": "blah", "build": { "environment": { "KBUILD_BUILD_HOST": "tuxmake", "KBUILD_BUILD_TIMESTAMP": "@1715675769", "KBUILD_BUILD_USER": "tuxmake", "KCFLAGS": "-ffile-prefix-map=/home/tuxbuild/.cache/tuxmake/builds/2/build/=", "SCCACHE_BUCKET": "sccache.tuxbuild.com", }, "jobs": 8, "kconfig": "clps711x_defconfig", "kconfig_add": [], "reproducer_cmdline": [ "tuxmake", "--target-arch=arm", "--kconfig=clps711x_defconfig", "--toolchain=clang-18", "--wrapper=sccache", "--environment=KBUILD_BUILD_TIMESTAMP=@1715675769", "--environment=KBUILD_BUILD_USER=tuxmake", "--environment=KBUILD_BUILD_HOST=tuxmake", "--environment=KCFLAGS=-ffile-prefix-map=/home/tuxbuild/.cache/tuxmake/builds/2/build/=", "--environment=SCCACHE_BUCKET=sccache.tuxbuild.com", "--runtime=podman", "--image=855116176053.dkr.ecr.us-east-1.amazonaws.com/tuxmake/arm_clang-18", "LLVM=1", "LLVM_IAS=1", "config", "default", "kernel", "xipkernel", "modules", "dtbs", "dtbs-legacy", "debugkernel", "headers", ], "runtime": "podman", "target_arch": "arm", "targets": [ "config", "default", "kernel", "xipkernel", "modules", "dtbs", "dtbs-legacy", "debugkernel", "headers", ], "toolchain": "clang-18", "verbose": False, "wrapper": "sccache", }, "compiler": { "name": "clang", "version": "18.1.4", "version_full": "Debian clang version 18.1.4 (++20240416103102+1deeee3f5da4-1~exp1~20240416223224.98)", }, "hardware": {"cores": 8, "free_disk_space": 58738, "ram": 14974}, "os": {"name": "Debian GNU/Linux", "version": "11"}, "resources": {"disk_space": 136}, "results": { "artifacts": { "config": ["config"], "debugkernel": ["vmlinux.xz", "System.map"], "default": [], "dtbs": ["dtbs.tar.xz"], "dtbs-legacy": ["dtbs.tar.xz"], "headers": ["headers.tar.xz"], "kernel": ["zImage"], "log": ["build.log", "build-debug.log"], "modules": [], "xipkernel": [], }, "duration": { "build": 186.54648447036743, "cleanup": 1.350804090499878, "copy": 0.006262302398681641, "metadata": 0.7471709251403809, "prepare": 70.95046162605286, "validate": 0.00015616416931152344, }, "errors": 0, "status": "PASS", "targets": { "config": {"duration": 4.253992795944214, "status": "PASS"}, "debugkernel": { "duration": 2.8869056701660156, "status": "PASS", }, "default": {"duration": 167.16954827308655, "status": "PASS"}, "dtbs": {"duration": 2.950917959213257, "status": "PASS"}, "dtbs-legacy": { "duration": 0.525646448135376, "status": "SKIP", }, "headers": {"duration": 5.598230361938477, "status": "PASS"}, "kernel": {"duration": 2.525160551071167, "status": "PASS"}, "modules": {"duration": 0.32180237770080566, "status": "SKIP"}, "xipkernel": {"duration": 0.314225435256958, "status": "SKIP"}, }, "warnings": 0, }, "runtime": { "image_digest": "855116176053.dkr.ecr.us-east-1.amazonaws.com/tuxmake/arm_clang-18@sha256:087c232cbeee1d92a541614852bbd298ef8406315c1461b67692b29d4fb2090a", "image_name": "855116176053.dkr.ecr.us-east-1.amazonaws.com/tuxmake/arm_clang-18", "image_tag": None, "version": "podman version 4.6.2", }, "sccache": {"cache_hits": 66, "cache_misses": 0}, "source": { "kernelrelease": "6.9.0-next-20240514", "kernelversion": "6.9.0", }, "system_map": {"text_offset": "0xc0008000"}, "tools": { "ar": "GNU ar (GNU Binutils for Debian) 2.35.2", "as": "GNU assembler (GNU Binutils for Debian) 2.35.2", "bc": "bc 1.07.1", "bison": "bison (GNU Bison) 3.7.5", "ccache": "ccache version 4.2", "clang": "Debian clang version 18.1.4 (++20240416103102+1deeee3f5da4-1~exp1~20240416223224.98)", "depmod": "kmod version 28", "fdformat": "fdformat from util-linux 2.36.1", "flex": "flex 2.6.4", "gcc": "gcc (Debian 10.2.1-6) 10.2.1 20210110", "ld": "GNU ld (GNU Binutils for Debian) 2.35.2", "lld": "Debian LLD 18.1.4 (compatible with GNU linkers)", "make": "GNU Make 4.3", "openssl": "OpenSSL 1.1.1w 11 Sep 2023", "pahole": "v1.25", "ps": "ps from procps-ng 3.3.17", "sccache": "sccache 0.2.9", }, "tuxmake": {"version": "1.23.2"}, "uname": { "kernel": "Linux", "kernel_release": "6.5.0-1014-aws", "kernel_version": "#14~22.04.1-Ubuntu SMP Thu Feb 15 15:27:06 UTC 2024", "machine": "x86_64", "operating_system": "GNU/Linux", }, "vmlinux": { "bss_size": 96144, "data_size": 806948, "file_size": 6236152, "text_size": 4193860, }, } with requests_mock.Mocker() as fake_request: fake_request.get(results["download_url"] + '/' + 'metadata.json', json=metadata_file) self.tuxsuite.update_metadata_from_file( results=results, metadata=metadata) self.assertEqual(expected, metadata) def test_parse_test_no_metadata(self): results = { 'download_url': 'http://builds.tuxbuild.com/123', } metadata = dict() expected = dict() with requests_mock.Mocker() as fake_request: fake_request.get(results['download_url'] + '/' + 'metadata.json', status_code=404) self.tuxsuite.update_metadata_from_file( results=results, metadata=metadata) self.assertEqual(expected, metadata) def test_parse_test_metadata(self): results = { 'download_url': 'http://builds.tuxbuild.com/123', } metadata = { "example_metadata": "blah", } metadata_file = { "arch": "arm64", "host_arch": "amd64", "qemu_version": "1:8.1.2+ds-1", "artefacts": { "rootfs": { "url": "https://storage.tuxboot.com/debian/bookworm/arm64/rootfs.ext4.xz", "sha256sum": "5e0a9ec562ffea3d9705834677df1cd43ff1ba44228b46734e10a5e990c2c169", }, "kernel": { "url": "https://storage.tuxsuite.com/public/linaro/lkft/builds/2ZPQut79EaVwA8ANRJp7xaAVkpP/Image.gz", "sha256sum": "ab5d2ef97d7a7da95032899c3ee9233dcdf75a4091ec03d7ae2d53f05d24e114", }, "modules": { "url": "https://storage.tuxsuite.com/public/linaro/lkft/builds/2ZPQut79EaVwA8ANRJp7xaAVkpP/modules.tar.xz", "sha256sum": "1602a287bb54f43e9d4e589c9a773cbd7b9d1bee336501792092a25d76d0f3fc", }, "overlay-00": { "url": "https://storage.tuxboot.com/overlays/debian/bookworm/arm64/ltp/20230929/ltp.tar.xz", "sha256sum": "94ff90b59487ceb765b09a53d6642ce0e39deaa92062687355a99de3652130e0", }, }, "durations": {"tests": {"ltp-controllers": "4250.66", "boot": "62.69"}}, } expected = { "example_metadata": "blah", "arch": "arm64", "host_arch": "amd64", "qemu_version": "1:8.1.2+ds-1", "artefacts": { "rootfs": { "url": "https://storage.tuxboot.com/debian/bookworm/arm64/rootfs.ext4.xz", "sha256sum": "5e0a9ec562ffea3d9705834677df1cd43ff1ba44228b46734e10a5e990c2c169", }, "kernel": { "url": "https://storage.tuxsuite.com/public/linaro/lkft/builds/2ZPQut79EaVwA8ANRJp7xaAVkpP/Image.gz", "sha256sum": "ab5d2ef97d7a7da95032899c3ee9233dcdf75a4091ec03d7ae2d53f05d24e114", }, "modules": { "url": "https://storage.tuxsuite.com/public/linaro/lkft/builds/2ZPQut79EaVwA8ANRJp7xaAVkpP/modules.tar.xz", "sha256sum": "1602a287bb54f43e9d4e589c9a773cbd7b9d1bee336501792092a25d76d0f3fc", }, "overlay-00": { "url": "https://storage.tuxboot.com/overlays/debian/bookworm/arm64/ltp/20230929/ltp.tar.xz", "sha256sum": "94ff90b59487ceb765b09a53d6642ce0e39deaa92062687355a99de3652130e0", }, }, "durations": {"tests": {"ltp-controllers": "4250.66", "boot": "62.69"}}, } with requests_mock.Mocker() as fake_request: fake_request.get(results["download_url"] + '/' + 'metadata.json', json=metadata_file) self.tuxsuite.update_metadata_from_file( results=results, metadata=metadata) self.assertEqual(expected, metadata) def test_fetch_url(self): expected_logs = 'dummy build log' with requests_mock.Mocker() as fake_request: url = 'http://tuxbuild.com/build1/build.log' fake_request.get(url, text=expected_logs) result = self.tuxsuite.fetch_url(url) self.assertEqual(expected_logs, result.text) def test_fetch_url_faulty_url(self): with requests_mock.Mocker() as fake_request: url = 'http://tuxbuild.com/build1/build.log' fake_request.get(url, exc=requests.exceptions.ConnectTimeout) with self.assertRaises(TemporaryFetchIssue): self.tuxsuite.fetch_url(url) @patch("squad.ci.backend.tuxsuite.Backend.fetch_from_results_input") def test_fetch_build_results(self, mock_fetch_from_results_input): job_id = 'BUILD:tuxgroup@tuxproject#123' testjob = self.build.test_jobs.create( target=self.project, backend=self.backend, job_id=job_id) build_url = urljoin( TUXSUITE_URL, '/groups/tuxgroup/projects/tuxproject/builds/123') build_download_url = 'http://builds.tuxbuild.com/123' # Only fetch when finished with requests_mock.Mocker() as fake_request: fake_request.get(build_url, json={'state': 'running'}) results = self.tuxsuite.fetch(testjob) self.assertEqual(None, results) build_logs = 'dummy build log' build_results = { 'retry': 0, 'state': 'finished', 'build_status': 'pass', 'build_name': 'tux-build', 'git_repo': 'https://github.com/Linaro/linux-canaries.git', 'git_ref': 'v5.9', 'git_describe': 'v5.9', 'git_sha': 'bbf5c979011a099af5dc76498918ed7df445635b', 'git_short_log': 'bbf5c979011a ("Linux 5.9")', 'kernel_version': '5.9.0', 'kconfig': ['tinyconfig'], 'target_arch': 'x86_64', 'toolchain': 'gcc-10', 'download_url': build_download_url, 'provisioning_time': '2022-03-25T15:42:06.570362', 'running_time': '2022-03-25T15:44:16.223590', 'finished_time': '2022-03-25T15:46:56.095902', 'warnings_count': '2', 'tuxmake_metadata': { 'results': { 'duration': { 'build': '42', }, }, }, } expected_metadata = { 'job_url': build_url, 'job_id': job_id, 'build_status': 'pass', 'git_repo': 'https://github.com/Linaro/linux-canaries.git', 'git_ref': 'v5.9', 'git_describe': 'v5.9', 'git_sha': 'bbf5c979011a099af5dc76498918ed7df445635b', 'git_short_log': 'bbf5c979011a ("Linux 5.9")', 'kernel_version': '5.9.0', 'kconfig': ['tinyconfig'], 'target_arch': 'x86_64', 'toolchain': 'gcc-10', 'download_url': build_download_url, 'config': f'{build_download_url}/config', 'does_not_exist': None, 'build_name': 'tux-build', } expected_tests = { 'build/tux-build': 'pass', } expected_metrics = { 'build/tux-build-duration': '42', 'build/tux-build-warnings': '2', } with requests_mock.Mocker() as fake_request: fake_request.get(build_url, json=build_results) fake_request.get(urljoin(build_download_url, 'build.log'), text=build_logs) status, completed, metadata, tests, metrics, logs = self.tuxsuite.fetch( testjob) self.assertEqual('Complete', status) self.assertTrue(completed) self.assertEqual(sorted(expected_metadata.items()), sorted(metadata.items())) self.assertEqual(sorted(expected_tests.items()), sorted(tests.items())) self.assertEqual(sorted(expected_metrics.items()), sorted(metrics.items())) self.assertEqual(build_logs, logs) self.assertEqual(build_results['build_name'], testjob.name) mock_fetch_from_results_input.assert_not_called() @patch("squad.ci.backend.tuxsuite.Backend.fetch_from_results_input") def test_retry_fetching_build_results(self, mock_fetch_from_results_input): job_id = 'BUILD:tuxgroup@tuxproject#124' testjob = self.build.test_jobs.create( target=self.project, backend=self.backend, job_id=job_id) build_url = urljoin( TUXSUITE_URL, '/groups/tuxgroup/projects/tuxproject/builds/124') build_download_url = 'http://builds.tuxbuild.com/124' build_results = { 'retry': 0, # this is the number of retry attempts TuxSuite has tried building 'state': 'finished', 'build_status': 'error', 'build_name': 'tux-build', 'git_repo': 'https://github.com/Linaro/linux-canaries.git', 'git_ref': 'v5.9', 'git_describe': 'v5.9', 'git_sha': 'bbf5c979011a099af5dc76498918ed7df445635b', 'git_short_log': 'bbf5c979011a ("Linux 5.9")', 'kernel_version': '5.9.0', 'kconfig': ['tinyconfig'], 'target_arch': 'x86_64', 'toolchain': 'gcc-10', 'download_url': build_download_url, 'provisioning_time': '2022-03-25T15:42:06.570362', 'running_time': '2022-03-25T15:44:16.223590', 'finished_time': '2022-03-25T15:46:56.095902', 'warnings_count': '2', 'status_message': 'Infrastructure Error, Please retry', } with requests_mock.Mocker() as fake_request: fake_request.get(build_url, json=build_results) with self.assertRaises(TemporaryFetchIssue): self.tuxsuite.fetch(testjob) self.assertEqual(build_results['build_name'], testjob.name) mock_fetch_from_results_input.assert_not_called() def test_fetch_build_with_given_up_infra_error(self): "this will test that the backend will still fetch the build despite its errored state" job_id = 'BUILD:tuxgroup@tuxproject#125' testjob = self.build.test_jobs.create( target=self.project, backend=self.backend, job_id=job_id) build_url = urljoin( TUXSUITE_URL, '/groups/tuxgroup/projects/tuxproject/builds/125') build_download_url = 'http://builds.tuxbuild.com/125' build_logs = '' build_results = { 'retry': 2, 'state': 'finished', 'build_status': 'error', 'build_name': 'tux-build', 'git_repo': 'https://github.com/Linaro/linux-canaries.git', 'git_ref': 'v5.9', 'git_describe': 'v5.9', 'git_sha': 'bbf5c979011a099af5dc76498918ed7df445635b', 'git_short_log': 'bbf5c979011a ("Linux 5.9")', 'kernel_version': '5.9.0', 'kconfig': ['tinyconfig'], 'target_arch': 'x86_64', 'toolchain': 'gcc-10', 'download_url': build_download_url, 'provisioning_time': '2022-03-25T15:42:06.570362', 'running_time': '2022-03-25T15:44:16.223590', 'finished_time': '2022-03-25T15:46:56.095902', 'warnings_count': '2', } expected_metadata = { 'job_url': build_url, 'job_id': job_id, 'build_status': 'error', 'git_repo': 'https://github.com/Linaro/linux-canaries.git', 'git_ref': 'v5.9', 'git_describe': 'v5.9', 'git_sha': 'bbf5c979011a099af5dc76498918ed7df445635b', 'git_short_log': 'bbf5c979011a ("Linux 5.9")', 'kernel_version': '5.9.0', 'kconfig': ['tinyconfig'], 'target_arch': 'x86_64', 'toolchain': 'gcc-10', 'download_url': build_download_url, 'config': f'{build_download_url}/config', 'does_not_exist': None, 'build_name': 'tux-build', } expected_tests = { 'build/tux-build': 'skip', } expected_metrics = {} with requests_mock.Mocker() as fake_request: fake_request.get(build_url, json=build_results) fake_request.get(urljoin(build_download_url, 'build.log'), status_code=404) status, completed, metadata, tests, metrics, logs = self.tuxsuite.fetch( testjob) self.assertEqual('Incomplete', status) self.assertTrue(completed) self.assertEqual(sorted(expected_metadata.items()), sorted(metadata.items())) self.assertEqual(sorted(expected_tests.items()), sorted(tests.items())) self.assertEqual(sorted(expected_metrics.items()), sorted(metrics.items())) self.assertEqual(build_logs, logs) self.assertEqual(build_results['build_name'], testjob.name) def test_fetch_test_results(self): job_id = 'TEST:tuxgroup@tuxproject#123' testjob = self.build.test_jobs.create( target=self.project, backend=self.backend, job_id=job_id) test_url = urljoin( TUXSUITE_URL, '/groups/tuxgroup/projects/tuxproject/tests/123') build_url = urljoin( TUXSUITE_URL, '/groups/tuxgroup/projects/tuxproject/builds/456') # Only fetch when finished with requests_mock.Mocker() as fake_request: fake_request.get(test_url, json={'state': 'running'}) results = self.tuxsuite.fetch(testjob) self.assertEqual(None, results) test_logs = 'dummy test log' test_results = { 'project': 'tuxgroup/tuxproject', 'device': 'qemu-armv7', 'uid': '123', 'kernel': 'https://storage.tuxboot.com/armv7/zImage', 'ap_romfw': None, 'mcp_fw': None, 'mcp_romfw': None, 'modules': None, 'parameters': {}, 'rootfs': None, 'scp_fw': None, 'scp_romfw': None, 'fip': None, 'tests': ['boot', 'ltp-smoke'], 'user': 'tuxbuild@linaro.org', 'user_agent': 'tuxsuite/0.43.6', 'state': 'finished', 'result': 'pass', 'results': {'boot': 'pass', 'ltp-smoke': 'pass'}, 'plan': None, 'waiting_for': '456', 'boot_args': None, 'provisioning_time': '2022-03-25T15:49:11.441860', 'running_time': '2022-03-25T15:50:11.770607', 'finished_time': '2022-03-25T15:52:42.672483', 'retries': 0, 'retries_messages': [], 'duration': 151 } build_results = { 'toolchain': 'gcc-10', 'kconfig': ['defconfig', 'CONFIG_DUMMY=1'], } build_name = self.tuxsuite.generate_test_name(build_results) expected_metadata = { 'job_url': test_url, 'job_id': job_id, 'build_name': build_name, 'does_not_exist': None, 'toolchain': 'gcc-10', 'kconfig': ['defconfig', 'CONFIG_DUMMY=1'], } # Real test results are stored in test/ci/backend/tuxsuite_test_result_sample.json with open('test/ci/backend/tuxsuite_test_result_sample.json') as test_result_file: test_results_json = json.load(test_result_file) expected_tests = { f'boot/{build_name}': 'pass', 'ltp-smoke/access01': 'pass', 'ltp-smoke/chdir01': 'skip', 'ltp-smoke/fork01': 'pass', 'ltp-smoke/time01': 'pass', 'ltp-smoke/wait02': 'pass', 'ltp-smoke/write01': 'pass', 'ltp-smoke/symlink01': 'pass', 'ltp-smoke/stat04': 'pass', 'ltp-smoke/utime01A': 'pass', 'ltp-smoke/rename01A': 'pass', 'ltp-smoke/splice02': 'pass', 'ltp-smoke/shell_test01': 'pass', 'ltp-smoke/ping01': 'skip', 'ltp-smoke/ping602': 'skip' } expected_metrics = {} with requests_mock.Mocker() as fake_request: fake_request.get(test_url, json=test_results) fake_request.get(build_url, json=build_results) fake_request.get(urljoin(test_url + '/', 'logs'), text=test_logs) fake_request.get(urljoin(test_url + '/', 'results'), json=test_results_json) status, completed, metadata, tests, metrics, logs = self.tuxsuite.fetch( testjob) self.assertEqual('Complete', status) self.assertTrue(completed) self.assertEqual(sorted(expected_metadata.items()), sorted(metadata.items())) self.assertEqual(sorted(expected_tests.items()), sorted(tests.items())) self.assertEqual(sorted(expected_metrics.items()), sorted(metrics.items())) self.assertEqual(test_logs, logs) self.assertEqual('ltp-smoke', testjob.name) def test_fetch_test_results_no_build_name_for_oebuilds(self): job_id = 'TEST:tuxgroup@tuxproject#1234' testjob = self.build.test_jobs.create( target=self.project, backend=self.backend, job_id=job_id) test_url = urljoin( TUXSUITE_URL, '/groups/tuxgroup/projects/tuxproject/tests/1234') test_logs = 'dummy test log' test_results = { 'project': 'tuxgroup/tuxproject', 'device': 'qemu-armv7', 'uid': '123', 'kernel': 'https://storage.tuxboot.com/armv7/zImage', 'ap_romfw': None, 'mcp_fw': None, 'mcp_romfw': None, 'modules': None, 'parameters': {}, 'rootfs': None, 'scp_fw': None, 'scp_romfw': None, 'fip': None, 'tests': ['boot', 'ltp-smoke'], 'user': 'tuxbuild@linaro.org', 'user_agent': 'tuxsuite/0.43.6', 'state': 'finished', 'result': 'pass', 'results': {'boot': 'pass', 'ltp-smoke': 'pass'}, 'plan': None, 'waiting_for': 'OEBUILD#2Wetiz7Qs0TbtfPgPT7hUObWqDK', 'boot_args': None, 'provisioning_time': '2022-03-25T15:49:11.441860', 'running_time': '2022-03-25T15:50:11.770607', 'finished_time': '2022-03-25T15:52:42.672483', 'retries': 0, 'retries_messages': [], 'duration': 151 } expected_metadata = { 'job_url': test_url, 'job_id': job_id, 'does_not_exist': None, } # Real test results are stored in test/ci/backend/tuxsuite_test_result_sample.json with open('test/ci/backend/tuxsuite_test_result_sample.json') as test_result_file: test_results_json = json.load(test_result_file) expected_tests = { 'boot/boot': 'pass', 'ltp-smoke/access01': 'pass', 'ltp-smoke/chdir01': 'skip', 'ltp-smoke/fork01': 'pass', 'ltp-smoke/time01': 'pass', 'ltp-smoke/wait02': 'pass', 'ltp-smoke/write01': 'pass', 'ltp-smoke/symlink01': 'pass', 'ltp-smoke/stat04': 'pass', 'ltp-smoke/utime01A': 'pass', 'ltp-smoke/rename01A': 'pass', 'ltp-smoke/splice02': 'pass', 'ltp-smoke/shell_test01': 'pass', 'ltp-smoke/ping01': 'skip', 'ltp-smoke/ping602': 'skip' } expected_metrics = {} with requests_mock.Mocker() as fake_request: fake_request.get(test_url, json=test_results) fake_request.get(urljoin(test_url + '/', 'logs'), text=test_logs) fake_request.get(urljoin(test_url + '/', 'results'), json=test_results_json) status, completed, metadata, tests, metrics, logs = self.tuxsuite.fetch( testjob) self.assertEqual('Complete', status) self.assertTrue(completed) self.assertEqual(sorted(expected_metadata.items()), sorted(metadata.items())) self.assertEqual(sorted(expected_tests.items()), sorted(tests.items())) self.assertEqual(sorted(expected_metrics.items()), sorted(metrics.items())) self.assertEqual(test_logs, logs) self.assertEqual('ltp-smoke', testjob.name) def test_fetch_results_from_testjob_input(self): job_id = 'TEST:tuxgroup@tuxproject#123' testjob = self.build.test_jobs.create( target=self.project, backend=self.backend, job_id=job_id) test_url = urljoin( TUXSUITE_URL, '/groups/tuxgroup/projects/tuxproject/tests/123') build_url = urljoin( TUXSUITE_URL, '/groups/tuxgroup/projects/tuxproject/builds/456') test_logs = 'dummy test log' test_results = { 'project': 'tuxgroup/tuxproject', 'device': 'qemu-armv7', 'uid': '123', 'kernel': 'https://storage.tuxboot.com/armv7/zImage', 'ap_romfw': None, 'mcp_fw': None, 'mcp_romfw': None, 'modules': None, 'parameters': {}, 'rootfs': None, 'scp_fw': None, 'scp_romfw': None, 'fip': None, 'tests': ['boot', 'ltp-smoke'], 'user': 'tuxbuild@linaro.org', 'user_agent': 'tuxsuite/0.43.6', 'state': 'finished', 'result': 'pass', 'results': {'boot': 'pass', 'ltp-smoke': 'pass'}, 'plan': None, 'waiting_for': '456', 'boot_args': None, 'provisioning_time': '2022-03-25T15:49:11.441860', 'running_time': '2022-03-25T15:50:11.770607', 'finished_time': '2022-03-25T15:52:42.672483', 'retries': 0, 'retries_messages': [], 'duration': 151 } build_results = { 'toolchain': 'gcc-10', 'kconfig': ['defconfig', 'CONFIG_DUMMY=1'], } build_name = self.tuxsuite.generate_test_name(build_results) expected_metadata = { 'job_url': test_url, 'job_id': job_id, 'build_name': build_name, 'does_not_exist': None, 'toolchain': 'gcc-10', 'kconfig': ['defconfig', 'CONFIG_DUMMY=1'], } # Real test results are stored in test/ci/backend/tuxsuite_test_result_sample.json with open('test/ci/backend/tuxsuite_test_result_sample.json') as test_result_file: test_results_json = json.load(test_result_file) expected_tests = { f'boot/{build_name}': 'pass', 'ltp-smoke/access01': 'pass', 'ltp-smoke/chdir01': 'skip', 'ltp-smoke/fork01': 'pass', 'ltp-smoke/time01': 'pass', 'ltp-smoke/wait02': 'pass', 'ltp-smoke/write01': 'pass', 'ltp-smoke/symlink01': 'pass', 'ltp-smoke/stat04': 'pass', 'ltp-smoke/utime01A': 'pass', 'ltp-smoke/rename01A': 'pass', 'ltp-smoke/splice02': 'pass', 'ltp-smoke/shell_test01': 'pass', 'ltp-smoke/ping01': 'skip', 'ltp-smoke/ping602': 'skip' } job_data = { 'download_url': 'http://storage.tuxapi.com/mystorage' } expected_metrics = {} testjob.input = json.dumps(test_results) with requests_mock.Mocker() as fake_request: fake_request.get(build_url, json=build_results) fake_request.get(urljoin(test_url + '/', 'logs'), text=test_logs) fake_request.get(urljoin(test_url + '/', 'results'), json=test_results_json) fake_request.get(test_url, json=job_data) status, completed, metadata, tests, metrics, logs = self.tuxsuite.fetch( testjob) self.assertEqual('Complete', status) self.assertTrue(completed) self.assertEqual(sorted(expected_metadata.items()), sorted(metadata.items())) self.assertEqual(sorted(expected_tests.items()), sorted(tests.items())) self.assertEqual(sorted(expected_metrics.items()), sorted(metrics.items())) self.assertEqual(test_logs, logs) self.assertEqual('ltp-smoke', testjob.name) def test_fetch_test_failed_results(self): job_id = 'TEST:tuxgroup@tuxproject#125' testjob = self.build.test_jobs.create( target=self.project, backend=self.backend, job_id=job_id) test_url = urljoin( TUXSUITE_URL, '/groups/tuxgroup/projects/tuxproject/tests/125') build_url = urljoin( TUXSUITE_URL, '/groups/tuxgroup/projects/tuxproject/builds/567') # Only fetch when finished with requests_mock.Mocker() as fake_request: fake_request.get(test_url, json={'state': 'running'}) results = self.tuxsuite.fetch(testjob) self.assertEqual(None, results) test_logs = 'dummy test log' test_results = { 'project': 'tuxgroup/tuxproject', 'device': 'qemu-armv7', 'uid': '125', 'kernel': 'https://storage.tuxboot.com/armv7/zImage', 'ap_romfw': None, 'mcp_fw': None, 'mcp_romfw': None, 'modules': None, 'parameters': {}, 'rootfs': None, 'scp_fw': None, 'scp_romfw': None, 'fip': None, 'tests': ['boot', 'ltp-smoke'], 'user': 'tuxbuild@linaro.org', 'user_agent': 'tuxsuite/0.43.6', 'state': 'finished', 'result': 'fail', 'results': {'boot': 'fail', 'ltp-smoke': 'unknown'}, 'plan': None, 'waiting_for': '567', 'boot_args': None, 'provisioning_time': '2022-03-25T15:49:11.441860', 'running_time': '2022-03-25T15:50:11.770607', 'finished_time': '2022-03-25T15:52:42.672483', 'retries': 0, 'retries_messages': [], 'duration': 151 } build_results = { 'toolchain': 'gcc-10', 'kconfig': ['defconfig', 'CONFIG_DUMMY=1'], } build_name = self.tuxsuite.generate_test_name(build_results) expected_metadata = { 'job_url': test_url, 'job_id': job_id, 'build_name': build_name, 'does_not_exist': None, 'toolchain': 'gcc-10', 'kconfig': ['defconfig', 'CONFIG_DUMMY=1'], } # Real test results are stored in test/ci/backend/tuxsuite_test_failed_result_sample.json with open('test/ci/backend/tuxsuite_test_failed_result_sample.json') as test_result_file: test_results_json = json.load(test_result_file) expected_tests = { f'boot/{build_name}': 'fail', 'ltp-smoke/access01': 'fail', 'ltp-smoke/chdir01': 'skip', 'ltp-smoke/fork01': 'pass', 'ltp-smoke/time01': 'pass', 'ltp-smoke/wait02': 'pass', 'ltp-smoke/write01': 'pass', 'ltp-smoke/symlink01': 'pass', 'ltp-smoke/stat04': 'pass', 'ltp-smoke/utime01A': 'pass', 'ltp-smoke/rename01A': 'pass', 'ltp-smoke/splice02': 'pass', 'ltp-smoke/shell_test01': 'pass', 'ltp-smoke/ping01': 'skip', 'ltp-smoke/ping602': 'skip' } expected_metrics = {} with requests_mock.Mocker() as fake_request: fake_request.get(test_url, json=test_results) fake_request.get(build_url, json=build_results) fake_request.get(urljoin(test_url + '/', 'logs'), text=test_logs) fake_request.get(urljoin(test_url + '/', 'results'), json=test_results_json) status, completed, metadata, tests, metrics, logs = self.tuxsuite.fetch( testjob) self.assertEqual('Complete', status) self.assertTrue(completed) self.assertEqual(sorted(expected_metadata.items()), sorted(metadata.items())) self.assertEqual(sorted(expected_tests.items()), sorted(tests.items())) self.assertEqual(sorted(expected_metrics.items()), sorted(metrics.items())) self.assertEqual(test_logs, logs) self.assertEqual('ltp-smoke', testjob.name) self.assertEqual( "{'boot': 'fail', 'ltp-smoke': 'unknown'}", testjob.failure) def test_fetch_test_infrastructure_error(self): job_id = 'TEST:tuxgroup@tuxproject#126' testjob = self.build.test_jobs.create( target=self.project, backend=self.backend, job_id=job_id) test_url = urljoin( TUXSUITE_URL, '/groups/tuxgroup/projects/tuxproject/tests/126') test_results = { 'project': 'tuxgroup/tuxproject', 'device': 'qemu-armv7', 'uid': '125', 'kernel': 'https://storage.tuxboot.com/armv7/zImage', 'ap_romfw': None, 'mcp_fw': None, 'mcp_romfw': None, 'modules': None, 'parameters': {}, 'rootfs': None, 'scp_fw': None, 'scp_romfw': None, 'fip': None, 'tests': ['boot', 'ltp-smoke'], 'user': 'tuxbuild@linaro.org', 'user_agent': 'tuxsuite/0.43.6', 'state': 'finished', # error means tuxsuite suffered from an infrastructure error and was not able to run tests 'result': 'error', 'results': {'boot': 'unknown', 'ltp-smoke': 'unknown'}, 'plan': None, 'waiting_for': '567', 'boot_args': None, 'provisioning_time': '2022-03-25T15:49:11.441860', 'running_time': '2022-03-25T15:50:11.770607', 'finished_time': '2022-03-25T15:52:42.672483', 'retries': 0, 'retries_messages': [], 'duration': 151 } expected_metadata = { 'job_url': test_url, 'job_id': job_id, 'does_not_exist': None, } with requests_mock.Mocker() as fake_request: fake_request.get(test_url, json=test_results) status, completed, metadata, tests, metrics, logs = self.tuxsuite.fetch( testjob) self.assertEqual('Incomplete', status) self.assertTrue(completed) self.assertEqual(sorted(expected_metadata.items()), sorted(metadata.items())) self.assertEqual({}, tests) self.assertEqual({}, metrics) self.assertEqual('', logs) self.assertEqual('ltp-smoke', testjob.name) self.assertEqual('tuxsuite infrastructure error', testjob.failure) def test_fetch_test_results_for_test_with_failed_build(self): job_id = 'TEST:tuxgroup@tuxproject#124' testjob = self.build.test_jobs.create( target=self.project, backend=self.backend, job_id=job_id) test_url = urljoin( TUXSUITE_URL, '/groups/tuxgroup/projects/tuxproject/tests/124') # Only fetch when finished with requests_mock.Mocker() as fake_request: fake_request.get(test_url, json={'state': 'running'}) results = self.tuxsuite.fetch(testjob) self.assertEqual(None, results) test_logs = '' test_results = { 'project': 'tuxgroup/tuxproject', 'device': 'qemu-armv7', 'uid': '124', 'kernel': 'https://storage.tuxboot.com/armv7/zImage', 'ap_romfw': None, 'mcp_fw': None, 'mcp_romfw': None, 'modules': None, 'parameters': {}, 'rootfs': None, 'scp_fw': None, 'scp_romfw': None, 'fip': None, 'tests': ['boot', 'ltp-smoke'], 'user': 'tuxbuild@linaro.org', 'user_agent': 'tuxsuite/0.43.6', 'state': 'finished', 'result': 'fail', 'results': {}, 'plan': None, 'waiting_for': 'BUILD#123', 'boot_args': None, 'provisioning_time': '2022-03-25T15:49:11.441860', 'running_time': '2022-03-25T15:50:11.770607', 'finished_time': '2022-03-25T15:52:42.672483', 'retries': 0, 'retries_messages': [], 'duration': 151 } expected_metadata = { 'job_url': test_url, 'job_id': job_id, 'does_not_exist': None, } expected_tests = {} expected_metrics = {} with requests_mock.Mocker() as fake_request: fake_request.get(test_url, json=test_results) fake_request.get(urljoin(test_url + '/', 'logs'), text='{"error": "File not found"}', status_code=404) fake_request.get(urljoin(test_url + '/', 'results'), json={'error': 'File not found'}, status_code=404) status, completed, metadata, tests, metrics, logs = self.tuxsuite.fetch( testjob) self.assertEqual('Complete', status) self.assertTrue(completed) self.assertEqual(sorted(expected_metadata.items()), sorted(metadata.items())) self.assertEqual(sorted(expected_tests.items()), sorted(tests.items())) self.assertEqual(sorted(expected_metrics.items()), sorted(metrics.items())) self.assertEqual(test_logs, logs) self.assertEqual('ltp-smoke', testjob.name) self.assertEqual('build failed', testjob.failure) def test_fetch_test_results_for_test_with_failed_sanity_test(self): job_id = 'TEST:tuxgroup@tuxproject#127' testjob = self.build.test_jobs.create( target=self.project, backend=self.backend, job_id=job_id) test_url = urljoin( TUXSUITE_URL, '/groups/tuxgroup/projects/tuxproject/tests/127') # Only fetch when finished with requests_mock.Mocker() as fake_request: fake_request.get(test_url, json={'state': 'running'}) results = self.tuxsuite.fetch(testjob) self.assertEqual(None, results) test_logs = '' test_results = { 'project': 'tuxgroup/tuxproject', 'device': 'qemu-armv7', 'uid': '127', 'kernel': 'https://storage.tuxboot.com/armv7/zImage', 'ap_romfw': None, 'mcp_fw': None, 'mcp_romfw': None, 'modules': None, 'parameters': {}, 'rootfs': None, 'scp_fw': None, 'scp_romfw': None, 'fip': None, 'tests': ['boot', 'ltp-smoke'], 'user': 'tuxbuild@linaro.org', 'user_agent': 'tuxsuite/0.43.6', 'state': 'finished', 'result': 'fail', 'results': {}, 'plan': None, 'waiting_for': 'TEST#123', 'boot_args': None, 'provisioning_time': '2022-03-25T15:49:11.441860', 'running_time': '2022-03-25T15:50:11.770607', 'finished_time': '2022-03-25T15:52:42.672483', 'retries': 0, 'retries_messages': [], 'duration': 151 } expected_metadata = { 'job_url': test_url, 'job_id': job_id, 'does_not_exist': None, } expected_tests = {} expected_metrics = {} with requests_mock.Mocker() as fake_request: fake_request.get(test_url, json=test_results) fake_request.get(urljoin(test_url + '/', 'logs'), text='{"error": "File not found"}', status_code=404) fake_request.get(urljoin(test_url + '/', 'results'), json={'error': 'File not found'}, status_code=404) status, completed, metadata, tests, metrics, logs = self.tuxsuite.fetch( testjob) self.assertEqual('Complete', status) self.assertTrue(completed) self.assertEqual(sorted(expected_metadata.items()), sorted(metadata.items())) self.assertEqual(sorted(expected_tests.items()), sorted(tests.items())) self.assertEqual(sorted(expected_metrics.items()), sorted(metrics.items())) self.assertEqual(test_logs, logs) self.assertEqual('ltp-smoke', testjob.name) self.assertEqual('sanity test failed', testjob.failure) def test_follow_test_dependency(self): job_id = 'TEST:tuxgroup@tuxproject#124' testjob = self.build.test_jobs.create( target=self.project, backend=self.backend, job_id=job_id) test_url = urljoin( TUXSUITE_URL, '/groups/tuxgroup/projects/tuxproject/tests/124') sanity_test_url = urljoin( TUXSUITE_URL, '/groups/tuxgroup/projects/tuxproject/tests/123') build_url = urljoin( TUXSUITE_URL, '/groups/tuxgroup/projects/tuxproject/builds/456') test_logs = 'dummy test log' test_results = { 'project': 'tuxgroup/tuxproject', 'uid': '124', 'tests': ['boot', 'ltp-smoke'], 'state': 'finished', 'result': 'pass', 'results': {'boot': 'pass', 'ltp-smoke': 'pass'}, 'plan': None, 'waiting_for': 'TEST#123', } sanity_test_results = { 'project': 'tuxgroup/tuxproject', 'uid': '123', 'waiting_for': 'BUILD#456', } build_results = { 'toolchain': 'gcc-10', 'kconfig': ['defconfig', 'CONFIG_DUMMY=1'], } build_name = self.tuxsuite.generate_test_name(build_results) expected_metadata = { 'job_url': test_url, 'job_id': job_id, 'build_name': build_name, 'does_not_exist': None, 'toolchain': 'gcc-10', 'kconfig': ['defconfig', 'CONFIG_DUMMY=1'], } # Real test results are stored in test/ci/backend/tuxsuite_test_result_sample.json with open('test/ci/backend/tuxsuite_test_result_sample.json') as test_result_file: test_results_json = json.load(test_result_file) expected_tests = { f'boot/{build_name}': 'pass', 'ltp-smoke/access01': 'pass', 'ltp-smoke/chdir01': 'skip', 'ltp-smoke/fork01': 'pass', 'ltp-smoke/time01': 'pass', 'ltp-smoke/wait02': 'pass', 'ltp-smoke/write01': 'pass', 'ltp-smoke/symlink01': 'pass', 'ltp-smoke/stat04': 'pass', 'ltp-smoke/utime01A': 'pass', 'ltp-smoke/rename01A': 'pass', 'ltp-smoke/splice02': 'pass', 'ltp-smoke/shell_test01': 'pass', 'ltp-smoke/ping01': 'skip', 'ltp-smoke/ping602': 'skip' } expected_metrics = {} with requests_mock.Mocker() as fake_request: fake_request.get(test_url, json=test_results) fake_request.get(sanity_test_url, json=sanity_test_results) fake_request.get(build_url, json=build_results) fake_request.get(urljoin(test_url + '/', 'logs'), text=test_logs) fake_request.get(urljoin(test_url + '/', 'results'), json=test_results_json) status, completed, metadata, tests, metrics, logs = self.tuxsuite.fetch( testjob) self.assertEqual('Complete', status) self.assertTrue(completed) self.assertEqual(sorted(expected_metadata.items()), sorted(metadata.items())) self.assertEqual(sorted(expected_tests.items()), sorted(tests.items())) self.assertEqual(sorted(expected_metrics.items()), sorted(metrics.items())) self.assertEqual(test_logs, logs) self.assertEqual(5, fake_request.call_count) self.assertEqual('ltp-smoke', testjob.name) def test_follow_test_dependency_using_cached_testrun(self): job_id = 'TEST:tuxgroup@tuxproject#124' sanity_job_id = 'TEST:tuxgroup@tuxproject#112233' testjob = self.build.test_jobs.create( target=self.project, backend=self.backend, job_id=job_id) sanity_testjob = self.build.test_jobs.create( target=self.project, backend=self.backend, job_id=sanity_job_id) test_url = urljoin( TUXSUITE_URL, '/groups/tuxgroup/projects/tuxproject/tests/124') sanity_test_url = urljoin( TUXSUITE_URL, '/groups/tuxgroup/projects/tuxproject/tests/112233') build_url = urljoin( TUXSUITE_URL, '/groups/tuxgroup/projects/tuxproject/builds/456') test_logs = 'dummy test log' test_results = { 'project': 'tuxgroup/tuxproject', 'uid': '124', 'tests': ['boot', 'ltp-smoke'], 'state': 'finished', 'result': 'pass', 'results': {'boot': 'pass', 'ltp-smoke': 'pass'}, 'plan': None, 'waiting_for': 'TEST#112233', } sanity_test_results = { 'project': 'tuxgroup/tuxproject', 'uid': '112233', 'waiting_for': 'BUILD#456', 'project': 'tuxgroup/tuxproject', 'tests': ['boot', 'ltp-smoke'], 'state': 'finished', 'result': 'pass', 'results': {'boot': 'pass', 'ltp-smoke': 'pass'}, 'plan': None, } build_results = { 'toolchain': 'gcc-10', 'kconfig': ['defconfig', 'CONFIG_DUMMY=1'], } build_name = self.tuxsuite.generate_test_name(build_results) # Real test results are stored in test/ci/backend/tuxsuite_test_result_sample.json with open('test/ci/backend/tuxsuite_test_result_sample.json') as test_result_file: test_results_json = json.load(test_result_file) with requests_mock.Mocker() as fake_request: fake_request.get(test_url, json=test_results) fake_request.get(sanity_test_url, json=sanity_test_results) fake_request.get(build_url, json=build_results) fake_request.get(urljoin(test_url + '/', 'logs'), text=test_logs) fake_request.get( urljoin(sanity_test_url + '/', 'logs'), text=test_logs) fake_request.get(urljoin(test_url + '/', 'results'), json=test_results_json) fake_request.get(urljoin(sanity_test_url + '/', 'results'), json=test_results_json) # Fetch sanity job first status, completed, metadata, tests, metrics, logs = self.tuxsuite.fetch( sanity_testjob) receive = ReceiveTestRun( sanity_testjob.target, update_project_status=False) testrun, _ = receive( version=sanity_testjob.target_build.version, environment_slug=sanity_testjob.environment, metadata_file=json.dumps(metadata), tests_file=json.dumps(tests), metrics_file=json.dumps(metrics), log_file=logs, completed=completed, ) self.assertEqual(4, fake_request.call_count) # Now fetch test, and make sure no extra requests were made _, _, metadata, _, _, _ = self.tuxsuite.fetch(testjob) self.assertEqual(build_name, metadata['build_name']) self.assertEqual(7, fake_request.call_count) self.assertEqual('ltp-smoke', testjob.name) def test_fetch_test_results_unknown(self): job_id = 'TEST:tuxgroup@tuxproject#125' testjob = self.build.test_jobs.create( target=self.project, backend=self.backend, job_id=job_id) test_url = urljoin( TUXSUITE_URL, '/groups/tuxgroup/projects/tuxproject/tests/125') test_results = { 'project': 'tuxgroup/tuxproject', 'device': 'qemu-armv7', 'uid': '124', 'kernel': 'https://storage.tuxboot.com/armv7/zImage', 'ap_romfw': None, 'mcp_fw': None, 'mcp_romfw': None, 'modules': None, 'parameters': {}, 'rootfs': None, 'scp_fw': None, 'scp_romfw': None, 'fip': None, 'tests': ['boot', 'ltp-smoke'], 'user': 'tuxbuild@linaro.org', 'user_agent': 'tuxsuite/0.43.6', 'state': 'finished', 'result': 'fail', 'results': {'boot': 'unknown', 'ltp-mm': 'unknown'}, 'plan': None, 'waiting_for': None, 'boot_args': None, 'provisioning_time': '2022-03-25T15:49:11.441860', 'running_time': '2022-03-25T15:50:11.770607', 'finished_time': '2022-03-25T15:52:42.672483', 'retries': 0, 'retries_messages': [], 'duration': 151 } with requests_mock.Mocker() as fake_request: fake_request.get(test_url, json=test_results) self.assertEqual(None, self.tuxsuite.fetch(testjob)) def test_cancel(self): job_id = 'TEST:tuxgroup@tuxproject#125' testjob = self.build.test_jobs.create( target=self.project, backend=self.backend, job_id=job_id) with requests_mock.Mocker() as fake_request: url = f'{TUXSUITE_URL}/groups/tuxgroup/projects/tuxproject/tests/125/cancel' fake_request.post(url, status_code=200) self.assertTrue(self.tuxsuite.cancel(testjob)) self.assertTrue(testjob.fetched) self.assertTrue(testjob.submitted) self.assertEqual("Canceled", testjob.job_status) # Mock a failed cancellation job_id = 'TEST:tuxgroup@tuxproject#126' testjob = self.build.test_jobs.create( target=self.project, backend=self.backend, job_id=job_id) with requests_mock.Mocker() as fake_request: url = f'{TUXSUITE_URL}/groups/tuxgroup/projects/tuxproject/tests/126/cancel' fake_request.post(url, status_code=400) self.assertFalse(self.tuxsuite.cancel(testjob)) def test_callback_is_supported(self): self.assertTrue(self.tuxsuite.supports_callbacks()) def test_validate_callback(self): request = Mock() request.headers = {} request.json = MagicMock(return_value={}) request.body = b'"{\\"content\\": 1}"' # Missing signature header with self.assertRaises(Exception) as ctx: self.tuxsuite.validate_callback(request, self.project) self.assertEqual( "tuxsuite request is missing signature headers", str(ctx.exception)) # Missing public key request.headers = { "x-tux-payload-signature": "does-not-work_6bUINPk62PaJb73C3bfKVvntgpr2Ii2TzQAiEA2D5-jKuh4xa4TkVhIA0UzvKERKKflpFjBH3hlsWivzI=", } with self.assertRaises(Exception) as ctx: self.tuxsuite.validate_callback(request, self.project) self.assertEqual( "missing tuxsuite public key for this project", str(ctx.exception)) # Invalid signature self.project.__settings__ = None self.project.project_settings = f"TUXSUITE_PUBLIC_KEY: \"{PUBLIC_SSH_KEY}\"" with self.assertRaises(InvalidSignature) as ctx: self.tuxsuite.validate_callback(request, self.project) self.assertEqual( "missing tuxsuite public key for this project", str(ctx.exception)) # Generate signature with testing private key content = b'{"signed": "content"}' content_bytes = b'"{\\"signed\\": \\"content\\"}"' key = serialization.load_pem_private_key( PRIVATE_SSH_KEY.encode("ascii"), None) signature = key.sign(content, ec.ECDSA(hashes.SHA256())) valid_signature = base64.urlsafe_b64encode(signature) request.headers = {"x-tux-payload-signature": valid_signature} request.body = content_bytes self.tuxsuite.validate_callback(request, self.project) def test_process_callback(self): # Test missing kind/status key with self.assertRaises(Exception) as ctx: self.tuxsuite.process_callback({}, None, None, None) self.assertEqual( "`kind` and `status` are required in the payload", str(ctx.exception)) # Test creating new testjob payload = { "kind": "test", "status": { "project": "tuxgroup/tuxproject", "uid": "123", "device": self.environment.slug, }, } self.assertFalse(TestJob.objects.filter( job_id="TEST:tuxgroup@tuxproject#123").exists()) testjob = self.tuxsuite.process_callback(json.dumps( payload), self.build, self.environment.slug, self.backend) self.assertEqual(json.dumps(payload["status"]), testjob.input) self.assertTrue(TestJob.objects.filter( job_id="TEST:tuxgroup@tuxproject#123").exists()) self.assertEqual(self.environment.slug, testjob.environment) # Test existing testjob payload["status"]["uid"] = "1234" testjob = TestJob.objects.create( backend=self.backend, target=self.project, target_build=self.build, environment=self.environment.slug, submitted=True, job_id="TEST:tuxgroup@tuxproject#1234", ) self.assertEqual(None, testjob.input) returned_testjob = self.tuxsuite.process_callback(json.dumps( payload), self.build, self.environment.slug, self.backend) self.assertEqual(testjob.id, returned_testjob.id) self.assertEqual(json.dumps(payload["status"]), returned_testjob.input) @patch("squad.ci.backend.tuxsuite.Backend.fetch_from_results_input") def test_fetch_oe_build_results(self, mock_fetch_from_results_input): job_id = 'OEBUILD:tuxgroup@tuxproject#123' testjob = self.build.test_jobs.create( target=self.project, backend=self.backend, job_id=job_id) build_url = urljoin( TUXSUITE_URL, '/groups/tuxgroup/projects/tuxproject/oebuilds/123') build_download_url = 'http://builds.tuxbuild.com/123' # Only fetch when finished with requests_mock.Mocker() as fake_request: fake_request.get(build_url, json={'state': 'running'}) results = self.tuxsuite.fetch(testjob) self.assertEqual(None, results) build_logs = 'dummy build log' build_results = { "artifacts": [], "bblayers_conf": [], "container": "ubuntu-20.04", "download_url": build_download_url, "environment": {}, "errors_count": 0, "extraconfigs": [], "is_canceling": False, "is_public": True, "local_conf": [], "name": "", "no_cache": False, "plan": "2UyDaiGYNeHEYPD7hGjuuqmgZIn", "project": "linaro/lkft", "provisioning_time": "2023-09-05T08:36:32.853409", "result": "pass", "sources": { "android": { "bazel": True, "branch": "common-android-mainline", "build_config": "//common:kernel_aarch64_dist", "manifest": "default.xml", "url": "https://android.googlesource.com/kernel/manifest" } }, "state": "finished", "token_name": "lkft-android-bot", "uid": "2UyDaslU6koW0a85VVEh3Pc2LNW", "user": "lkft@linaro.org", "user_agent": "tuxsuite/1.25.1", "waited_by": [], "warnings_count": 0 } expected_metadata = { 'download_url': build_download_url, 'sources': { 'android': { 'bazel': True, 'branch': 'common-android-mainline', 'build_config': '//common:kernel_aarch64_dist', 'manifest': 'default.xml', 'url': 'https://android.googlesource.com/kernel/manifest' } }, 'job_url': build_url, 'job_id': job_id, } expected_tests = { 'build/build': 'pass', } expected_metrics = {} with requests_mock.Mocker() as fake_request: fake_request.get(build_url, json=build_results) fake_request.get(urljoin(build_download_url, 'build.log'), text=build_logs) status, completed, metadata, tests, metrics, logs = self.tuxsuite.fetch( testjob) self.assertEqual('Complete', status) self.assertTrue(completed) self.assertEqual(sorted(expected_metadata.items()), sorted(metadata.items())) self.assertEqual(sorted(expected_tests.items()), sorted(tests.items())) self.assertEqual(sorted(expected_metrics.items()), sorted(metrics.items())) self.assertEqual(build_logs, logs) mock_fetch_from_results_input.assert_not_called()
class TuxSuiteTest(TestCase): def setUp(self): pass def test_detect(self): pass def test_not_implemented(self): pass def test_generate_test_name(self): pass def test_parse_job_id(self): pass def test_job_url(self): pass def test_parse_build_metadata(self): pass def test_parse_test_no_metadata(self): pass def test_parse_test_metadata(self): pass def test_fetch_url(self): pass def test_fetch_url_faulty_url(self): pass @patch("squad.ci.backend.tuxsuite.Backend.fetch_from_results_input") def test_fetch_build_results(self, mock_fetch_from_results_input): pass @patch("squad.ci.backend.tuxsuite.Backend.fetch_from_results_input") def test_retry_fetching_build_results(self, mock_fetch_from_results_input): pass def test_fetch_build_with_given_up_infra_error(self): '''this will test that the backend will still fetch the build despite its errored state''' pass def test_fetch_test_results(self): pass def test_fetch_test_results_no_build_name_for_oebuilds(self): pass def test_fetch_results_from_testjob_input(self): pass def test_fetch_test_failed_results(self): pass def test_fetch_test_infrastructure_error(self): pass def test_fetch_test_results_for_test_with_failed_build(self): pass def test_fetch_test_results_for_test_with_failed_sanity_test(self): pass def test_follow_test_dependency(self): pass def test_follow_test_dependency_using_cached_testrun(self): pass def test_fetch_test_results_unknown(self): pass def test_cancel(self): pass def test_callback_is_supported(self): pass def test_validate_callback(self): pass def test_process_callback(self): pass @patch("squad.ci.backend.tuxsuite.Backend.fetch_from_results_input") def test_fetch_oe_build_results(self, mock_fetch_from_results_input): pass
33
1
56
5
50
2
1
0.04
1
12
6
0
29
6
29
29
1,658
166
1,467
255
1,434
62
484
224
454
2
1
2
30
145,583
Linaro/squad
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Linaro_squad/test/ci/test_listen.py
test.ci.test_listen.TestCommand
class TestCommand(TestCase): @patch("squad.ci.management.commands.listen.ListenerManager") def test_handle(self, ListenerManager): command = Command() command.handle() ListenerManager.assert_called_once() ListenerManager.return_value.run.assert_called_once() @patch("squad.ci.management.commands.listen.Backend") @patch("squad.ci.management.commands.listen.ListenerManager") @patch("squad.ci.management.commands.listen.Listener") def test_handle_listener(self, Listener, ListenerManager, Backend): backend = object() Backend.objects.get.return_value = backend command = Command() command.handle(BACKEND='foo') ListenerManager.assert_not_called() Listener.assert_called_with(backend) Listener.return_value.run.assert_called()
class TestCommand(TestCase): @patch("squad.ci.management.commands.listen.ListenerManager") def test_handle(self, ListenerManager): pass @patch("squad.ci.management.commands.listen.Backend") @patch("squad.ci.management.commands.listen.ListenerManager") @patch("squad.ci.management.commands.listen.Listener") def test_handle_listener(self, Listener, ListenerManager, Backend): pass
7
0
8
2
7
0
1
0
1
2
1
0
2
0
2
2
23
5
18
8
11
0
14
6
11
1
1
0
2
145,584
Linaro/squad
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Linaro_squad/test/ci/test_listen.py
test.ci.test_listen.TestListenerManager
class TestListenerManager(TestCase): @patch('squad.ci.management.commands.listen.subprocess.Popen') def test_start(self, Popen): backend = Backend.objects.create(name="foo") manager = ListenerManager() manager.start(backend) self.assertEqual(Popen.call_args[0][-1][-1], "foo") @patch('squad.ci.management.commands.listen.subprocess.Popen') def test_stop(self, Popen): backend = Backend.objects.create(name="foo") manager = ListenerManager() Popen.return_value.poll.return_value = None manager.start(backend) manager.stop(backend.id) Popen.return_value.poll.assert_called_once() Popen.return_value.terminate.assert_called_once() Popen.return_value.wait.assert_called_once() @patch('squad.ci.management.commands.listen.subprocess.Popen') def test_cleanup(self, Popen): backend1 = Backend.objects.create(name="foo") backend2 = Backend.objects.create(name="bar") manager = ListenerManager() manager.start(backend1) manager.start(backend2) manager.stop = MagicMock() manager.cleanup() manager.stop.assert_has_calls( [call(backend1.id), call(backend2.id)], any_order=True) @patch('squad.ci.management.commands.listen.subprocess.Popen') def test_keep_listeners_running_added(self, Popen): manager = ListenerManager() backend1 = Backend.objects.create(name="foo") manager.start = MagicMock() manager.stop = MagicMock() # start existing backends manager.keep_listeners_running() manager.start.assert_called_with(backend1) # new backend, start it too backend2 = Backend.objects.create(name="bar") manager.keep_listeners_running() manager.start.assert_has_calls( [call(backend1), call(backend2)], any_order=True) manager.stop.assert_not_called() @patch('squad.ci.management.commands.listen.subprocess.Popen') def test_keep_listeners_running_removed(self, Popen): manager = ListenerManager() backend = Backend.objects.create(name="foo") manager.stop = MagicMock() # start existing backends manager.keep_listeners_running() # backend is removed bid = backend.id backend.delete() manager.keep_listeners_running() manager.stop.assert_called_with(bid) @patch('squad.ci.management.commands.listen.subprocess.Popen') def test_keep_listeners_running_changed(self, Popen): manager = ListenerManager() backend = Backend.objects.create(name="foo") # start existing backends manager.keep_listeners_running() manager.stop = MagicMock() manager.start = MagicMock() # backend is changed backend.name = 'bar' backend.save() manager.keep_listeners_running() manager.stop.assert_called_with(backend.id) manager.start.assert_called_with(backend) @patch('squad.ci.management.commands.listen.subprocess.Popen') def test_keep_listeners_running_restart_dead_process(self, Popen): manager = ListenerManager() backend = Backend.objects.create(name="foo") # start existing backends manager.keep_listeners_running() self.assertEqual(1, len(manager.__processes__)) Popen.assert_called() # "kill" the process Popen.return_value.poll.return_value = -15 # SIGKILL manager.stop = MagicMock() manager.start = MagicMock() # Give it another go manager.keep_listeners_running() manager.stop.assert_called_with(backend.id) manager.start.assert_called_with(backend)
class TestListenerManager(TestCase): @patch('squad.ci.management.commands.listen.subprocess.Popen') def test_start(self, Popen): pass @patch('squad.ci.management.commands.listen.subprocess.Popen') def test_stop(self, Popen): pass @patch('squad.ci.management.commands.listen.subprocess.Popen') def test_cleanup(self, Popen): pass @patch('squad.ci.management.commands.listen.subprocess.Popen') def test_keep_listeners_running_added(self, Popen): pass @patch('squad.ci.management.commands.listen.subprocess.Popen') def test_keep_listeners_running_removed(self, Popen): pass @patch('squad.ci.management.commands.listen.subprocess.Popen') def test_keep_listeners_running_changed(self, Popen): pass @patch('squad.ci.management.commands.listen.subprocess.Popen') def test_keep_listeners_running_restart_dead_process(self, Popen): pass
15
0
14
3
9
1
1
0.14
1
3
2
0
7
0
7
7
114
31
74
32
59
10
67
25
59
1
1
0
7
145,585
Linaro/squad
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Linaro_squad/test/ci/test_models.py
test.ci.test_models.BackendFetchTest
class BackendFetchTest(BackendTestBase): @patch("squad.ci.backend.null.Backend.fetch") def test_fetch_skips_already_fetched(self, fetch): test_job = self.create_test_job(submitted=True, fetched=True) self.backend.fetch(test_job.id) fetch.assert_not_called() @patch('django.utils.timezone.now', return_value=NOW) @patch('squad.ci.models.Backend.get_implementation') def test_fetch(self, get_implementation, __now__): impl = MagicMock() impl.fetch = MagicMock(return_value=None) get_implementation.return_value = impl test_job = self.create_test_job() self.backend.fetch(test_job.id) test_job.refresh_from_db() self.assertEqual(NOW, test_job.last_fetch_attempt) self.assertFalse(test_job.fetched) self.assertIsNone(test_job.job_status) get_implementation.assert_called() impl.fetch.assert_called() @patch('django.utils.timezone.now', return_value=NOW) @patch('squad.ci.models.Backend.get_implementation') def test_fetch_creates_testrun(self, get_implementation, __now__): metadata = {"foo": "bar"} tests = {"foo": "pass"} metrics = {"bar": {"value": 1, "unit": ""}} results = ('Complete', True, metadata, tests, metrics, "abc") project_status = self.build.status tests_pass_so_far = project_status.tests_pass impl = MagicMock() impl.fetch = MagicMock(return_value=results) impl.job_url = MagicMock(return_value="http://www.example.com") get_implementation.return_value = impl test_job = self.create_test_job( backend=self.backend, definition='foo: 1', environment='myenv', job_id='999', ) self.backend.fetch(test_job.id) # should not crash test_run = core_models.TestRun.objects.get( build__project=self.project, environment__slug='myenv', build__version='1', job_id='999', job_status='Complete', completed=True, ) self.assertEqual( 1, core_models.Test.objects.filter( test_run=test_run, metadata__name="foo", result=True, ).count() ) self.assertEqual( 1, core_models.Metric.objects.filter( test_run=test_run, metadata__name="bar", result=1, ).count() ) project_status.refresh_from_db() self.assertEqual(project_status.tests_pass, tests_pass_so_far + 1) test_job.refresh_from_db() self.assertTrue(test_job.fetched) self.assertEqual("Complete", test_job.job_status) @patch('django.utils.timezone.now', return_value=NOW) @patch('squad.ci.models.Backend.get_implementation') def test_fetch_sets_fetched_on_invalid_metadata(self, get_implementation, __now__): metadata = {"foo": "bar"} tests = {"foo": "pass"} metrics = {"bar": {"value": 1, "unit": "nuggets"}} results = ('Complete', True, metadata, tests, metrics, "abc") impl = MagicMock() impl.fetch = MagicMock(return_value=results) impl.job_url = MagicMock(return_value="http://www.example.com") get_implementation.return_value = impl environment = self.project.environments.create(slug='myenv') self.build.test_runs.create( environment=environment, job_id='999', job_status='Complete', completed=True, ) test_job = self.create_test_job( backend=self.backend, definition='foo: 1', environment='myenv', job_id='999', ) self.backend.fetch(test_job.id) test_job.refresh_from_db() self.assertTrue(test_job.fetched) self.assertIsNone(test_job.failure) @patch('django.utils.timezone.now', return_value=NOW) @patch('squad.ci.models.Backend.get_implementation') def test_fetch_with_empty_results(self, get_implementation, __now__): metadata = {"foo": "bar"} tests = {} metrics = {} results = ('Complete', True, metadata, tests, metrics, "abc") impl = MagicMock() impl.fetch = MagicMock(return_value=results) impl.job_url = MagicMock(return_value="http://www.example.com") get_implementation.return_value = impl test_job = self.create_test_job( backend=self.backend, definition='foo: 1', environment='myenv', job_id='999', ) self.backend.fetch(test_job.id) # should not crash test_run = core_models.TestRun.objects.get( build__project=self.project, environment__slug='myenv', build__version='1', job_id='999', job_status='Complete', ) test_job.refresh_from_db() self.assertTrue(test_job.can_resubmit) self.assertFalse(test_run.completed) @patch('django.utils.timezone.now', return_value=NOW) @patch('squad.ci.models.Backend.get_implementation') def test_fetch_with_only_results(self, get_implementation, __now__): metadata = {"foo": "bar"} tests = {"foo": "pass"} metrics = {} results = ('Complete', True, metadata, tests, metrics, "abc") impl = MagicMock() impl.fetch = MagicMock(return_value=results) impl.job_url = MagicMock(return_value="http://www.example.com") get_implementation.return_value = impl test_job = self.create_test_job( backend=self.backend, definition='foo: 1', environment='myenv', job_id='999', ) self.backend.fetch(test_job.id) # should not crash test_run = core_models.TestRun.objects.get( build__project=self.project, environment__slug='myenv', build__version='1', job_id='999', job_status='Complete', ) test_job.refresh_from_db() self.assertFalse(test_job.can_resubmit) self.assertTrue(test_run.completed) @patch('django.utils.timezone.now', return_value=NOW) @patch('squad.ci.models.Backend.get_implementation') def test_fetch_with_only_metrics(self, get_implementation, __now__): metadata = {"foo": "bar"} tests = {} metrics = {"foo": {"value": 10, "unit": "boxes"}} results = ('Complete', True, metadata, tests, metrics, "abc") impl = MagicMock() impl.fetch = MagicMock(return_value=results) impl.job_url = MagicMock(return_value="http://www.example.com") get_implementation.return_value = impl test_job = self.create_test_job( backend=self.backend, definition='foo: 1', environment='myenv', job_id='999', ) self.backend.fetch(test_job.id) # should not crash test_run = core_models.TestRun.objects.get( build__project=self.project, environment__slug='myenv', build__version='1', job_id='999', job_status='Complete', ) test_job.refresh_from_db() self.assertFalse(test_job.can_resubmit) self.assertTrue(test_run.completed) @patch('django.utils.timezone.now', return_value=NOW) @patch('squad.ci.models.Backend.get_implementation') def test_create_testrun_job_url(self, get_implementation, __now__): metadata = {"foo": "bar"} tests = {"foo": "pass"} metrics = {"bar": {"value": 1, "unit": "donuts"}} results = ('Complete', True, metadata, tests, metrics, "abc") test_job_url = "http://www.example.com" impl = MagicMock() impl.fetch = MagicMock(return_value=results) impl.job_url = MagicMock(return_value=test_job_url) get_implementation.return_value = impl test_job = self.create_test_job( backend=self.backend, definition='foo: 1', environment='myenv', job_id='999', ) self.backend.fetch(test_job.id) # should not crash test_run = core_models.TestRun.objects.get( build__project=self.project, environment__slug='myenv', build__version='1', job_id='999', job_status='Complete', completed=True, ) test_job.refresh_from_db() self.assertEqual(test_run.job_url, test_job_url) @patch('django.utils.timezone.now', return_value=NOW) @patch('squad.ci.models.Backend.get_implementation') def test_fetch_ignores_results_from_incomplete_job(self, get_implementation, __now__): metadata = {"foo": "bar"} tests = {"foo": "pass"} metrics = {"bar": {"value": 1, "unit": ""}} results = ('Incomplete', False, metadata, tests, metrics, "abc") # ^^^^^ job resulted in an infra failure impl = MagicMock() impl.fetch = MagicMock(return_value=results) impl.job_url = MagicMock(return_value="http://www.example.com") get_implementation.return_value = impl test_job = self.create_test_job( backend=self.backend, definition='foo: 1', environment='myenv', job_id='999', ) self.backend.fetch(test_job.id) test_job.refresh_from_db() self.assertTrue(test_job.can_resubmit) # should not crash testrun = core_models.TestRun.objects.get( build__project=self.project, environment__slug='myenv', build__version='1', job_id='999', job_status='Incomplete', completed=False, # even if results are not empty ) # no results get recorded self.assertEqual(0, testrun.tests.count()) self.assertEqual(0, testrun.metrics.count()) @patch('django.utils.timezone.now', return_value=NOW) @patch('squad.ci.models.Backend.get_implementation') def test_fetch_sets_testjob_can_resubmit_and_testrun_completed2(self, get_implementation, __now__): metadata = {"foo": "bar"} tests = {} metrics = {} results = ('Incomplete', False, metadata, tests, metrics, "abc") # ^^^^^ job resulted in an infra failure impl = MagicMock() impl.fetch = MagicMock(return_value=results) impl.job_url = MagicMock(return_value="http://www.example.com") get_implementation.return_value = impl test_job = self.create_test_job( backend=self.backend, definition='foo: 1', environment='myenv', job_id='999', ) self.backend.fetch(test_job.id) test_job.refresh_from_db() self.assertTrue(test_job.can_resubmit) # should not crash core_models.TestRun.objects.get( build__project=self.project, environment__slug='myenv', build__version='1', job_id='999', job_status='Incomplete', completed=False, ) @patch('squad.ci.backend.null.Backend.job_url', return_value="http://example.com/123") @patch('squad.ci.backend.null.Backend.fetch') @patch('squad.ci.models.ReceiveTestRun.__call__') def test_fetch_sets_fetched_at(self, receive, backend_fetch, backend_job_url): backend_fetch.return_value = ('Completed', True, {}, {}, {}, None) env = self.project.environments.create(slug='foo') receive.return_value = ( self.build.test_runs.create(environment=env), None) test_job = self.create_test_job( backend=self.backend, definition='foo: 1', environment='myenv', job_id='999', ) self.backend.fetch(test_job.id) test_job.refresh_from_db() self.assertIsNotNone(test_job.fetched_at) @patch('squad.ci.models.Backend.__postprocess_testjob__') @patch('squad.ci.backend.null.Backend.job_url', return_value="http://example.com/123") @patch('squad.ci.backend.null.Backend.fetch') @patch('squad.ci.models.ReceiveTestRun.__call__') def test_fetch_postprocessing(self, receive, backend_fetch, backend_job_url, postprocess): self.project.enabled_plugins_list = ['linux_log_parser'] self.project.save() backend_fetch.return_value = ('Completed', True, {}, {}, {}, None) env = self.project.environments.create(slug='foo') receive.return_value = ( self.build.test_runs.create(environment=env), None) test_job = self.create_test_job( backend=self.backend, definition='foo: 1', environment='myenv', job_id='999', ) self.backend.fetch(test_job.id) postprocess.assert_called()
class BackendFetchTest(BackendTestBase): @patch("squad.ci.backend.null.Backend.fetch") def test_fetch_skips_already_fetched(self, fetch): pass @patch('django.utils.timezone.now', return_value=NOW) @patch('squad.ci.models.Backend.get_implementation') def test_fetch_skips_already_fetched(self, fetch): pass @patch('django.utils.timezone.now', return_value=NOW) @patch('squad.ci.models.Backend.get_implementation') def test_fetch_creates_testrun(self, get_implementation, __now__): pass @patch('django.utils.timezone.now', return_value=NOW) @patch('squad.ci.models.Backend.get_implementation') def test_fetch_sets_fetched_on_invalid_metadata(self, get_implementation, __now__): pass @patch('django.utils.timezone.now', return_value=NOW) @patch('squad.ci.models.Backend.get_implementation') def test_fetch_with_empty_results(self, get_implementation, __now__): pass @patch('django.utils.timezone.now', return_value=NOW) @patch('squad.ci.models.Backend.get_implementation') def test_fetch_with_only_results(self, get_implementation, __now__): pass @patch('django.utils.timezone.now', return_value=NOW) @patch('squad.ci.models.Backend.get_implementation') def test_fetch_with_only_metrics(self, get_implementation, __now__): pass @patch('django.utils.timezone.now', return_value=NOW) @patch('squad.ci.models.Backend.get_implementation') def test_create_testrun_job_url(self, get_implementation, __now__): pass @patch('django.utils.timezone.now', return_value=NOW) @patch('squad.ci.models.Backend.get_implementation') def test_fetch_ignores_results_from_incomplete_job(self, get_implementation, __now__): pass @patch('django.utils.timezone.now', return_value=NOW) @patch('squad.ci.models.Backend.get_implementation') def test_fetch_sets_testjob_can_resubmit_and_testrun_completed2(self, get_implementation, __now__): pass @patch('squad.ci.backend.null.Backend.job_url', return_value="http://example.com/123") @patch('squad.ci.backend.null.Backend.fetch') @patch('squad.ci.models.ReceiveTestRun.__call__') def test_fetch_sets_fetched_at(self, receive, backend_fetch, backend_job_url): pass @patch('squad.ci.models.Backend.__postprocess_testjob__') @patch('squad.ci.backend.null.Backend.job_url', return_value="http://example.com/123") @patch('squad.ci.backend.null.Backend.fetch') @patch('squad.ci.models.ReceiveTestRun.__call__') def test_fetch_postprocessing(self, receive, backend_fetch, backend_job_url, postprocess): pass
39
0
27
4
23
1
1
0.04
1
4
3
0
12
2
12
14
367
55
302
92
263
11
161
78
148
1
2
0
12
145,586
Linaro/squad
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Linaro_squad/test/ci/test_models.py
test.ci.test_models.BackendSubmitTest
class BackendSubmitTest(BackendTestBase): @patch('squad.ci.models.Backend.get_implementation') def test_submit(self, get_implementation): test_job = self.create_test_job() impl = MagicMock() impl.submit = MagicMock(return_value=['999']) get_implementation.return_value = impl self.backend.submit(test_job) test_job.refresh_from_db() impl.submit.assert_called() self.assertTrue(test_job.submitted) self.assertIsNotNone(test_job.submitted_at) self.assertEqual('999', test_job.job_id)
class BackendSubmitTest(BackendTestBase): @patch('squad.ci.models.Backend.get_implementation') def test_submit(self, get_implementation): pass
3
0
13
2
11
0
1
0
1
1
0
0
1
0
1
3
16
3
13
5
10
0
12
4
10
1
2
0
1
145,587
Linaro/squad
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Linaro_squad/test/ci/test_models.py
test.ci.test_models.TestJobTest
class TestJobTest(TestCase): def setUp(self): self.group = core_models.Group.objects.create(slug='mygroup') self.project = self.group.projects.create(slug='myproject') self.build = self.project.builds.create(version='1') self.backend = models.Backend.objects.create( url='http://example.com', username='foobar', token='mypassword', ) def test_basics(self): testjob = models.TestJob.objects.create( target=self.project, target_build=self.build, environment='myenv', backend=self.backend, ) self.assertIsNone(testjob.job_id) @patch('squad.ci.models.Backend.get_implementation') def test_cancel(self, get_implementation): test_job = models.TestJob.objects.create( target=self.project, target_build=self.build, environment='myenv', backend=self.backend, submitted=True, job_id=123 ) impl = MagicMock() impl.cancel = MagicMock(return_value=True) get_implementation.return_value = impl test_job.cancel() impl.cancel.assert_called() @patch('squad.ci.models.Backend.get_implementation') def test_cancel_not_submitted(self, get_implementation): test_job = models.TestJob.objects.create( target=self.project, target_build=self.build, environment='myenv', backend=self.backend, submitted=False ) impl = MagicMock() impl.cancel = MagicMock(return_value=True) get_implementation.return_value = impl test_job.cancel() impl.cancel.assert_not_called() test_job.refresh_from_db() self.assertTrue(test_job.fetched) self.assertTrue(test_job.submitted) self.assertIsNotNone(test_job.failure) @patch('squad.ci.backend.null.Backend.resubmit', return_value="1") def test_records_resubmitted_count(self, backend_resubmit): testjob = models.TestJob.objects.create( target=self.project, target_build=self.build, environment='myenv', backend=self.backend, submitted=True, can_resubmit=True, job_id="12345", ) testjob.resubmit() self.assertEqual(1, testjob.resubmitted_count) @patch('squad.ci.backend.null.Backend.resubmit', return_value="1") def test_delete_results_resubmitted_job(self, backend_resubmit): env, _ = self.project.environments.get_or_create(slug='myenv') testrun = self.build.test_runs.create(environment=env) suite, _ = self.project.suites.get_or_create(slug='mysuite') metadata = core_models.SuiteMetadata.objects.create( suite=suite.slug, name='mytest', kind='test') testrun.tests.create(metadata=metadata, suite=suite, result=True, environment=env, build=self.build) testjob = models.TestJob.objects.create( target=self.project, target_build=self.build, environment='myenv', backend=self.backend, testrun=testrun, submitted=True, can_resubmit=True, job_id="12345", ) testjob.resubmit() self.assertEqual(1, testjob.resubmitted_count) # Check that the original testrun still exists testrun.refresh_from_db() self.assertEqual(1, self.build.tests.count()) # Configure project to remove TestJob's results on resubmission self.project.project_settings = '{"CI_DELETE_RESULTS_RESUBMITTED_JOBS": true}' self.project.save() testjob.can_resubmit = True testjob.save() testjob.refresh_from_db() testjob.resubmit() testjob.refresh_from_db() self.assertEqual(2, testjob.resubmitted_count) # Check that the original testrun still exists with self.assertRaises(core_models.TestRun.DoesNotExist): testrun.refresh_from_db() self.assertEqual(0, self.build.tests.count()) def mock_backend_fetch(test_job): status = 'Complete' completed = True metadata = {} tests = {'mysuite/mytest': 'pass'} metrics = {} logs = '' return status, completed, metadata, tests, metrics, logs @patch('requests.post') @patch('squad.ci.backend.null.Backend.fetch', side_effect=mock_backend_fetch) @patch('squad.ci.backend.null.Backend.resubmit', return_value="1") @patch('squad.ci.backend.null.Backend.submit', return_value=["1"]) @patch('squad.ci.backend.null.Backend.job_url', return_value="http://job.url/") @patch('squad.core.tasks.notification.notify_patch_build_finished.delay') def test_resubmitted_job_retriggers_build_events(self, patch_notification, job_url, backend_submit, backend_resubmit, fetch, post): callback_url = 'http://callback.com/' self.build.callbacks.create( url=callback_url, event=core_models.Callback.events.ON_BUILD_FINISHED) testjob = self.build.test_jobs.create( target=self.project, environment='myenv', backend=self.backend, job_id='12345', submitted=True, ) self.backend.fetch(testjob.id) self.build.refresh_from_db() testjob.refresh_from_db() # Ensures build is finished and events are triggered self.assertTrue(self.build.status.finished) self.assertTrue(self.build.status.notified) self.assertTrue(self.build.patch_notified) post.assert_called_with(callback_url) patch_notification.assert_called_with(self.build.id) self.assertEqual(1, self.build.tests.count()) self.assertEqual(1, self.build.callbacks.filter(is_sent=True).count()) # Reset mocks post.reset_mock() patch_notification.reset_mock() # Submit a new job, make sure build events are NOT reset by default submit_testjob = self.build.test_jobs.create( target=self.project, environment='myenv', backend=self.backend, job_status='Complete', ) self.backend.submit(submit_testjob) self.assertTrue(self.build.status.finished) self.assertTrue(self.build.status.notified) self.assertTrue(self.build.patch_notified) self.assertEqual(1, self.build.callbacks.filter(is_sent=True).count()) # Now fetch it, and make sure events DID NOT get triggered submit_testjob.job_id = "2" submit_testjob.save() self.backend.fetch(submit_testjob.id) post.assert_not_called() patch_notification.assert_not_called() # Repeat steps above, resubmit is used instead of submit resubmit_testjob = self.build.test_jobs.create( target=self.project, environment='myenv', backend=self.backend, job_status='Complete', ) resubmit_testjob.resubmit() self.assertTrue(self.build.status.finished) self.assertTrue(self.build.status.notified) self.assertTrue(self.build.patch_notified) self.assertEqual(1, self.build.callbacks.filter(is_sent=True).count()) # Now fetch it, and make sure events DID NOT get triggered resubmit_testjob.job_id = "3" resubmit_testjob.save() self.backend.fetch(resubmit_testjob.id) post.assert_not_called() patch_notification.assert_not_called() # Time for the truth! Configure project settings to allow build events # to get reset on submit/resubmit self.project.project_settings = '{"CI_RESET_BUILD_EVENTS_ON_JOB_RESUBMISSION": true}' self.project.__settings__ = None self.project.save() # Submit a new job, make sure build events ARE reset due to project setting submit_testjob = self.build.test_jobs.create( target=self.project, environment='myenv', backend=self.backend, job_status='Complete', ) self.backend.submit(submit_testjob) self.assertFalse(self.build.status.finished) self.assertFalse(self.build.status.notified) self.assertFalse(self.build.patch_notified) self.assertEqual(0, self.build.callbacks.filter(is_sent=True).count()) # Now fetch it, and make sure events GET triggered submit_testjob.job_id = "4" submit_testjob.save() self.backend.fetch(submit_testjob.id) self.build.refresh_from_db() self.build.status.refresh_from_db() submit_testjob.refresh_from_db() self.assertTrue(self.build.status.finished) self.assertTrue(self.build.status.notified) self.assertTrue(self.build.patch_notified) post.assert_called_with(callback_url) patch_notification.assert_called_with(self.build.id) self.assertEqual(4, self.build.tests.count()) self.assertEqual(1, self.build.callbacks.filter(is_sent=True).count()) # Reset mocks post.reset_mock() patch_notification.reset_mock() # Resubmit a new job, make sure build events ARE reset due to project setting resubmit_testjob = self.build.test_jobs.create( target=self.project, environment='myenv', backend=self.backend, job_status='Complete', ) self.backend.submit(resubmit_testjob) self.assertFalse(self.build.status.finished) self.assertFalse(self.build.status.notified) self.assertFalse(self.build.patch_notified) self.assertEqual(0, self.build.callbacks.filter(is_sent=True).count()) # Now fetch it, and make sure events GET triggered resubmit_testjob.job_id = "5" resubmit_testjob.save() self.backend.fetch(resubmit_testjob.id) self.build.refresh_from_db() self.build.status.refresh_from_db() resubmit_testjob.refresh_from_db() self.assertTrue(self.build.status.finished) self.assertTrue(self.build.status.notified) self.assertTrue(self.build.patch_notified) post.assert_called_with(callback_url) patch_notification.assert_called_with(self.build.id) self.assertEqual(5, self.build.tests.count()) self.assertEqual(1, self.build.callbacks.filter(is_sent=True).count()) @patch('squad.ci.backend.null.Backend.resubmit', side_effect=SubmissionIssue) def test_force_resubmit_exception(self, backend_resubmit): testjob = models.TestJob.objects.create( target=self.project, target_build=self.build, environment='myenv', backend=self.backend, submitted=True, can_resubmit=True, job_id="12345", ) testjob.force_resubmit() self.assertEqual(0, testjob.resubmitted_jobs.count()) self.assertEqual(0, testjob.resubmitted_count) self.assertEqual("12345", testjob.job_id) @patch('squad.ci.backend.null.Backend.submit', return_value=["12345"]) def test_force_resubmit_unsubmitted_job(self, backend_resubmit): # By "unsubmitted", maybe a cancelled before submission or a submission that went wrong, and the # user just wants to force_resubmit it testjob = models.TestJob.objects.create( target=self.project, target_build=self.build, environment='myenv', backend=self.backend, ) testjob.force_resubmit() self.assertEqual(0, testjob.resubmitted_jobs.count()) self.assertEqual(0, testjob.resubmitted_count) self.assertEqual('12345', testjob.job_id) def test_show_definition_hides_secrets(self): definition = "foo: bar\nsecrets:\n baz: qux\n" testjob = models.TestJob( definition=definition ) display = yaml.safe_load(testjob.show_definition) self.assertNotEqual('qux', display['secrets']['baz']) def test_show_definition_non_dict(self): definition = "something that doesn't matter" testjob = models.TestJob( definition=definition ) display = yaml.safe_load(testjob.show_definition) self.assertEqual(definition, display) def test_job_input(self): testjob = models.TestJob.objects.create( target=self.project, target_build=self.build, environment='myenv', backend=self.backend, ) self.assertIsNone(testjob.input) testjob.input = "testing" self.assertEqual("testing", testjob.input)
class TestJobTest(TestCase): def setUp(self): pass def test_basics(self): pass @patch('squad.ci.models.Backend.get_implementation') def test_cancel(self, get_implementation): pass @patch('squad.ci.models.Backend.get_implementation') def test_cancel_not_submitted(self, get_implementation): pass @patch('squad.ci.backend.null.Backend.resubmit', return_value="1") def test_records_resubmitted_count(self, backend_resubmit): pass @patch('squad.ci.backend.null.Backend.resubmit', return_value="1") def test_delete_results_resubmitted_job(self, backend_resubmit): pass def mock_backend_fetch(test_job): pass @patch('requests.post') @patch('squad.ci.backend.null.Backend.fetch', side_effect=mock_backend_fetch) @patch('squad.ci.backend.null.Backend.resubmit', return_value="1") @patch('squad.ci.backend.null.Backend.submit', return_value=["1"]) @patch('squad.ci.backend.null.Backend.job_url', return_value="http://job.url/") @patch('squad.core.tasks.notification.notify_patch_build_finished.delay') def test_resubmitted_job_retriggers_build_events(self, patch_notification, job_url, backend_submit, backend_resubmit, fetch, post): pass @patch('squad.ci.backend.null.Backend.resubmit', side_effect=SubmissionIssue) def test_force_resubmit_exception(self, backend_resubmit): pass @patch('squad.ci.backend.null.Backend.submit', return_value=["12345"]) def test_force_resubmit_unsubmitted_job(self, backend_resubmit): pass def test_show_definition_hides_secrets(self): pass def test_show_definition_non_dict(self): pass def test_job_input(self): pass
26
0
23
2
20
1
1
0.07
1
6
5
0
13
4
13
13
325
39
268
55
242
18
169
48
155
1
1
1
13
145,588
Linaro/squad
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Linaro_squad/squad/frontend/project_settings.py
squad.frontend.project_settings.ProjectFormAdvanced.Meta
class Meta: model = Project fields = ['project_settings']
class Meta: pass
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
3
0
3
3
2
0
3
3
2
0
0
0
0
145,589
Linaro/squad
Linaro_squad/test/core/test_import_data.py
test.core.test_import_data.TestDryRun
class TestDryRun(TestCase): def test_dry_run(self): self.importer = Command() self.importer.silent = True d = os.path.join(os.path.dirname(__file__), 'test_import_data_input') call_command('import_data', '--silent', '--dry-run', 'foo/bar', d) self.assertEqual(0, Group.objects.count()) self.assertEqual(0, Project.objects.count()) self.assertEqual(0, Build.objects.count()) self.assertEqual(0, TestRun.objects.count())
class TestDryRun(TestCase): def test_dry_run(self): pass
2
0
9
0
9
0
1
0
1
4
4
0
1
1
1
1
11
1
10
4
8
0
10
4
8
1
1
0
1
145,590
Linaro/squad
Linaro_squad/squad/frontend/apps.py
squad.frontend.apps.FrontendConfig
class FrontendConfig(AppConfig): default_auto_field = 'django.db.models.AutoField' name = 'squad.frontend'
class FrontendConfig(AppConfig): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
3
0
3
3
2
0
3
3
2
0
1
0
0
145,591
Linaro/squad
Linaro_squad/test/core/test_history.py
test.core.test_history.TestHistoryTest
class TestHistoryTest(TestCase): def receive_test_run(self, project, version, env, tests): receive = ReceiveTestRun(project, update_project_status=False) receive(version, env, tests_file=json.dumps(tests)) def setUp(self): self.group = models.Group.objects.create(slug='mygruop') self.project1 = self.group.projects.create(slug='project1') self.receive_test_run(self.project1, '0', 'env1', { 'foo/bar': 'fail', # missing `root` on purpose }) now = timezone.now() past = now - relativedelta(hours=1) self.project1.builds.create(version='1', datetime=past) self.receive_test_run(self.project1, '1', 'env1', { 'foo/bar': 'fail', 'root': 'fail', }) self.receive_test_run(self.project1, '1', 'env2', { 'foo/bar': 'pass', 'root': 'pass', }) self.project1.builds.create(version='2', datetime=now) self.receive_test_run(self.project1, '2', 'env1', { 'foo/bar': 'pass', 'root': 'pass', }) self.receive_test_run(self.project1, '2', 'env2', { 'foo/bar': 'fail', 'root': 'fail', }) def test_environments(self): history = TestHistory(self.project1, 'foo/bar') env1 = self.project1.environments.get(slug='env1') env2 = self.project1.environments.get(slug='env2') self.assertEqual([env1.id, env2.id], sorted([e.id for e in history.environments])) def test_results(self): history = TestHistory(self.project1, 'foo/bar') build1 = self.project1.builds.get(version='1') build2 = self.project1.builds.get(version='2') env1 = self.project1.environments.get(slug='env1') env2 = self.project1.environments.get(slug='env2') self.assertEqual('fail', history.results[build1][env1.id].status) self.assertEqual('pass', history.results[build1][env2.id].status) self.assertEqual('pass', history.results[build2][env1.id].status) self.assertEqual('fail', history.results[build2][env2.id].status) def test_results_no_suite(self): history = TestHistory(self.project1, 'root') build1 = self.project1.builds.get(version='1') build2 = self.project1.builds.get(version='2') env1 = self.project1.environments.get(slug='env1') env2 = self.project1.environments.get(slug='env2') self.assertEqual('fail', history.results[build1][env1.id].status) self.assertEqual('pass', history.results[build1][env2.id].status) self.assertEqual('pass', history.results[build2][env1.id].status) self.assertEqual('fail', history.results[build2][env2.id].status) def test_displays_all_builds(self): build0 = self.project1.builds.get(version='0') history = TestHistory(self.project1, 'root') self.assertIn(build0, history.results.keys()) def test_pagination(self): build1 = self.project1.builds.get(version='1') build2 = self.project1.builds.get(version='2') history = TestHistory(self.project1, 'root', page=1, per_page=1) self.assertIn(build2, history.results.keys()) self.assertNotIn(build1, history.results.keys()) def test_pin_top_build(self): build1 = self.project1.builds.get(version='1') build2 = self.project1.builds.get(version='2') history = TestHistory(self.project1, 'root', top=build1) self.assertIn(build1, history.results.keys()) self.assertNotIn(build2, history.results.keys()) self.assertEqual(build1, history.top) def test_no_metadata(self): testrun = self.project1.builds.last().test_runs.last() suite = testrun.tests.last().suite test_name = 'no_metadata_test' metadata = models.SuiteMetadata.objects.create(kind='test', suite=suite.slug, name=test_name) testrun.tests.create(result=False, suite=suite, metadata=metadata, build=testrun.build, environment=testrun.environment) history = TestHistory(self.project1, test_name) self.assertIsNotNone(history.results) def test_no_build(self): empty_project = self.group.projects.create(slug='empty-project') test_name = 'fake_test' with self.assertRaises(models.Build.DoesNotExist): TestHistory(empty_project, test_name)
class TestHistoryTest(TestCase): def receive_test_run(self, project, version, env, tests): pass def setUp(self): pass def test_environments(self): pass def test_results(self): pass def test_results_no_suite(self): pass def test_displays_all_builds(self): pass def test_pagination(self): pass def test_pin_top_build(self): pass def test_no_metadata(self): pass def test_no_build(self): pass
11
0
10
2
8
0
1
0.01
1
5
4
0
10
2
10
10
111
25
85
44
74
1
71
44
60
1
1
1
10
145,592
Linaro/squad
Linaro_squad/test/api/test_data.py
test.api.test_data.ApiDataTest
class ApiDataTest(TestCase): def setUp(self): self.group = models.Group.objects.create(slug='mygroup') self.project = self.group.projects.create(slug='myproject') self.client = APIClient('thekey') def receive(self, datestr, metrics={}, tests={}): receive = ReceiveTestRun(self.project) receive( version=datestr, environment_slug="env1", metadata_file=json.dumps({"datetime": datestr + "T00:00:00+00:00", "job_id": '1'}), metrics_file=json.dumps(metrics), tests_file=json.dumps(tests), ) def get_json(self, url): with count_queries('url:' + url): response = self.client.get_json(url) return response def test_basics(self): self.receive("2016-09-01", metrics={ "foo": {"value": 1, "unit": "kb"}, "bar/baz": {"value": 2, "unit": "kb"}, }) self.receive("2016-09-02", metrics={ "foo": {"value": 2, "unit": ""}, "bar/baz": {"value": 3, "unit": "minutes"} }) resp = self.get_json('/api/data/mygroup/myproject?metric=foo&metric=bar/baz&environment=env1') json = resp.data self.assertEqual(dict, type(json['foo'])) first = json['foo']['env1'][0] second = json['foo']['env1'][1] self.assertEqual([1472688000, 1.0], first[0:2]) self.assertEqual([1472774400, 2.0], second[0:2]) first = json['bar/baz']['env1'][0] second = json['bar/baz']['env1'][1] self.assertEqual([1472688000, 2.0], first[0:2]) self.assertEqual([1472774400, 3.0], second[0:2]) self.assertEqual('application/json; charset=utf-8', resp.http['Content-Type']) def test_metrics_csv(self): self.receive("2018-09-17", metrics={ "foo": {"value": 1, "unit": ""}, "bar/baz": {"value": 2, "unit": ""} }) self.receive("2018-09-18", metrics={ "foo": {"value": 2, "unit": "hours"}, "bar/baz": {"value": 3, "unit": "minutes"} }) resp = self.client.get('/api/data/mygroup/myproject?metric=foo&environment=env1&format=csv') data = resp.content.decode('utf-8').split("\n") self.assertIn('"foo","env1","1537142400","1.0","2018-09-17",""', data[0]) self.assertIn('"foo","env1","1537228800","2.0","2018-09-18",""', data[1]) def test_tests(self): self.receive("2017-01-01", tests={ "foo": "pass", "bar": "fail", }) self.receive("2017-01-02", tests={ "foo": "pass", "bar": "pass", }) response = self.get_json('/api/data/mygroup/myproject?metric=:tests:&environment=env1') json = response.data first = json[':tests:']['env1'][0] second = json[':tests:']['env1'][1] self.assertEqual([1483228800, 50, '2017-01-01', ''], first) self.assertEqual([1483315200, 100, '2017-01-02', ''], second) def test_no_auth_on_non_public_project(self): self.project.is_public = False self.project.save() unauthenticated_client = Client() resp = unauthenticated_client.get('/api/data/mygroup/myproject?metric=foo&metric=bar/baz&environment=env1') self.assertEqual(401, resp.status_code) def test_no_auth_on_public_project(self): unauthenticated_client = Client() resp = unauthenticated_client.get('/api/data/mygroup/myproject?metric=foo&metric=bar/baz&environment=env1') self.assertEqual(200, resp.status_code) def test_invalid_auth(self): self.project.is_public = False self.project.save() wrong_client = APIClient('invalidkey') resp = wrong_client.get('/api/data/mygroup/myproject?metric=foo&metric=bar/baz&environment=env1') self.assertEqual(401, resp.status_code) def test_auth_from_web_ui(self): self.project.is_public = False self.project.save() web_client = Client() user = User.objects.create(username='theuser') self.group.add_admin(user) web_client.force_login(user) resp = web_client.get('/api/data/mygroup/myproject?metric=foo&metric=bar/baz&environment=env1') self.assertEqual(200, resp.status_code) def test_all_metrics(self): self.receive("2018-09-01", metrics={ "foo": {"value": 1, "unit": "kb"}, "bar/baz": {"value": 2, "unit": "seconds"} }) self.receive("2018-09-02", metrics={ "foo": {"value": 2, "unit": ""}, "bar/baz": {"value": 3, "unit": ""} }) resp = self.get_json('/api/data/mygroup/myproject?environment=env1') json = resp.data self.assertEqual(dict, type(json['foo'])) self.assertEqual(dict, type(json['bar/baz'])) first = json['foo']['env1'][0] second = json['foo']['env1'][1] self.assertEqual([1535760000, 1.0], first[0:2]) self.assertEqual([1535846400, 2.0], second[0:2]) first = json['bar/baz']['env1'][0] second = json['bar/baz']['env1'][1] self.assertEqual([1535760000, 2.0], first[0:2]) self.assertEqual([1535846400, 3.0], second[0:2]) self.assertEqual('application/json; charset=utf-8', resp.http['Content-Type']) def test_dynamic_summary(self): self.receive("2019-06-04", metrics={ "foo": {"value": 2, "unit": ""}, "bar/baz": {"value": 2, "unit": ""} }) # geomean = 2 self.receive("2019-06-05", metrics={ "foo": {"value": 3, "unit": ""}, "bar/baz": {"value": 3, "unit": "seconds"}, 'fox/qux': {"value": 3, "unit": "minutes"} }) # geomean = 3 resp = self.get_json( '/api/data/mygroup/myproject?environment=env1&metric=foo&metric=bar/baz&metric=:dynamic_summary:') first = resp.data[':dynamic_summary:']['env1'][0][1] self.assertAlmostEqual(first, 2) first = resp.data[':dynamic_summary:']['env1'][1][1] self.assertAlmostEqual(first, 3) def test_dynamic_summary_no_selected_metrics(self): self.receive("2019-06-04", metrics={ "foo": {"value": 2, "unit": "kg"}, "bar/baz": {"value": 2, "unit": "cm"} }) resp = self.get_json( '/api/data/mygroup/myproject?environment=env1&metric=:dynamic_summary:') self.assertEqual(resp.data[':dynamic_summary:']['env1'], [])
class ApiDataTest(TestCase): def setUp(self): pass def receive(self, datestr, metrics={}, tests={}): pass def get_json(self, url): pass def test_basics(self): pass def test_metrics_csv(self): pass def test_tests(self): pass def test_no_auth_on_non_public_project(self): pass def test_no_auth_on_public_project(self): pass def test_invalid_auth(self): pass def test_auth_from_web_ui(self): pass def test_all_metrics(self): pass def test_dynamic_summary(self): pass def test_dynamic_summary_no_selected_metrics(self): pass
14
0
12
2
11
0
1
0.01
1
4
2
0
13
3
13
13
173
35
138
45
124
2
96
45
82
1
1
1
13
145,593
Linaro/squad
Linaro_squad/test/api/__init__.py
test.api.RestAPIClient
class RestAPIClient(APIClient): def __request__(self, method, *args, **kwargs): if not kwargs.get('HTTP_AUTHORIZATION'): kwargs = kwargs.copy() kwargs.update({'HTTP_AUTHORIZATION': "Token %s" % self.token}) m = getattr(super(APIClient, self), method) return m(*args, **kwargs)
class RestAPIClient(APIClient): def __request__(self, method, *args, **kwargs): pass
2
0
6
0
6
0
2
0
1
1
0
0
1
0
1
6
8
1
7
3
5
0
7
3
5
2
2
1
2
145,594
Linaro/squad
Linaro_squad/test/api/__init__.py
test.api.JSONResponse
class JSONResponse(object): def __init__(self, response): self.http = response body = response.content or bytes('{}', 'utf-8') self.data = json.loads(body.decode('utf-8'))
class JSONResponse(object): def __init__(self, response): pass
2
0
5
1
4
0
1
0
1
1
0
0
1
2
1
1
6
1
5
5
3
0
5
5
3
1
1
0
1
145,595
Linaro/squad
Linaro_squad/test/api/__init__.py
test.api.APIClient
class APIClient(Client): def __init__(self, token): self.token = token return super(APIClient, self).__init__(token) def post(self, *args, **kwargs): return self.__request__('post', *args, **kwargs) def get(self, *args, **kwargs): return self.__request__('get', *args, **kwargs) def get_json(self, *args, **kwargs): resp = self.get(*args, **kwargs) return JSONResponse(resp) def __request__(self, method, *args, **kwargs): if not kwargs.get('HTTP_AUTH_TOKEN'): kwargs = kwargs.copy() kwargs.update({'HTTP_AUTH_TOKEN': self.token}) m = getattr(super(APIClient, self), method) return m(*args, **kwargs)
class APIClient(Client): def __init__(self, token): pass def post(self, *args, **kwargs): pass def get(self, *args, **kwargs): pass def get_json(self, *args, **kwargs): pass def __request__(self, method, *args, **kwargs): pass
6
0
3
0
3
0
1
0
1
2
1
1
5
1
5
5
22
5
17
9
11
0
17
9
11
2
1
1
6
145,596
Linaro/squad
Linaro_squad/test/__init__.py
test.TestResult
class TestResult(TextTestResult): def __init__(self, stream, descriptions, verbosity): super(TestResult, self).__init__(stream, descriptions, verbosity) self.passes = [] def addSuccess(self, test): super(TestResult, self).addSuccess(test) self.passes.append((test, None))
class TestResult(TextTestResult): def __init__(self, stream, descriptions, verbosity): pass def addSuccess(self, test): pass
3
0
3
0
3
0
1
0
1
1
0
0
2
1
2
37
8
1
7
4
4
0
7
4
4
1
3
0
2
145,597
Linaro/squad
Linaro_squad/squad/core/migrations/0028_suite_and_test_name_length.py
squad.core.migrations.0028_suite_and_test_name_length.Migration
class Migration(migrations.Migration): dependencies = [ ('core', '0027_project_notification_strategy'), ] operations = [ migrations.AlterField( model_name='environment', name='slug', field=models.CharField(max_length=100, validators=[django.core.validators.RegexValidator(regex='^[a-zA-Z0-9][a-zA-Z0-9_.-]*')]), ), migrations.AlterField( model_name='group', name='slug', field=models.CharField(max_length=100, unique=True, validators=[django.core.validators.RegexValidator(regex='^[a-zA-Z0-9][a-zA-Z0-9_.-]*')]), ), migrations.AlterField( model_name='project', name='slug', field=models.CharField(max_length=100, validators=[django.core.validators.RegexValidator(regex='^[a-zA-Z0-9][a-zA-Z0-9_.-]*')]), ), migrations.AlterField( model_name='suite', name='name', field=models.CharField(max_length=256, null=True), ), migrations.AlterField( model_name='suite', name='slug', field=models.CharField(max_length=256, validators=[django.core.validators.RegexValidator(regex='^[a-zA-Z0-9][a-zA-Z0-9_.-]*')]), ), migrations.AlterField( model_name='test', name='name', field=models.CharField(max_length=256), ), ]
class Migration(migrations.Migration): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
38
2
36
3
35
0
3
3
2
0
1
0
0
145,598
Linaro/squad
Linaro_squad/squad/core/migrations/0029_subscription_email_formats.py
squad.core.migrations.0029_subscription_email_formats.Migration
class Migration(migrations.Migration): dependencies = [ ('core', '0028_suite_and_test_name_length'), ] operations = [ migrations.AddField( model_name='subscription', name='html', field=models.BooleanField(default=True, verbose_name='Send HTML version'), ), ]
class Migration(migrations.Migration): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
13
2
11
3
10
0
3
3
2
0
1
0
0
145,599
Linaro/squad
Linaro_squad/squad/core/migrations/0030_remove_project_build_completion_threshold.py
squad.core.migrations.0030_remove_project_build_completion_threshold.Migration
class Migration(migrations.Migration): dependencies = [ ('core', '0029_subscription_email_formats'), ] operations = [ migrations.RemoveField( model_name='project', name='build_completion_threshold', ), ]
class Migration(migrations.Migration): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
12
2
10
3
9
0
3
3
2
0
1
0
0
145,600
Linaro/squad
Linaro_squad/squad/core/migrations/0031_environment_expected_test_runs.py
squad.core.migrations.0031_environment_expected_test_runs.Migration
class Migration(migrations.Migration): dependencies = [ ('core', '0030_remove_project_build_completion_threshold'), ] operations = [ migrations.AddField( model_name='environment', name='expected_test_runs', field=models.IntegerField(default=None, null=True), ), ]
class Migration(migrations.Migration): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
13
2
11
3
10
0
3
3
2
0
1
0
0
145,601
Linaro/squad
Linaro_squad/squad/core/migrations/0032_testrun_completed.py
squad.core.migrations.0032_testrun_completed.Migration
class Migration(migrations.Migration): dependencies = [ ('core', '0031_environment_expected_test_runs'), ] operations = [ migrations.AddField( model_name='testrun', name='completed', field=models.BooleanField(default=True), ), ]
class Migration(migrations.Migration): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
13
2
11
3
10
0
3
3
2
0
1
0
0
145,602
Linaro/squad
Linaro_squad/test/api/tests.py
test.api.tests.ApiTest
class ApiTest(TestCase): def setUp(self): self.group = models.Group.objects.create(slug='mygroup') self.project = self.group.projects.create(slug='myproject') self.usergroup = models.UserNamespace.objects.create(slug='~project-user') self.userproject = self.usergroup.projects.create(slug='userproject') self.project_submission_admin_user = User.objects.create(username='project-user') self.project_submitter_level = User.objects.create(username='project-user-two') self.group.add_admin(self.project_submission_admin_user) self.group.add_user(self.project_submitter_level, 'submitter') self.usergroup.add_admin(self.project_submission_admin_user) Token.objects.create(user=self.project_submission_admin_user, key='thekey') Token.objects.create(user=self.project_submitter_level, key='thesubmitterkey') self.global_submission_user = User.objects.create(username='global-user', is_staff=True) self.global_token = Token.objects.create(user=self.global_submission_user) self.client = APIClient('thekey') self.submitter_client = APIClient('thesubmitterkey')
class ApiTest(TestCase): def setUp(self): pass
2
0
19
3
16
0
1
0
1
2
2
2
1
10
1
1
21
4
17
12
15
0
17
12
15
1
1
0
1
145,603
Linaro/squad
Linaro_squad/squad/api/rest.py
squad.api.rest.ProjectFilter
class ProjectFilter(filters.FilterSet): group = filters.RelatedFilter(GroupFilter, field_name="group", queryset=Group.objects.all()) full_name = filters.CharFilter(method='filter_full_name', lookup_expr='icontains') class Meta: model = Project fields = {'name': ['exact', 'in', 'startswith', 'contains', 'icontains'], 'slug': ['exact', 'in', 'startswith', 'contains', 'icontains'], 'datetime': ['exact', 'gt', 'gte', 'lt', 'lte'], 'id': ['exact', 'in']} def filter_full_name(self, queryset, field_name, value): if value: group_slug = 'group__slug' project_slug = 'slug' if queryset.model is not Project: group_slug = 'project__%s' % group_slug project_slug = 'project__%s' % project_slug queryset = queryset.annotate(fullname=Concat(F(group_slug), V('/'), F(project_slug), output_field=CharField())).filter(fullname__startswith=value) return queryset
class ProjectFilter(filters.FilterSet): class Meta: def filter_full_name(self, queryset, field_name, value): pass
3
0
10
0
10
0
3
0
1
1
1
0
1
0
1
1
21
2
19
9
16
0
15
9
12
3
1
2
3
145,604
Linaro/squad
Linaro_squad/squad/api/rest.py
squad.api.rest.ModelViewSet
class ModelViewSet(viewsets.ModelViewSet): project_lookup_key = None def get_projects(self): """ Determines which projects the current user is allowed to visualize. Returns a list of project ids to be used in get_queryset() for filtering. """ user = self.request.user return Project.objects.accessible_to(user).only('id') def get_queryset(self): queryset = self.queryset user = self.request.user if not (user.is_superuser or user.is_staff) and self.project_lookup_key is not None: project_lookup = {self.project_lookup_key: self.get_projects()} queryset = queryset.filter(**project_lookup) return queryset
class ModelViewSet(viewsets.ModelViewSet): def get_projects(self): ''' Determines which projects the current user is allowed to visualize. Returns a list of project ids to be used in get_queryset() for filtering. ''' pass def get_queryset(self): pass
3
1
8
1
5
3
2
0.42
1
1
1
7
2
0
2
2
20
3
12
8
9
5
12
8
9
2
1
1
3
145,605
Linaro/squad
Linaro_squad/squad/core/migrations/0033_drop_debversion.py
squad.core.migrations.0033_drop_debversion.Migration
class Migration(migrations.Migration): dependencies = [ ('core', '0032_testrun_completed'), ] operations = [ migrations.AlterField( model_name='build', name='version', field=models.CharField(max_length=100), ), ]
class Migration(migrations.Migration): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
13
2
11
3
10
0
3
3
2
0
1
0
0
145,606
Linaro/squad
Linaro_squad/squad/core/migrations/0034_prepare_to_remove_build_name.py
squad.core.migrations.0034_prepare_to_remove_build_name.Migration
class Migration(migrations.Migration): dependencies = [ ('core', '0033_drop_debversion'), ] operations = [ migrations.RunSQL( sql="UPDATE core_build SET version = name WHERE name != ''", reverse_sql=migrations.RunSQL.noop, ), ]
class Migration(migrations.Migration): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
12
2
10
3
9
0
3
3
2
0
1
0
0
145,607
Linaro/squad
Linaro_squad/squad/core/migrations/0035_remove_build_name.py
squad.core.migrations.0035_remove_build_name.Migration
class Migration(migrations.Migration): dependencies = [ ('core', '0034_prepare_to_remove_build_name'), ] operations = [ migrations.AlterField( model_name='build', name='name', field=models.CharField(max_length=100, default=''), preserve_default=True, ), migrations.RemoveField( model_name='build', name='name', ), ]
class Migration(migrations.Migration): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
18
2
16
3
15
0
3
3
2
0
1
0
0
145,608
Linaro/squad
Linaro_squad/squad/core/migrations/0036_status_tests_skip.py
squad.core.migrations.0036_status_tests_skip.Migration
class Migration(migrations.Migration): dependencies = [ ('core', '0035_remove_build_name'), ] operations = [ migrations.AddField( model_name='status', name='tests_skip', field=models.IntegerField(default=0), ), ]
class Migration(migrations.Migration): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
13
2
11
3
10
0
3
3
2
0
1
0
0
145,609
Linaro/squad
Linaro_squad/squad/core/migrations/0037_project_status_test_summary_fields.py
squad.core.migrations.0037_project_status_test_summary_fields.Migration
class Migration(migrations.Migration): dependencies = [ ('core', '0036_status_tests_skip'), ] operations = [ migrations.AddField( model_name='projectstatus', name='tests_fail', field=models.IntegerField(default=0), preserve_default=False, ), migrations.AddField( model_name='projectstatus', name='tests_pass', field=models.IntegerField(default=0), preserve_default=False, ), migrations.AddField( model_name='projectstatus', name='tests_skip', field=models.IntegerField(default=0), preserve_default=False, ), migrations.AddField( model_name='projectstatus', name='metrics_summary', field=models.FloatField(default=0.0), preserve_default=False, ), ]
class Migration(migrations.Migration): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
32
2
30
3
29
0
3
3
2
0
1
0
0
145,610
Linaro/squad
Linaro_squad/squad/core/migrations/0038_populate_project_status_cache.py
squad.core.migrations.0038_populate_project_status_cache.Migration
class Migration(migrations.Migration): dependencies = [ ('core', '0037_project_status_test_summary_fields'), ] operations = [ migrations.RunPython( update_test_summary, reverse_code=migrations.RunPython.noop, ), ]
class Migration(migrations.Migration): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
12
2
10
3
9
0
3
3
2
0
1
0
0
145,611
Linaro/squad
Linaro_squad/squad/core/migrations/0039_orderings.py
squad.core.migrations.0039_orderings.Migration
class Migration(migrations.Migration): dependencies = [ ('core', '0038_populate_project_status_cache'), ] operations = [ migrations.AlterModelOptions( name='group', options={'ordering': ['slug']}, ), migrations.AlterModelOptions( name='project', options={'ordering': ['group', 'slug']}, ), migrations.AlterModelOptions( name='test', options={'ordering': ['name']}, ), ]
class Migration(migrations.Migration): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
20
2
18
3
17
0
3
3
2
0
1
0
0
145,612
Linaro/squad
Linaro_squad/squad/core/migrations/0040_remove_subscription_html.py
squad.core.migrations.0040_remove_subscription_html.Migration
class Migration(migrations.Migration): dependencies = [ ('core', '0039_orderings'), ] operations = [ migrations.RemoveField( model_name='subscription', name='html', ), ]
class Migration(migrations.Migration): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
12
2
10
3
9
0
3
3
2
0
1
0
0
145,613
Linaro/squad
Linaro_squad/squad/core/migrations/0041_projectstatus_notified.py
squad.core.migrations.0041_projectstatus_notified.Migration
class Migration(migrations.Migration): dependencies = [ ('core', '0040_remove_subscription_html'), ] operations = [ migrations.AddField( model_name='projectstatus', name='notified', field=models.BooleanField(default=False), ), ]
class Migration(migrations.Migration): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
13
2
11
3
10
0
3
3
2
0
1
0
0
145,614
Linaro/squad
Linaro_squad/squad/api/rest.py
squad.api.rest.PatchSourceViewSet
class PatchSourceViewSet(viewsets.ModelViewSet): queryset = PatchSource.objects serializer_class = PatchSourceSerializer filterset_fields = ('implementation', 'url', 'name') filter_fields = filterset_fields
class PatchSourceViewSet(viewsets.ModelViewSet): pass
1
0
0
0
0
0
0
0.2
1
0
0
0
0
0
0
0
5
0
5
5
4
1
5
5
4
0
1
0
0
145,615
Linaro/squad
Linaro_squad/squad/core/migrations/0042_set_projectstatus_notified.py
squad.core.migrations.0042_set_projectstatus_notified.Migration
class Migration(migrations.Migration): dependencies = [ ('core', '0041_projectstatus_notified'), ] operations = [ migrations.RunSQL( "UPDATE core_projectstatus SET notified = (2>1);", reverse_sql="SELECT 1", ), ]
class Migration(migrations.Migration): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
12
2
10
3
9
0
3
3
2
0
1
0
0
145,616
Linaro/squad
Linaro_squad/test/api/tests.py
test.api.tests.CreateBuildApiTest
class CreateBuildApiTest(ApiTest): def setUp(self): super(CreateBuildApiTest, self).setUp() self.github = models.PatchSource.objects.create( name='github', username='foo', url='https://github.com/', token='*********', implementation='example' ) def test_patch_source(self): response = self.client.post( '/api/createbuild/mygroup/myproject/1.0.0', { 'patch_source': 'github', 'patch_id': '999', } ) self.assertEqual(response.status_code, 201) build = self.project.builds.get(version='1.0.0') self.assertEqual(self.github, build.patch_source) self.assertEqual(build.patch_id, "999") logentry_queryset = LogEntry.objects.filter( user_id=self.project_submission_admin_user.pk, object_id=build.pk, object_repr=force_text(build), ) self.assertEqual( 1, logentry_queryset.count() ) self.assertEqual( ADDITION, logentry_queryset.last().action_flag ) def test_patch_source_private(self): response = self.client.post( '/api/createbuild/~project-user/userproject/1.0.0', { 'patch_source': 'github', 'patch_id': '999', } ) self.assertEqual(response.status_code, 201) build = self.userproject.builds.get(version='1.0.0') self.assertEqual(self.github, build.patch_source) self.assertEqual(build.patch_id, "999") def test_patch_baseline(self): baseline = self.project.builds.create(version='0') response = self.client.post( '/api/createbuild/mygroup/myproject/1', { 'patch_source': 'github', 'patch_id': '999', 'patch_baseline': '0', } ) self.assertEqual(response.status_code, 201) build = self.project.builds.get(version='1') self.assertEqual(build.patch_baseline, baseline) def test_unexisting_patch_source(self): response = self.client.post( '/api/createbuild/mygroup/myproject/1.0.0', { 'patch_source': 'foobarbaz', # does not exist 'patch_id': '999', } ) self.assertEqual(response.status_code, 400) def test_create_callback(self): response = self.client.post( '/api/createbuild/mygroup/myproject/with-callback', { 'callback_url': 'http://the-callback.target' } ) self.assertEqual(response.status_code, 201) build = self.project.builds.get(version='with-callback') self.assertEqual(1, build.callbacks.count()) def test_create_callback_all_attrs(self): attrs = { 'url': 'http://the-callback.target.com', 'method': 'post', 'event': 'on_build_finished', 'headers': '{"Authorization": "123456"}', 'payload': '{"data": "value"}', 'payload_is_json': 'true', 'record_response': 'true', } response = self.client.post( '/api/createbuild/mygroup/myproject/with-callback', { 'callback_%s' % attr: attrs[attr] for attr in attrs.keys() } ) self.assertEqual(response.status_code, 201) build = self.project.builds.get(version='with-callback') self.assertEqual(1, build.callbacks.count()) callback = build.callbacks.first() attrs['payload_is_json'] = True attrs['record_response'] = True for attr in attrs: self.assertEqual(getattr(callback, attr), attrs[attr]) def test_malformed_callback(self): response = self.client.post( '/api/createbuild/mygroup/myproject/with-callback', { 'callback_url': 'invalid-callback-target-url' } ) self.assertEqual(response.status_code, 400) self.assertEqual(b'Enter a valid URL.', response.content) def test_duplicated_callback(self): callback_url = 'http://the-callback.target' response = self.client.post( '/api/createbuild/mygroup/myproject/with-callback', { 'callback_url': callback_url, } ) self.assertEqual(response.status_code, 201) build = self.project.builds.get(version='with-callback') self.assertEqual(1, build.callbacks.count()) response = self.client.post( '/api/createbuild/mygroup/myproject/with-callback', { 'callback_url': callback_url, } ) self.assertEqual(response.status_code, 400) self.assertEqual(1, build.callbacks.count()) self.assertEqual(b'Callback with this Object reference type, Object reference id, Url and Event already exists.', response.content)
class CreateBuildApiTest(ApiTest): def setUp(self): pass def test_patch_source(self): pass def test_patch_source_private(self): pass def test_patch_baseline(self): pass def test_unexisting_patch_source(self): pass def test_create_callback(self): pass def test_create_callback_all_attrs(self): pass def test_malformed_callback(self): pass def test_duplicated_callback(self): pass
10
0
15
1
15
0
1
0.01
1
2
1
0
9
1
9
10
148
16
132
30
122
1
58
30
48
2
2
1
10
145,617
Linaro/squad
Linaro_squad/squad/core/migrations/0026_testrun_result_accept_null.py
squad.core.migrations.0026_testrun_result_accept_null.Migration
class Migration(migrations.Migration): dependencies = [ ('core', '0025_unique_testrun_job_id'), ] operations = [ migrations.AlterModelOptions( name='build', options={'ordering': ['datetime']}, ), migrations.AlterField( model_name='test', name='result', field=models.NullBooleanField(), ), ]
class Migration(migrations.Migration): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
17
2
15
3
14
0
3
3
2
0
1
0
0
145,618
Linaro/squad
Linaro_squad/squad/core/migrations/0004_group_user_groups.py
squad.core.migrations.0004_group_user_groups.Migration
class Migration(migrations.Migration): dependencies = [ ('auth', '0008_alter_user_username_max_length'), ('core', '0003_testrun_log_file'), ] operations = [ migrations.AddField( model_name='group', name='user_groups', field=models.ManyToManyField(to='auth.Group'), ), ]
class Migration(migrations.Migration): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
14
2
12
3
11
0
3
3
2
0
1
0
0
145,619
Linaro/squad
Linaro_squad/squad/core/migrations/0005_token.py
squad.core.migrations.0005_token.Migration
class Migration(migrations.Migration): dependencies = [ ('core', '0004_group_user_groups'), ] operations = [ migrations.CreateModel( name='Token', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('key', models.CharField(max_length=64, unique=True)), ('description', models.CharField(max_length=100)), ('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='tokens', to='core.Project')), ], ), ]
class Migration(migrations.Migration): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
17
2
15
3
14
0
3
3
2
0
1
0
0
145,620
Linaro/squad
Linaro_squad/squad/core/migrations/0006_auto_20160826_2242.py
squad.core.migrations.0006_auto_20160826_2242.Migration
class Migration(migrations.Migration): dependencies = [ ('core', '0005_token'), ] operations = [ migrations.AddField( model_name='metric', name='name', field=models.CharField(default='', max_length=100), preserve_default=False, ), migrations.AddField( model_name='test', name='name', field=models.CharField(default='', max_length=100), preserve_default=False, ), ]
class Migration(migrations.Migration): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
20
2
18
3
17
0
3
3
2
0
1
0
0
145,621
Linaro/squad
Linaro_squad/squad/core/migrations/0007_testrun_data_processed.py
squad.core.migrations.0007_testrun_data_processed.Migration
class Migration(migrations.Migration): dependencies = [ ('core', '0006_auto_20160826_2242'), ] operations = [ migrations.AddField( model_name='testrun', name='data_processed', field=models.BooleanField(default=False), ), ]
class Migration(migrations.Migration): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
13
2
11
3
10
0
3
3
2
0
1
0
0
145,622
Linaro/squad
Linaro_squad/squad/core/migrations/0008_status.py
squad.core.migrations.0008_status.Migration
class Migration(migrations.Migration): dependencies = [ ('core', '0007_testrun_data_processed'), ] operations = [ migrations.CreateModel( name='Status', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('tests_pass', models.IntegerField(default=0)), ('tests_fail', models.IntegerField(default=0)), ('metrics_summary', models.FloatField(default=0.0)), ('suite', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='core.Suite')), ('test_run', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='status', to='core.TestRun')), ], ), migrations.AlterUniqueTogether( name='status', unique_together=set([('test_run', 'suite')]), ), ]
class Migration(migrations.Migration): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
23
2
21
3
20
0
3
3
2
0
1
0
0
145,623
Linaro/squad
Linaro_squad/squad/core/migrations/0009_testrun_status_recorded.py
squad.core.migrations.0009_testrun_status_recorded.Migration
class Migration(migrations.Migration): dependencies = [ ('core', '0008_status'), ] operations = [ migrations.AddField( model_name='testrun', name='status_recorded', field=models.BooleanField(default=False), ), ]
class Migration(migrations.Migration): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
13
2
11
3
10
0
3
3
2
0
1
0
0
145,624
Linaro/squad
Linaro_squad/squad/core/migrations/0010_testrun_datetime.py
squad.core.migrations.0010_testrun_datetime.Migration
class Migration(migrations.Migration): dependencies = [ ('core', '0009_testrun_status_recorded'), ] operations = [ migrations.AddField( model_name='testrun', name='datetime', field=models.DateTimeField(null=True), ), migrations.RunSQL( "UPDATE core_testrun SET datetime = created_at", reverse_sql="SELECT 1", ), migrations.AlterField( model_name='testrun', name='datetime', field=models.DateTimeField(null=False), ) ]
class Migration(migrations.Migration): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
22
2
20
3
19
0
3
3
2
0
1
0
0
145,625
Linaro/squad
Linaro_squad/squad/core/migrations/0011_testrun_metadata_fields.py
squad.core.migrations.0011_testrun_metadata_fields.Migration
class Migration(migrations.Migration): dependencies = [ ('core', '0010_testrun_datetime'), ] operations = [ migrations.AddField( model_name='testrun', name='build_url', field=models.CharField(max_length=2048, null=True), ), migrations.AddField( model_name='testrun', name='job_id', field=models.CharField(max_length=128, null=True), ), migrations.AddField( model_name='testrun', name='job_status', field=models.CharField(max_length=128, null=True), ), migrations.AddField( model_name='testrun', name='job_url', field=models.CharField(max_length=2048, null=True), ), ]
class Migration(migrations.Migration): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
28
2
26
3
25
0
3
3
2
0
1
0
0
145,626
Linaro/squad
Linaro_squad/squad/core/migrations/0012_build_datetime.py
squad.core.migrations.0012_build_datetime.Migration
class Migration(migrations.Migration): dependencies = [ ('core', '0011_testrun_metadata_fields'), ] operations = [ migrations.AddField( model_name='build', name='datetime', field=models.DateTimeField(null=True), ), migrations.RunSQL( "UPDATE core_build SET datetime = created_at", reverse_sql="SELECT 1", ), migrations.AlterField( model_name='build', name='datetime', field=models.DateTimeField(null=False), ), ]
class Migration(migrations.Migration): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
22
2
20
3
19
0
3
3
2
0
1
0
0
145,627
Linaro/squad
Linaro_squad/squad/core/migrations/0013_testrun_resubmit_url.py
squad.core.migrations.0013_testrun_resubmit_url.Migration
class Migration(migrations.Migration): dependencies = [ ('core', '0012_build_datetime'), ] operations = [ migrations.AddField( model_name='testrun', name='resubmit_url', field=models.CharField(max_length=2048, null=True), ), ]
class Migration(migrations.Migration): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
13
2
11
3
10
0
3
3
2
0
1
0
0
145,628
Linaro/squad
Linaro_squad/squad/core/migrations/0027_project_notification_strategy.py
squad.core.migrations.0027_project_notification_strategy.Migration
class Migration(migrations.Migration): dependencies = [ ('core', '0026_testrun_result_accept_null'), ] operations = [ migrations.AddField( model_name='project', name='notification_strategy', field=models.CharField(choices=[('all', 'All builds'), ('change', 'Only on change')], default='all', max_length=32), ), ]
class Migration(migrations.Migration): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
13
2
11
3
10
0
3
3
2
0
1
0
0
145,629
Linaro/squad
Linaro_squad/squad/core/migrations/0014_testrun_metadata_file.py
squad.core.migrations.0014_testrun_metadata_file.Migration
class Migration(migrations.Migration): dependencies = [ ('core', '0013_testrun_resubmit_url'), ] operations = [ migrations.AddField( model_name='testrun', name='metadata_file', field=models.TextField(null=True), ), ]
class Migration(migrations.Migration): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
13
2
11
3
10
0
3
3
2
0
1
0
0
145,630
Linaro/squad
Linaro_squad/squad/core/migrations/0016_project_is_public.py
squad.core.migrations.0016_project_is_public.Migration
class Migration(migrations.Migration): dependencies = [ ('core', '0015_attachment'), ] operations = [ migrations.AddField( model_name='project', name='is_public', field=models.BooleanField(default=True), ), ]
class Migration(migrations.Migration): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
13
2
11
3
10
0
3
3
2
0
1
0
0
145,631
Linaro/squad
Linaro_squad/squad/core/migrations/0017_slug_validator.py
squad.core.migrations.0017_slug_validator.Migration
class Migration(migrations.Migration): dependencies = [ ('core', '0016_project_is_public'), ] operations = [ migrations.AlterField( model_name='environment', name='slug', field=models.CharField(max_length=100, validators=[django.core.validators.RegexValidator(regex='^[a-zA-Z0-9][a-zA-Z0-9_-]+')]), ), migrations.AlterField( model_name='group', name='slug', field=models.CharField(max_length=100, unique=True, validators=[django.core.validators.RegexValidator(regex='^[a-zA-Z0-9][a-zA-Z0-9_-]+')]), ), migrations.AlterField( model_name='project', name='slug', field=models.CharField(max_length=100, validators=[django.core.validators.RegexValidator(regex='^[a-zA-Z0-9][a-zA-Z0-9_-]+')]), ), migrations.AlterField( model_name='suite', name='slug', field=models.CharField(max_length=100, validators=[django.core.validators.RegexValidator(regex='^[a-zA-Z0-9][a-zA-Z0-9_-]+')]), ), ]
class Migration(migrations.Migration): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
28
2
26
3
25
0
3
3
2
0
1
0
0
145,632
Linaro/squad
Linaro_squad/squad/core/migrations/0018_build_name.py
squad.core.migrations.0018_build_name.Migration
class Migration(migrations.Migration): dependencies = [ ('core', '0017_slug_validator'), ] operations = [ migrations.AddField( model_name='build', name='name', field=models.CharField(default='', max_length=100), preserve_default=False, ), migrations.RunSQL( sql="UPDATE core_build SET name = version;", reverse_sql=migrations.RunSQL.noop, ), ]
class Migration(migrations.Migration): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
18
2
16
3
15
0
3
3
2
0
1
0
0
145,633
Linaro/squad
Linaro_squad/squad/core/migrations/0019_build_version.py
squad.core.migrations.0019_build_version.Migration
class Migration(migrations.Migration): dependencies = [ ('core', '0018_build_name'), ] operations = [ migrations.AlterField( model_name='build', name='version', field=models.TextField(), ), ]
class Migration(migrations.Migration): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
13
2
11
3
10
0
3
3
2
0
1
0
0
145,634
Linaro/squad
Linaro_squad/squad/core/migrations/0020_build_ordering.py
squad.core.migrations.0020_build_ordering.Migration
class Migration(migrations.Migration): dependencies = [ ('core', '0019_build_version'), ] operations = [ migrations.AlterModelOptions( name='build', options={'ordering': ['version']}, ), ]
class Migration(migrations.Migration): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
12
2
10
3
9
0
3
3
2
0
1
0
0
145,635
Linaro/squad
Linaro_squad/squad/core/migrations/0021_global_tokens.py
squad.core.migrations.0021_global_tokens.Migration
class Migration(migrations.Migration): dependencies = [ ('core', '0020_build_ordering'), ] operations = [ migrations.AlterField( model_name='token', name='project', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='tokens', to='core.Project'), ), ]
class Migration(migrations.Migration): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
13
2
11
3
10
0
3
3
2
0
1
0
0
145,636
Linaro/squad
Linaro_squad/squad/core/migrations/0022_projectstatus.py
squad.core.migrations.0022_projectstatus.Migration
class Migration(migrations.Migration): dependencies = [ ('core', '0021_global_tokens'), ] operations = [ migrations.CreateModel( name='ProjectStatus', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created_at', models.DateTimeField(auto_now_add=True)), ('build', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='core.Build')), ('previous', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='next', to='core.ProjectStatus')), ], ), ]
class Migration(migrations.Migration): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
17
2
15
3
14
0
3
3
2
0
1
0
0
145,637
Linaro/squad
Linaro_squad/squad/core/migrations/0023_subscription.py
squad.core.migrations.0023_subscription.Migration
class Migration(migrations.Migration): dependencies = [ ('core', '0022_projectstatus'), ] operations = [ migrations.CreateModel( name='Subscription', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('email', models.CharField(max_length=1024, validators=[django.core.validators.EmailValidator()])), ('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='subscriptions', to='core.Project')), ], ), ]
class Migration(migrations.Migration): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
16
2
14
3
13
0
3
3
2
0
1
0
0
145,638
Linaro/squad
Linaro_squad/squad/core/migrations/0024_project_build_completion_threshold.py
squad.core.migrations.0024_project_build_completion_threshold.Migration
class Migration(migrations.Migration): dependencies = [ ('core', '0023_subscription'), ] operations = [ migrations.AddField( model_name='project', name='build_completion_threshold', field=models.IntegerField(default=120), ), ]
class Migration(migrations.Migration): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
13
2
11
3
10
0
3
3
2
0
1
0
0
145,639
Linaro/squad
Linaro_squad/squad/core/migrations/0025_unique_testrun_job_id.py
squad.core.migrations.0025_unique_testrun_job_id.Migration
class Migration(migrations.Migration): dependencies = [ ('core', '0024_project_build_completion_threshold'), ] operations = [ migrations.AlterUniqueTogether( name='testrun', unique_together=set([('build', 'job_id')]), ), ]
class Migration(migrations.Migration): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
12
2
10
3
9
0
3
3
2
0
1
0
0
145,640
Linaro/squad
Linaro_squad/squad/core/migrations/0015_attachment.py
squad.core.migrations.0015_attachment.Migration
class Migration(migrations.Migration): dependencies = [ ('core', '0014_testrun_metadata_file'), ] operations = [ migrations.CreateModel( name='Attachment', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('filename', models.CharField(max_length=1024)), ('data', models.BinaryField(default=None)), ('length', models.IntegerField(default=None)), ('test_run', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='attachments', to='core.TestRun')), ], ), ]
class Migration(migrations.Migration): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
18
2
16
3
15
0
3
3
2
0
1
0
0
145,641
Linaro/squad
Linaro_squad/squad/core/migrations/0043_project_status_build.py
squad.core.migrations.0043_project_status_build.Migration
class Migration(migrations.Migration): dependencies = [ ('core', '0042_set_projectstatus_notified'), ] operations = [ migrations.AlterField( model_name='projectstatus', name='build', field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='core.Build'), ), ]
class Migration(migrations.Migration): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
13
2
11
3
10
0
3
3
2
0
1
0
0
145,642
Linaro/squad
Linaro_squad/squad/core/migrations/0044_project_html_mail.py
squad.core.migrations.0044_project_html_mail.Migration
class Migration(migrations.Migration): dependencies = [ ('core', '0043_project_status_build'), ] operations = [ migrations.AddField( model_name='project', name='html_mail', field=models.BooleanField(default=True), ), ]
class Migration(migrations.Migration): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
13
2
11
3
10
0
3
3
2
0
1
0
0
145,643
Linaro/squad
Linaro_squad/squad/core/migrations/0045_adminsubscription.py
squad.core.migrations.0045_adminsubscription.Migration
class Migration(migrations.Migration): dependencies = [ ('core', '0044_project_html_mail'), ] operations = [ migrations.CreateModel( name='AdminSubscription', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('email', models.CharField(max_length=1024, validators=[django.core.validators.EmailValidator()])), ('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='admin_subscriptions', to='core.Project')), ], ), ]
class Migration(migrations.Migration): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
16
2
14
3
13
0
3
3
2
0
1
0
0
145,644
Linaro/squad
Linaro_squad/squad/core/migrations/0073_auto_20180420_1643.py
squad.core.migrations.0073_auto_20180420_1643.Migration
class Migration(migrations.Migration): dependencies = [ ('core', '0072_group_description'), ] operations = [ migrations.AlterField( model_name='environment', name='expected_test_runs', field=models.IntegerField(blank=True, default=None, null=True), ), migrations.AlterField( model_name='environment', name='name', field=models.CharField(blank=True, max_length=100, null=True), ), migrations.AlterField( model_name='project', name='notification_timeout', field=models.IntegerField(blank=True, help_text='Force sending build notifications after this many seconds', null=True), ), migrations.AlterField( model_name='project', name='wait_before_notification', field=models.IntegerField(blank=True, help_text='Wait this many seconds before sending notifications', null=True), ), ]
class Migration(migrations.Migration): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
28
2
26
3
25
0
3
3
2
0
1
0
0
145,645
Linaro/squad
Linaro_squad/squad/core/migrations/0074_add_indexes.py
squad.core.migrations.0074_add_indexes.Migration
class Migration(migrations.Migration): dependencies = [ ('core', '0073_auto_20180420_1643'), ] operations = [ migrations.AlterField( model_name='environment', name='slug', field=models.CharField(db_index=True, max_length=100, validators=[django.core.validators.RegexValidator(regex='^[a-zA-Z0-9][a-zA-Z0-9_.-]*')]), ), migrations.AlterField( model_name='group', name='slug', field=models.CharField(db_index=True, max_length=100, unique=True, validators=[django.core.validators.RegexValidator(regex='^[a-zA-Z0-9][a-zA-Z0-9_.-]*')]), ), migrations.AlterField( model_name='project', name='slug', field=models.CharField(db_index=True, max_length=100, validators=[django.core.validators.RegexValidator(regex='^[a-zA-Z0-9][a-zA-Z0-9_.-]*')]), ), migrations.AlterField( model_name='suite', name='slug', field=models.CharField(db_index=True, max_length=256, validators=[django.core.validators.RegexValidator(regex='^[a-zA-Z0-9][a-zA-Z0-9_.-]*')]), ), migrations.AlterField( model_name='suitemetadata', name='kind', field=models.CharField(choices=[('suite', 'Suite'), ('test', 'Test'), ('metric', 'Metric')], db_index=True, max_length=6), ), migrations.AlterField( model_name='suitemetadata', name='name', field=models.CharField(db_index=True, max_length=256, null=True), ), migrations.AlterField( model_name='suitemetadata', name='suite', field=models.CharField(db_index=True, max_length=256), ), migrations.AlterField( model_name='test', name='name', field=models.CharField(db_index=True, max_length=256), ), ]
class Migration(migrations.Migration): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
48
2
46
3
45
0
3
3
2
0
1
0
0
145,646
Linaro/squad
Linaro_squad/squad/core/migrations/0075_update_project_enabled_plugin_list.py
squad.core.migrations.0075_update_project_enabled_plugin_list.Migration
class Migration(migrations.Migration): dependencies = [ ('core', '0074_add_indexes'), ] operations = [ migrations.AlterField( model_name='project', name='enabled_plugins_list', field=squad.core.plugins.PluginListField(null=True), ), ]
class Migration(migrations.Migration): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
13
2
11
3
10
0
3
3
2
0
1
0
0
145,647
Linaro/squad
Linaro_squad/squad/core/migrations/0076_patch_builds.py
squad.core.migrations.0076_patch_builds.Migration
class Migration(migrations.Migration): dependencies = [ ('core', '0075_update_project_enabled_plugin_list'), ] operations = [ migrations.CreateModel( name='PatchSource', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=256, unique=True)), ('username', models.CharField(max_length=128)), ('url', models.URLField()), ('token', models.CharField(max_length=1024)), ('implementation', squad.core.plugins.PluginField(default='null')), ], ), migrations.AddField( model_name='build', name='patch_baseline', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.Build'), ), migrations.AddField( model_name='build', name='patch_id', field=models.CharField(blank=True, max_length=1024, null=True), ), migrations.AddField( model_name='build', name='patch_source', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.PatchSource'), ), ]
class Migration(migrations.Migration): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
34
2
32
3
31
0
3
3
2
0
1
0
0