lines
listlengths
1
444
raw_lines
listlengths
1
444
label
listlengths
1
444
type
listlengths
1
444
[ "def FUNC_14(self):...\n", "if self.field_name in self.query_params:\n", "VAR_24 = self.query_params[self.field_name]\n", "if self.value is None:\n", "if type(VAR_24) == list:\n", "return True\n", "return False\n", "VAR_24 = VAR_24[0]\n", "if VAR_24 == self.value:\n", "return True\n" ]
[ "def is_active(self):...\n", "if self.field_name in self.query_params:\n", "selected_value = self.query_params[self.field_name]\n", "if self.value is None:\n", "if type(selected_value) == list:\n", "return True\n", "return False\n", "selected_value = selected_value[0]\n", "if selected_value == self.value:\n", "return True\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Condition", "Condition", "Return'", "Return'", "Assign'", "Condition", "Return'" ]
[ "@staticmethod...\n", "VAR_0 = Database()\n", "VAR_11 = VAR_0.list_tasks(VAR_2=limit, VAR_3=offset, VAR_30='file',\n not_status=TASK_PENDING)\n", "VAR_12 = VAR_0.list_tasks(VAR_2=limit, VAR_3=offset, VAR_30='url',\n not_status=TASK_PENDING)\n", "VAR_9 = []\n", "if VAR_11:\n", "for VAR_10 in VAR_11:\n", "if VAR_12:\n", "VAR_27 = VAR_10.to_dict()\n", "for VAR_10 in VAR_12:\n", "return VAR_9\n", "VAR_27['sample'] = VAR_0.view_sample(VAR_27['sample_id']).to_dict()\n", "VAR_27 = VAR_10.to_dict()\n", "VAR_28 = os.path.basename(VAR_27['target'])\n", "if VAR_0.view_errors(VAR_10.id):\n", "VAR_27.update({'filename': VAR_28})\n", "VAR_27['errors'] = True\n", "VAR_9.append(VAR_27)\n", "if VAR_0.view_errors(VAR_10.id):\n", "VAR_27['errors'] = True\n", "VAR_9.append(VAR_27)\n" ]
[ "@staticmethod...\n", "db = Database()\n", "tasks_files = db.list_tasks(limit=limit, offset=offset, category='file',\n not_status=TASK_PENDING)\n", "tasks_urls = db.list_tasks(limit=limit, offset=offset, category='url',\n not_status=TASK_PENDING)\n", "data = []\n", "if tasks_files:\n", "for task in tasks_files:\n", "if tasks_urls:\n", "new = task.to_dict()\n", "for task in tasks_urls:\n", "return data\n", "new['sample'] = db.view_sample(new['sample_id']).to_dict()\n", "new = task.to_dict()\n", "filename = os.path.basename(new['target'])\n", "if db.view_errors(task.id):\n", "new.update({'filename': filename})\n", "new['errors'] = True\n", "data.append(new)\n", "if db.view_errors(task.id):\n", "new['errors'] = True\n", "data.append(new)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "For", "Condition", "Assign'", "For", "Return'", "Assign'", "Assign'", "Assign'", "Condition", "Expr'", "Assign'", "Expr'", "Condition", "Assign'", "Expr'" ]
[ "def FUNC_7(self, VAR_17=None):...\n", "if not VAR_17:\n", "VAR_17 = self._error\n", "if VAR_17:\n", "VAR_101.errors.add(VAR_17)\n" ]
[ "def error(self, e=None):...\n", "if not e:\n", "e = self._error\n", "if e:\n", "c.errors.add(e)\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Condition", "Expr'" ]
[ "def FUNC_19(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_2 = {'SEVERITY': {'HIGH': 2}, 'CONFIDENCE': {'HIGH': 2}}\n", "self.check_example('ftplib.py', VAR_2)\n" ]
[ "def test_ftp_usage(self):...\n", "\"\"\"docstring\"\"\"\n", "expect = {'SEVERITY': {'HIGH': 2}, 'CONFIDENCE': {'HIGH': 2}}\n", "self.check_example('ftplib.py', expect)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Expr'" ]
[ "def FUNC_5(self):...\n", "while len(self.targets) == 0:\n", "VAR_14 = self.get_targets()\n", "self.schedule(self.comment_loop)\n", "if VAR_14 == 0:\n", "if len(self.forums) == 0:\n", "self.log.info('No targets found at all, sleeping for 30 seconds')\n", "self.schedule(self.wait_loop)\n", "self.long_sleep(30)\n" ]
[ "def scan_targets_loop(self):...\n", "while len(self.targets) == 0:\n", "c = self.get_targets()\n", "self.schedule(self.comment_loop)\n", "if c == 0:\n", "if len(self.forums) == 0:\n", "self.log.info('No targets found at all, sleeping for 30 seconds')\n", "self.schedule(self.wait_loop)\n", "self.long_sleep(30)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Expr'", "Condition", "For", "Expr'", "Expr'", "Expr'" ]
[ "from __future__ import absolute_import\n", "from __future__ import division\n", "from __future__ import print_function\n", "import binascii\n", "import functools\n", "import hashlib\n", "import inspect\n", "import logging\n", "import numpy as np\n", "import os\n", "import subprocess\n", "import sys\n", "import threading\n", "import time\n", "import uuid\n", "import ray.gcs_utils\n", "import ray.ray_constants as ray_constants\n", "def FUNC_0():...\n", "VAR_32 = hashlib.sha1()\n", "VAR_32.update(uuid.uuid4().bytes)\n", "VAR_33 = VAR_32.digest()\n", "assert len(VAR_33) == ray_constants.ID_SIZE\n", "return VAR_33\n" ]
[ "from __future__ import absolute_import\n", "from __future__ import division\n", "from __future__ import print_function\n", "import binascii\n", "import functools\n", "import hashlib\n", "import inspect\n", "import logging\n", "import numpy as np\n", "import os\n", "import subprocess\n", "import sys\n", "import threading\n", "import time\n", "import uuid\n", "import ray.gcs_utils\n", "import ray.ray_constants as ray_constants\n", "def _random_string():...\n", "id_hash = hashlib.sha1()\n", "id_hash.update(uuid.uuid4().bytes)\n", "id_bytes = id_hash.digest()\n", "assert len(id_bytes) == ray_constants.ID_SIZE\n", "return id_bytes\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "ImportFrom'", "ImportFrom'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "FunctionDef'", "Assign'", "Expr'", "Assign'", "Assert'", "Return'" ]
[ "@property...\n", "\"\"\"docstring\"\"\"\n", "return self.config.get(self.section, 'fticks_format_string')\n" ]
[ "@property...\n", "\"\"\"docstring\"\"\"\n", "return self.config.get(self.section, 'fticks_format_string')\n" ]
[ 0, 0, 0 ]
[ "Condition", "Docstring", "Return'" ]
[ "def __hash__(self):...\n", "return self._hash\n" ]
[ "def __hash__(self):...\n", "return self._hash\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_18(self, VAR_34, VAR_36, VAR_37):...\n", "\"\"\"docstring\"\"\"\n", "if not VAR_34:\n", "return True\n", "return os.lseek(VAR_34, VAR_36, VAR_37)\n" ]
[ "def lseek(self, fd, offset, whence):...\n", "\"\"\"docstring\"\"\"\n", "if not fd:\n", "return True\n", "return os.lseek(fd, offset, whence)\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Return'", "Return'" ]
[ "def FUNC_0(VAR_0):...\n", "\"\"\"docstring\"\"\"\n", "VAR_0.browser.get(VAR_0.live_server_url + '/login/')\n", "VAR_1 = VAR_0.browser.find_element_by_tag_name('body')\n", "VAR_0.assertIn('Please sign in', VAR_1.text)\n", "VAR_2 = VAR_0.browser.find_element_by_name('username')\n", "VAR_2.send_keys('Karyn')\n", "VAR_3 = VAR_0.browser.find_element_by_name('password')\n", "VAR_3.send_keys('specialP@55word')\n", "VAR_0.browser.find_element_by_class_name('btn').click()\n" ]
[ "def log_karyn_in(object):...\n", "\"\"\"docstring\"\"\"\n", "object.browser.get(object.live_server_url + '/login/')\n", "body = object.browser.find_element_by_tag_name('body')\n", "object.assertIn('Please sign in', body.text)\n", "username_input = object.browser.find_element_by_name('username')\n", "username_input.send_keys('Karyn')\n", "password_input = object.browser.find_element_by_name('password')\n", "password_input.send_keys('specialP@55word')\n", "object.browser.find_element_by_class_name('btn').click()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'" ]
[ "def __init__(VAR_39, VAR_40, VAR_41, VAR_42, VAR_48, VAR_43, VAR_44, VAR_45,...\n", "VAR_39.returncode = None\n", "VAR_39._out_file = VAR_7.path.join(self.root_dir, 'work',\n 'task_runner_out.json')\n", "VAR_6 = [sys.executable, bot_main.THIS_FILE, 'task_runner',\n '--swarming-server', VAR_3, '--in-file', VAR_7.path.join(self.root_dir,\n 'work', 'task_runner_in.json'), '--out-file', VAR_39._out_file,\n '--cost-usd-hour', '3600.0', '--start', '100.0', '--min-free-space',\n str(int((os_utilities.get_min_free_space(bot_main.THIS_FILE) + 250.0) *\n 1024 * 1024))]\n", "self.assertEqual(VAR_6, VAR_40)\n", "self.assertEqual(True, VAR_41)\n", "self.assertEqual(self.bot.base_dir, VAR_42)\n", "self.assertEqual('24', VAR_48['SWARMING_TASK_ID'])\n", "self.assertTrue(VAR_43)\n", "self.assertEqual(subprocess42.STDOUT, VAR_44)\n", "self.assertEqual(subprocess42.PIPE, VAR_45)\n", "self.assertEqual(sys.platform != 'win32', VAR_46)\n" ]
[ "def __init__(self2, cmd, detached, cwd, env, stdout, stderr, stdin, close_fds):...\n", "self2.returncode = None\n", "self2._out_file = os.path.join(self.root_dir, 'work', 'task_runner_out.json')\n", "expected = [sys.executable, bot_main.THIS_FILE, 'task_runner',\n '--swarming-server', url, '--in-file', os.path.join(self.root_dir,\n 'work', 'task_runner_in.json'), '--out-file', self2._out_file,\n '--cost-usd-hour', '3600.0', '--start', '100.0', '--min-free-space',\n str(int((os_utilities.get_min_free_space(bot_main.THIS_FILE) + 250.0) *\n 1024 * 1024))]\n", "self.assertEqual(expected, cmd)\n", "self.assertEqual(True, detached)\n", "self.assertEqual(self.bot.base_dir, cwd)\n", "self.assertEqual('24', env['SWARMING_TASK_ID'])\n", "self.assertTrue(stdout)\n", "self.assertEqual(subprocess42.STDOUT, stderr)\n", "self.assertEqual(subprocess42.PIPE, stdin)\n", "self.assertEqual(sys.platform != 'win32', close_fds)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "@tornado.web.asynchronous...\n", "" ]
[ "@tornado.web.asynchronous...\n", "" ]
[ 0, 0 ]
[ "Condition", "Condition" ]
[ "@frappe.whitelist(allow_guest=True)...\n", "from frappe.www.contact import send_message as website_send_message\n", "VAR_4 = VAR_5 = None\n", "website_send_message(VAR_0, VAR_1, VAR_2)\n", "VAR_5 = frappe.db.sql('string'.format(email_id=sender))\n", "if not VAR_5:\n", "VAR_4 = frappe.db.get_value('Lead', dict(email_id=sender))\n", "VAR_6 = frappe.get_doc(dict(doctype='Opportunity', enquiry_from='Customer' if\n customer else 'Lead', VAR_3='Open', title=subject, contact_email=sender,\n to_discuss=message))\n", "if not VAR_4:\n", "if VAR_5:\n", "VAR_8 = frappe.get_doc(dict(doctype='Lead', email_id=sender, lead_name=\n sender.split('@')[0].title())).insert(ignore_permissions=True)\n", "VAR_6.customer = VAR_5[0][0]\n", "if VAR_4:\n", "VAR_6.insert(ignore_permissions=True)\n", "VAR_6.lead = VAR_4\n", "VAR_6.lead = VAR_8.name\n", "VAR_7 = frappe.get_doc({'doctype': 'Communication', 'subject': VAR_0,\n 'content': VAR_1, 'sender': VAR_2, 'sent_or_received': 'Received',\n 'reference_doctype': 'Opportunity', 'reference_name': VAR_6.name})\n", "VAR_7.insert(ignore_permissions=True)\n", "return 'okay'\n" ]
[ "@frappe.whitelist(allow_guest=True)...\n", "from frappe.www.contact import send_message as website_send_message\n", "lead = customer = None\n", "website_send_message(subject, message, sender)\n", "customer = frappe.db.sql(\n \"\"\"select distinct dl.link_name from `tabDynamic Link` dl\n\t\tleft join `tabContact` c on dl.parent=c.name where dl.link_doctype='Customer'\n\t\tand c.email_id='{email_id}'\"\"\"\n .format(email_id=sender))\n", "if not customer:\n", "lead = frappe.db.get_value('Lead', dict(email_id=sender))\n", "opportunity = frappe.get_doc(dict(doctype='Opportunity', enquiry_from=\n 'Customer' if customer else 'Lead', status='Open', title=subject,\n contact_email=sender, to_discuss=message))\n", "if not lead:\n", "if customer:\n", "new_lead = frappe.get_doc(dict(doctype='Lead', email_id=sender, lead_name=\n sender.split('@')[0].title())).insert(ignore_permissions=True)\n", "opportunity.customer = customer[0][0]\n", "if lead:\n", "opportunity.insert(ignore_permissions=True)\n", "opportunity.lead = lead\n", "opportunity.lead = new_lead.name\n", "comm = frappe.get_doc({'doctype': 'Communication', 'subject': subject,\n 'content': message, 'sender': sender, 'sent_or_received': 'Received',\n 'reference_doctype': 'Opportunity', 'reference_name': opportunity.name})\n", "comm.insert(ignore_permissions=True)\n", "return 'okay'\n" ]
[ 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "ImportFrom'", "Assign'", "Expr'", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Condition", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Return'" ]
[ "def FUNC_6(VAR_4):...\n", "VAR_11 = FUNC_4(VAR_4)\n", "VAR_9 = FUNC_0()\n", "VAR_10 = VAR_9.cursor()\n", "if VAR_11 is None:\n", "VAR_11 = VAR_11 + 1\n", "VAR_10.execute(\n \"\"\"\n INSERT INTO people(name,karma,shame) VALUES('{}',1,0)\n \"\"\"\n .format(VAR_4))\n", "VAR_2.error('Execution failed with error: {}'.format(e))\n", "VAR_9.close()\n", "VAR_10.execute(\n \"\"\"\n UPDATE people SET karma = {0} WHERE name = '{1}'\n \"\"\"\n .format(VAR_11, VAR_4))\n", "VAR_2.error('Execution failed with error: {}'.format(e))\n", "VAR_9.commit()\n", "VAR_9.commit()\n", "VAR_2.debug('Inserted into karmadb 1 karma for {}'.format(VAR_4))\n", "VAR_2.debug('Inserted into karmadb {} karma for {}'.format(VAR_11, VAR_4))\n", "return 1\n", "return VAR_11\n" ]
[ "def karma_add(name):...\n", "karma = karma_ask(name)\n", "db = db_connect()\n", "cursor = db.cursor()\n", "if karma is None:\n", "karma = karma + 1\n", "cursor.execute(\n \"\"\"\n INSERT INTO people(name,karma,shame) VALUES('{}',1,0)\n \"\"\"\n .format(name))\n", "logger.error('Execution failed with error: {}'.format(e))\n", "db.close()\n", "cursor.execute(\n \"\"\"\n UPDATE people SET karma = {0} WHERE name = '{1}'\n \"\"\"\n .format(karma, name))\n", "logger.error('Execution failed with error: {}'.format(e))\n", "db.commit()\n", "db.commit()\n", "logger.debug('Inserted into karmadb 1 karma for {}'.format(name))\n", "logger.debug('Inserted into karmadb {} karma for {}'.format(karma, name))\n", "return 1\n", "return karma\n" ]
[ 0, 0, 0, 0, 0, 0, 4, 0, 0, 4, 0, 0, 0, 0, 4, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Return'", "Return'" ]
[ "def FUNC_0():...\n", "\"\"\"docstring\"\"\"\n", "import imp\n", "VAR_5 = [VAR_13[0] for VAR_13 in imp.get_suffixes() if VAR_13[-1] == imp.\n C_EXTENSION]\n", "VAR_6 = [('pyodbc%s' % ext) for ext in VAR_5]\n", "VAR_7 = '-%s.%s' % (sys.version_info[0], sys.version_info[1])\n", "VAR_8 = join(dirname(dirname(abspath(__file__))), 'build')\n", "for root, dirs, files in os.walk(VAR_8):\n", "for d in dirs[:]:\n", "print(\n 'Did not find the pyodbc library in the build directory. Will use an installed version.'\n )\n", "if not d.endswith(VAR_7):\n", "for VAR_2 in VAR_6:\n", "dirs.remove(d)\n", "if VAR_2 in files:\n", "sys.path.insert(0, root)\n", "return\n" ]
[ "def add_to_path():...\n", "\"\"\"docstring\"\"\"\n", "import imp\n", "library_exts = [t[0] for t in imp.get_suffixes() if t[-1] == imp.C_EXTENSION]\n", "library_names = [('pyodbc%s' % ext) for ext in library_exts]\n", "dir_suffix = '-%s.%s' % (sys.version_info[0], sys.version_info[1])\n", "build = join(dirname(dirname(abspath(__file__))), 'build')\n", "for root, dirs, files in os.walk(build):\n", "for d in dirs[:]:\n", "print(\n 'Did not find the pyodbc library in the build directory. Will use an installed version.'\n )\n", "if not d.endswith(dir_suffix):\n", "for name in library_names:\n", "dirs.remove(d)\n", "if name in files:\n", "sys.path.insert(0, root)\n", "return\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Import'", "Assign'", "Assign'", "Assign'", "Assign'", "For", "For", "Expr'", "Condition", "For", "Expr'", "Condition", "Expr'", "Return'" ]
[ "def FUNC_7(self, VAR_2, VAR_3, VAR_4, VAR_7={}):...\n", "self.write(VAR_2, VAR_3, VAR_4, {'state': 'cancelled'}, VAR_7=context)\n", "return True\n" ]
[ "def set_cancel(self, cr, uid, ids, context={}):...\n", "self.write(cr, uid, ids, {'state': 'cancelled'}, context=context)\n", "return True\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Return'" ]
[ "@property...\n", "return self.get_object()\n" ]
[ "@property...\n", "return self.get_object()\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "def FUNC_5(VAR_2):...\n", "VAR_16 = urlparse(VAR_2)\n", "VAR_17 = HTTPSConnection(VAR_16.netloc)\n", "VAR_17.request('HEAD', VAR_16.path)\n", "VAR_18 = VAR_17.getresponse()\n", "if VAR_18.getheader('location') == None:\n", "return VAR_2\n", "return VAR_18.getheader('location')\n" ]
[ "def expand(url):...\n", "o = urlparse(url)\n", "con = HTTPSConnection(o.netloc)\n", "con.request('HEAD', o.path)\n", "res = con.getresponse()\n", "if res.getheader('location') == None:\n", "return url\n", "return res.getheader('location')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Assign'", "Condition", "Return'", "Return'" ]
[ "def FUNC_3(self, VAR_2, VAR_9):...\n", "VAR_2 = db.get_dataset(VAR_2)\n", "VAR_1 = db.User.select().where(db.User.email == VAR_9).get()\n", "VAR_34 = db.DatasetAccess.select().where(db.DatasetAccess.user == VAR_1, db\n .DatasetAccess.dataset == VAR_2).get()\n", "VAR_34.has_access = True\n", "VAR_34.save()\n", "db.UserAccessLog.create(VAR_1=user, VAR_2=dataset, action='access_granted')\n", "VAR_23 = MIMEMultipart()\n", "VAR_23['to'] = VAR_9\n", "VAR_23['from'] = settings.from_address\n", "VAR_23['subject'] = 'Swefreq access granted to {}'.format(VAR_2.short_name)\n", "VAR_23.add_header('reply-to', settings.reply_to_address)\n", "VAR_24 = 'string'.format(VAR_2.full_name, VAR_2.short_name, VAR_2.study.\n contact_name)\n", "VAR_23.attach(MIMEText(VAR_24, 'plain'))\n", "VAR_25 = smtplib.SMTP(settings.mail_server)\n", "VAR_25.sendmail(VAR_23['from'], [VAR_23['to']], VAR_23.as_string())\n" ]
[ "def post(self, dataset, email):...\n", "dataset = db.get_dataset(dataset)\n", "user = db.User.select().where(db.User.email == email).get()\n", "da = db.DatasetAccess.select().where(db.DatasetAccess.user == user, db.\n DatasetAccess.dataset == dataset).get()\n", "da.has_access = True\n", "da.save()\n", "db.UserAccessLog.create(user=user, dataset=dataset, action='access_granted')\n", "msg = MIMEMultipart()\n", "msg['to'] = email\n", "msg['from'] = settings.from_address\n", "msg['subject'] = 'Swefreq access granted to {}'.format(dataset.short_name)\n", "msg.add_header('reply-to', settings.reply_to_address)\n", "body = (\n \"\"\"You now have access to the {} dataset\n\nPlease visit https://swefreq.nbis.se/dataset/{}/download to download files.\n \"\"\"\n .format(dataset.full_name, dataset.short_name, dataset.study.contact_name))\n", "msg.attach(MIMEText(body, 'plain'))\n", "server = smtplib.SMTP(settings.mail_server)\n", "server.sendmail(msg['from'], [msg['to']], msg.as_string())\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'" ]
[ "def __init__(self, VAR_0):...\n", "self.db_conn = VAR_0\n" ]
[ "def __init__(self, db_conn):...\n", "self.db_conn = db_conn\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Assign'" ]
[ "def __init__(self, VAR_0, VAR_1=False):...\n", "self.settings = VAR_0\n", "self.serial_settings = get_serial_settings(VAR_0)\n", "self.sensors = VAR_0.get('sensors', None)\n", "self.cmd = codecs.decode(self.settings['cmd'], 'unicode-escape')\n", "self.regex = VAR_0.get('regex', None)\n", "self.debug = VAR_1\n", "if self.debug:\n", "print('serial settings:', self.serial_settings)\n", "super().__init__(**self.serial_settings)\n" ]
[ "def __init__(self, settings, debug=False):...\n", "self.settings = settings\n", "self.serial_settings = get_serial_settings(settings)\n", "self.sensors = settings.get('sensors', None)\n", "self.cmd = codecs.decode(self.settings['cmd'], 'unicode-escape')\n", "self.regex = settings.get('regex', None)\n", "self.debug = debug\n", "if self.debug:\n", "print('serial settings:', self.serial_settings)\n", "super().__init__(**self.serial_settings)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Expr'", "Expr'" ]
[ "\"\"\"string\"\"\"\n", "import sys\n", "import citest.aws_testing as aws\n", "import citest.json_contract as jc\n", "import citest.service_testing as st\n", "import spinnaker_testing as sk\n", "import spinnaker_testing.kato as kato\n", "\"\"\"string\"\"\"\n", "VAR_0 = ''\n", "@classmethod...\n", "\"\"\"docstring\"\"\"\n", "return kato.new_agent(VAR_2)\n" ]
[ "\"\"\"\nTests to see if CloudDriver/Kato can interoperate with Amazon Web Services.\n\nSample Usage:\n Assuming you have created $PASSPHRASE_FILE (which you should chmod 400):\n and $CITEST_ROOT points to the root directory of this repository\n (which is . if you execute this from the root)\n and $AWS_PROFILE is the name of the aws_cli profile for authenticating\n to observe aws resources:\n\n This first command would be used if Spinnaker itself was deployed on GCE.\n The test needs to talk to GCE to get to spinnaker (using the gce_* params)\n then talk to AWS (using the aws_profile with the aws cli program) to\n verify Spinnaker had the right effects on AWS.\n\n PYTHONPATH=$CITEST_ROOT:$CITEST_ROOT/spinnaker python $CITEST_ROOT/spinnaker/spinnaker_system/aws_kato_test.py --gce_ssh_passphrase_file=$PASSPHRASE_FILE --gce_project=$PROJECT --gce_zone=$GCE_ZONE --gce_instance=$INSTANCE --test_aws_zone=$AWS_ZONE --aws_profile=$AWS_PROFILE\n\n or\n\n This second command would be used if Spinnaker itself was deployed some\n place reachable through a direct IP connection. It could be, but is not\n necessarily deployed on GCE. It is similar to above except it does not\n need to go through GCE and its firewalls to locate the actual IP endpoints\n rather those are already known and accessible.\n\n PYTHONPATH=$CITEST_ROOT:$CITEST_ROOT/spinnaker python $CITEST_ROOT/spinnaker/spinnaker_system/aws_kato_test.py --native_hostname=host-running-kato\n --test_aws_zone=$AWS_ZONE --aws_profile=$AWS_PROFILE\n\n Note that the $AWS_ZONE is not directly used, rather it is a standard\n parameter being used to infer the region. The test is going to pick\n some different availability zones within the region in order to test kato.\n These are currently hardcoded in.\n\"\"\"\n", "import sys\n", "import citest.aws_testing as aws\n", "import citest.json_contract as jc\n", "import citest.service_testing as st\n", "import spinnaker_testing as sk\n", "import spinnaker_testing.kato as kato\n", "\"\"\"Defines the scenario for the test.\n\n This scenario defines the different test operations.\n We're going to:\n Create a Load Balancer\n Delete a Load Balancer\n \"\"\"\n", "__use_lb_name = ''\n", "@classmethod...\n", "\"\"\"docstring\"\"\"\n", "return kato.new_agent(bindings)\n" ]
[ 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Expr'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Expr'", "Assign'", "Condition", "Docstring", "Return'" ]
[ "def FUNC_13(self, VAR_14):...\n", "" ]
[ "def is_float(self, col_name):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "@VAR_0.route('/search', methods=['GET'])...\n", "print('in rec query')\n", "VAR_7 = {}\n", "VAR_7['pages'] = []\n", "print(request.args.get('query'))\n", "VAR_4 = request.args.get('query')\n", "if not VAR_4:\n", "return jsonify(VAR_7)\n", "VAR_4 = VAR_4.lower()\n", "VAR_8 = FUNC_2(VAR_4)\n", "return jsonify(VAR_8)\n" ]
[ "@app.route('/search', methods=['GET'])...\n", "print('in rec query')\n", "emptyRes = {}\n", "emptyRes['pages'] = []\n", "print(request.args.get('query'))\n", "query = request.args.get('query')\n", "if not query:\n", "return jsonify(emptyRes)\n", "query = query.lower()\n", "rankedList = getRanking(query)\n", "return jsonify(rankedList)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Expr'", "Assign'", "Assign'", "Expr'", "Assign'", "Condition", "Return'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_10(self, VAR_10, VAR_12):...\n", "\"\"\"docstring\"\"\"\n", "self._create_server(VAR_12)\n", "VAR_5 = {}\n", "VAR_5['volumeName'] = VAR_10['name']\n", "VAR_5['serverName'] = VAR_12['host']\n", "self._cliq_run_xml('assignVolumeToServer', VAR_5)\n", "VAR_32 = self._get_iscsi_properties(VAR_10)\n", "return {'driver_volume_type': 'iscsi', 'data': VAR_32}\n" ]
[ "def initialize_connection(self, volume, connector):...\n", "\"\"\"docstring\"\"\"\n", "self._create_server(connector)\n", "cliq_args = {}\n", "cliq_args['volumeName'] = volume['name']\n", "cliq_args['serverName'] = connector['host']\n", "self._cliq_run_xml('assignVolumeToServer', cliq_args)\n", "iscsi_properties = self._get_iscsi_properties(volume)\n", "return {'driver_volume_type': 'iscsi', 'data': iscsi_properties}\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Return'" ]
[ "@Endpoint('GET', '/cephx/clients')...\n", "return [client for client in CephX.list_clients()]\n" ]
[ "@Endpoint('GET', '/cephx/clients')...\n", "return [client for client in CephX.list_clients()]\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "\"\"\"\nModule for gathering disk information\n\"\"\"\n", "import logging\n", "import salt.utils\n", "VAR_0 = logging.getLogger(__name__)\n", "def __virtual__():...\n", "\"\"\"docstring\"\"\"\n", "if salt.utils.is_windows():\n", "return False\n", "return 'disk'\n" ]
[ "\"\"\"\nModule for gathering disk information\n\"\"\"\n", "import logging\n", "import salt.utils\n", "log = logging.getLogger(__name__)\n", "def __virtual__():...\n", "\"\"\"docstring\"\"\"\n", "if salt.utils.is_windows():\n", "return False\n", "return 'disk'\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Expr'", "Import'", "Import'", "Assign'", "FunctionDef'", "Docstring", "Condition", "Return'", "Return'" ]
[ "from django.db.models.signals import post_save, post_delete, m2m_changed\n", "from django.utils import timezone\n", "from lib.cache import CachedAbstract\n", "from ..models import StudentGroup, Enrollment, CourseInstance, Course\n", "from ..renders import render_group_info\n", "VAR_4 = 'topmenu'\n", "def __init__(self, VAR_5):...\n", "self.user = VAR_5\n", "super().__init__(VAR_5)\n", "def FUNC_4(self, VAR_5, VAR_6=None):...\n", "VAR_7 = VAR_5.userprofile if VAR_5 and VAR_5.is_authenticated() else None\n", "return {'courses': self._generate_courses(VAR_7), 'groups': self.\n _generate_groups(VAR_7)}\n" ]
[ "from django.db.models.signals import post_save, post_delete, m2m_changed\n", "from django.utils import timezone\n", "from lib.cache import CachedAbstract\n", "from ..models import StudentGroup, Enrollment, CourseInstance, Course\n", "from ..renders import render_group_info\n", "KEY_PREFIX = 'topmenu'\n", "def __init__(self, user):...\n", "self.user = user\n", "super().__init__(user)\n", "def _generate_data(self, user, data=None):...\n", "profile = user.userprofile if user and user.is_authenticated() else None\n", "return {'courses': self._generate_courses(profile), 'groups': self.\n _generate_groups(profile)}\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0 ]
[ "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Assign'", "FunctionDef'", "Assign'", "Expr'", "FunctionDef'", "Assign'", "Return'" ]
[ "@VAR_4.teardown_appcontext...\n", "\"\"\"docstring\"\"\"\n", "if hasattr(g, 'sqlite_db'):\n", "g.sqlite_db.close()\n" ]
[ "@app.teardown_appcontext...\n", "\"\"\"docstring\"\"\"\n", "if hasattr(g, 'sqlite_db'):\n", "g.sqlite_db.close()\n" ]
[ 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Condition", "Expr'" ]
[ "def FUNC_4(self, VAR_9, VAR_20):...\n", "VAR_23 = []\n", "for VAR_29 in VAR_20:\n", "VAR_23.append((VAR_9, str(VAR_29['qhash']), str(VAR_29['hash']), str(VAR_29\n ['date']), str(VAR_29['url']), VAR_29['content'], datetime.now()))\n", "VAR_22 = 'string' + ','.join('(%s, %s, %s, %s, %s, %s, %s)' for _ in VAR_23)\n", "VAR_24 = [item for sublist in VAR_23 for item in sublist]\n", "self.cur.execute(VAR_22, VAR_24)\n", "self.conn.commit()\n" ]
[ "def insert_references(self, qid, articles):...\n", "insert_values = []\n", "for article in articles:\n", "insert_values.append((qid, str(article['qhash']), str(article['hash']), str\n (article['date']), str(article['url']), article['content'], datetime.now())\n )\n", "sql = (\n 'INSERT INTO article_reference (id_query, query_hash, article_hash, article_date, article_url, article_content, retrieved_at) VALUES'\n + ','.join('(%s, %s, %s, %s, %s, %s, %s)' for _ in insert_values))\n", "flattened_values = [item for sublist in insert_values for item in sublist]\n", "self.cur.execute(sql, flattened_values)\n", "self.conn.commit()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "For", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_6(self, VAR_22):...\n", "if VAR_22:\n", "VAR_107 = int(VAR_22, 36)\n", "if self.redirect:\n", "return Meetup._byID(VAR_107, True)\n", "abort(404, 'page not found')\n", "return None\n" ]
[ "def run(self, meetup_id36):...\n", "if meetup_id36:\n", "meetup_id = int(meetup_id36, 36)\n", "if self.redirect:\n", "return Meetup._byID(meetup_id, True)\n", "abort(404, 'page not found')\n", "return None\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Condition", "Return'", "Expr'", "Return'" ]
[ "def FUNC_14(VAR_24):...\n", "for user, VAR_20, forum in VAR_26.findall(VAR_24):\n", "if VAR_20 not in VAR_7:\n", "VAR_7[VAR_20] = set()\n", "if len(forum) > 0:\n", "FUNC_13(forum)\n", "VAR_0.info('Appending %s:%s to forums[%s]', user, forum, VAR_20)\n", "VAR_7[VAR_20].add((user, forum))\n" ]
[ "def affu(urls):...\n", "for user, domain, forum in r_udf.findall(urls):\n", "if domain not in forums:\n", "forums[domain] = set()\n", "if len(forum) > 0:\n", "get_forum_id(forum)\n", "logger.info('Appending %s:%s to forums[%s]', user, forum, domain)\n", "forums[domain].add((user, forum))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "For", "Condition", "Assign'", "For", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_6(self):...\n", "\"\"\"docstring\"\"\"\n", "assert self._redis_address is None\n", "self._redis_address, VAR_11, VAR_12 = ray.services.start_redis(self.\n _node_ip_address, port=self._ray_params.redis_port, redis_shard_ports=\n self._ray_params.redis_shard_ports, num_redis_shards=self._ray_params.\n num_redis_shards, redis_max_clients=self._ray_params.redis_max_clients,\n redirect_output=self._ray_params.redirect_output,\n redirect_worker_output=self._ray_params.redirect_worker_output,\n password=self._ray_params.redis_password, redis_max_memory=self.\n _ray_params.redis_max_memory)\n", "assert VAR_13.PROCESS_TYPE_REDIS_SERVER not in self.all_processes\n", "self.all_processes[VAR_13.PROCESS_TYPE_REDIS_SERVER] = VAR_12\n" ]
[ "def start_redis(self):...\n", "\"\"\"docstring\"\"\"\n", "assert self._redis_address is None\n", "self._redis_address, redis_shards, process_infos = ray.services.start_redis(\n self._node_ip_address, port=self._ray_params.redis_port,\n redis_shard_ports=self._ray_params.redis_shard_ports, num_redis_shards=\n self._ray_params.num_redis_shards, redis_max_clients=self._ray_params.\n redis_max_clients, redirect_output=self._ray_params.redirect_output,\n redirect_worker_output=self._ray_params.redirect_worker_output,\n password=self._ray_params.redis_password, redis_max_memory=self.\n _ray_params.redis_max_memory)\n", "assert ray_constants.PROCESS_TYPE_REDIS_SERVER not in self.all_processes\n", "self.all_processes[ray_constants.PROCESS_TYPE_REDIS_SERVER] = process_infos\n" ]
[ 0, 0, 0, 6, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assert'", "Assign'", "Assert'", "Assign'" ]
[ "def __init__(self, VAR_43, VAR_44=None, VAR_1=0):...\n", "if VAR_44 is None:\n", "self.options = CLASS_0()\n", "self.loader = dataloader.DataLoader()\n", "self.options.verbosity = VAR_1\n", "self.variable_manager = vars.VariableManager()\n", "self.inventory = VAR_2.Inventory(loader=self.loader, variable_manager=self.\n variable_manager, host_list='/etc/ansible/hosts')\n", "self.variable_manager.set_inventory(self.inventory)\n", "VAR_51 = os.path.abspath('.')\n", "VAR_52 = '%s/%s' % (VAR_51, VAR_43)\n", "VAR_0.verbosity = self.options.verbosity\n", "self.pbex = playbook_executor.PlaybookExecutor(playbooks=[playbook], VAR_2=\n self.inventory, variable_manager=self.variable_manager, loader=self.\n loader, VAR_44=self.options, passwords={})\n" ]
[ "def __init__(self, playbook, options=None, verbosity=0):...\n", "if options is None:\n", "self.options = Options()\n", "self.loader = dataloader.DataLoader()\n", "self.options.verbosity = verbosity\n", "self.variable_manager = vars.VariableManager()\n", "self.inventory = inventory.Inventory(loader=self.loader, variable_manager=\n self.variable_manager, host_list='/etc/ansible/hosts')\n", "self.variable_manager.set_inventory(self.inventory)\n", "pb_dir = os.path.abspath('.')\n", "playbook_path = '%s/%s' % (pb_dir, playbook)\n", "display.verbosity = self.options.verbosity\n", "self.pbex = playbook_executor.PlaybookExecutor(playbooks=[playbook],\n inventory=self.inventory, variable_manager=self.variable_manager,\n loader=self.loader, options=self.options, passwords={})\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_9():...\n", "VAR_10 = get_executing_test()\n", "VAR_17 = getattr(VAR_10, 'webdriver', None)\n", "if VAR_17:\n", "VAR_10.webdriver = None\n", "VAR_17.close()\n" ]
[ "def stop_webdriver():...\n", "test = get_executing_test()\n", "webdriver = getattr(test, 'webdriver', None)\n", "if webdriver:\n", "test.webdriver = None\n", "webdriver.close()\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Condition", "Assign'", "Expr'" ]
[ "def FUNC_0(self):...\n", "VAR_0.config.from_object('config.TestConfig')\n", "return VAR_0\n" ]
[ "def create_app(self):...\n", "app.config.from_object('config.TestConfig')\n", "return app\n" ]
[ 0, 4, 4 ]
[ "FunctionDef'", "Expr'", "Return'" ]
[ "def FUNC_2(VAR_0, VAR_1, VAR_2):...\n", "\"\"\"docstring\"\"\"\n", "VAR_7 = FUNC_0(VAR_0, VAR_1)\n", "VAR_8 = VAR_7.cursor()\n", "VAR_8.execute(VAR_2)\n", "return FUNC_4(VAR_8.description), VAR_8.fetchall()\n" ]
[ "def execute_query(app, context, query):...\n", "\"\"\"docstring\"\"\"\n", "con = get_db(app, context)\n", "cur = con.cursor()\n", "cur.execute(query)\n", "return extract_schema(cur.description), cur.fetchall()\n" ]
[ 0, 0, 4, 0, 4, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Expr'", "Return'" ]
[ "def FUNC_2():...\n", "\"\"\"docstring\"\"\"\n", "FUNC_1('DELETE FROM matches')\n" ]
[ "def deleteMatches():...\n", "\"\"\"docstring\"\"\"\n", "_commit('DELETE FROM matches')\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Expr'" ]
[ "def __init__(self, *VAR_3, **VAR_4):...\n", "VAR_11 = VAR_4.pop('execute', self.san_execute)\n", "super(CLASS_0, self).__init__(*VAR_3, VAR_11=execute, **kwargs)\n", "self.configuration.append_config_values(VAR_1)\n", "self.run_local = self.configuration.san_is_local\n", "self.sshpool = None\n" ]
[ "def __init__(self, *args, **kwargs):...\n", "execute = kwargs.pop('execute', self.san_execute)\n", "super(SanDriver, self).__init__(*args, execute=execute, **kwargs)\n", "self.configuration.append_config_values(san_opts)\n", "self.run_local = self.configuration.san_is_local\n", "self.sshpool = None\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'" ]
[ "def __init__(self, VAR_7, VAR_20=True, *VAR_15, **VAR_16):...\n", "CLASS_0.__init__(self, VAR_7, *VAR_15, **kw)\n", "self.redirect = VAR_20\n" ]
[ "def __init__(self, param, redirect=True, *a, **kw):...\n", "Validator.__init__(self, param, *a, **kw)\n", "self.redirect = redirect\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'" ]
[ "def FUNC_1(self, VAR_1, VAR_2):...\n", "self.assertEqual(self.parser.parse(VAR_1), VAR_2)\n" ]
[ "def __checkCompilation(self, script, result):...\n", "self.assertEqual(self.parser.parse(script), result)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def FUNC_4(self):...\n", "VAR_6 = {'message': '', 'title': 'Config: Chrome Infra Monitoring Proxy'}\n", "VAR_7 = common.MonAcqData.get_or_insert(common.CONFIG_DATA_KEY)\n", "self.setParams(VAR_6, VAR_7)\n", "VAR_10 = False\n", "VAR_11 = []\n", "for VAR_12, parser in self._parsers.iteritems():\n", "if not self.request.get(VAR_12):\n", "if VAR_11:\n", "setattr(VAR_7, VAR_12, parser(self.request.get(VAR_12)))\n", "VAR_11.append(VAR_12)\n", "VAR_6[VAR_12] = self.request.get(VAR_12)\n", "VAR_6['message'] = 'Failed to update %s. Please try again.' % ', '.join(VAR_11)\n", "if VAR_10:\n", "VAR_10 = True\n", "self.render_response('set_credentials.html', **params)\n", "VAR_7.put()\n", "self.setParams(VAR_6, VAR_7)\n", "VAR_6['message'] = 'Updated configuration.'\n", "logging.info('Updated configuration: %r', VAR_7)\n" ]
[ "def post(self):...\n", "params = {'message': '', 'title': 'Config: Chrome Infra Monitoring Proxy'}\n", "data = common.MonAcqData.get_or_insert(common.CONFIG_DATA_KEY)\n", "self.setParams(params, data)\n", "updated_fields = False\n", "failed_fields = []\n", "for field, parser in self._parsers.iteritems():\n", "if not self.request.get(field):\n", "if failed_fields:\n", "setattr(data, field, parser(self.request.get(field)))\n", "failed_fields.append(field)\n", "params[field] = self.request.get(field)\n", "params['message'] = 'Failed to update %s. Please try again.' % ', '.join(\n failed_fields)\n", "if updated_fields:\n", "updated_fields = True\n", "self.render_response('set_credentials.html', **params)\n", "data.put()\n", "self.setParams(params, data)\n", "params['message'] = 'Updated configuration.'\n", "logging.info('Updated configuration: %r', data)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "For", "Condition", "Condition", "Expr'", "Expr'", "Assign'", "Assign'", "Condition", "Assign'", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'" ]
[ "def FUNC_7(VAR_7):...\n", "\"\"\"docstring\"\"\"\n", "@wraps(VAR_7)...\n", "return VAR_7(*VAR_8, **kwargs)\n", "current_app.logger.warning(e.message, exc_info=True)\n", "return FUNC_16\n", "return FUNC_15(e.remote, e.response, e.code, e.uri, e.description)\n" ]
[ "def oauth_error_handler(f):...\n", "\"\"\"docstring\"\"\"\n", "@wraps(f)...\n", "return f(*args, **kwargs)\n", "current_app.logger.warning(e.message, exc_info=True)\n", "return inner\n", "return oauth2_handle_error(e.remote, e.response, e.code, e.uri, e.description)\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Return'", "Expr'", "Return'", "Return'" ]
[ "@FUNC_0...\n", "VAR_12 = await get_session(VAR_3)\n", "VAR_13 = await VAR_3.post()\n", "VAR_5 = VAR_12['uname']\n", "if 'action' in VAR_13:\n", "if VAR_13['action'] == 'change_password':\n", "return f'Invalid POST request: <i>{VAR_13.items()}</i>'\n", "if set(['cpsw', 'psw', 'psw2']).issubset(VAR_13.keys()):\n", "if VAR_13['psw'] != VAR_13['psw2']:\n", "return \"New passwords doesn't match!\"\n", "VAR_25 = await database.select_user(VAR_3, VAR_5)\n", "if not VAR_25:\n", "return 'Error: Logged in as non-existing user! (what?)'\n", "VAR_27 = VAR_13['cpsw']\n", "if bcrypt.hashpw(VAR_27.encode('UTF-8'), VAR_25[0][2].encode('UTF-8')).decode(\n", "return 'Error: \"Current password\" was incorrect'\n", "VAR_21 = VAR_13['psw']\n", "VAR_26 = bcrypt.hashpw(VAR_21.encode('UTF-8'), bcrypt.gensalt()).decode('UTF-8'\n )\n", "await database.update_user_password(VAR_3, VAR_5, VAR_26)\n", "return \"\"\"Success! Your password has been changed!<br>\n<a href=\"/settings\">Click here to go back.</a>\"\"\"\n" ]
[ "@handle_html...\n", "session = await get_session(request)\n", "data = await request.post()\n", "uname = session['uname']\n", "if 'action' in data:\n", "if data['action'] == 'change_password':\n", "return f'Invalid POST request: <i>{data.items()}</i>'\n", "if set(['cpsw', 'psw', 'psw2']).issubset(data.keys()):\n", "if data['psw'] != data['psw2']:\n", "return \"New passwords doesn't match!\"\n", "entry = await database.select_user(request, uname)\n", "if not entry:\n", "return 'Error: Logged in as non-existing user! (what?)'\n", "cpsw = data['cpsw']\n", "if bcrypt.hashpw(cpsw.encode('UTF-8'), entry[0][2].encode('UTF-8')).decode(\n", "return 'Error: \"Current password\" was incorrect'\n", "psw = data['psw']\n", "bhash = bcrypt.hashpw(psw.encode('UTF-8'), bcrypt.gensalt()).decode('UTF-8')\n", "await database.update_user_password(request, uname, bhash)\n", "return \"\"\"Success! Your password has been changed!<br>\n<a href=\"/settings\">Click here to go back.</a>\"\"\"\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Condition", "Return'", "Condition", "Condition", "Return'", "Assign'", "Condition", "Return'", "Assign'", "Condition", "Return'", "Assign'", "Assign'", "Expr'", "Return'" ]
[ "def FUNC_14(self, VAR_17):...\n", "self.currentDirectoryPath = VAR_17\n", "VAR_34 = os.listdir(self.currentDirectoryPath)\n", "VAR_35 = []\n", "for fileOrDir in VAR_34:\n", "if not fileOrDir.endswith('.json') and (not self.directoryFilter or self.\n", "self.currentDirectoryCache = sorted(VAR_35)\n", "VAR_35.append(fileOrDir)\n" ]
[ "def changeCurrentDirectory(self, newDirectory):...\n", "self.currentDirectoryPath = newDirectory\n", "dirList = os.listdir(self.currentDirectoryPath)\n", "filteredDirList = []\n", "for fileOrDir in dirList:\n", "if not fileOrDir.endswith('.json') and (not self.directoryFilter or self.\n", "self.currentDirectoryCache = sorted(filteredDirList)\n", "filteredDirList.append(fileOrDir)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "For", "Condition", "Assign'", "Expr'" ]
[ "@inlineCallbacks...\n", "yield super(CLASS_1, self).setUp()\n", "self.events_deferred = Deferred()\n", "self.connection_pool = HTTPConnectionPool(reactor, False)\n", "self.socket_open_deferred = self.tribler_started_deferred.addCallback(self.\n open_events_socket)\n", "self.messages_to_wait_for = 0\n" ]
[ "@inlineCallbacks...\n", "yield super(TestEventsEndpoint, self).setUp()\n", "self.events_deferred = Deferred()\n", "self.connection_pool = HTTPConnectionPool(reactor, False)\n", "self.socket_open_deferred = self.tribler_started_deferred.addCallback(self.\n open_events_socket)\n", "self.messages_to_wait_for = 0\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_0():...\n", "\"\"\"docstring\"\"\"\n", "VAR_30 = os.environ.get('OS_CONFIG_APPLIER_TEMPLATES', None)\n", "if VAR_30 is None:\n", "VAR_30 = '/opt/stack/os-apply-config/templates'\n", "return VAR_30\n", "if not os.path.isdir(VAR_30):\n", "VAR_30 = '/opt/stack/os-config-applier/templates'\n", "if os.path.isdir(VAR_30) and not os.path.isdir(VAR_0):\n", "logging.warning(\n 'Template directory %s is deprecated. The recommended location for template files is %s'\n , VAR_30, VAR_0)\n", "VAR_30 = VAR_0\n" ]
[ "def templates_dir():...\n", "\"\"\"docstring\"\"\"\n", "templates_dir = os.environ.get('OS_CONFIG_APPLIER_TEMPLATES', None)\n", "if templates_dir is None:\n", "templates_dir = '/opt/stack/os-apply-config/templates'\n", "return templates_dir\n", "if not os.path.isdir(templates_dir):\n", "templates_dir = '/opt/stack/os-config-applier/templates'\n", "if os.path.isdir(templates_dir) and not os.path.isdir(DEFAULT_TEMPLATES_DIR):\n", "logging.warning(\n 'Template directory %s is deprecated. The recommended location for template files is %s'\n , templates_dir, DEFAULT_TEMPLATES_DIR)\n", "templates_dir = DEFAULT_TEMPLATES_DIR\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Assign'", "Return'", "Condition", "Assign'", "Condition", "Expr'", "Assign'" ]
[ "def __str__(self):...\n", "VAR_10 = \"\"\"DrSEUs Attributes:\n\tDebugger: \"\"\" + str(self.debugger\n ) + '\\n\\tDUT:\\t' + str(self.debugger.dut).replace('\\n\\t', '\\n\\t\\t')\n", "if self.campaign_data['use_aux']:\n", "VAR_10 += '\\n\\tAUX:\\t' + str(self.debugger.aux).replace('\\n\\t', '\\n\\t\\t')\n", "VAR_10 += \"\"\"\n\tCampaign Information:\n\t\tCampaign Number: \"\"\" + str(self.\n campaign_data['id']) + \"\"\"\n\t\tDUT Command: \\\"\"\"\" + self.campaign_data[\n 'command'] + '\"'\n", "if self.campaign_data['use_aux']:\n", "VAR_10 += \"\"\"\n\t\tAUX Command: \\\"\"\"\" + self.campaign_data['aux_command'] + '\"'\n", "VAR_10 += '\\n\\t\\t' + ('Host ' if self.campaign_data['use_simics'] else ''\n ) + 'Execution Time: ' + str(self.campaign_data['exec_time']) + ' seconds'\n", "if self.campaign_data['use_simics']:\n", "VAR_10 += \"\"\"\n\t\tExecution Cycles: \"\"\" + '{:,}'.format(self.campaign_data[\n 'num_cycles']) + \"\"\" cycles\n\t\tSimulated Time: \"\"\" + str(self.\n campaign_data['sim_time']) + ' seconds'\n", "return VAR_10\n" ]
[ "def __str__(self):...\n", "string = \"\"\"DrSEUs Attributes:\n\tDebugger: \"\"\" + str(self.debugger\n ) + '\\n\\tDUT:\\t' + str(self.debugger.dut).replace('\\n\\t', '\\n\\t\\t')\n", "if self.campaign_data['use_aux']:\n", "string += '\\n\\tAUX:\\t' + str(self.debugger.aux).replace('\\n\\t', '\\n\\t\\t')\n", "string += \"\"\"\n\tCampaign Information:\n\t\tCampaign Number: \"\"\" + str(self.\n campaign_data['id']) + \"\"\"\n\t\tDUT Command: \\\"\"\"\" + self.campaign_data[\n 'command'] + '\"'\n", "if self.campaign_data['use_aux']:\n", "string += \"\"\"\n\t\tAUX Command: \\\"\"\"\" + self.campaign_data['aux_command'] + '\"'\n", "string += '\\n\\t\\t' + ('Host ' if self.campaign_data['use_simics'] else ''\n ) + 'Execution Time: ' + str(self.campaign_data['exec_time']) + ' seconds'\n", "if self.campaign_data['use_simics']:\n", "string += \"\"\"\n\t\tExecution Cycles: \"\"\" + '{:,}'.format(self.campaign_data[\n 'num_cycles']) + \"\"\" cycles\n\t\tSimulated Time: \"\"\" + str(self.\n campaign_data['sim_time']) + ' seconds'\n", "return string\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "AugAssign'", "AugAssign'", "Condition", "AugAssign'", "AugAssign'", "Condition", "AugAssign'", "Return'" ]
[ "def FUNC_1(VAR_0):...\n", "\"\"\"docstring\"\"\"\n", "VAR_0.inf_file = 'item.inf'\n", "VAR_14 = VAR_0.folderName + '/login.inf'\n", "VAR_15 = [line.strip() for line in open(VAR_14, 'r')]\n", "VAR_0.username = VAR_15[0]\n", "VAR_0.password = VAR_15[1]\n" ]
[ "def get_folder_data(args):...\n", "\"\"\"docstring\"\"\"\n", "args.inf_file = 'item.inf'\n", "cred_file = args.folderName + '/login.inf'\n", "creds = [line.strip() for line in open(cred_file, 'r')]\n", "args.username = creds[0]\n", "args.password = creds[1]\n" ]
[ 0, 0, 0, 5, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'" ]
[ "def __init__(self, VAR_1, VAR_7, VAR_11, VAR_4=None):...\n", "GenericRequest.__init__(self, VAR_1, VAR_4)\n", "self.url = '%stasks/%s/submissions/%s/token' % (self.base_url, VAR_7[1], VAR_11\n )\n", "self.task = VAR_7\n", "self.submission_num = VAR_11\n", "self.data = {}\n" ]
[ "def __init__(self, browser, task, submission_num, base_url=None):...\n", "GenericRequest.__init__(self, browser, base_url)\n", "self.url = '%stasks/%s/submissions/%s/token' % (self.base_url, task[1],\n submission_num)\n", "self.task = task\n", "self.submission_num = submission_num\n", "self.data = {}\n" ]
[ 0, 0, 5, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_0(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_18 = pq.backends.ClassicalSimulator(**self.filter_kwargs_for_backend(\n self.kwargs))\n", "self.eng = pq.MainEngine(VAR_18)\n", "super().reset()\n" ]
[ "def reset(self):...\n", "\"\"\"docstring\"\"\"\n", "backend = pq.backends.ClassicalSimulator(**self.filter_kwargs_for_backend(\n self.kwargs))\n", "self.eng = pq.MainEngine(backend)\n", "super().reset()\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_6(self, VAR_5):...\n", "\"\"\"docstring\"\"\"\n", "VAR_7 = self.bindings['TEST_APP_COMPONENT_NAME']\n", "if not VAR_5:\n", "VAR_7 += '-pub'\n", "VAR_12 = self.agent.make_json_payload_from_kwargs(job=[{'type':\n 'deleteLoadBalancer', 'cloudProvider': 'aws', 'credentials': self.\n bindings['AWS_CREDENTIALS'], 'regions': [self.bindings[\n 'TEST_AWS_REGION']], 'loadBalancerName': load_balancer_name}],\n description='Delete Load Balancer: {0} in {1}:{2}'.format(\n load_balancer_name, self.bindings['AWS_CREDENTIALS'], self.bindings[\n 'TEST_AWS_REGION']), application=self.TEST_APP)\n", "VAR_13 = aws.AwsContractBuilder(self.aws_observer)\n", "VAR_13.new_clause_builder('Load Balancer Removed').collect_resources(aws_module\n ='elb', command='describe-load-balancers', args=[\n '--load-balancer-names', load_balancer_name], no_resources_ok=True\n ).excludes_path_value('LoadBalancerName', VAR_7)\n", "VAR_14 = '_with_vpc' if VAR_5 else '_without_vpc'\n", "return st.OperationContract(self.new_post_operation(title=\n 'delete_load_balancer' + title_decorator, data=payload, path='tasks'),\n VAR_6=builder.build())\n" ]
[ "def delete_load_balancer(self, use_vpc):...\n", "\"\"\"docstring\"\"\"\n", "load_balancer_name = self.bindings['TEST_APP_COMPONENT_NAME']\n", "if not use_vpc:\n", "load_balancer_name += '-pub'\n", "payload = self.agent.make_json_payload_from_kwargs(job=[{'type':\n 'deleteLoadBalancer', 'cloudProvider': 'aws', 'credentials': self.\n bindings['AWS_CREDENTIALS'], 'regions': [self.bindings[\n 'TEST_AWS_REGION']], 'loadBalancerName': load_balancer_name}],\n description='Delete Load Balancer: {0} in {1}:{2}'.format(\n load_balancer_name, self.bindings['AWS_CREDENTIALS'], self.bindings[\n 'TEST_AWS_REGION']), application=self.TEST_APP)\n", "builder = aws.AwsContractBuilder(self.aws_observer)\n", "builder.new_clause_builder('Load Balancer Removed').collect_resources(\n aws_module='elb', command='describe-load-balancers', args=[\n '--load-balancer-names', load_balancer_name], no_resources_ok=True\n ).excludes_path_value('LoadBalancerName', load_balancer_name)\n", "title_decorator = '_with_vpc' if use_vpc else '_without_vpc'\n", "return st.OperationContract(self.new_post_operation(title=\n 'delete_load_balancer' + title_decorator, data=payload, path='tasks'),\n contract=builder.build())\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "AugAssign'", "Assign'", "Assign'", "Expr'", "Assign'", "Return'" ]
[ "def __iter__(self) ->Iterator[IRow]:...\n", "VAR_4 = Spy()\n", "yield VAR_4\n", "VAR_5 = construct_select_statement(VAR_4, self.from_object)\n", "print(VAR_5)\n", "yield from self.client.query(VAR_5)\n" ]
[ "def __iter__(self) ->Iterator[IRow]:...\n", "spy = Spy()\n", "yield spy\n", "query_string = construct_select_statement(spy, self.from_object)\n", "print(query_string)\n", "yield from self.client.query(query_string)\n" ]
[ 0, 0, 0, 4, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_1(VAR_1):...\n", "VAR_7 = {'bluef': '\\x1b[34m', 'boldon': '\\x1b[1m', 'boldoff': '\\x1b[22m',\n 'redf': '\\x1b[31m', 'reset': '\\x1b[0m', 'yellowf': '\\x1b[33m'}\n", "if VAR_1 in VAR_7:\n", "return VAR_7[VAR_1]\n", "return ''\n" ]
[ "def ansi(keyword):...\n", "codes = {'bluef': '\\x1b[34m', 'boldon': '\\x1b[1m', 'boldoff': '\\x1b[22m',\n 'redf': '\\x1b[31m', 'reset': '\\x1b[0m', 'yellowf': '\\x1b[33m'}\n", "if keyword in codes:\n", "return codes[keyword]\n", "return ''\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Return'", "Return'" ]
[ "def __init__(self, VAR_37=None, VAR_38=None, VAR_39=False):...\n", "\"\"\"docstring\"\"\"\n", "list.__init__(self)\n", "self._names = dict()\n", "if VAR_37:\n", "self.extend(map(str, VAR_37) if VAR_39 else VAR_37)\n", "if VAR_38:\n", "if isinstance(VAR_37, CLASS_2):\n", "for VAR_44, item in VAR_38.items():\n", "self.take_names(VAR_37.get_names())\n", "self.append(item)\n", "self.add_name(VAR_44)\n" ]
[ "def __init__(self, toclone=None, fromdict=None, plainstr=False):...\n", "\"\"\"docstring\"\"\"\n", "list.__init__(self)\n", "self._names = dict()\n", "if toclone:\n", "self.extend(map(str, toclone) if plainstr else toclone)\n", "if fromdict:\n", "if isinstance(toclone, Namedlist):\n", "for key, item in fromdict.items():\n", "self.take_names(toclone.get_names())\n", "self.append(item)\n", "self.add_name(key)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Expr'", "Assign'", "Condition", "Expr'", "Condition", "Condition", "For", "Expr'", "Expr'", "Expr'" ]
[ "def __init__(self, VAR_19=False, *VAR_20, **VAR_21):...\n", "super(CLASS_2, self).__init__(*VAR_20, VAR_19=csrf_enabled, **kwargs)\n" ]
[ "def __init__(self, csrf_enabled=False, *args, **kwargs):...\n", "super(ChangeOneModelForm, self).__init__(*args, csrf_enabled=csrf_enabled,\n **kwargs)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "\"\"\"string\"\"\"\n", "from collections import MutableSequence\n", "from collections import MutableSet\n", "from everest.attributes import get_attribute_cardinality\n", "from everest.attributes import is_terminal_attribute\n", "from everest.constants import CARDINALITY_CONSTANTS\n", "from everest.constants import RELATION_OPERATIONS\n", "from everest.constants import RESOURCE_ATTRIBUTE_KINDS\n", "from everest.constants import RESOURCE_KINDS\n", "from everest.interfaces import IDataTraversalProxyAdapter\n", "from everest.interfaces import IDataTraversalProxyFactory\n", "from everest.resources.interfaces import IResource\n", "from everest.traversalpath import TraversalPath\n", "from logging import getLogger as get_logger\n", "from pyramid.compat import itervalues_\n", "from pyramid.threadlocal import get_current_registry\n", "from pyramid.traversal import ResourceTreeTraverser\n", "from zope.interface import implementer\n", "__docformat__ = 'reStructuredText en'\n", "__all__ = ['ConvertingDataTraversalProxyMixin',\n 'DataSequenceTraversalProxy', 'DataTraversalProxy',\n 'DataTraversalProxyAdapter', 'DataTraversalProxyFactory',\n 'SourceTargetDataTreeTraverser', 'SuffixResourceTraverser']\n", "\"\"\"string\"\"\"\n", "def __call__(self, VAR_0):...\n", "VAR_31 = ResourceTreeTraverser.__call__(self, VAR_0)\n", "VAR_32 = VAR_31['context']\n", "VAR_33 = VAR_31['view_name']\n", "if IResource.providedBy(VAR_32) and '.' in VAR_33:\n", "VAR_43, VAR_44 = VAR_33.split('.')\n", "return VAR_31\n", "VAR_51 = VAR_32[VAR_43]\n", "if IResource.providedBy(VAR_51):\n", "VAR_31['context'] = VAR_51\n", "VAR_31['view_name'] = VAR_44\n" ]
[ "\"\"\"\nCustom resource object tree traverser.\n\nThis file is part of the everest project.\nSee LICENSE.txt for licensing, CONTRIBUTORS.txt for contributor information.\n\nCreated on Feb 4, 2011.\n\"\"\"\n", "from collections import MutableSequence\n", "from collections import MutableSet\n", "from everest.attributes import get_attribute_cardinality\n", "from everest.attributes import is_terminal_attribute\n", "from everest.constants import CARDINALITY_CONSTANTS\n", "from everest.constants import RELATION_OPERATIONS\n", "from everest.constants import RESOURCE_ATTRIBUTE_KINDS\n", "from everest.constants import RESOURCE_KINDS\n", "from everest.interfaces import IDataTraversalProxyAdapter\n", "from everest.interfaces import IDataTraversalProxyFactory\n", "from everest.resources.interfaces import IResource\n", "from everest.traversalpath import TraversalPath\n", "from logging import getLogger as get_logger\n", "from pyramid.compat import itervalues_\n", "from pyramid.threadlocal import get_current_registry\n", "from pyramid.traversal import ResourceTreeTraverser\n", "from zope.interface import implementer\n", "__docformat__ = 'reStructuredText en'\n", "__all__ = ['ConvertingDataTraversalProxyMixin',\n 'DataSequenceTraversalProxy', 'DataTraversalProxy',\n 'DataTraversalProxyAdapter', 'DataTraversalProxyFactory',\n 'SourceTargetDataTreeTraverser', 'SuffixResourceTraverser']\n", "\"\"\"\n A custom resource tree traverser that allows us to specify the\n representation for resources with a suffix as in\n `http://everest/myobjects.csv`.\n\n Rather than to reproduce the functionality of the `__call__` method, we\n check if base part of the current view name (`myobjects` in the example)\n can be retrieved as a child resource from the context. If yes, we set the\n context to the resource and the view name to the extension part of the\n current view name (`csv` in the example); if no, nothing is changed.\n \"\"\"\n", "def __call__(self, request):...\n", "system = ResourceTreeTraverser.__call__(self, request)\n", "context = system['context']\n", "view_name = system['view_name']\n", "if IResource.providedBy(context) and '.' in view_name:\n", "rc_name, repr_name = view_name.split('.')\n", "return system\n", "child_rc = context[rc_name]\n", "if IResource.providedBy(child_rc):\n", "system['context'] = child_rc\n", "system['view_name'] = repr_name\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Expr'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Assign'", "Assign'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Return'", "Assign'", "Condition", "Assign'", "Assign'" ]
[ "@staticmethod...\n", "return ' '.join(escape_path_argument(s) for s in (sys.executable, os.path.\n join(os.path.dirname(os.path.realpath(__file__)),\n 'run_shell_command_testfiles', VAR_0)))\n" ]
[ "@staticmethod...\n", "return ' '.join(escape_path_argument(s) for s in (sys.executable, os.path.\n join(os.path.dirname(os.path.realpath(__file__)),\n 'run_shell_command_testfiles', scriptname)))\n" ]
[ 0, 2 ]
[ "Condition", "Return'" ]
[ "def FUNC_2(VAR_5, **VAR_6):...\n", "VAR_7 = load_website_config(VAR_5)\n", "VAR_10 = FUNC_4(VAR_5, **kwargs)\n", "VAR_11 = start_test_http_server(VAR_10, host=config.host, port=config.port)\n", "VAR_11.purpose = VAR_5\n", "return VAR_11\n" ]
[ "def start_test_website(purpose, **kwargs):...\n", "config = load_website_config(purpose)\n", "http_handler = create_website_http_handler(purpose, **kwargs)\n", "http_server = start_test_http_server(http_handler, host=config.host, port=\n config.port)\n", "http_server.purpose = purpose\n", "return http_server\n" ]
[ 0, 0, 5, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_7(self, VAR_9, VAR_11=None):...\n", "get_and_check_project(VAR_9, VAR_11)\n", "VAR_25 = self.queryset.filter(VAR_1=project_pk)\n", "VAR_25 = filters.OrderingFilter().filter_queryset(self.request, VAR_25, self)\n", "VAR_26 = CLASS_1(VAR_25, many=True)\n", "return Response(VAR_26.data)\n" ]
[ "def list(self, request, project_pk=None):...\n", "get_and_check_project(request, project_pk)\n", "tasks = self.queryset.filter(project=project_pk)\n", "tasks = filters.OrderingFilter().filter_queryset(self.request, tasks, self)\n", "serializer = TaskSerializer(tasks, many=True)\n", "return Response(serializer.data)\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "@staticmethod...\n", "\"\"\"docstring\"\"\"\n", "VAR_22 = CLASS_1.get_user_by_id(VAR_3)\n", "return MappingLevel(VAR_22.mapping_level)\n" ]
[ "@staticmethod...\n", "\"\"\"docstring\"\"\"\n", "user = UserService.get_user_by_id(user_id)\n", "return MappingLevel(user.mapping_level)\n" ]
[ 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Return'" ]
[ "def FUNC_13(self, VAR_10=None, VAR_14=None, VAR_15=False):...\n", "\"\"\"docstring\"\"\"\n", "if VAR_15:\n", "VAR_10 = sys.argv[1:] if VAR_10 is None else VAR_10\n", "return super().parse_known_args(VAR_10, VAR_14)\n", "VAR_10 = [a for a in VAR_10 if a != '-h' and a != '--help']\n" ]
[ "def parse_known_args(self, args=None, namespace=None, nohelp=False):...\n", "\"\"\"docstring\"\"\"\n", "if nohelp:\n", "args = sys.argv[1:] if args is None else args\n", "return super().parse_known_args(args, namespace)\n", "args = [a for a in args if a != '-h' and a != '--help']\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Assign'", "Return'", "Assign'" ]
[ "def FUNC_22(self, VAR_22):...\n", "\"\"\"docstring\"\"\"\n", "VAR_30 = self.cursor.execute('string' % VAR_22).fetchall()\n", "return VAR_30\n" ]
[ "def list_items_in_order(self, storeind):...\n", "\"\"\"docstring\"\"\"\n", "r = self.cursor.execute(\n 'select shoppingorder.sorder, shoppingorder.storeid, shoppingorder.itemid, store.storeid, store.storename, items.itemid, items.itemname from shoppingorder, store, items where (store.storeid = %s and items.itemid = shoppingorder.itemid and shoppingorder.storeid = store.storeid) order by shoppingorder.sorder'\n % storeind).fetchall()\n", "return r\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Return'" ]
[ "def FUNC_8(self):...\n", "self.driver._eql_execute = self.mox.CreateMock(self.driver._eql_execute)\n", "VAR_5 = {'id': 'fake_uuid'}\n", "VAR_1 = {'name': self.volume_name}\n", "VAR_6 = self.configuration.volume_name_template % VAR_5['id']\n", "self.driver._eql_execute('volume', 'select', VAR_6, 'clone', VAR_1['name']\n ).AndReturn(['iSCSI target name is %s.' % self.fake_iqn])\n", "self.mox.ReplayAll()\n", "VAR_2 = self.driver.create_cloned_volume(VAR_1, VAR_5)\n", "self.assertEqual(VAR_2, self._model_update)\n" ]
[ "def test_create_cloned_volume(self):...\n", "self.driver._eql_execute = self.mox.CreateMock(self.driver._eql_execute)\n", "src_vref = {'id': 'fake_uuid'}\n", "volume = {'name': self.volume_name}\n", "src_volume_name = self.configuration.volume_name_template % src_vref['id']\n", "self.driver._eql_execute('volume', 'select', src_volume_name, 'clone',\n volume['name']).AndReturn(['iSCSI target name is %s.' % self.fake_iqn])\n", "self.mox.ReplayAll()\n", "model_update = self.driver.create_cloned_volume(volume, src_vref)\n", "self.assertEqual(model_update, self._model_update)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'" ]
[ "def FUNC_1(VAR_2, VAR_3, VAR_1, VAR_4):...\n", "\"\"\"docstring\"\"\"\n", "VAR_7 = get_visible_tournament_or_404(VAR_3, VAR_2.user)\n", "VAR_8 = FUNC_0(VAR_7, VAR_1)\n", "VAR_9 = {'tournament': VAR_7, 'round': VAR_8}\n", "return render(VAR_2, 'rounds/%s.html' % VAR_4, VAR_9)\n" ]
[ "def round_simple(request, tournament_id, round_num, template):...\n", "\"\"\"docstring\"\"\"\n", "t = get_visible_tournament_or_404(tournament_id, request.user)\n", "r = get_round_or_404(t, round_num)\n", "context = {'tournament': t, 'round': r}\n", "return render(request, 'rounds/%s.html' % template, context)\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_25(self, VAR_73):...\n", "def FUNC_39(VAR_101):...\n", "VAR_101.message = VAR_73\n", "return VAR_101\n" ]
[ "def message(self, message):...\n", "def decorate(ruleinfo):...\n", "ruleinfo.message = message\n", "return ruleinfo\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "FunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_2():...\n", "\"\"\"docstring\"\"\"\n", "VAR_2 = 8080\n", "VAR_6 = 2 << 16\n", "while FUNC_1(VAR_2) and VAR_2 < VAR_6:\n", "VAR_2 += 1\n", "if VAR_2 == VAR_6:\n", "return VAR_2\n" ]
[ "def find_free_port():...\n", "\"\"\"docstring\"\"\"\n", "port = 8080\n", "max_val = 2 << 16\n", "while test_port(port) and port < max_val:\n", "port += 1\n", "if port == max_val:\n", "return port\n" ]
[ 0, 0, 5, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Condition", "AugAssign'", "Condition", "Return'" ]
[ "from difflib import SequenceMatcher\n", "import os\n", "from paramiko import RSAKey\n", "from shutil import copy, rmtree\n", "from subprocess import PIPE, Popen\n", "from termcolor import colored\n", "from threading import Thread\n", "from time import sleep\n", "from error import DrSEUsError\n", "from jtag import bdi_p2020, openocd\n", "from simics import simics\n", "from sql import sql\n", "def __init__(self, VAR_0, VAR_1):...\n", "self.campaign_data = VAR_0\n", "self.options = VAR_1\n", "self.result_data = {'campaign_id': self.campaign_data['id'], 'aux_output':\n '', 'data_diff': None, 'debugger_output': '', 'detected_errors': None,\n 'dut_output': ''}\n", "if os.path.exists('campaign-data/' + str(VAR_0['id']) + '/private.key'):\n", "self.rsakey = RSAKey.from_private_key_file('campaign-data/' + str(VAR_0[\n 'id']) + '/private.key')\n", "self.rsakey = RSAKey.generate(1024)\n", "if self.campaign_data['use_simics']:\n", "self.rsakey.write_private_key_file('campaign-data/' + str(VAR_0['id']) +\n '/private.key')\n", "self.debugger = simics(VAR_0, self.result_data, VAR_1, self.rsakey)\n", "if VAR_0['architecture'] == 'p2020':\n", "if not self.campaign_data['use_simics']:\n", "self.debugger = bdi_p2020(VAR_0, self.result_data, VAR_1, self.rsakey)\n", "if VAR_0['architecture'] == 'a9':\n", "if self.campaign_data['use_aux']:\n", "def __str__(self):...\n", "self.debugger = openocd(VAR_0, self.result_data, VAR_1, self.rsakey)\n", "self.debugger.aux.serial.write('\\x03')\n", "if VAR_1.command == 'new':\n", "VAR_10 = \"\"\"DrSEUs Attributes:\n\tDebugger: \"\"\" + str(self.debugger\n ) + '\\n\\tDUT:\\t' + str(self.debugger.dut).replace('\\n\\t', '\\n\\t\\t')\n", "self.debugger.aux.do_login()\n", "self.debugger.reset_dut()\n", "if self.campaign_data['use_aux']:\n", "if VAR_1.command != 'new':\n", "VAR_10 += '\\n\\tAUX:\\t' + str(self.debugger.aux).replace('\\n\\t', '\\n\\t\\t')\n", "VAR_10 += \"\"\"\n\tCampaign Information:\n\t\tCampaign Number: \"\"\" + str(self.\n campaign_data['id']) + \"\"\"\n\t\tDUT Command: \\\"\"\"\" + self.campaign_data[\n 'command'] + '\"'\n", "self.send_dut_files(VAR_2=True)\n", "if self.campaign_data['use_aux']:\n", "VAR_10 += \"\"\"\n\t\tAUX Command: \\\"\"\"\" + self.campaign_data['aux_command'] + '\"'\n", "VAR_10 += '\\n\\t\\t' + ('Host ' if self.campaign_data['use_simics'] else ''\n ) + 'Execution Time: ' + str(self.campaign_data['exec_time']) + ' seconds'\n", "if self.campaign_data['use_simics']:\n", "VAR_10 += \"\"\"\n\t\tExecution Cycles: \"\"\" + '{:,}'.format(self.campaign_data[\n 'num_cycles']) + \"\"\" cycles\n\t\tSimulated Time: \"\"\" + str(self.\n campaign_data['sim_time']) + ' seconds'\n", "return VAR_10\n" ]
[ "from difflib import SequenceMatcher\n", "import os\n", "from paramiko import RSAKey\n", "from shutil import copy, rmtree\n", "from subprocess import PIPE, Popen\n", "from termcolor import colored\n", "from threading import Thread\n", "from time import sleep\n", "from error import DrSEUsError\n", "from jtag import bdi_p2020, openocd\n", "from simics import simics\n", "from sql import sql\n", "def __init__(self, campaign_data, options):...\n", "self.campaign_data = campaign_data\n", "self.options = options\n", "self.result_data = {'campaign_id': self.campaign_data['id'], 'aux_output':\n '', 'data_diff': None, 'debugger_output': '', 'detected_errors': None,\n 'dut_output': ''}\n", "if os.path.exists('campaign-data/' + str(campaign_data['id']) + '/private.key'\n", "self.rsakey = RSAKey.from_private_key_file('campaign-data/' + str(\n campaign_data['id']) + '/private.key')\n", "self.rsakey = RSAKey.generate(1024)\n", "if self.campaign_data['use_simics']:\n", "self.rsakey.write_private_key_file('campaign-data/' + str(campaign_data[\n 'id']) + '/private.key')\n", "self.debugger = simics(campaign_data, self.result_data, options, self.rsakey)\n", "if campaign_data['architecture'] == 'p2020':\n", "if not self.campaign_data['use_simics']:\n", "self.debugger = bdi_p2020(campaign_data, self.result_data, options, self.rsakey\n )\n", "if campaign_data['architecture'] == 'a9':\n", "if self.campaign_data['use_aux']:\n", "def __str__(self):...\n", "self.debugger = openocd(campaign_data, self.result_data, options, self.rsakey)\n", "self.debugger.aux.serial.write('\\x03')\n", "if options.command == 'new':\n", "string = \"\"\"DrSEUs Attributes:\n\tDebugger: \"\"\" + str(self.debugger\n ) + '\\n\\tDUT:\\t' + str(self.debugger.dut).replace('\\n\\t', '\\n\\t\\t')\n", "self.debugger.aux.do_login()\n", "self.debugger.reset_dut()\n", "if self.campaign_data['use_aux']:\n", "if options.command != 'new':\n", "string += '\\n\\tAUX:\\t' + str(self.debugger.aux).replace('\\n\\t', '\\n\\t\\t')\n", "string += \"\"\"\n\tCampaign Information:\n\t\tCampaign Number: \"\"\" + str(self.\n campaign_data['id']) + \"\"\"\n\t\tDUT Command: \\\"\"\"\" + self.campaign_data[\n 'command'] + '\"'\n", "self.send_dut_files(aux=True)\n", "if self.campaign_data['use_aux']:\n", "string += \"\"\"\n\t\tAUX Command: \\\"\"\"\" + self.campaign_data['aux_command'] + '\"'\n", "string += '\\n\\t\\t' + ('Host ' if self.campaign_data['use_simics'] else ''\n ) + 'Execution Time: ' + str(self.campaign_data['exec_time']) + ' seconds'\n", "if self.campaign_data['use_simics']:\n", "string += \"\"\"\n\t\tExecution Cycles: \"\"\" + '{:,}'.format(self.campaign_data[\n 'num_cycles']) + \"\"\" cycles\n\t\tSimulated Time: \"\"\" + str(self.\n campaign_data['sim_time']) + ' seconds'\n", "return string\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Expr'", "Assign'", "Condition", "Condition", "Assign'", "Condition", "Condition", "FunctionDef'", "Assign'", "Expr'", "Condition", "Assign'", "Expr'", "Expr'", "Condition", "Condition", "AugAssign'", "AugAssign'", "Expr'", "Condition", "AugAssign'", "AugAssign'", "Condition", "AugAssign'", "Return'" ]
[ "def FUNC_19(self, VAR_2, VAR_3, VAR_4, *VAR_5):...\n", "self.write_data({'type': 'market_payment_sent', 'event': VAR_5[0]})\n" ]
[ "def on_market_payment_sent(self, subject, changetype, objectID, *args):...\n", "self.write_data({'type': 'market_payment_sent', 'event': args[0]})\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def FUNC_44(self):...\n", "return self._names\n" ]
[ "def keys(self):...\n", "return self._names\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "@api.multi...\n", "\"\"\"docstring\"\"\"\n", "VAR_51 = super(CLASS_0, self).onchange_type(VAR_41)\n", "if VAR_41:\n", "VAR_51['value']['title'] = self.env.ref(\n 'partner_compassion.res_partner_title_friends').id\n", "return VAR_51\n" ]
[ "@api.multi...\n", "\"\"\"docstring\"\"\"\n", "res = super(ResPartner, self).onchange_type(is_company)\n", "if is_company:\n", "res['value']['title'] = self.env.ref(\n 'partner_compassion.res_partner_title_friends').id\n", "return res\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Condition", "Assign'", "Return'" ]
[ "@VAR_0.route('/find_and_replace', methods=['POST'])...\n", "VAR_1 = get_dataset_with_id(request.args.get('dataset_id'))\n", "VAR_2 = request.form['column']\n", "VAR_14 = request.form['find']\n", "VAR_15 = request.form['match-mode']\n", "VAR_16 = request.form['replace']\n", "if VAR_15 == 'full-match':\n", "find_replace(VAR_1.working_copy, VAR_2, VAR_14, VAR_16)\n", "if VAR_15 == 'substring-match':\n", "return redirect(request.referrer)\n", "VAR_22 = request.form['replace-mode']\n", "if VAR_15 == 'regex-match':\n", "if VAR_22 == 'full-replace':\n", "regex_find_replace(VAR_1.working_copy, VAR_2, VAR_14, VAR_16)\n", "substring_find_replace(VAR_1.working_copy, VAR_2, VAR_14, VAR_16, full=True)\n", "if VAR_22 == 'substring-replace':\n", "substring_find_replace(VAR_1.working_copy, VAR_2, VAR_14, VAR_16, full=False)\n" ]
[ "@_transform.route('/find_and_replace', methods=['POST'])...\n", "dataset = get_dataset_with_id(request.args.get('dataset_id'))\n", "col = request.form['column']\n", "find = request.form['find']\n", "match_mode = request.form['match-mode']\n", "replace = request.form['replace']\n", "if match_mode == 'full-match':\n", "find_replace(dataset.working_copy, col, find, replace)\n", "if match_mode == 'substring-match':\n", "return redirect(request.referrer)\n", "replace_mode = request.form['replace-mode']\n", "if match_mode == 'regex-match':\n", "if replace_mode == 'full-replace':\n", "regex_find_replace(dataset.working_copy, col, find, replace)\n", "substring_find_replace(dataset.working_copy, col, find, replace, full=True)\n", "if replace_mode == 'substring-replace':\n", "substring_find_replace(dataset.working_copy, col, find, replace, full=False)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Expr'", "Condition", "Return'", "Assign'", "Condition", "Condition", "Expr'", "Expr'", "Condition", "Expr'" ]
[ "def FUNC_57(self):...\n", "VAR_2 = {'SEVERITY': {'HIGH': 1}, 'CONFIDENCE': {'MEDIUM': 1}}\n", "self.check_example('flask_debug.py', VAR_2)\n" ]
[ "def test_flask_debug_true(self):...\n", "expect = {'SEVERITY': {'HIGH': 1}, 'CONFIDENCE': {'MEDIUM': 1}}\n", "self.check_example('flask_debug.py', expect)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'" ]
[ "import pymysql\n", "from config import create_connection\n", "VAR_0 = 'users'\n", "VAR_1 = 'carts'\n", "VAR_2 = 'products'\n", "VAR_3 = 'invoices'\n", "VAR_4 = 'invoice_products'\n", "def FUNC_0():...\n", "VAR_12 = f'string{VAR_0}\\n '\n", "VAR_14 = create_connection()\n", "VAR_14.close()\n", "def FUNC_1(VAR_5):...\n", "VAR_15 = VAR_14.cursor()\n", "VAR_12 = f\"\"\"\n SELECT * FROM {VAR_0} WHERE id=%s\n \"\"\"\n", "VAR_15.execute(VAR_12)\n", "VAR_14 = create_connection()\n", "VAR_14.close()\n", "def FUNC_2(VAR_5):...\n", "return VAR_15.fetchall()\n", "VAR_15 = VAR_14.cursor()\n", "VAR_12 = 'string'\n", "VAR_15.execute(VAR_12, VAR_5)\n", "VAR_14 = create_connection()\n", "VAR_14.close()\n", "def FUNC_3(VAR_5):...\n", "VAR_16 = VAR_15.fetchone()\n", "VAR_15 = VAR_14.cursor()\n", "VAR_12 = 'string'\n", "if not VAR_16:\n", "VAR_15.execute(VAR_12, VAR_5)\n", "VAR_14 = create_connection()\n", "VAR_14.close()\n", "def FUNC_4(VAR_6, VAR_7, VAR_8):...\n", "return None\n", "return {'firstName': VAR_16['first_name'], 'lastName': VAR_16['last_name'],\n 'username': VAR_16['username']}\n", "return VAR_15.fetchall()\n", "VAR_15 = VAR_14.cursor()\n", "VAR_12 = f\"\"\"\n INSERT INTO {VAR_1} (user_id, product_id, quantity)\n VALUES({VAR_6}, {VAR_7}, {VAR_8['quantity']})\n \"\"\"\n", "VAR_15.execute(VAR_12, VAR_5)\n", "VAR_14 = create_connection()\n", "VAR_14.close()\n", "def FUNC_5(VAR_6):...\n", "return VAR_15.fetchall()\n", "VAR_15 = VAR_14.cursor()\n", "VAR_12 = f\"\"\"string{VAR_2} AS p INNER JOIN {VAR_1} AS c ON\n p.id=c.product_id\n WHERE c.user_id={VAR_6}\n \"\"\"\n", "VAR_15.execute(VAR_12)\n", "VAR_14 = create_connection()\n", "VAR_14.close()\n", "def FUNC_6(VAR_6, VAR_7):...\n", "VAR_14.commit()\n", "VAR_15 = VAR_14.cursor()\n", "VAR_12 = f\"\"\"\n DELETE FROM {VAR_1} \n WHERE user_id={VAR_6} && product_id={VAR_7}\n \"\"\"\n", "return 'Ok'\n", "VAR_15.execute(VAR_12)\n", "VAR_14 = create_connection()\n", "VAR_14.close()\n", "def FUNC_7(VAR_6):...\n", "return VAR_15.fetchall()\n", "VAR_15 = VAR_14.cursor()\n", "VAR_12 = f\"\"\"\n DELETE FROM {VAR_1} \n WHERE user_id={VAR_6}\n \"\"\"\n", "VAR_15.execute(VAR_12)\n", "VAR_14 = create_connection()\n", "VAR_14.close()\n", "def FUNC_8(VAR_6):...\n", "VAR_14.commit()\n", "VAR_15 = VAR_14.cursor()\n", "VAR_12 = f\"\"\"\n INSERT INTO {VAR_3} (user_id)\n VALUES ({VAR_6})\n \"\"\"\n", "return 'Ok'\n", "VAR_15.execute(VAR_12)\n", "VAR_14 = create_connection()\n", "VAR_14.close()\n", "def FUNC_9(VAR_6):...\n", "VAR_14.commit()\n", "VAR_15 = VAR_14.cursor()\n", "VAR_12 = f\"\"\"\n SELECT id from {VAR_3}\n WHERE user_id={VAR_6}\n ORDER BY transaction_date DESC\n LIMIT 1\n \"\"\"\n", "return 'Ok'\n", "VAR_15.execute(VAR_12)\n", "VAR_14 = create_connection()\n", "VAR_14.close()\n", "def FUNC_10(VAR_6, VAR_7, VAR_9):...\n", "VAR_14.commit()\n", "VAR_15 = VAR_14.cursor()\n", "VAR_12 = f\"\"\"\n UPDATE {VAR_1}\n SET quantity = {VAR_9}\n WHERE user_id = {VAR_6} AND product_id = {VAR_7};\n \"\"\"\n", "return 'Ok'\n", "VAR_15.execute(VAR_12)\n", "VAR_14 = create_connection()\n", "VAR_14.close()\n", "def FUNC_11(VAR_10, VAR_11):...\n", "return VAR_15.fetchone()\n", "VAR_15 = VAR_14.cursor()\n", "VAR_13 = ''\n", "VAR_15.execute(VAR_12)\n", "for product in VAR_11['products']:\n", "VAR_14.commit()\n", "VAR_13 += '('\n", "VAR_13 = VAR_13[:-1]\n", "return 'Ok'\n", "VAR_13 += str(VAR_10['id'])\n", "return VAR_13\n", "VAR_13 += ','\n", "VAR_13 += str(product['product']['productId'])\n", "VAR_13 += ','\n", "VAR_13 += str(product['product']['quantity'])\n", "VAR_13 += '),'\n" ]
[ "import pymysql\n", "from config import create_connection\n", "USERS_TABLE = 'users'\n", "CARTS_TABLE = 'carts'\n", "PRODUCTS_TABLE = 'products'\n", "INVOICES_TABLE = 'invoices'\n", "INVOICE_PRODUCTS_TABLE = 'invoice_products'\n", "def get_users():...\n", "sql_query = f\"\"\"\n SELECT id, first_name, last_name, username, email, password, INET_NTOA(ip_address)\n as ip_address, registration_date, activated\n FROM {USERS_TABLE}\n \"\"\"\n", "connection = create_connection()\n", "connection.close()\n", "def get_user(id):...\n", "cursor = connection.cursor()\n", "sql_query = f\"\"\"\n SELECT * FROM {USERS_TABLE} WHERE id=%s\n \"\"\"\n", "cursor.execute(sql_query)\n", "connection = create_connection()\n", "connection.close()\n", "def get_invoice(id):...\n", "return cursor.fetchall()\n", "cursor = connection.cursor()\n", "sql_query = (\n 'select t2.id as id_invoice , t2.transaction_date, sum(t3.price * t1.quantity) as montant from invoice_products as t1 inner join invoices as t2 on t2.id = t1.invoice_id inner join products as t3 on t3.id = t1.product_id and t2.user_id= %s group by t2.id, t2.transaction_date order by t2.transaction_date DESC'\n )\n", "cursor.execute(sql_query, id)\n", "connection = create_connection()\n", "connection.close()\n", "def get_invoiceById(id):...\n", "user = cursor.fetchone()\n", "cursor = connection.cursor()\n", "sql_query = (\n 'select i.transaction_date, ip.invoice_id, p.name, ip.quantity, p.price from invoice_products as ip, products as p, invoices as i where invoice_id = %s and ip.product_id = p.id and ip.invoice_id = i.id;'\n )\n", "if not user:\n", "cursor.execute(sql_query, id)\n", "connection = create_connection()\n", "connection.close()\n", "def add_product_to_cart(userId, productId, cart):...\n", "return None\n", "return {'firstName': user['first_name'], 'lastName': user['last_name'],\n 'username': user['username']}\n", "return cursor.fetchall()\n", "cursor = connection.cursor()\n", "sql_query = f\"\"\"\n INSERT INTO {CARTS_TABLE} (user_id, product_id, quantity)\n VALUES({userId}, {productId}, {cart['quantity']})\n \"\"\"\n", "cursor.execute(sql_query, id)\n", "connection = create_connection()\n", "connection.close()\n", "def get_cart(userId):...\n", "return cursor.fetchall()\n", "cursor = connection.cursor()\n", "sql_query = f\"\"\"\n SELECT p.id, p.name, p.company, p.rating, p.image_url, p.price, c.quantity AS quantity\n FROM {PRODUCTS_TABLE} AS p INNER JOIN {CARTS_TABLE} AS c ON\n p.id=c.product_id\n WHERE c.user_id={userId}\n \"\"\"\n", "cursor.execute(sql_query)\n", "connection = create_connection()\n", "connection.close()\n", "def delete_product_from_cart(userId, productId):...\n", "connection.commit()\n", "cursor = connection.cursor()\n", "sql_query = f\"\"\"\n DELETE FROM {CARTS_TABLE} \n WHERE user_id={userId} && product_id={productId}\n \"\"\"\n", "return 'Ok'\n", "cursor.execute(sql_query)\n", "connection = create_connection()\n", "connection.close()\n", "def delete_cart(userId):...\n", "return cursor.fetchall()\n", "cursor = connection.cursor()\n", "sql_query = f\"\"\"\n DELETE FROM {CARTS_TABLE} \n WHERE user_id={userId}\n \"\"\"\n", "cursor.execute(sql_query)\n", "connection = create_connection()\n", "connection.close()\n", "def create_invoice(userId):...\n", "connection.commit()\n", "cursor = connection.cursor()\n", "sql_query = f\"\"\"\n INSERT INTO {INVOICES_TABLE} (user_id)\n VALUES ({userId})\n \"\"\"\n", "return 'Ok'\n", "cursor.execute(sql_query)\n", "connection = create_connection()\n", "connection.close()\n", "def get_invoice_id(userId):...\n", "connection.commit()\n", "cursor = connection.cursor()\n", "sql_query = f\"\"\"\n SELECT id from {INVOICES_TABLE}\n WHERE user_id={userId}\n ORDER BY transaction_date DESC\n LIMIT 1\n \"\"\"\n", "return 'Ok'\n", "cursor.execute(sql_query)\n", "connection = create_connection()\n", "connection.close()\n", "def update_cart_quantity(userId, productId, quantity):...\n", "connection.commit()\n", "cursor = connection.cursor()\n", "sql_query = f\"\"\"\n UPDATE {CARTS_TABLE}\n SET quantity = {quantity}\n WHERE user_id = {userId} AND product_id = {productId};\n \"\"\"\n", "return 'Ok'\n", "cursor.execute(sql_query)\n", "connection = create_connection()\n", "connection.close()\n", "def create_invoice_products_values_query(invoiceId, products):...\n", "return cursor.fetchone()\n", "cursor = connection.cursor()\n", "invoice_products_values = ''\n", "cursor.execute(sql_query)\n", "for product in products['products']:\n", "connection.commit()\n", "invoice_products_values += '('\n", "invoice_products_values = invoice_products_values[:-1]\n", "return 'Ok'\n", "invoice_products_values += str(invoiceId['id'])\n", "return invoice_products_values\n", "invoice_products_values += ','\n", "invoice_products_values += str(product['product']['productId'])\n", "invoice_products_values += ','\n", "invoice_products_values += str(product['product']['quantity'])\n", "invoice_products_values += '),'\n" ]
[ 0, 0, 4, 4, 4, 4, 4, 0, 4, 0, 0, 0, 0, 4, 4, 0, 0, 0, 0, 0, 4, 4, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Import'", "ImportFrom'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Assign'", "Assign'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "FunctionDef'", "Return'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Condition", "Expr'", "Assign'", "Expr'", "FunctionDef'", "Return'", "Return'", "Return'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "FunctionDef'", "Return'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "FunctionDef'", "Expr'", "Assign'", "Assign'", "Return'", "Expr'", "Assign'", "Expr'", "FunctionDef'", "Return'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "FunctionDef'", "Expr'", "Assign'", "Assign'", "Return'", "Expr'", "Assign'", "Expr'", "FunctionDef'", "Expr'", "Assign'", "Assign'", "Return'", "Expr'", "Assign'", "Expr'", "FunctionDef'", "Expr'", "Assign'", "Assign'", "Return'", "Expr'", "Assign'", "Expr'", "FunctionDef'", "Return'", "Assign'", "Assign'", "Expr'", "For", "Expr'", "AugAssign'", "Assign'", "Return'", "AugAssign'", "Return'", "AugAssign'", "AugAssign'", "AugAssign'", "AugAssign'", "AugAssign'" ]
[ "def __init__(self, VAR_7, *VAR_15, **VAR_16):...\n", "CLASS_0.__init__(self, VAR_7, *VAR_15, **kw)\n" ]
[ "def __init__(self, param, *a, **kw):...\n", "Validator.__init__(self, param, *a, **kw)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "@classmethod...\n", "return super(CLASS_0, VAR_12).implementation_version() + [(\n 'BaseZincCompile', 7)]\n" ]
[ "@classmethod...\n", "return super(BaseZincCompile, cls).implementation_version() + [(\n 'BaseZincCompile', 7)]\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "def FUNC_4(self):...\n", "VAR_8 = 'test_foo.py:FooTest'\n", "VAR_7 = BokChoyTestSuite('', test_spec=spec)\n", "VAR_1 = 'tests/{}'.format(VAR_8)\n", "self.assertEqual(VAR_7.cmd, self._expected_command(VAR_1=name))\n" ]
[ "def test_class_spec(self):...\n", "spec = 'test_foo.py:FooTest'\n", "suite = BokChoyTestSuite('', test_spec=spec)\n", "name = 'tests/{}'.format(spec)\n", "self.assertEqual(suite.cmd, self._expected_command(name=name))\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_0(self):...\n", "VAR_7 = []\n", "for parent in self.parent_groups:\n", "VAR_7.append(parent.serialize())\n", "self._hosts = None\n", "VAR_8 = dict(VAR_0=self.name, vars=self.vars.copy(), VAR_7=parent_groups,\n depth=self.depth, VAR_9=self.hosts)\n", "return VAR_8\n" ]
[ "def serialize(self):...\n", "parent_groups = []\n", "for parent in self.parent_groups:\n", "parent_groups.append(parent.serialize())\n", "self._hosts = None\n", "result = dict(name=self.name, vars=self.vars.copy(), parent_groups=\n parent_groups, depth=self.depth, hosts=self.hosts)\n", "return result\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "For", "Expr'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_4(VAR_3=datetime.datetime(2000, 1, 1)):...\n", "VAR_14 = crawl.get_sslowdown_data()\n", "VAR_15 = []\n", "VAR_16 = {}\n", "for entry_key, entry_data in VAR_14.items():\n", "VAR_2 = FUNC_3(entry_data['date'])\n", "return VAR_15\n", "if VAR_2 > VAR_3:\n", "VAR_16['author'] = entry_data['author']\n", "VAR_16['date'] = VAR_2\n", "VAR_16['image'] = entry_data['image']\n", "VAR_16['summary'] = entry_data['summary']\n", "VAR_16['title'] = entry_data['title']\n", "VAR_16['text'] = entry_data['text']\n", "VAR_16['slug'] = slugify.slugify(entry_data['summary'][:30])\n", "VAR_15.append(deepcopy(VAR_16))\n" ]
[ "def build_json_from_raw_data(ch_date=datetime.datetime(2000, 1, 1)):...\n", "raw_data = crawl.get_sslowdown_data()\n", "result = []\n", "entry = {}\n", "for entry_key, entry_data in raw_data.items():\n", "date = get_datetime(entry_data['date'])\n", "return result\n", "if date > ch_date:\n", "entry['author'] = entry_data['author']\n", "entry['date'] = date\n", "entry['image'] = entry_data['image']\n", "entry['summary'] = entry_data['summary']\n", "entry['title'] = entry_data['title']\n", "entry['text'] = entry_data['text']\n", "entry['slug'] = slugify.slugify(entry_data['summary'][:30])\n", "result.append(deepcopy(entry))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "For", "Assign'", "Return'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'" ]
[ "import sqlite3, hashlib, random, string, uuid\n", "VAR_0 = 32\n", "VAR_1 = 'db/data.db'\n", "def FUNC_0(VAR_2, VAR_3):...\n", "VAR_5 = ''.join(random.choice(string.ascii_letters + string.digits) for _ in\n range(VAR_0))\n", "VAR_9 = FUNC_5(VAR_3, VAR_5)\n", "VAR_10 = sqlite3.connect(VAR_1)\n", "VAR_11 = VAR_10.cursor()\n", "VAR_11.execute(\n \"\"\"INSERT INTO UserData(username, password_hash, salt) \n VALUES (?, ?, ?)\"\"\"\n , (VAR_2, VAR_9, VAR_5))\n", "VAR_10.commit()\n", "VAR_10.close()\n", "def FUNC_1(VAR_2, VAR_3):...\n", "VAR_10 = sqlite3.connect(VAR_1)\n", "VAR_11 = VAR_10.cursor()\n", "VAR_11.execute(\n 'SELECT user_id, password_hash, salt FROM UserData WHERE username = ?',\n [VAR_2])\n", "VAR_12 = VAR_11.fetchone()\n", "if not VAR_12:\n", "return None\n", "VAR_13 = VAR_12[0]\n", "VAR_9 = VAR_12[1]\n", "VAR_5 = VAR_12[2]\n", "VAR_4 = None\n", "if FUNC_5(VAR_3, VAR_5) == VAR_9:\n", "VAR_4 = str(VAR_6.uuid4())\n", "VAR_10.close()\n", "VAR_11.execute('UPDATE UserData SET session_id = ? WHERE user_id = ?', (\n VAR_4, VAR_13))\n", "return VAR_4, VAR_18\n", "print('SID: ' + VAR_4)\n", "VAR_10.commit()\n", "VAR_11.execute('SELECT secure_name, uuid_filename FROM Notes WHERE user_id = ?'\n , [VAR_13])\n", "VAR_18 = []\n", "VAR_19 = VAR_11.fetchall()\n", "for VAR_17 in VAR_19:\n", "VAR_18.append({'file_id': VAR_17[1].split('.')[0], 'name': VAR_17[0]})\n" ]
[ "import sqlite3, hashlib, random, string, uuid\n", "SALT_LENGTH = 32\n", "DATABASE_PATH = 'db/data.db'\n", "def add_user(username, password):...\n", "salt = ''.join(random.choice(string.ascii_letters + string.digits) for _ in\n range(SALT_LENGTH))\n", "password_hash = multiple_hash_password(password, salt)\n", "connection = sqlite3.connect(DATABASE_PATH)\n", "cursor = connection.cursor()\n", "cursor.execute(\n \"\"\"INSERT INTO UserData(username, password_hash, salt) \n VALUES (?, ?, ?)\"\"\"\n , (username, password_hash, salt))\n", "connection.commit()\n", "connection.close()\n", "def login(username, password):...\n", "connection = sqlite3.connect(DATABASE_PATH)\n", "cursor = connection.cursor()\n", "cursor.execute(\n 'SELECT user_id, password_hash, salt FROM UserData WHERE username = ?',\n [username])\n", "data = cursor.fetchone()\n", "if not data:\n", "return None\n", "user_id = data[0]\n", "password_hash = data[1]\n", "salt = data[2]\n", "session_id = None\n", "if multiple_hash_password(password, salt) == password_hash:\n", "session_id = str(uuid.uuid4())\n", "connection.close()\n", "cursor.execute('UPDATE UserData SET session_id = ? WHERE user_id = ?', (\n session_id, user_id))\n", "return session_id, notes\n", "print('SID: ' + session_id)\n", "connection.commit()\n", "cursor.execute('SELECT secure_name, uuid_filename FROM Notes WHERE user_id = ?'\n , [user_id])\n", "notes = []\n", "rows = cursor.fetchall()\n", "for row in rows:\n", "notes.append({'file_id': row[1].split('.')[0], 'name': row[0]})\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0 ]
[ "Import'", "Assign'", "Assign'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Expr'", "Assign'", "Condition", "Return'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Expr'", "Expr'", "Return'", "Expr'", "Expr'", "Expr'", "Assign'", "Assign'", "For", "Expr'" ]
[ "def FUNC_14(VAR_7):...\n", "def FUNC_13(self, *VAR_16, **VAR_10):...\n", "if not self.is_group_user(VAR_8):\n", "return VAR_7(self, *VAR_16, **kwargs)\n" ]
[ "def decorator(method):...\n", "def wrapper(self, *args, **kwargs):...\n", "if not self.is_group_user(group):\n", "return method(self, *args, **kwargs)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "FunctionDef'", "Condition", "Return'" ]
[ "@eqlx.with_timeout...\n", "time.sleep(1)\n" ]
[ "@eqlx.with_timeout...\n", "time.sleep(1)\n" ]
[ 0, 0 ]
[ "Condition", "Expr'" ]
[ "@click.command(help='Login to newrelic')...\n", "VAR_5 = VAR_0.obj['EMAIL']\n", "if not VAR_5:\n", "VAR_5 = click.prompt('Email')\n", "VAR_19 = VAR_0.obj['PASSWORD']\n", "if not VAR_19:\n", "VAR_19 = click.prompt('Password', hide_input=True)\n", "newrelic.login(VAR_5, VAR_19)\n", "print(click.style(u'OK', fg='green', bold=True))\n" ]
[ "@click.command(help='Login to newrelic')...\n", "email = ctx.obj['EMAIL']\n", "if not email:\n", "email = click.prompt('Email')\n", "password = ctx.obj['PASSWORD']\n", "if not password:\n", "password = click.prompt('Password', hide_input=True)\n", "newrelic.login(email, password)\n", "print(click.style(u'OK', fg='green', bold=True))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_0():...\n", "\"\"\"docstring\"\"\"\n", "VAR_1 = psycopg2.connect('dbname=forum')\n", "VAR_2 = VAR_1.cursor()\n", "VAR_2.execute('select content, time from posts order by time desc')\n", "VAR_3 = VAR_2.fetchall()\n", "VAR_1.close()\n", "return VAR_3\n" ]
[ "def get_posts():...\n", "\"\"\"docstring\"\"\"\n", "conn = psycopg2.connect('dbname=forum')\n", "cursor = conn.cursor()\n", "cursor.execute('select content, time from posts order by time desc')\n", "all_posts = cursor.fetchall()\n", "conn.close()\n", "return all_posts\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Return'" ]
[ "def __init__(self, VAR_0, VAR_1, VAR_2, VAR_3):...\n", "self.connection = VAR_0\n", "self.logger = VAR_2\n", "self.verb = VAR_3\n", "self.path = VAR_1.pop()\n", "self.pipe_command = VAR_1.pop() if VAR_1 else None\n" ]
[ "def __init__(self, connection, args, logger, verb):...\n", "self.connection = connection\n", "self.logger = logger\n", "self.verb = verb\n", "self.path = args.pop()\n", "self.pipe_command = args.pop() if args else None\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_15(self, VAR_7, VAR_8=False, VAR_9=True, VAR_10=False):...\n", "\"\"\"docstring\"\"\"\n", "VAR_12 = self.all_processes[VAR_7]\n", "if VAR_7 != VAR_13.PROCESS_TYPE_REDIS_SERVER:\n", "assert len(VAR_12) == 1\n", "for VAR_16 in VAR_12:\n", "VAR_20 = VAR_16.process\n", "if VAR_20.poll() is not None:\n", "if VAR_9:\n", "if VAR_16.use_valgrind:\n", "VAR_20.terminate()\n", "if VAR_16.use_valgrind_profiler:\n", "VAR_20.wait()\n", "os.kill(VAR_20.pid, signal.SIGINT)\n", "if VAR_8:\n", "if VAR_20.returncode != 0:\n", "time.sleep(0.1)\n", "VAR_20.terminate()\n", "VAR_20.kill()\n", "VAR_22 = ('Valgrind detected some errors in process of type {}. Error code {}.'\n .format(VAR_7, VAR_20.returncode))\n", "VAR_21 = threading.Timer(1, lambda VAR_20: VAR_20.kill(), [VAR_20])\n", "if VAR_10:\n", "if VAR_16.stdout_file is not None:\n", "VAR_21.start()\n", "VAR_21.cancel()\n", "if VAR_20.poll() is not None:\n", "VAR_20.wait()\n", "VAR_22 += \"\"\"\nPROCESS STDOUT:\n\"\"\" + f.read()\n", "if VAR_16.stderr_file is not None:\n", "VAR_20.wait()\n", "VAR_22 += \"\"\"\nPROCESS STDERR:\n\"\"\" + f.read()\n" ]
[ "def _kill_process_type(self, process_type, allow_graceful=False,...\n", "\"\"\"docstring\"\"\"\n", "process_infos = self.all_processes[process_type]\n", "if process_type != ray_constants.PROCESS_TYPE_REDIS_SERVER:\n", "assert len(process_infos) == 1\n", "for process_info in process_infos:\n", "process = process_info.process\n", "if process.poll() is not None:\n", "if check_alive:\n", "if process_info.use_valgrind:\n", "process.terminate()\n", "if process_info.use_valgrind_profiler:\n", "process.wait()\n", "os.kill(process.pid, signal.SIGINT)\n", "if allow_graceful:\n", "if process.returncode != 0:\n", "time.sleep(0.1)\n", "process.terminate()\n", "process.kill()\n", "message = (\n 'Valgrind detected some errors in process of type {}. Error code {}.'.\n format(process_type, process.returncode))\n", "timer = threading.Timer(1, lambda process: process.kill(), [process])\n", "if wait:\n", "if process_info.stdout_file is not None:\n", "timer.start()\n", "timer.cancel()\n", "if process.poll() is not None:\n", "process.wait()\n", "message += \"\"\"\nPROCESS STDOUT:\n\"\"\" + f.read()\n", "if process_info.stderr_file is not None:\n", "process.wait()\n", "message += \"\"\"\nPROCESS STDERR:\n\"\"\" + f.read()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Assert'", "For", "Assign'", "Condition", "Condition", "Condition", "Expr'", "Condition", "Expr'", "Expr'", "Condition", "Condition", "Expr'", "Expr'", "Expr'", "Assign'", "Assign'", "Condition", "Condition", "Expr'", "Expr'", "Condition", "Expr'", "AugAssign'", "Condition", "Expr'", "AugAssign'" ]
[ "def FUNC_8(self):...\n", "VAR_3 = DatabaseQuery('DocField').execute(filters={'parent': 'DocType'},\n fields=['fieldname', 'fieldtype'], or_filters=[{'fieldtype': 'Table'},\n {'fieldtype': 'Select'}])\n", "self.assertTrue({'fieldtype': 'Table', 'fieldname': 'fields'} in VAR_3)\n", "self.assertTrue({'fieldtype': 'Select', 'fieldname': 'document_type'} in VAR_3)\n", "self.assertFalse({'fieldtype': 'Check', 'fieldname': 'issingle'} in VAR_3)\n" ]
[ "def test_or_filters(self):...\n", "data = DatabaseQuery('DocField').execute(filters={'parent': 'DocType'},\n fields=['fieldname', 'fieldtype'], or_filters=[{'fieldtype': 'Table'},\n {'fieldtype': 'Select'}])\n", "self.assertTrue({'fieldtype': 'Table', 'fieldname': 'fields'} in data)\n", "self.assertTrue({'fieldtype': 'Select', 'fieldname': 'document_type'} in data)\n", "self.assertFalse({'fieldtype': 'Check', 'fieldname': 'issingle'} in data)\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_26(self):...\n", "FUNC_4()\n" ]
[ "def setUp(self):...\n", "create_dummy_data()\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def FUNC_0(self, VAR_0):...\n", "VAR_0 = VAR_0.strip()\n", "VAR_16, VAR_10 = self.parse_line(VAR_0)\n", "self.terminal.write('Syntax error: %s\\n' % e.message)\n", "VAR_4 = defer.maybeDeferred(VAR_16, *VAR_10)\n", "self.print_prompt()\n", "@VAR_4...\n", "return\n", "self.print_prompt()\n", "@VAR_4...\n", "if not VAR_6.check(cmdline.ArgumentParsingError):\n", "VAR_6.raiseException()\n", "self.print_prompt()\n", "VAR_5 = defer.Deferred()\n", "VAR_4.addBoth(VAR_5.callback)\n", "return VAR_5\n" ]
[ "def lineReceived(self, line):...\n", "line = line.strip()\n", "command, cmd_args = self.parse_line(line)\n", "self.terminal.write('Syntax error: %s\\n' % e.message)\n", "deferred = defer.maybeDeferred(command, *cmd_args)\n", "self.print_prompt()\n", "@deferred...\n", "return\n", "self.print_prompt()\n", "@deferred...\n", "if not f.check(cmdline.ArgumentParsingError):\n", "f.raiseException()\n", "self.print_prompt()\n", "ret = defer.Deferred()\n", "deferred.addBoth(ret.callback)\n", "return ret\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Condition", "Return'", "Expr'", "Condition", "Condition", "Expr'", "Expr'", "Assign'", "Expr'", "Return'" ]
[ "@property...\n", "return self.report.decrypt_record(self.storage.passphrase)\n" ]
[ "@property...\n", "return self.report.decrypt_record(self.storage.passphrase)\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "def FUNC_7(VAR_17, VAR_18, VAR_9, VAR_10):...\n", "VAR_40 = 'string'\n", "VAR_47 = pd.read_sql(VAR_40.format(table_name=matched_hmis_table, start=\n 'client_location_start_date', exit='client_location_end_date'), con=db.\n engine, params={'start_time': start_time, 'end_time': end_time})\n", "VAR_48 = pd.read_sql(VAR_40.format(table_name=matched_bookings_table, start\n ='jail_entry_date', exit='jail_exit_date'), con=db.engine, params={\n 'start_time': start_time, 'end_time': end_time})\n", "VAR_2 = VAR_47[VAR_47.matched_id.isin(VAR_48.matched_id)].matched_id.unique()\n", "if len(VAR_2) == 0:\n", "logger.warning('No matched between two services')\n", "VAR_49 = {'jailDurationBarData': FUNC_0(VAR_48, FUNC_3, VAR_2, 'Jail'),\n 'homelessDurationBarData': FUNC_0(VAR_47, FUNC_3, VAR_2, 'Homeless'),\n 'jailContactBarData': FUNC_0(VAR_48, FUNC_2, VAR_2, 'Jail'),\n 'homelessContactBarData': FUNC_0(VAR_47, FUNC_2, VAR_2, 'Homeless')}\n", "return VAR_49\n" ]
[ "def retrieve_bar_data(matched_hmis_table, matched_bookings_table,...\n", "query = \"\"\"\n SELECT\n *,\n DATE_PART('day', {exit}::timestamp - {start}::timestamp) as days\n FROM {table_name}\n WHERE\n not ({start} < %(start_time)s AND {exit} < %(start_time)s) and\n not ({start} > %(end_time)s AND {exit} > %(end_time)s)\n \"\"\"\n", "filtered_hmis = pd.read_sql(query.format(table_name=matched_hmis_table,\n start='client_location_start_date', exit='client_location_end_date'),\n con=db.engine, params={'start_time': start_time, 'end_time': end_time})\n", "filtered_bookings = pd.read_sql(query.format(table_name=\n matched_bookings_table, start='jail_entry_date', exit='jail_exit_date'),\n con=db.engine, params={'start_time': start_time, 'end_time': end_time})\n", "shared_ids = filtered_hmis[filtered_hmis.matched_id.isin(filtered_bookings.\n matched_id)].matched_id.unique()\n", "if len(shared_ids) == 0:\n", "logger.warning('No matched between two services')\n", "bar_data = {'jailDurationBarData': get_histogram_bar_chart_data(\n filtered_bookings, get_days_distribution, shared_ids, 'Jail'),\n 'homelessDurationBarData': get_histogram_bar_chart_data(filtered_hmis,\n get_days_distribution, shared_ids, 'Homeless'), 'jailContactBarData':\n get_histogram_bar_chart_data(filtered_bookings, get_contact_dist,\n shared_ids, 'Jail'), 'homelessContactBarData':\n get_histogram_bar_chart_data(filtered_hmis, get_contact_dist,\n shared_ids, 'Homeless')}\n", "return bar_data\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Expr'", "Assign'", "Return'" ]
[ "def FUNC_58(self):...\n", "self.cursor.execute('create table t1(a blob)')\n", "VAR_36 = 'x' * 100 * 1024\n", "self.cursor.execute('update t1 set a=? where 1=0', (VAR_36,))\n" ]
[ "def test_large_update_nodata(self):...\n", "self.cursor.execute('create table t1(a blob)')\n", "hundredkb = 'x' * 100 * 1024\n", "self.cursor.execute('update t1 set a=? where 1=0', (hundredkb,))\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Expr'" ]
[ "def FUNC_0(VAR_3):...\n", "\"\"\"docstring\"\"\"\n", "\"\"\" OUPUT: a list of elements in the selected row \"\"\"\n", "VAR_4 = text(str(VAR_3))\n", "return VAR_2.execute(VAR_4).fetchall()\n" ]
[ "def select_row_from_mysql_command(command_str):...\n", "\"\"\"docstring\"\"\"\n", "\"\"\" OUPUT: a list of elements in the selected row \"\"\"\n", "sql = text(str(command_str))\n", "return s.execute(sql).fetchall()\n" ]
[ 0, 0, 2, 2, 2 ]
[ "FunctionDef'", "Docstring", "Expr'", "Assign'", "Return'" ]
[ "def FUNC_0(self):...\n", "VAR_0 = 'string'\n", "VAR_1 = VAR_0.split('#')[-1]\n", "VAR_2 = Board(VAR_1)\n" ]
[ "def setUp(self):...\n", "test_link = (\n 'https://www.chiark.greenend.org.uk/~sgtatham/puzzles/js/undead.html#4x4:5,2,4,cRdRLbLbR,2,3,1,3,3,3,1,0,0,1,4,0,0,2,3,1'\n )\n", "board_txt = test_link.split('#')[-1]\n", "board = Board(board_txt)\n" ]
[ 0, 0, 0, 1 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'" ]
[ "__author__ = 'zkqiang'\n", "__zhihu__ = 'https://www.zhihu.com/people/z-kqiang'\n", "__github__ = 'https://github.com/zkqiang/Zhihu-Login'\n", "import requests\n", "import time\n", "import re\n", "import base64\n", "import hmac\n", "import hashlib\n", "import json\n", "import matplotlib.pyplot as plt\n", "from http import cookiejar\n", "from PIL import Image\n", "VAR_0 = {'Connection': 'keep-alive', 'Host': 'www.zhihu.com', 'Referer':\n 'https://www.zhihu.com/', 'User-Agent': 'string'}\n", "VAR_1 = 'https://www.zhihu.com/signup'\n", "VAR_2 = 'https://www.zhihu.com/api/v3/oauth/sign_in'\n", "VAR_3 = {'client_id': 'c3cef7c66a1843f8b3a9e6a1e3160e20', 'grant_type':\n 'password', 'source': 'com.zhihu.web', 'username': '', 'password': '',\n 'lang': 'en', 'ref_source': 'homepage'}\n", "def __init__(self):...\n", "self.login_url = VAR_1\n", "self.login_api = VAR_2\n", "self.login_data = VAR_3.copy()\n", "self.session = requests.session()\n", "self.session.headers = VAR_0.copy()\n", "self.session.cookies = cookiejar.LWPCookieJar(filename='./cookies.txt')\n", "def FUNC_0(self, VAR_4=None, VAR_5=None, VAR_6=True):...\n", "\"\"\"docstring\"\"\"\n", "if VAR_6 and self.load_cookies():\n", "if self.check_login():\n", "VAR_7 = self.session.headers.copy()\n", "return True\n", "VAR_7.update({'authorization': 'oauth c3cef7c66a1843f8b3a9e6a1e3160e20',\n 'X-Xsrftoken': self._get_token()})\n", "VAR_4, VAR_5 = self._check_user_pass(VAR_4, VAR_5)\n", "self.login_data.update({'username': VAR_4, 'password': VAR_5})\n", "VAR_8 = str(int(time.time() * 1000))\n", "self.login_data.update({'captcha': self._get_captcha(VAR_7), 'timestamp':\n VAR_8, 'signature': self._get_signature(VAR_8)})\n", "VAR_10 = self.session.post(self.login_api, data=self.login_data, VAR_7=headers)\n", "if 'error' in VAR_10.text:\n", "print(re.findall('\"message\":\"(.+?)\"', VAR_10.text)[0])\n", "if self.check_login():\n", "print('登录失败')\n", "return True\n", "return False\n" ]
[ "__author__ = 'zkqiang'\n", "__zhihu__ = 'https://www.zhihu.com/people/z-kqiang'\n", "__github__ = 'https://github.com/zkqiang/Zhihu-Login'\n", "import requests\n", "import time\n", "import re\n", "import base64\n", "import hmac\n", "import hashlib\n", "import json\n", "import matplotlib.pyplot as plt\n", "from http import cookiejar\n", "from PIL import Image\n", "HEADERS = {'Connection': 'keep-alive', 'Host': 'www.zhihu.com', 'Referer':\n 'https://www.zhihu.com/', 'User-Agent':\n 'Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Mobile Safari/537.36'\n }\n", "LOGIN_URL = 'https://www.zhihu.com/signup'\n", "LOGIN_API = 'https://www.zhihu.com/api/v3/oauth/sign_in'\n", "FORM_DATA = {'client_id': 'c3cef7c66a1843f8b3a9e6a1e3160e20', 'grant_type':\n 'password', 'source': 'com.zhihu.web', 'username': '', 'password': '',\n 'lang': 'en', 'ref_source': 'homepage'}\n", "def __init__(self):...\n", "self.login_url = LOGIN_URL\n", "self.login_api = LOGIN_API\n", "self.login_data = FORM_DATA.copy()\n", "self.session = requests.session()\n", "self.session.headers = HEADERS.copy()\n", "self.session.cookies = cookiejar.LWPCookieJar(filename='./cookies.txt')\n", "def login(self, username=None, password=None, load_cookies=True):...\n", "\"\"\"docstring\"\"\"\n", "if load_cookies and self.load_cookies():\n", "if self.check_login():\n", "headers = self.session.headers.copy()\n", "return True\n", "headers.update({'authorization': 'oauth c3cef7c66a1843f8b3a9e6a1e3160e20',\n 'X-Xsrftoken': self._get_token()})\n", "username, password = self._check_user_pass(username, password)\n", "self.login_data.update({'username': username, 'password': password})\n", "timestamp = str(int(time.time() * 1000))\n", "self.login_data.update({'captcha': self._get_captcha(headers), 'timestamp':\n timestamp, 'signature': self._get_signature(timestamp)})\n", "resp = self.session.post(self.login_api, data=self.login_data, headers=headers)\n", "if 'error' in resp.text:\n", "print(re.findall('\"message\":\"(.+?)\"', resp.text)[0])\n", "if self.check_login():\n", "print('登录失败')\n", "return True\n", "return False\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Assign'", "Assign'", "Assign'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "ImportFrom'", "ImportFrom'", "Assign'", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Docstring", "Condition", "Condition", "Assign'", "Return'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Condition", "Expr'", "Condition", "Expr'", "Return'", "Return'" ]
[ "def FUNC_2(self, VAR_1):...\n", "\"\"\"docstring\"\"\"\n", "if self.connection:\n", "for line in self.connection.iterdump():\n", "VAR_34.write('%s\\n' % line)\n", "print('db dumped to %s' % VAR_1[0])\n" ]
[ "def dump_database(self, sqlfile):...\n", "\"\"\"docstring\"\"\"\n", "if self.connection:\n", "for line in self.connection.iterdump():\n", "f.write('%s\\n' % line)\n", "print('db dumped to %s' % sqlfile[0])\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "For", "Expr'", "Expr'" ]
[ "def FUNC_0(self):...\n", "if not GenericRequest.test_success(self):\n", "return False\n", "VAR_13 = re.compile(self.username)\n", "if self.loggedin:\n", "if VAR_13.search(self.res_data) is None:\n", "if VAR_13.search(self.res_data) is not None:\n", "return False\n", "return True\n", "return False\n" ]
[ "def test_success(self):...\n", "if not GenericRequest.test_success(self):\n", "return False\n", "username_re = re.compile(self.username)\n", "if self.loggedin:\n", "if username_re.search(self.res_data) is None:\n", "if username_re.search(self.res_data) is not None:\n", "return False\n", "return True\n", "return False\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Assign'", "Condition", "Condition", "Condition", "Return'", "Return'", "Return'" ]
[ "def FUNC_2(*VAR_3, VAR_4=False, VAR_5=False, VAR_1=None, VAR_6=None, VAR_7=...\n", "\"\"\"docstring\"\"\"\n", "VAR_12 = []\n", "if isinstance(VAR_1, str):\n", "import glob\n", "for k in VAR_3:\n", "for k in VAR_3:\n", "if os.path.exists(k):\n", "if isinstance(VAR_1, str) and VAR_1 in VAR_12:\n", "if os.path.exists(k):\n", "return VAR_12\n", "if VAR_5:\n", "VAR_12 = [VAR_12[VAR_12.index(VAR_1)]]\n", "if not VAR_4:\n", "VAR_12 += glob.glob(os.path.join(k, VAR_1))\n", "VAR_12 += [os.path.join(k, i) for i in os.listdir(k)]\n", "VAR_12 += os.listdir(k)\n", "VAR_12 = [os.path.splitext(k)[0] for k in VAR_12]\n", "if isinstance(VAR_6, (list, tuple)):\n", "from itertools import product\n", "if VAR_8:\n", "VAR_12 = [k for k, i in product(VAR_12, VAR_6) if i not in k]\n", "VAR_12 = list(set(VAR_12))\n", "if VAR_7:\n", "VAR_12.sort()\n", "return VAR_12\n" ]
[ "def get_files_in_folders(*args, with_ext=False, with_path=False, file=None,...\n", "\"\"\"docstring\"\"\"\n", "files = []\n", "if isinstance(file, str):\n", "import glob\n", "for k in args:\n", "for k in args:\n", "if os.path.exists(k):\n", "if isinstance(file, str) and file in files:\n", "if os.path.exists(k):\n", "return files\n", "if with_path:\n", "files = [files[files.index(file)]]\n", "if not with_ext:\n", "files += glob.glob(os.path.join(k, file))\n", "files += [os.path.join(k, i) for i in os.listdir(k)]\n", "files += os.listdir(k)\n", "files = [os.path.splitext(k)[0] for k in files]\n", "if isinstance(exclude, (list, tuple)):\n", "from itertools import product\n", "if unique:\n", "files = [k for k, i in product(files, exclude) if i not in k]\n", "files = list(set(files))\n", "if sort:\n", "files.sort()\n", "return files\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Condition", "Import'", "For", "For", "Condition", "Condition", "Condition", "Return'", "Condition", "Assign'", "Condition", "AugAssign'", "AugAssign'", "AugAssign'", "Assign'", "Condition", "ImportFrom'", "Condition", "Assign'", "Assign'", "Condition", "Expr'", "Return'" ]
[ "def __init__(self):...\n", "self.stats = {'requests': 0, 'finished': 0, 'failed': 0, 'bytesRcv': 0}\n" ]
[ "def __init__(self):...\n", "self.stats = {'requests': 0, 'finished': 0, 'failed': 0, 'bytesRcv': 0}\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Assign'" ]
[ "def FUNC_30(VAR_16, VAR_28):...\n", "VAR_53 = \"SELECT DISTINCT scene FROM ranks WHERE player='{}'\".format(VAR_28)\n", "VAR_79 = VAR_16.exec(VAR_53)\n", "VAR_79 = [VAR_93[0] for VAR_93 in VAR_79]\n", "VAR_80 = [FUNC_25(VAR_16, VAR_93, VAR_28) for VAR_93 in VAR_79]\n", "VAR_81 = [FUNC_26(VAR_16, VAR_93, VAR_28) for VAR_93 in VAR_79]\n", "VAR_82 = min(VAR_80)\n", "VAR_83 = max(VAR_81)\n", "VAR_84 = FUNC_27(VAR_82, VAR_83, VAR_27=True)\n", "VAR_85 = FUNC_29(VAR_16, 'austin', 'christmasmike')\n", "VAR_86 = {VAR_93: FUNC_29(VAR_16, VAR_93, VAR_28) for VAR_93 in VAR_79}\n", "VAR_87 = {VAR_93: [] for VAR_93 in VAR_79}\n", "for month in VAR_84:\n", "for VAR_93 in VAR_79:\n", "return VAR_87, VAR_84\n", "VAR_122 = VAR_86[VAR_93]\n", "if month in VAR_122:\n", "VAR_87[VAR_93].append([month, VAR_122[month]])\n" ]
[ "def get_ranking_graph_data(db, tag):...\n", "sql = \"SELECT DISTINCT scene FROM ranks WHERE player='{}'\".format(tag)\n", "scenes = db.exec(sql)\n", "scenes = [s[0] for s in scenes]\n", "first_months = [get_first_ranked_month(db, s, tag) for s in scenes]\n", "last_months = [get_last_ranked_month(db, s, tag) for s in scenes]\n", "first_month = min(first_months)\n", "last_month = max(last_months)\n", "iterated_months = iter_months(first_month, last_month, include_last=True)\n", "arank = get_monthly_ranks_for_scene(db, 'austin', 'christmasmike')\n", "monthly_ranks_per_scene = {s: get_monthly_ranks_for_scene(db, s, tag) for s in\n scenes}\n", "ranks_per_scene = {s: [] for s in scenes}\n", "for month in iterated_months:\n", "for s in scenes:\n", "return ranks_per_scene, iterated_months\n", "scene_ranks = monthly_ranks_per_scene[s]\n", "if month in scene_ranks:\n", "ranks_per_scene[s].append([month, scene_ranks[month]])\n" ]
[ 0, 4, 4, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "For", "For", "Return'", "Assign'", "Condition", "Expr'" ]