lines sequencelengths 1 444 | raw_lines sequencelengths 1 444 | label sequencelengths 1 444 | type sequencelengths 1 444 |
|---|---|---|---|
[
"@integration_synonym_api...\n",
"FUNC_2(VAR_2)\n",
"FUNC_3(VAR_2, 'VENIZIA', VAR_7='1')\n",
"FUNC_3(VAR_2, 'VENEZIA', VAR_7='2')\n",
"FUNC_3(VAR_2, 'VANSEA', VAR_7='3')\n",
"FUNC_3(VAR_2, 'WENSO', VAR_7='4')\n",
"FUNC_5(VAR_4, VAR_5, VAR_11='VENIZIA', VAR_10=[{'name': '----VENIZIA'}, {\n 'name': 'VE... | [
"@integration_synonym_api...\n",
"clean_database(solr)\n",
"seed_database_with(solr, 'VENIZIA', id='1')\n",
"seed_database_with(solr, 'VENEZIA', id='2')\n",
"seed_database_with(solr, 'VANSEA', id='3')\n",
"seed_database_with(solr, 'WENSO', id='4')\n",
"verify_results(client, jwt, query='VENIZIA', expect... | [
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_14(self, VAR_64):...\n",
"self._onerror = VAR_64\n"
] | [
"def onerror(self, func):...\n",
"self._onerror = func\n"
] | [
0,
0
] | [
"FunctionDef'",
"Assign'"
] |
[
"def FUNC_9(self, VAR_6):...\n",
"VAR_22 = \"DELETE FROM article_reference WHERE query_hash = '%s'\" % VAR_6\n",
"self.cur.execute(VAR_22)\n",
"self.conn.commit()\n"
] | [
"def del_reference_by_qhash(self, qhash):...\n",
"sql = \"DELETE FROM article_reference WHERE query_hash = '%s'\" % qhash\n",
"self.cur.execute(sql)\n",
"self.conn.commit()\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"@scenario('../features/pods_alive.feature', 'Exec in Pods')...\n",
""
] | [
"@scenario('../features/pods_alive.feature', 'Exec in Pods')...\n",
""
] | [
0,
0
] | [
"Condition",
"Condition"
] |
[
"def FUNC_1(self):...\n",
"VAR_7 = request.form['Name']\n",
"VAR_8 = request.form['Password']\n",
"VAR_3 = VAR_2.execute(\"SELECT PASSWORD FROM USERS WHERE NAME = '\" + VAR_7 + \"'\"\n )\n",
"VAR_9 = ''\n",
"for row in VAR_3:\n",
"VAR_9 = row[0]\n",
"if VAR_9 is '':\n",
"print(VAR_9)\n",
"retur... | [
"def post(self):...\n",
"name = request.form['Name']\n",
"password = request.form['Password']\n",
"query = conn.execute(\"SELECT PASSWORD FROM USERS WHERE NAME = '\" + name + \"'\")\n",
"realPassword = ''\n",
"for row in query:\n",
"realPassword = row[0]\n",
"if realPassword is '':\n",
"print(realPa... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Assign'",
"Condition",
"Expr'",
"Return'",
"Condition",
"Return'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_43(self, VAR_41, VAR_43):...\n",
"self[VAR_41:VAR_41 + 1] = VAR_43\n",
"VAR_55 = len(VAR_43) - 1\n",
"for VAR_40, (i, j) in self._names.items():\n",
"if i > VAR_41:\n",
"self._names[VAR_40] = i + VAR_55, j + VAR_55\n",
"if i == VAR_41:\n",
"self.set_name(VAR_40, i, VAR_42=i + len(items))\n"
... | [
"def insert_items(self, index, items):...\n",
"self[index:index + 1] = items\n",
"add = len(items) - 1\n",
"for name, (i, j) in self._names.items():\n",
"if i > index:\n",
"self._names[name] = i + add, j + add\n",
"if i == index:\n",
"self.set_name(name, i, end=i + len(items))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"For",
"Condition",
"Assign'",
"Condition",
"Expr'"
] |
[
"def FUNC_6(self):...\n",
"\"\"\"docstring\"\"\"\n",
"if self.eng is not None and self.backend == 'Simulator' or self.backend == 'IBMBackend':\n",
"pq.ops.All(pq.ops.Measure) | self.reg\n"
] | [
"def _deallocate(self):...\n",
"\"\"\"docstring\"\"\"\n",
"if self.eng is not None and self.backend == 'Simulator' or self.backend == 'IBMBackend':\n",
"pq.ops.All(pq.ops.Measure) | self.reg\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Expr'"
] |
[
"def FUNC_13(VAR_7, VAR_1, VAR_12=True):...\n",
"\"\"\"docstring\"\"\"\n",
"if isinstance(VAR_7, six.text_type):\n",
"VAR_7 = import_string(VAR_7)\n",
"@wraps(VAR_7)...\n",
"if VAR_12:\n",
"return VAR_7(VAR_8[0], VAR_1, *VAR_8[1:], **kwargs)\n",
"return VAR_7(VAR_1, *VAR_8, **kwargs)\n"
] | [
"def make_handler(f, remote, with_response=True):...\n",
"\"\"\"docstring\"\"\"\n",
"if isinstance(f, six.text_type):\n",
"f = import_string(f)\n",
"@wraps(f)...\n",
"if with_response:\n",
"return f(args[0], remote, *args[1:], **kwargs)\n",
"return f(remote, *args, **kwargs)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Assign'",
"Condition",
"Condition",
"Return'",
"Return'"
] |
[
"def __init__(self, VAR_1, VAR_2, VAR_3, VAR_4, VAR_5, VAR_6):...\n",
"self._attributes = VAR_2\n",
"self._base_dir = VAR_5\n",
"self._remote = VAR_1\n",
"self._server = VAR_3\n",
"self._server_version = VAR_4\n",
"self._shutdown_hook = VAR_6\n",
"self._timers = []\n",
"self._timers_dying = False\n"... | [
"def __init__(self, remote, attributes, server, server_version, base_dir,...\n",
"self._attributes = attributes\n",
"self._base_dir = base_dir\n",
"self._remote = remote\n",
"self._server = server\n",
"self._server_version = server_version\n",
"self._shutdown_hook = shutdown_hook\n",
"self._timers = [... | [
0,
0,
0,
5,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_5(self, VAR_10):...\n",
"self.window.set_headerbar()\n",
"self.window.close_tab(self.parent_widget)\n"
] | [
"def on_headerbar_back_button_clicked(self, widget):...\n",
"self.window.set_headerbar()\n",
"self.window.close_tab(self.parent_widget)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'"
] |
[
"from flask import Blueprint, request\n",
"from modules.MessagesManager.api.functions import db_getMessage, db_sendMessage\n",
"VAR_0 = Blueprint('messages', __name__)\n",
"@VAR_0.route('/message/send')...\n",
"if request.method == 'PUT':\n",
"return db_sendMessage(request.get_json())\n",
"@VAR_0.route(... | [
"from flask import Blueprint, request\n",
"from modules.MessagesManager.api.functions import db_getMessage, db_sendMessage\n",
"messages_module = Blueprint('messages', __name__)\n",
"@messages_module.route('/message/send')...\n",
"if request.method == 'PUT':\n",
"return db_sendMessage(request.get_json())\... | [
4,
0,
0,
4,
0,
4,
4,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"ImportFrom'",
"Assign'",
"Condition",
"Condition",
"Return'",
"Condition",
"Condition",
"Return'",
"Condition",
"Condition",
"Return'"
] |
[
"def FUNC_6(self, VAR_53):...\n",
"if VAR_53:\n",
"self.error()\n",
"VAR_23 = _force_utf8(VAR_53)\n",
"return self.error(errors.USER_DOESNT_EXIST)\n",
"return Account._by_name(VAR_23)\n"
] | [
"def run(self, username):...\n",
"if username:\n",
"self.error()\n",
"name = _force_utf8(username)\n",
"return self.error(errors.USER_DOESNT_EXIST)\n",
"return Account._by_name(name)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Expr'",
"Assign'",
"Return'",
"Return'"
] |
[
"@utils.synchronized('3par', external=True)...\n",
"self.common.client_login()\n",
"self.common.delete_snapshot(VAR_8)\n",
"self.common.client_logout()\n"
] | [
"@utils.synchronized('3par', external=True)...\n",
"self.common.client_login()\n",
"self.common.delete_snapshot(snapshot)\n",
"self.common.client_logout()\n"
] | [
0,
0,
0,
0
] | [
"Condition",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_3(VAR_5, VAR_6, VAR_7, VAR_8, VAR_3, VAR_9):...\n",
"VAR_0, VAR_1 = GetCLInfo(VAR_8)\n",
"VAR_2 = build_util.CreateBuildId(VAR_5, VAR_6, VAR_7)\n",
"VAR_18 = FUNC_0(VAR_0, VAR_1, VAR_2, VAR_3) and FUNC_1(VAR_5, VAR_6, VAR_7,\n VAR_0, VAR_1, VAR_3)\n",
"if VAR_18:\n",
"FUNC_2(VAR_5, VAR_6, VAR... | [
"def _UpdateSuspectedCLAndAnalysis(master_name, builder_name, build_number,...\n",
"repo_name, revision = GetCLInfo(cl_info)\n",
"build_key = build_util.CreateBuildId(master_name, builder_name, build_number)\n",
"success = _UpdateSuspectedCL(repo_name, revision, build_key, cl_status\n ) and _UpdateAnalysis... | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Return'"
] |
[
"def FUNC_1(VAR_2):...\n",
"ensure_server()\n",
"logging.disable('INFO')\n",
"PixelatedSite.disable_csp_requests()\n",
"VAR_0 = AppTestClient()\n",
"FUNC_0(VAR_0, UserAgentMode(is_single_user=True))\n",
"VAR_0.listenTCP()\n",
"VAR_6 = Proxy(proxy_port='8889', app_port='4567')\n",
"FeaturesResource.D... | [
"def before_all(context):...\n",
"ensure_server()\n",
"logging.disable('INFO')\n",
"PixelatedSite.disable_csp_requests()\n",
"client = AppTestClient()\n",
"start_app_test_client(client, UserAgentMode(is_single_user=True))\n",
"client.listenTCP()\n",
"proxy = Proxy(proxy_port='8889', app_port='4567')\n... | [
0,
0,
0,
0,
0,
0,
5,
5,
0,
0,
5,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'"
] |
[
"def FUNC_5(VAR_8):...\n",
"VAR_32 = ['< 1 day', '1 day']\n",
"for VAR_24 in VAR_8[2:-1]:\n",
"VAR_32.append(\n f'{VAR_24.left}-{VAR_24.right - 1 if VAR_24.open_right else VAR_24.right} days'\n )\n",
"VAR_32 = VAR_32 + ['90+ days']\n",
"return VAR_32\n"
] | [
"def days_interval_to_text(interval_list):...\n",
"result = ['< 1 day', '1 day']\n",
"for i in interval_list[2:-1]:\n",
"result.append(f'{i.left}-{i.right - 1 if i.open_right else i.right} days')\n",
"result = result + ['90+ days']\n",
"return result\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"For",
"Expr'",
"Assign'",
"Return'"
] |
[
"def FUNC_3(self):...\n",
"VAR_15 = self._server_popen.poll()\n",
"return VAR_15 is None\n"
] | [
"def _IsServerAlive(self):...\n",
"returncode = self._server_popen.poll()\n",
"return returncode is None\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"def FUNC_0(VAR_0, VAR_1):...\n",
"if VAR_0 is None and VAR_1 is None:\n",
"return None, None\n",
"VAR_0 = VAR_0 if VAR_0 is not None else VAR_1\n",
"VAR_1 = VAR_1 if VAR_1 is not None else VAR_0\n",
"return min(VAR_0, VAR_1), max(VAR_0, VAR_1)\n"
] | [
"def _GetLowerAndUpperBoundCommitPositions(lower_bound, upper_bound):...\n",
"if lower_bound is None and upper_bound is None:\n",
"return None, None\n",
"lower_bound = lower_bound if lower_bound is not None else upper_bound\n",
"upper_bound = upper_bound if upper_bound is not None else lower_bound\n",
"re... | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_5(VAR_3, VAR_7, VAR_8=None):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_5 = {'currentProvider': None, 'providers': [], 'secondaryProviders': [],\n 'finishAuthUrl': None, 'errorMessage': None,\n 'registerFormSubmitButtonText': _('Create Account')}\n",
"if third_party_auth.is_enabled():\n",
"VAR_2... | [
"def _third_party_auth_context(request, redirect_to, tpa_hint=None):...\n",
"\"\"\"docstring\"\"\"\n",
"context = {'currentProvider': None, 'providers': [], 'secondaryProviders':\n [], 'finishAuthUrl': None, 'errorMessage': None,\n 'registerFormSubmitButtonText': _('Create Account')}\n",
"if third_party... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"Assign'",
"Return'",
"Condition",
"For",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"For",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"For",
"Condition",
"Assign'",
"Assign'",
"Assign'"
] |
[
"@staticmethod...\n",
"return FUNC_1(CLASS_0._TalkToHandlerAsync('', VAR_7, 'GET', VAR_10))\n"
] | [
"@staticmethod...\n",
"return JsonFromFuture(BaseRequest._TalkToHandlerAsync('', handler, 'GET',\n timeout))\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def FUNC_2(VAR_7=None, VAR_8=0, VAR_9=0):...\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_7 is None:\n",
"return True\n",
"return False\n",
"VAR_21 = re.match('^([^;]+)(; length=([0-9]+))?$', VAR_7, re.IGNORECASE)\n",
"VAR_22 = parse_http_date(VAR_21.group(1))\n",
"VAR_23 = VAR_21.group(3)\n",
"if VAR_23... | [
"def was_modified_since(header=None, mtime=0, size=0):...\n",
"\"\"\"docstring\"\"\"\n",
"if header is None:\n",
"return True\n",
"return False\n",
"matches = re.match('^([^;]+)(; length=([0-9]+))?$', header, re.IGNORECASE)\n",
"header_mtime = parse_http_date(matches.group(1))\n",
"header_len = matche... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Return'",
"Return'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Condition"
] |
[
"def FUNC_6(self, VAR_47):...\n",
"if VAR_101.user_is_admin:\n",
"return True\n",
"if VAR_101.user_is_loggedin:\n",
"VAR_18 = Thing._by_fullname(VAR_47, data=True)\n",
"abort(403, 'forbidden')\n",
"VAR_109 = VAR_18.subreddit_slow\n",
"if VAR_109.is_special(VAR_101.user):\n",
"return True\n"
] | [
"def run(self, thing_name):...\n",
"if c.user_is_admin:\n",
"return True\n",
"if c.user_is_loggedin:\n",
"item = Thing._by_fullname(thing_name, data=True)\n",
"abort(403, 'forbidden')\n",
"subreddit = item.subreddit_slow\n",
"if subreddit.is_special(c.user):\n",
"return True\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Return'"
] |
[
"@commands.command()...\n",
"\"\"\"docstring\"\"\"\n",
"await self.bot.say('https://www.nintendo.co.jp/netinfo/en_US/index.html')\n"
] | [
"@commands.command()...\n",
"\"\"\"docstring\"\"\"\n",
"await self.bot.say('https://www.nintendo.co.jp/netinfo/en_US/index.html')\n"
] | [
0,
0,
0
] | [
"Condition",
"Docstring",
"Expr'"
] |
[
"def FUNC_0(self, VAR_2, VAR_3, VAR_4, VAR_5):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_6 = self.pool.get('res.users').browse(VAR_2, VAR_3, VAR_3).company_id.id\n",
"VAR_7 = []\n",
"VAR_8 = self.pool.get('ir.model.data')\n",
"VAR_9 = self.pool.get('account.bank.statement')\n",
"VAR_10 = self.pool.get('acc... | [
"def close_statement(self, cr, uid, ids, context):...\n",
"\"\"\"docstring\"\"\"\n",
"company_id = self.pool.get('res.users').browse(cr, uid, uid).company_id.id\n",
"list_statement = []\n",
"mod_obj = self.pool.get('ir.model.data')\n",
"statement_obj = self.pool.get('account.bank.statement')\n",
"journa... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
4,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"For",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Condition",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"... |
[
"def __init__(self, VAR_16, *VAR_17, **VAR_10):...\n",
"super().__init__(*VAR_17, **kwargs)\n",
"self.key_name = VAR_16\n"
] | [
"def __init__(self, key_name, *args, **kwargs):...\n",
"super().__init__(*args, **kwargs)\n",
"self.key_name = key_name\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'"
] |
[
"def __init__(self, VAR_0):...\n",
"super(CLASS_9, self).__init__(self._init_helper(VAR_0.strip().split()) + '\\n')\n"
] | [
"def __init__(self, txt):...\n",
"super(ScissorEnv, self).__init__(self._init_helper(txt.strip().split()) + '\\n')\n"
] | [
0,
2
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_7(VAR_15):...\n",
"if VAR_15 == 1:\n",
"return '0.6 AND 1'\n",
"return '0.0 AND 0.4'\n"
] | [
"def get_taste_condition(value):...\n",
"if value == 1:\n",
"return '0.6 AND 1'\n",
"return '0.0 AND 0.4'\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_8(self, VAR_7, VAR_9, VAR_10=False):...\n",
"if isinstance(VAR_9, list) and not VAR_10:\n",
"self.__dict__[VAR_7] = []\n",
"self.__dict__[VAR_7] = VAR_9\n",
"self.extend(VAR_7, VAR_9)\n"
] | [
"def set(self, key, value, as_value=False):...\n",
"if isinstance(value, list) and not as_value:\n",
"self.__dict__[key] = []\n",
"self.__dict__[key] = value\n",
"self.extend(key, value)\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_4(self, VAR_3):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_4 = '/api/keys'\n",
"VAR_5 = {'id': 'mykey@box.local', 'public': VAR_3}\n",
"VAR_6 = self.client.post(VAR_4, json.dumps(VAR_5), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_6.status_code, 201)\n",
"VAR_6 = self.client.p... | [
"def _check_duplicate_key(self, pubkey):...\n",
"\"\"\"docstring\"\"\"\n",
"url = '/api/keys'\n",
"body = {'id': 'mykey@box.local', 'public': pubkey}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 201)\n",
"respo... | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"def __init__(self, VAR_4, VAR_5=False, VAR_6=False):...\n",
"self.serial = VAR_4\n",
"self.fail_br = VAR_5\n",
"self.fail_br_before_N = VAR_6\n"
] | [
"def __init__(self, serial, fail_br=False, fail_br_before_N=False):...\n",
"self.serial = serial\n",
"self.fail_br = fail_br\n",
"self.fail_br_before_N = fail_br_before_N\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_2(self, VAR_7):...\n",
"return 'FROM {}'.format(VAR_7)\n"
] | [
"def create_from(self, table_or_view):...\n",
"return 'FROM {}'.format(table_or_view)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def __init__(self, *VAR_2, **VAR_3):...\n",
"super(CLASS_0, self).__init__(*VAR_2, **kwargs)\n",
"self.cluster_vip = None\n"
] | [
"def __init__(self, *args, **kwargs):...\n",
"super(HpSanISCSIDriver, self).__init__(*args, **kwargs)\n",
"self.cluster_vip = None\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'"
] |
[
"def FUNC_29(self, VAR_21):...\n",
"\"\"\"docstring\"\"\"\n",
"return self.stat(VAR_21, VAR_23=False)\n"
] | [
"def lstat(self, path):...\n",
"\"\"\"docstring\"\"\"\n",
"return self.stat(path, follow_symlinks=False)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Return'"
] |
[
"def FUNC_19():...\n",
"populate_test_database()\n",
"FUNC_1('first playlist')\n",
"VAR_11 = VAR_3.post('/videos/1/title/thumbnail')\n",
"assert VAR_11.json['status'] == 'OK'\n",
"VAR_11 = VAR_3.delete('/videos/1/2')\n",
"assert VAR_11.json['status'] == 'NOK'\n",
"assert VAR_11.json['message'] != None... | [
"def test_should_return_a_not_ok_status_when_deleting_a_video_from_an_unknown_playlist_id(...\n",
"populate_test_database()\n",
"create_playlist('first playlist')\n",
"response = test_app.post('/videos/1/title/thumbnail')\n",
"assert response.json['status'] == 'OK'\n",
"response = test_app.delete('/videos... | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'",
"Assign'",
"Assert'",
"Assign'",
"Assert'",
"Assert'"
] |
[
"import urllib.parse as urlparse\n",
"import pytest\n",
"import sqlalchemy as sa\n",
"from pymash import cfg\n",
"from pymash import main\n",
"from pymash import tables\n",
"@pytest.fixture(scope='session')...\n",
"return FUNC_2(VAR_0, 'postgres')\n"
] | [
"import urllib.parse as urlparse\n",
"import pytest\n",
"import sqlalchemy as sa\n",
"from pymash import cfg\n",
"from pymash import main\n",
"from pymash import tables\n",
"@pytest.fixture(scope='session')...\n",
"return _get_engine(request, 'postgres')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Condition",
"Return'"
] |
[
"@VAR_2.errorhandler(404)...\n",
"return {'status': 404, 'msg': str(VAR_15)}\n"
] | [
"@app.errorhandler(404)...\n",
"return {'status': 404, 'msg': str(error)}\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def FUNC_24(self, VAR_7, VAR_17):...\n",
"self.common.extend_volume(VAR_7, VAR_17)\n"
] | [
"def extend_volume(self, volume, new_size):...\n",
"self.common.extend_volume(volume, new_size)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def __call__(self):...\n",
"return self\n"
] | [
"def __call__(self):...\n",
"return self\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_6(VAR_2, VAR_5, VAR_4, VAR_7):...\n",
"VAR_8 = VAR_4.connect()\n",
"VAR_9 = VAR_8.cursor()\n",
"VAR_10 = \"SELECT userId FROM Users WHERE userEmail = '{0}'\".format(VAR_2)\n",
"VAR_9.execute(VAR_10)\n",
"VAR_11 = VAR_9.fetchone()\n",
"if VAR_11 is None:\n",
"return False\n",
"VAR_17 = (\n ... | [
"def updateAvailability(username, calendarId, sqlInstance, timeList):...\n",
"conn = sqlInstance.connect()\n",
"cursor = conn.cursor()\n",
"userCheckQuery = \"SELECT userId FROM Users WHERE userEmail = '{0}'\".format(\n username)\n",
"cursor.execute(userCheckQuery)\n",
"userResult = cursor.fetchone()\n... | [
0,
0,
0,
0,
0,
0,
0,
0,
4,
0,
0,
0,
4,
4,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Return'",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Return'"
] |
[
"def FUNC_0(self):...\n",
"return self.get_serializer_class().setup_eager_loading(State.objects.all())\n"
] | [
"def get_queryset(self):...\n",
"return self.get_serializer_class().setup_eager_loading(State.objects.all())\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_39(VAR_101):...\n",
"VAR_101.threads = VAR_75\n",
"return VAR_101\n"
] | [
"def decorate(ruleinfo):...\n",
"ruleinfo.threads = threads\n",
"return ruleinfo\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"def FUNC_2(self):...\n",
"\"\"\"docstring\"\"\"\n",
"config.set(xsrf_token_key='abcdef')\n",
"VAR_1 = utils.XsrfTool()\n",
"VAR_3 = utils.get_timestamp(CLASS_0.TEST_NOW)\n",
"self.assertFalse(VAR_1.verify_token('NotTheRightDigest/%f' % VAR_3, 12345,\n 'test_action'))\n"
] | [
"def test_rejects_invalid_token(self):...\n",
"\"\"\"docstring\"\"\"\n",
"config.set(xsrf_token_key='abcdef')\n",
"tool = utils.XsrfTool()\n",
"timestamp = utils.get_timestamp(XsrfToolTests.TEST_NOW)\n",
"self.assertFalse(tool.verify_token('NotTheRightDigest/%f' % timestamp, \n 12345, 'test_action'))\n... | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def __init__(self, VAR_0, VAR_1):...\n",
"self.campaign_data = VAR_0\n",
"self.options = VAR_1\n",
"self.result_data = {'campaign_id': self.campaign_data['id'], 'aux_output':\n '', 'data_diff': None, 'debugger_output': '', 'detected_errors': None,\n 'dut_output': ''}\n",
"if os.path.exists('campaign-... | [
"def __init__(self, campaign_data, options):...\n",
"self.campaign_data = campaign_data\n",
"self.options = options\n",
"self.result_data = {'campaign_id': self.campaign_data['id'], 'aux_output':\n '', 'data_diff': None, 'debugger_output': '', 'detected_errors': None,\n 'dut_output': ''}\n",
"if os.pa... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Condition",
"Expr'"
] |
[
"def FUNC_1(VAR_3):...\n",
"if VAR_3 in ('friends', 'all', ' reddit.com'):\n",
"return False\n",
"return VAR_100(VAR_3) if VAR_3 and VAR_2.match(VAR_3) else None\n",
"return None\n"
] | [
"def chksrname(x):...\n",
"if x in ('friends', 'all', ' reddit.com'):\n",
"return False\n",
"return str(x) if x and subreddit_rx.match(x) else None\n",
"return None\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Return'",
"Return'"
] |
[
"def FUNC_20(VAR_18, VAR_20, VAR_10, VAR_26, VAR_27, VAR_28):...\n",
"VAR_48 = VAR_20, VAR_4[VAR_20]['name']\n",
"VAR_34 = FUNC_1(VAR_10)\n",
"VAR_49 = SubmitUserTestRequest(VAR_34, VAR_48, base_url=CWS_BASE_URL,\n VAR_26=submission_format, VAR_27=filenames)\n",
"VAR_49.execute()\n",
"VAR_31 = VAR_49.g... | [
"def cws_submit_user_test(contest_id, task_id, user_id, submission_format,...\n",
"task = task_id, created_tasks[task_id]['name']\n",
"browser = get_cws_browser(user_id)\n",
"sr = SubmitUserTestRequest(browser, task, base_url=CWS_BASE_URL,\n submission_format=submission_format, filenames=filenames)\n",
"... | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Return'"
] |
[
"@property...\n",
"return self.data_group.group_type.code in ['CP', 'HH', 'CO']\n"
] | [
"@property...\n",
"return self.data_group.group_type.code in ['CP', 'HH', 'CO']\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def FUNC_1(VAR_0):...\n",
"return 'Meetup : %s' % VAR_0.title\n"
] | [
"def meetup_article_title(meetup):...\n",
"return 'Meetup : %s' % meetup.title\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"@rest_utils.ajax()...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_14 = api.network.server_security_groups(VAR_1, VAR_3)\n",
"return {'items': [s.to_dict() for s in VAR_14]}\n"
] | [
"@rest_utils.ajax()...\n",
"\"\"\"docstring\"\"\"\n",
"groups = api.network.server_security_groups(request, server_id)\n",
"return {'items': [s.to_dict() for s in groups]}\n"
] | [
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Return'"
] |
[
"def FUNC_2(self, VAR_3):...\n",
"VAR_2 = self.frames[VAR_3]\n",
"VAR_2.tkraise()\n"
] | [
"def show_frame(self, cont):...\n",
"frame = self.frames[cont]\n",
"frame.tkraise()\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'"
] |
[
"def FUNC_10(self):...\n",
"VAR_12 = 'result/' + self.cate_str + '_scope.json'\n",
"json.dump(self.scope, f)\n"
] | [
"def saveScope(self):...\n",
"file_name = 'result/' + self.cate_str + '_scope.json'\n",
"json.dump(self.scope, f)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'"
] |
[
"def __init__(self, VAR_10):...\n",
"VAR_10 = VAR_10.strip()\n",
"debug('Txt picked up by col:', VAR_10)\n",
"VAR_30 = VAR_10.find('\\n')\n",
"VAR_36 = VAR_10[1:VAR_30].strip()\n",
"VAR_10 = VAR_10[VAR_30 + 1:]\n",
"self.percentage = self.units = 0.0\n",
"self.unspecified = 0\n",
"if len(VAR_36) == ... | [
"def __init__(self, txt):...\n",
"txt = txt.strip()\n",
"debug('Txt picked up by col:', txt)\n",
"i = txt.find('\\n')\n",
"head = txt[1:i].strip()\n",
"txt = txt[i + 1:]\n",
"self.percentage = self.units = 0.0\n",
"self.unspecified = 0\n",
"if len(head) == 0:\n",
"self.unspecified = 1\n",
"if he... | [
0,
0,
2,
0,
0,
2,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"@commands.command()...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_7 = discord.Embed(VAR_2='Why you should not use video guides', VAR_3=\n discord.Color.dark_orange())\n",
"VAR_7.description = 'string'\n",
"VAR_7.add_field(name='Recommended', value=\n \"The recommended thing to do is to use [Plailect's writt... | [
"@commands.command()...\n",
"\"\"\"docstring\"\"\"\n",
"embed = discord.Embed(title='Why you should not use video guides', color=\n discord.Color.dark_orange())\n",
"embed.description = \"\"\"\"Video guides\" for custom firmware and arm9loaderhax/boot9strap are not recommended for use. Their contents gener... | [
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def __init__(self, VAR_24=None):...\n",
"self.logger = logging.getLogger(__name__)\n",
"self.logger.setLevel(logging.DEBUG)\n",
"self.configfile = VAR_24\n",
"self.nodes = {}\n",
"self.server = []\n",
"self.host_list = []\n",
"if VAR_24:\n",
"self.load_config(VAR_24)\n",
"self.config = None\n",
... | [
"def __init__(self, configfile=None):...\n",
"self.logger = logging.getLogger(__name__)\n",
"self.logger.setLevel(logging.DEBUG)\n",
"self.configfile = configfile\n",
"self.nodes = {}\n",
"self.server = []\n",
"self.host_list = []\n",
"if configfile:\n",
"self.load_config(configfile)\n",
"self.con... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Expr'",
"Assign'"
] |
[
"from __future__ import unicode_literals\n",
"import frappe\n",
"from frappe import _\n",
"from frappe.website.website_generator import WebsiteGenerator\n",
"from frappe.website.render import clear_cache\n",
"from frappe.utils import today, cint, global_date_format, get_fullname, strip_html_tags, markdown... | [
"from __future__ import unicode_literals\n",
"import frappe\n",
"from frappe import _\n",
"from frappe.website.website_generator import WebsiteGenerator\n",
"from frappe.website.render import clear_cache\n",
"from frappe.utils import today, cint, global_date_format, get_fullname, strip_html_tags, markdown... | [
0,
0,
0,
0,
0,
3,
0,
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"FunctionDef'",
"Condition",
"Return'",
"FunctionDef'",
"Return'"
] |
[
"def __init__(self, VAR_21, **VAR_1):...\n",
"super().__init__(**kwargs)\n",
"if VAR_44 is None:\n",
"logging.warn('Timezone support disabled, install pytz to enable.')\n",
"self._timezone = VAR_44.timezone(VAR_21)\n",
"self._timezone = None\n"
] | [
"def __init__(self, timezone, **kwargs):...\n",
"super().__init__(**kwargs)\n",
"if pytz is None:\n",
"logging.warn('Timezone support disabled, install pytz to enable.')\n",
"self._timezone = pytz.timezone(timezone)\n",
"self._timezone = None\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Condition",
"Expr'",
"Assign'",
"Assign'"
] |
[
"def FUNC_7(self):...\n",
"VAR_2 = FUNC_0(self.window)\n",
"VAR_3 = FUNC_1(self.window)\n",
"VAR_16 = []\n",
"for VAR_6 in VAR_3:\n",
"VAR_20 = []\n",
"self.sorted_menu = sorted(VAR_16, key=lambda item: item[1], reverse=True)\n",
"VAR_12 = Urtext.meta.NodeMetadata(os.path.join(VAR_2, VAR_6))\n",
"se... | [
"def run(self):...\n",
"path = get_path(self.window)\n",
"files = get_all_files(self.window)\n",
"menu = []\n",
"for filename in files:\n",
"item = []\n",
"self.sorted_menu = sorted(menu, key=lambda item: item[1], reverse=True)\n",
"metadata = Urtext.meta.NodeMetadata(os.path.join(path, filename))\n",... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
1,
0,
0,
0,
1,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"For",
"Assign'",
"Assign'",
"FunctionDef'",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"def FUNC_3(VAR_0):...\n",
"VAR_2 = []\n",
"VAR_5 = {}\n",
"VAR_6 = 's.transaction_date' if VAR_0['based_on'\n ] == 'Sales Order' else 's.posting_date'\n",
"VAR_7 = frappe.db.sql('string'.format(VAR_6=date_field, doctype=filters[\n 'based_on']), as_dict=1)\n",
"for d in VAR_7:\n",
"VAR_5.setdefaul... | [
"def get_sales_details(filters):...\n",
"data = []\n",
"item_details_map = {}\n",
"date_field = 's.transaction_date' if filters['based_on'\n ] == 'Sales Order' else 's.posting_date'\n",
"sales_data = frappe.db.sql(\n \"\"\"\n\t\tselect s.territory, s.customer, si.item_group, si.item_name, si.qty, {dat... | [
0,
0,
0,
0,
4,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Expr'",
"Return'"
] |
[
"import requests\n",
"import sqlite3\n",
"import os\n",
"from bs4 import BeautifulSoup\n",
"def FUNC_0(VAR_0):...\n",
"if VAR_0 == '':\n",
"return True\n",
"if len(VAR_0.split()) > 1:\n",
"return True\n",
"VAR_2 = requests.get('http://codeforces.com/submissions/' + VAR_0)\n",
"VAR_3 = BeautifulS... | [
"import requests\n",
"import sqlite3\n",
"import os\n",
"from bs4 import BeautifulSoup\n",
"def check_username(username):...\n",
"if username == '':\n",
"return True\n",
"if len(username.split()) > 1:\n",
"return True\n",
"r = requests.get('http://codeforces.com/submissions/' + username)\n",
"so... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"FunctionDef'",
"Condition",
"Return'",
"Condition",
"Return'",
"Assign'",
"Assign'",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_1(**VAR_12) ->typing.Callable[[typing.Any], commands.Group]:...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_12.setdefault('cls', CLASS_2)\n",
"return commands.command(**kwargs)\n"
] | [
"def group(**kwargs) ->typing.Callable[[typing.Any], commands.Group]:...\n",
"\"\"\"docstring\"\"\"\n",
"kwargs.setdefault('cls', NekoGroup)\n",
"return commands.command(**kwargs)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'",
"Return'"
] |
[
"def FUNC_4(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self._check_flags()\n"
] | [
"def check_for_setup_error(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self._check_flags()\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'"
] |
[
"@functools.wraps(VAR_0)...\n",
"if not self.request.headers.get('X-XSRF-Token-Request'):\n",
"return VAR_0(self, *VAR_5, **kwargs)\n"
] | [
"@functools.wraps(f)...\n",
"if not self.request.headers.get('X-XSRF-Token-Request'):\n",
"return f(self, *args, **kwargs)\n"
] | [
0,
0,
0
] | [
"Condition",
"Condition",
"Return'"
] |
[
"def FUNC_2(self):...\n",
"return ''.join(VAR_8 + '/\\n' if os.path.isdir(os.path.join(self.filename,\n VAR_8)) else VAR_8 + '\\n' for VAR_8 in os.listdir(self.filename))\n"
] | [
"def index(self):...\n",
"return ''.join(filename + '/\\n' if os.path.isdir(os.path.join(self.filename,\n filename)) else filename + '\\n' for filename in os.listdir(self.filename))\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_25(self):...\n",
"self.compilation_ko(\n \"\"\"\nif header :is \"Sender\" \"me@example.com\" \n discard;\n}\n\"\"\")\n"
] | [
"def test_nonopened_block(self):...\n",
"self.compilation_ko(\n \"\"\"\nif header :is \"Sender\" \"me@example.com\" \n discard;\n}\n\"\"\")\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_8(self, VAR_12, VAR_13, VAR_14=None):...\n",
""
] | [
"def insert(self, table, values, updater=None):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_29(self):...\n",
"\"\"\"docstring\"\"\"\n",
"if self.config['tmp_dir_created']:\n",
"self.delete_tmp_dir()\n",
"for f in os.listdir(self.config['tmp_dir']):\n",
"if re.search('*sosreport-*tar*', f):\n",
"os.remove(os.path.join(self.config['tmp_dir'], f))\n"
] | [
"def cleanup(self):...\n",
"\"\"\"docstring\"\"\"\n",
"if self.config['tmp_dir_created']:\n",
"self.delete_tmp_dir()\n",
"for f in os.listdir(self.config['tmp_dir']):\n",
"if re.search('*sosreport-*tar*', f):\n",
"os.remove(os.path.join(self.config['tmp_dir'], f))\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Expr'",
"For",
"Condition",
"Expr'"
] |
[
"def FUNC_0(VAR_0, VAR_1, VAR_2):...\n",
""
] | [
"def SaveDocumentCollection(dc, filenameedges, filenamedata):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_1(self, VAR_13, VAR_11, VAR_12):...\n",
"\"\"\"docstring\"\"\"\n",
"if self.gives_corrected:\n",
"return self._process_corrected(VAR_13, VAR_11, VAR_12)\n",
"return self._process_issues(VAR_13, VAR_11)\n"
] | [
"def process_output(self, output, filename, file):...\n",
"\"\"\"docstring\"\"\"\n",
"if self.gives_corrected:\n",
"return self._process_corrected(output, filename, file)\n",
"return self._process_issues(output, filename)\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Return'",
"Return'"
] |
[
"@app.route('/api/indi_service', methods=['GET'])...\n",
"return controller.indi_service.to_map()\n"
] | [
"@app.route('/api/indi_service', methods=['GET'])...\n",
"return controller.indi_service.to_map()\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def FUNC_6(self, VAR_16):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_24 = VAR_16['errata_list']\n",
"VAR_24 = filter(None, VAR_24)\n",
"VAR_25 = {}\n",
"if not VAR_24:\n",
"return VAR_25\n",
"VAR_26 = 'SELECT errata.id, errata.name, synopsis, severity.name, description,'\n",
"VAR_26 += ' solution, issu... | [
"def process_list(self, data):...\n",
"\"\"\"docstring\"\"\"\n",
"errata_to_process = data['errata_list']\n",
"errata_to_process = filter(None, errata_to_process)\n",
"answer = {}\n",
"if not errata_to_process:\n",
"return answer\n",
"errata_query = (\n 'SELECT errata.id, errata.name, synopsis, sev... | [
0,
0,
0,
0,
0,
0,
0,
4,
4,
4,
4,
4,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Return'",
"Assign'",
"AugAssign'",
"AugAssign'",
"AugAssign'",
"AugAssign'",
"Expr'",
"Assign'",
"Assign'",
"For",
"Assign'",
"Assign'",
"Expr'",
"For",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
... |
[
"def FUNC_2(self, VAR_3, VAR_4):...\n",
"api.nova.keypair_delete(VAR_3, VAR_4)\n"
] | [
"def delete(self, request, obj_id):...\n",
"api.nova.keypair_delete(request, obj_id)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_8(VAR_5=20):...\n",
"\"\"\"docstring\"\"\"\n",
"for i in range(VAR_5):\n",
"VAR_1.info('Waiting for JupyterHub to come up ({}/{} tries)'.format(i + 1,\n VAR_5))\n",
"if h.code in [404, 502, 503]:\n",
"urlopen('http://127.0.0.1')\n",
"time.sleep(1)\n",
"if isinstance(e.reason, ConnectionRe... | [
"def ensure_jupyterhub_running(times=20):...\n",
"\"\"\"docstring\"\"\"\n",
"for i in range(times):\n",
"logger.info('Waiting for JupyterHub to come up ({}/{} tries)'.format(i + 1,\n times))\n",
"if h.code in [404, 502, 503]:\n",
"urlopen('http://127.0.0.1')\n",
"time.sleep(1)\n",
"if isinstance(e.... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"For",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Condition",
"Return'",
"Expr'"
] |
[
"@VAR_2.route('/web')...\n",
"if VAR_0 == None:\n",
"FUNC_16()\n",
"return json.dumps(get_web(VAR_5, VAR_0=db))\n"
] | [
"@endpoints.route('/web')...\n",
"if db == None:\n",
"init()\n",
"return json.dumps(get_web(tag, db=db))\n"
] | [
0,
0,
0,
0
] | [
"Condition",
"Condition",
"Expr'",
"Return'"
] |
[
"def __init__(self, VAR_27, VAR_28, VAR_19=None):...\n",
"super(CLASS_4, self).__init__(VAR_27, VAR_19)\n",
"self._result_getter = VAR_28\n"
] | [
"def __init__(self, name, result_getter, event=None):...\n",
"super(AwaitableEvent, self).__init__(name, event)\n",
"self._result_getter = result_getter\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'"
] |
[
"def FUNC_15(self, VAR_14):...\n",
""
] | [
"def is_date(self, col_name):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_11(VAR_34):...\n",
"if VAR_34.startswith('!'):\n",
"return VAR_34[1:]\n",
"return VAR_34.format(VAR_11=url, VAR_36=dest.name)\n"
] | [
"def formatCommand(e):...\n",
"if e.startswith('!'):\n",
"return e[1:]\n",
"return e.format(url=url, dest=dest.name)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_9(self):...\n",
"self.driver._eql_execute = self.mox.CreateMock(self.driver._eql_execute)\n",
"VAR_3 = {'name': 'fakesnap', 'volume_name': 'fakevolume_name'}\n",
"self.driver._eql_execute('volume', 'select', VAR_3['volume_name'],\n 'snapshot', 'delete', VAR_3['name'])\n",
"self.mox.ReplayAll()\... | [
"def test_delete_snapshot(self):...\n",
"self.driver._eql_execute = self.mox.CreateMock(self.driver._eql_execute)\n",
"snapshot = {'name': 'fakesnap', 'volume_name': 'fakevolume_name'}\n",
"self.driver._eql_execute('volume', 'select', snapshot['volume_name'],\n 'snapshot', 'delete', snapshot['name'])\n",
... | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_7(self):...\n",
"self.assertFalse(DatabaseQuery('DocType').execute(filters={'name': ['in',\n None]}))\n",
"self.assertTrue({'name': 'DocType'} in DatabaseQuery('DocType').execute(\n filters={'name': ['not in', None]}))\n",
"for result in [{'name': 'DocType'}, {'name': 'DocField'}]:\n",
"self... | [
"def test_in_not_in_filters(self):...\n",
"self.assertFalse(DatabaseQuery('DocType').execute(filters={'name': ['in',\n None]}))\n",
"self.assertTrue({'name': 'DocType'} in DatabaseQuery('DocType').execute(\n filters={'name': ['not in', None]}))\n",
"for result in [{'name': 'DocType'}, {'name': 'DocField... | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'",
"For",
"Expr'",
"For",
"Expr'"
] |
[
"def __init__(self, VAR_2, VAR_3, VAR_4='member', VAR_5=100, VAR_6=0):...\n",
"self.id = VAR_2\n",
"self.nickname = VAR_3\n",
"self.rank = VAR_4\n",
"self.balance = VAR_5\n",
"self.events_attd = VAR_6\n"
] | [
"def __init__(self, id, nickname, rank='member', balance=100, events_attd=0):...\n",
"self.id = id\n",
"self.nickname = nickname\n",
"self.rank = rank\n",
"self.balance = balance\n",
"self.events_attd = events_attd\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"@VAR_0.route('/add', methods=['POST'])...\n",
""
] | [
"@app.route('/add', methods=['POST'])...\n",
""
] | [
0,
0
] | [
"Condition",
"Condition"
] |
[
"from __future__ import unicode_literals\n",
"import frappe\n",
"from frappe.utils import getdate, add_days, today, cint\n",
"from frappe import _\n",
"def FUNC_0(VAR_0=None):...\n",
"VAR_1 = FUNC_1()\n",
"VAR_2 = FUNC_2(VAR_0)\n",
"return VAR_1, VAR_2\n"
] | [
"from __future__ import unicode_literals\n",
"import frappe\n",
"from frappe.utils import getdate, add_days, today, cint\n",
"from frappe import _\n",
"def execute(filters=None):...\n",
"columns = get_columns()\n",
"data = get_data(filters)\n",
"return columns, data\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"FunctionDef'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_5(self):...\n",
"if self.test_server:\n",
"if hasattr(self.test_server, 'kill'):\n",
"self.test_server.kill()\n",
"os.kill(self.test_server.pid, signal.SIGKILL)\n",
"self.test_server = None\n",
"self.port = None\n",
"self.url = None\n",
"if self.tmp_db:\n",
"os.remove(self.tmp_db)\n",
... | [
"def stop_server(self):...\n",
"if self.test_server:\n",
"if hasattr(self.test_server, 'kill'):\n",
"self.test_server.kill()\n",
"os.kill(self.test_server.pid, signal.SIGKILL)\n",
"self.test_server = None\n",
"self.port = None\n",
"self.url = None\n",
"if self.tmp_db:\n",
"os.remove(self.tmp_db)\n... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Condition",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Assign'"
] |
[
"@VAR_0.route('/submit/', methods=['POST'])...\n",
"VAR_8 = getConnexion()\n",
"if flask.request.method == 'POST':\n",
"VAR_20 = flask.request.json['general']['observateur']\n",
"return Response(flask.json.dumps('success'), mimetype='application/json')\n",
"VAR_21 = flask.request.json['general']['taxon'][... | [
"@addObs.route('/submit/', methods=['POST'])...\n",
"db = getConnexion()\n",
"if flask.request.method == 'POST':\n",
"observateur = flask.request.json['general']['observateur']\n",
"return Response(flask.json.dumps('success'), mimetype='application/json')\n",
"cd_nom = flask.request.json['general']['taxon... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
"Condition",
"Assign'",
"Condition",
"Assign'",
"Return'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
... |
[
"import os\n",
"import re\n",
"import operator\n",
"from functools import partial\n",
"import pyramid.events\n",
"import pyramid.request\n",
"import pyramid.config\n",
"from pyramid.session import SignedCookieSessionFactory\n",
"from pyramid.i18n import get_localizer, TranslationStringFactory\n",
... | [
"import os\n",
"import re\n",
"import operator\n",
"from functools import partial\n",
"import pyramid.events\n",
"import pyramid.request\n",
"import pyramid.config\n",
"from pyramid.session import SignedCookieSessionFactory\n",
"from pyramid.i18n import get_localizer, TranslationStringFactory\n",
... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0... | [
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Import'",
"Assign'",
"Assign'... |
[
"def FUNC_5(self, VAR_5, VAR_6, VAR_18, VAR_13=None):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_55 = sorted(self.get_flat_tree(VAR_5, VAR_6, VAR_18), key=itemgetter(\n 'sequence'))\n",
"VAR_15 = [VAR_14['id'] for VAR_14 in VAR_55]\n",
"return self.browse(VAR_5, VAR_6, VAR_15, VAR_13=context)\n"
] | [
"def get_sorted_list(self, cr, uid, root_id, context=None):...\n",
"\"\"\"docstring\"\"\"\n",
"flat_tree = sorted(self.get_flat_tree(cr, uid, root_id), key=itemgetter(\n 'sequence'))\n",
"item_ids = [item['id'] for item in flat_tree]\n",
"return self.browse(cr, uid, item_ids, context=context)\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_44(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_2 = {'SEVERITY': {'LOW': 1, 'MEDIUM': 2}, 'CONFIDENCE': {'MEDIUM': 3}}\n",
"self.check_example('secret-config-option.py', VAR_2)\n"
] | [
"def test_secret_config_option(self):...\n",
"\"\"\"docstring\"\"\"\n",
"expect = {'SEVERITY': {'LOW': 1, 'MEDIUM': 2}, 'CONFIDENCE': {'MEDIUM': 3}}\n",
"self.check_example('secret-config-option.py', expect)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'"
] |
[
"def FUNC_3(VAR_1):...\n",
"return frappe.db.get_value('Blog Category', {'name': VAR_1}, 'title') or VAR_1\n"
] | [
"def get_blog_category(route):...\n",
"return frappe.db.get_value('Blog Category', {'name': route}, 'title') or route\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"@login_required...\n",
"VAR_15 = get_object_or_404(Candidate, id=candidate_id)\n",
"if VAR_15.person.netid != VAR_2.user.username:\n",
"if not VAR_2.user.has_perm('etd_app.change_candidate'):\n",
"if not VAR_15.thesis.current_file_name:\n",
"return HttpResponseForbidden(\n \"You don't have permission ... | [
"@login_required...\n",
"candidate = get_object_or_404(Candidate, id=candidate_id)\n",
"if candidate.person.netid != request.user.username:\n",
"if not request.user.has_perm('etd_app.change_candidate'):\n",
"if not candidate.thesis.current_file_name:\n",
"return HttpResponseForbidden(\n \"You don't hav... | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Condition",
"Condition",
"Condition",
"Return'",
"Return'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def __init__(self, VAR_0, VAR_7):...\n",
"self.name = VAR_0\n",
"self.href = VAR_7\n"
] | [
"def __init__(self, name, href):...\n",
"self.name = name\n",
"self.href = href\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'"
] |
[
"def FUNC_2(self, VAR_3):...\n",
"VAR_5 = 'select userid from comment_like where commentid=%d' % VAR_3\n",
"VAR_6 = sql.queryDB(self.conn, VAR_5)\n",
"return VAR_6\n"
] | [
"def getCommentsLike(self, commentid):...\n",
"sqlText = 'select userid from comment_like where commentid=%d' % commentid\n",
"result = sql.queryDB(self.conn, sqlText)\n",
"return result\n"
] | [
0,
4,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_1(VAR_1):...\n",
"return CLASS_3(VAR_1, VAR_2=True)\n"
] | [
"def r_strict(item):...\n",
"return Roamer(item, _raise=True)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_2(VAR_3):...\n",
"@wraps(VAR_3)...\n",
"VAR_6 = VAR_5.headers.get(VAR_1)\n",
"VAR_11 = VAR_5.session.get_csrf_token()\n",
"if VAR_6 == VAR_11:\n",
"return VAR_3(VAR_9, VAR_5)\n",
"return FUNC_5\n"
] | [
"def csrf(fn):...\n",
"@wraps(fn)...\n",
"token = request.headers.get(HEADER_NAME)\n",
"session_token = request.session.get_csrf_token()\n",
"if token == session_token:\n",
"return fn(context, request)\n",
"return wrapper\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_27(VAR_13):...\n",
"self.assertEqual('/request', VAR_13.path)\n",
"VAR_12.append('not_applicable')\n",
"return None\n"
] | [
"def not_applicable(request):...\n",
"self.assertEqual('/request', request.path)\n",
"calls.append('not_applicable')\n",
"return None\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'",
"Return'"
] |
[
"def FUNC_10(self):...\n",
"self.runner.policy.strict_check = True\n",
"self.runall(self.checks)\n",
"VAR_15 = self.runner.stats\n",
"self.assertEqual(7, VAR_15.num_cases())\n",
"self.assertEqual(5, len(VAR_15.failures()))\n",
"self.assertEqual(2, self._num_failures_stage('setup'))\n",
"self.assertEqu... | [
"def test_strict_performance_check(self):...\n",
"self.runner.policy.strict_check = True\n",
"self.runall(self.checks)\n",
"stats = self.runner.stats\n",
"self.assertEqual(7, stats.num_cases())\n",
"self.assertEqual(5, len(stats.failures()))\n",
"self.assertEqual(2, self._num_failures_stage('setup'))\n"... | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_10(self):...\n",
"if self._hosts_cache is None:\n",
"self._hosts_cache = self._get_hosts()\n",
"return self._hosts_cache\n"
] | [
"def get_hosts(self):...\n",
"if self._hosts_cache is None:\n",
"self._hosts_cache = self._get_hosts()\n",
"return self._hosts_cache\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Return'"
] |
[
"def FUNC_17(self):...\n",
"self._test_strtype('varchar', u'')\n"
] | [
"def test_text_upperlatin(self):...\n",
"self._test_strtype('varchar', u'')\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_0(VAR_0):...\n",
"VAR_7 = psycopg2.connect(dbname=pg_connection['database'], user=\n pg_connection['user'], password=pg_connection['password'], host=\n pg_connection['host'])\n",
"VAR_8 = VAR_7.cursor()\n",
"VAR_8.execute('string')\n",
"VAR_7.commit()\n",
"VAR_8.close()\n",
"VAR_7.close(... | [
"def create_tables(pg_connection):...\n",
"conn = psycopg2.connect(dbname=pg_connection['database'], user=\n pg_connection['user'], password=pg_connection['password'], host=\n pg_connection['host'])\n",
"cur = conn.cursor()\n",
"cur.execute(\n \"\"\"\n CREATE TABLE IF NOT EXISTS quests\n (id SE... | [
0,
4,
4,
4,
0,
4,
4
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_2(self):...\n",
"\"\"\"docstring\"\"\"\n",
"if not site_settings.OPTIONAL_PATH_PREFIX:\n",
"return False\n",
"VAR_14 = self.request.path[1:]\n",
"if VAR_14 == site_settings.OPTIONAL_PATH_PREFIX:\n",
"return True\n",
"return VAR_14.startswith('%s/' % site_settings.OPTIONAL_PATH_PREFIX)\n"
] | [
"def _request_is_for_prefixed_path(self):...\n",
"\"\"\"docstring\"\"\"\n",
"if not site_settings.OPTIONAL_PATH_PREFIX:\n",
"return False\n",
"req_path = self.request.path[1:]\n",
"if req_path == site_settings.OPTIONAL_PATH_PREFIX:\n",
"return True\n",
"return req_path.startswith('%s/' % site_settings... | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Return'",
"Assign'",
"Condition",
"Return'",
"Return'"
] |
[
"from django.views.generic import TemplateView, FormView, DetailView\n",
"from django.urls import reverse\n",
"from .entryform import EntryForm, entry_form_config, build_question_flag\n",
"from .models import LifeCondition, Benefit, BenefitRequirement\n",
"VAR_0 = 'core/benefit_overview.html'\n",
"def FUN... | [
"from django.views.generic import TemplateView, FormView, DetailView\n",
"from django.urls import reverse\n",
"from .entryform import EntryForm, entry_form_config, build_question_flag\n",
"from .models import LifeCondition, Benefit, BenefitRequirement\n",
"template_name = 'core/benefit_overview.html'\n",
... | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"FunctionDef'",
"Assign'",
"Assign'",
"Return'"
] |
[
"@api.depends('transitions_to', 'automaton')...\n",
"for record in self:\n",
"if len(record.transitions_to) == 0 or record.transitions_to is False:\n",
"record.is_start_state = True\n",
"record.is_start_state = False\n"
] | [
"@api.depends('transitions_to', 'automaton')...\n",
"for record in self:\n",
"if len(record.transitions_to) == 0 or record.transitions_to is False:\n",
"record.is_start_state = True\n",
"record.is_start_state = False\n"
] | [
0,
0,
0,
0,
0
] | [
"Condition",
"For",
"Condition",
"Assign'",
"Assign'"
] |
[
"def FUNC_3(self, VAR_2, VAR_4, VAR_5):...\n",
"VAR_17 = VAR_2.user\n",
"return not isinstance(VAR_5, self.model\n ) or VAR_17.is_staff or VAR_17.is_superuser or self.is_object_visible(VAR_2\n , VAR_4, VAR_5)\n"
] | [
"def has_object_permission(self, request, view, obj):...\n",
"user = request.user\n",
"return not isinstance(obj, self.model\n ) or user.is_staff or user.is_superuser or self.is_object_visible(request,\n view, obj)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Return'"
] |
End of preview. Expand
in Data Studio
Dataset Card for "vul_lines"
Original Paper: https://www.sciencedirect.com/science/article/abs/pii/S0167739X24004680
bibtex
@article{TRAN2024107504,
title = {DetectVul: A statement-level code vulnerability detection for Python},
journal = {Future Generation Computer Systems},
pages = {107504},
year = {2024},
issn = {0167-739X},
doi = {https://doi.org/10.1016/j.future.2024.107504},
url = {https://www.sciencedirect.com/science/article/pii/S0167739X24004680},
author = {Hoai-Chau Tran and Anh-Duy Tran and Kim-Hung Le},
keywords = {Source code vulnerability detection, Deep learning, Natural language processing},
abstract = {Detecting vulnerabilities in source code using graph neural networks (GNN) has gained significant attention in recent years. However, the detection performance of these approaches relies highly on the graph structure, and constructing meaningful graphs is expensive. Moreover, they often operate at a coarse level of granularity (such as function-level), which limits their applicability to other scripting languages like Python and their effectiveness in identifying vulnerabilities. To address these limitations, we propose DetectVul, a new approach that accurately detects vulnerable patterns in Python source code at the statement level. DetectVul applies self-attention to directly learn patterns and interactions between statements in a raw Python function; thus, it eliminates the complicated graph extraction process without sacrificing model performance. In addition, the information about each type of statement is also leveraged to enhance the model’s detection accuracy. In our experiments, we used two datasets, CVEFixes and Vudenc, with 211,317 Python statements in 21,571 functions from real-world projects on GitHub, covering seven vulnerability types. Our experiments show that DetectVul outperforms GNN-based models using control flow graphs, achieving the best F1 score of 74.47%, which is 25.45% and 18.05% higher than the best GCN and GAT models, respectively.}
}
- Downloads last month
- 66