diff --git "a/pytest-dev__pytest-8365/docstore.json" "b/pytest-dev__pytest-8365/docstore.json" new file mode 100644--- /dev/null +++ "b/pytest-dev__pytest-8365/docstore.json" @@ -0,0 +1 @@ +{"docstore/metadata": {"/tmp/repos/swe-bench_pytest-dev__pytest/bench/bench.py__": {"doc_hash": "9ec01c1fc46b1d1dbaa44a6378107db82f200c2839a7f5f22e7ef45972e5d615"}, "/tmp/repos/swe-bench_pytest-dev__pytest/bench/bench_argcomplete.py__10000_iterations_just__": {"doc_hash": "a723742a1d288e3f4993bde367528cda6a0f188b456333a84953eb6e2d8ee9e7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/bench/empty.py__": {"doc_hash": "6ed6bc069c64510abd6d65a2f92bb8f1b03f27dc24a8d8cc7b9b9d034a3101ad"}, "/tmp/repos/swe-bench_pytest-dev__pytest/bench/manyparam.py__": {"doc_hash": "23437d09db06329c7bad13fc05562636a801802829f334a28ca66d14498d83cc"}, "/tmp/repos/swe-bench_pytest-dev__pytest/bench/skip.py__": {"doc_hash": "c759e6c53ec0b1d7a26727fb8e2df891b3d907e8b92d2aff1ff01801e8a7011c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/conftest.py__": {"doc_hash": "78f878a023ce5824b4e5caf19869dfe59b2701f86bfc976df31bf9d700c0e602"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/assertion/failure_demo.py_TestSpecialisedExplanations_TestSpecialisedExplanations.test_eq_attrs.assert_left_right": {"doc_hash": "43ec8efcb5cbfaa4a669fcdd4823fa41676c5b88e87a7657cd448e6f01167326"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/assertion/failure_demo.py_test_attribute_globf.return.x_1": {"doc_hash": "3a0f3ca29d829728056136c554c67998fac49ae939a3a3081b35acce82ce621f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/assertion/failure_demo.py_TestMoreErrors_TestMoreErrors.test_try_finally.try_.finally_.x.0": {"doc_hash": "00a1bc28e70c85de8cc892590ce6bb399414572ee27b9b2659984c0a2036d8e6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/assertion/failure_demo.py_TestCustomAssertMsg_": {"doc_hash": "d2f37b36f9492de3d9dca8a80c416571f4cb103bbf75175389e4a8c55b2ecedc"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/assertion/global_testmodule_config/conftest.py__": {"doc_hash": "3683bcbb241dca86652d7262f5a76037a32c7985a3bfd217c9ffb19fd1fdf56a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/assertion/global_testmodule_config/test_hello_world.py__": {"doc_hash": "f17bd63e212798effda0670f24afe667195e773cf1c1a104b2d63e5d7c797e19"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/assertion/test_failures.py__": {"doc_hash": "5c22241bda4039d064978e526c5afd685d8a81af4d743a6f7beb43b2a5a028ad"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/assertion/test_setup_flow_example.py_setup_module_": {"doc_hash": "08bd44f73348d1ec0a1a3c9139d1e094d79a25fda4250f54469413d90c589750"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/conftest.py__": {"doc_hash": "07d8046da363766ce73d6453fd04ebfbc318e2cd895cf13db5e3f52c58a802b1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/multipython.py___python2.return.Python_request_param_pyt": {"doc_hash": "576f4cf775d5b6581f5fc11e765234bbca63d6b1928fbe58bb2238a87196e1eb"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/multipython.py_Python_Python.dumps.subprocess_check_call_se": {"doc_hash": "23a83a6658862f7e28684ed4c998b96a3e5245a56848328cb9eb08b469d5d52e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/multipython.py_Python.load_and_is_true_": {"doc_hash": "61109b877c8de1e43ad9bf5c3da2e730fdfbe63426689e314ab3c12d7446be70"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/pythoncollection.py__": {"doc_hash": "f63bb4ceb6d8b2ef0b2f309c9b9f61ce1186914ccd6d78f9bda4832ff158400d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/xfail_demo.py_pytest_": {"doc_hash": "f32c0dfc058f1ab86c3b6c104d38d26351c698953e8ca0c96b691d38f7ba5185"}, "/tmp/repos/swe-bench_pytest-dev__pytest/extra/get_issues.py_json_get_issues.while_1_.if_not_another_page_.return.issues": {"doc_hash": "5b39dc7982dee717621c367c3e054be59af4f0699917a1c937314a9cd9ade39a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/extra/get_issues.py_main__get_kind.return._issue_": {"doc_hash": "bf74ca164b1ad2d6a33b5aabf0fbe46f4d7d2da04acdfec194dcc049e2df858a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/extra/get_issues.py_report_": {"doc_hash": "4aace0a44a6d90f2bf0c6d462ddfe4f81890ba826ed4546fb36d9d934456e227"}, "/tmp/repos/swe-bench_pytest-dev__pytest/extra/setup-py.test/setup.py__": {"doc_hash": "7da8ce8050aecfff84ced3c59cbd7bcd8629a4f1ce97b7cf1e67c41250cfaf9c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/__init__.py__": {"doc_hash": "bf6203ffa029a7fad23b4ef334bf1750eae101c8157d4767b6da2daf40153e5e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_argcomplete.py_FastFilesCompleter_": {"doc_hash": "c4eb250f05ef446063baf1548d8e950a1ba96c2a8d26892ae55bfe83fbed3eac"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_Frame_Frame.getargs.return.retval": {"doc_hash": "a9868e376decd8ac57e57833935785b8e9ba627b9d348f519e7dd104aa302a67"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_Traceback.cut_Traceback.cut.return.self": {"doc_hash": "5d2deb519bd4b105f56039e547ff5028211300f8bbfa0d4f283902455d7950e5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_ExceptionInfo.getrepr_ExceptionInfo.getrepr.return.fmt_repr_excinfo_self_": {"doc_hash": "0bd89e8db98fdd12d53cea14c13138458f59da5478ea1c5472a9e323c4fa126a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_FormattedExcinfo._makepath_FormattedExcinfo.repr_traceback.return.ReprTraceback_entries_ex": {"doc_hash": "4cc9f1370f6a23b318d52f8431d358eee591cb595be78e0e013d5b9402ba4c52"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_FormattedExcinfo._truncate_recursive_traceback_FormattedExcinfo._truncate_recursive_traceback.return.traceback_extraline": {"doc_hash": "b3bc61d47d2a0ac4281dbe97665d12db82ab85a0a4d9cc75d83bdcc74e9d7642"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_ReprFuncArgs_ReprFuncArgs.toterminal.if_self_args_.tw_line_": {"doc_hash": "0f46c43897a7eb2d655b27fe48ce891aafae3ea7552279e9cc0c3384c0ae069b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_filter_traceback_": {"doc_hash": "99e25e78194f7ab6d5bfc8e34fb43b13fa87365809ce665f836031be91dad6ab"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/source.py_get_statement_startend2_get_statement_startend2.return.start_end": {"doc_hash": "29a93b590d4a7d0767de52ae0173e1cf4477bb52efecc2f04378d54cbe7e27da"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/source.py_getstatementrange_ast_": {"doc_hash": "faed10d369c0f6dbc9e156b398abe72564d452f1cf2c14243814990e12174206"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/__init__.py_register_assert_rewrite_register_assert_rewrite.importhook_mark_rewrite_": {"doc_hash": "3c0647e299256128f303309c9b680cb0537156f649bcd5368f57d2e891d6d7be"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/__init__.py_DummyRewriteHook_pytest_collection.if_assertstate_.if_assertstate_hook_is_no.assertstate_hook_set_sess": {"doc_hash": "fe619a680f56228c2eabc9a01951a367573ff56d321835c080376aa5afff8be0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewritingHook._early_rewrite_bailout_AssertionRewritingHook._early_rewrite_bailout.return.True": {"doc_hash": "aa340da241186ad3950cc4c0fbac9dd6c8994739bd0edb5f0730c67828985272"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewritingHook._should_rewrite_AssertionRewritingHook._should_rewrite.return.self__is_marked_for_rewri": {"doc_hash": "e96d722052cc5f8530efa78f8c7a837f4a6429bbc44e42e50297210b615fd92f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py__read_pyc__read_pyc.with_fp_.return.co": {"doc_hash": "e2fa5f5244b9904fd3d059dedd5f9f8602f6a4665cf81b0aaed12e71ae9c5a23"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py__format_assertmsg__format_assertmsg.return.obj": {"doc_hash": "99110a8e96a15664ece29473a695598aab79ea6321f0309ca6177dde2313d43f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewriter.run_AssertionRewriter.run.while_nodes_.for_name_field_in_ast_it.if_isinstance_field_list.elif_.nodes_append_field_": {"doc_hash": "7de2e243852d6ddd1d1b8cd8c9da9644be0867a373c8d3d4cd76261ed12a7031"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewriter.is_rewrite_disabled_AssertionRewriter.builtin.return.ast_Attribute_builtin_nam": {"doc_hash": "7473a0b3835a90e0a52b8e08d7ac91720e7b13ccc6d61f79747d0c08e162dfbd"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewriter.explanation_param_AssertionRewriter.explanation_param.return._specifier_s_": {"doc_hash": "744e29ee04be237533855b5ccd447c4e9043b3a83ba09cfc9aedc9df13ff4cac"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewriter.push_format_context_AssertionRewriter.push_format_context.self_stack_append_self_ex": {"doc_hash": "1f9b238263701dd4da92b8a7cd1cd911485b0e68ad0bb4d543ddf011703e3773"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewriter.pop_format_context_AssertionRewriter.generic_visit.return.res_self_explanation_par": {"doc_hash": "db557056a34e139fb68643fb4a74c58c8388d204c5a450a74dc94c9d7f154b7a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewriter.visit_Name_AssertionRewriter.visit_Name.return.name_self_explanation_pa": {"doc_hash": "9d98d12d3f3120338e87907e0b60469e06a604788810c422b0175f81fce0eb52"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewriter.visit_BoolOp_AssertionRewriter.visit_BoolOp.return.ast_Name_res_var_ast_Loa": {"doc_hash": "e6e25a39158a9ff7d7de0dd27e99d4e046603ea1cd9faa6a1d7bba84a04c9215"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewriter.visit_UnaryOp_AssertionRewriter.visit_BinOp.return.res_explanation": {"doc_hash": "7d784f751041290f7b9a696978e95623d747b9d24b27008a3fc31b3a4f88c48d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/truncate.py__truncate_explanation__truncate_explanation.return.truncated_explanation": {"doc_hash": "37edfb6b34a458cafdaf8a0dbcd6ce12695760f4d3ce2603cbe31379b22a8ca1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/truncate.py__truncate_by_char_count_": {"doc_hash": "e3350dcf6b4e94ee59439be6d52b8ee56549b5049a9392c8fe5578a6a07fe7f6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/util.py__split_explanation__split_explanation.return.lines": {"doc_hash": "2d188d712aeb1a034186a652e8f64a9b8bf5510b224952e8399c88faae63447f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/util.py__format_lines__format_lines.return.result": {"doc_hash": "b814d1e5af4595508fc42a95957d6cc5011a9e9852d69004b4a6536902e4529d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/util.py_assertrepr_compare_assertrepr_compare.return._summary_explanation": {"doc_hash": "362056a3a0dcfa690e55d44a2e36743c601cc957bf69f8350774516a419ca176"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/util.py__compare_eq_cls__compare_eq_cls.return.explanation": {"doc_hash": "1b557329628b75778929ae9b3ec2a65b530a3251a4baa1cf1e5364579258f5a1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/util.py__notin_text_": {"doc_hash": "94511e4202b0585f2897706eeef12f200c80a37b9a29d8e83ad9f6972635025c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py_Cache.makedir_Cache.makedir.return.py_path_local_res_": {"doc_hash": "da94c33a274d6e972cb886e1c3007144bc33df26040b1e6eac6da17d095fa129"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py_Cache._ensure_supporting_files_Cache._ensure_supporting_files.cachedir_tag_path_write_b": {"doc_hash": "5609f322b87c11cb63728408bb45b35cd4e2c1020936df7e2fbc2aa534cdf442"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py_LFPlugin_LFPlugin.pytest_collectreport.if_passed_.else_.self_lastfailed_report_no": {"doc_hash": "4aabb06fbc9cff0aeadaea4820e5f75f2ac625ee2d57e9d2c7ea8c13441354d5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py_LFPlugin.pytest_collection_modifyitems_LFPlugin.pytest_sessionfinish.if_saved_lastfailed_se.config_cache_set_cache_l": {"doc_hash": "17b1ce8aaa2e64aeea1d314f5f54123d5d574d2077a06a4a6de65d4644a653bb"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py_pytest_addoption_pytest_addoption.None_6": {"doc_hash": "9510313a573c0632fe63df4df9320affcb51c912865f7fd8d4f56ea4e4616005"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py_cacheshow_": {"doc_hash": "9c9585809966eb7521f44414a8ff36c0edf80a249134da091e264183b1de6542"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_pytest_load_initial_conftests_pytest_load_initial_conftests.if_outcome_excinfo_is_not.sys_stderr_write_err_": {"doc_hash": "d6d989e89719c4733201a22207b50d728416392bef88b0defb46e1f57bd3aaf4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_CaptureFixture_CaptureFixture.close.if_self__capture_is_not_N.self._capture.None": {"doc_hash": "d9c01bca3adef51096f3502e29608974235718df4afa125b1afe56622e603643"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/compat.py_getfuncargnames_getfuncargnames.return.arg_names": {"doc_hash": "64d07949b194e6750e15417a959d6fd6f10321fa54d3c6256f6aa1799f406745"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py__prepareconfig__prepareconfig.try_.except_BaseException_.raise": {"doc_hash": "f95f2bd6e33737e7840231abfb28e544e5b23a4ad470bc9e4524eb7a8c12ab01"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_PytestPluginManager.parse_hookimpl_opts_PytestPluginManager.parse_hookimpl_opts.return.opts": {"doc_hash": "c7734df07fe36fe9d1497d11aaedb475e67bb2b5461e05cbf5172d4b58176ff7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_PytestPluginManager.parse_hookspec_opts_PytestPluginManager.parse_hookspec_opts.return.opts": {"doc_hash": "e172eeecad1b312fbc7eec9b1841e6f1086add0272382caaa415ebb89072b089"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_PytestPluginManager._set_initial_conftests_PytestPluginManager._set_initial_conftests.if_not_foundanchor_.self__try_load_conftest_c": {"doc_hash": "bc4b7196cfeaaf618c15f63d8f115f5aa61b448b1c0ad4aba1291a7d6e50c2e5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_PytestPluginManager.import_plugin_PytestPluginManager.import_plugin.try_.else_.self_register_mod_modnam": {"doc_hash": "e6f973b561c0da4e39d9ac2996bce9ecbeec1f064b848988c3d0eb571fbd08ad"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config.pytest_cmdline_parse_Config.pytest_cmdline_parse.return.self": {"doc_hash": "e6a783b522b1e731662ceb1f008d67af211b37ca85b34e37e169e666571904e9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config.notify_exception_Config.notify_exception.if_not_any_res_.for_line_in_str_excrepr_.sys_stderr_flush_": {"doc_hash": "0f047a41dd59b49a894f3343b2b51f59e5ff381387c9c0bdb76475283eee6127"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config.cwd_relative_nodeid_Config.pytest_load_initial_conftests.self_pluginmanager__set_i": {"doc_hash": "2fa50c82eff2e75e9889170991837c7184a74877af55ad6e0257f5fdecec84b8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config._initini_Config._initini.self._override_ini.ns_override_ini_or_": {"doc_hash": "fae333cb02af5d64b8dc3ab8ec510d80d8ea86eb47ce11d0b8c386dd38922a69"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config._mark_plugins_for_rewrite_Config._validate_args.return.args": {"doc_hash": "2e78544b5340a52be9ce341747b7804596427ac3148697ea00c3d36ecf49603a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config.addinivalue_line_Config.getini.try_.except_KeyError_.return.val": {"doc_hash": "b60508e609deb7f14c5d80821ce3040a85b613e434025e203f9d3b360a619786"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config._getini_Config._getini.if_type_pathlist_.else_.return.value": {"doc_hash": "6393664136e08822a6917b43cdc926eacc7e33581f9351329b43ecef831e0f16"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config._getconftest_pathlist_Config._getconftest_pathlist.return.values": {"doc_hash": "d7ccb0c59a866dec374fb7fd671f21e690603780c31123d52255c7f88d14ea65"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config._get_override_ini_value_Config._get_override_ini_value.return.value": {"doc_hash": "a6fac2d7ed7e3ed8bb279e419a559f4382afc78ac12e15ccaa73ca91bd6a3b91"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_Parser.getgroup_Parser.getgroup.return.group": {"doc_hash": "bb219f0461d76a790540ff186c07c257d21dea3839ed9244d663b68bd68e969a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_Parser.addini_ArgumentError.__str__.if_self_option_id_.else_.return.self_msg": {"doc_hash": "6d0abdb51aa866be5a88135a5c15f8008f7340c880f93091461a2efd82f48c8b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_Argument_Argument.names.return.self__short_opts_self__": {"doc_hash": "82f779d2bf887063e6795810c8f00f0a47cc4a2b2d402b6c9f3b2ef3ce6dcf9e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_Argument.attrs_Argument.attrs.return.self__attrs": {"doc_hash": "b8469af05393309e84e66a0f2479301e9cf9dbe67fe96e8c850aba5c29813aa0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_Argument._set_opt_strings_Argument._set_opt_strings.for_opt_in_opts_.if_len_opt_2_.else_.self__long_opts_append_op": {"doc_hash": "8918478e6fad0cfe354025f50566afb6a90ad311584545ef62d5fec17ee458e1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_Argument.__repr___Argument.__repr__.return._Argument_format_": {"doc_hash": "1a1e1cf33950eea607e7349e8f808b757979bd1f3013562b3ab3d9db3bff0ef3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/exceptions.py__": {"doc_hash": "8fb5d20eb8579de4589d7ec74256c64ab83da8755eb8fc1151e831ccdf4d64ea"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/findpaths.py_get_common_ancestor_get_common_ancestor.return.common_ancestor": {"doc_hash": "7fd3c8b86a4bb765c8092f91ab14f7dd622a1d8b57cb3a6d9b80a4d7267cb566"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/findpaths.py_determine_setup_": {"doc_hash": "0afff5ad9bc6e984fd4138105eef167198e92ad3f039e08170180f2e5b88e884"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/debugging.py_pytest_addoption_pytest_addoption.None_2": {"doc_hash": "ace2c75321a94f02382c83346d6d03cd37eedf1d4415e1c4474873817fd983bf"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/debugging.py_pytest_configure_pytest_configure.config__cleanup_append_fi": {"doc_hash": "3ee06be2bbdad0e59b7317dc79b729e5c5a333cc1bf6052c00c31f30abe2c30d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/debugging.py_PdbInvoke_PdbTrace.pytest_pyfunc_call.yield": {"doc_hash": "ed2aab0461dcce0afa2408aa1fe72c8266af5be2453ac4e3f9018df06de02c88"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/debugging.py__enter_pdb__enter_pdb.return.rep": {"doc_hash": "23acb97a64dd5b3debf2ef0aed431d397b4dfd3d329c5146b791027399f79415"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/debugging.py__postmortem_traceback_": {"doc_hash": "36b4671657ca7d48c3a229d62b73ef537f2d5d5b35760744a5e82dce0dd155aa"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py_DoctestItem.repr_failure_DoctestItem.reportinfo.return.self_fspath_self_dtest_l": {"doc_hash": "5beb2050af0301cc2b4c219fcb9388ab4080c4ed7b25222ccead97471ec79714"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py__get_flag_lookup__get_flag_lookup.return.dict_": {"doc_hash": "4c27d67e8e5b7f88a3f162a62dd5a46a4049c338fd710b71f0cc43733b29cbfd"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py_get_optionflags__get_continue_on_failure.return.continue_on_failure": {"doc_hash": "3dddb9e7d8aeea66727571b9644d100c50b0f159fde17057c182a7d4950820e9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py__check_all_skipped__is_mocked.return._": {"doc_hash": "263e73e1e83ec05505e98b813bcc6a3c0016e8f02716d170b093b5860f8a1fb8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py_DoctestModule_DoctestModule.collect.MockAwareDocTestFinder._find.with__patch_unwrap_mock_a.doctest_DocTestFinder__fi": {"doc_hash": "cb83c48765c47fe94013bd93e8f55674132e45fb75dfe42563ddc74e497998ed"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py__setup_fixtures__setup_fixtures.return.fixture_request": {"doc_hash": "82c0408a13ff28c9f76e5f7c57c711525c7c280cd9768e29c703e384da309e89"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_reorder_items_atscope_reorder_items_atscope.return.items_done": {"doc_hash": "06ea4352c6e962ee91968990f1c6a4a8fd6ac176e108fd04256bf83e7ac269e2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FuncFixtureInfo_FuncFixtureInfo.prune_dependency_tree.self_names_closure_s": {"doc_hash": "7e6a6aa71f411709399e475b54b6d21f948978448f2796ec63db6362ac7512c8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureRequest_FixtureRequest.node.return.self__getscopeitem_self_s": {"doc_hash": "dfb65825af28172a42c3799ae6c563384895a2b4051f09e06cbb1754f7f21809"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureRequest._getnextfixturedef_FixtureRequest._getnextfixturedef.return.fixturedefs_index_": {"doc_hash": "ccd4a7159dbf2ae613c225e605d901db5b3cfa4fdfce4e25abd84930a99c6e07"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureRequest._compute_fixture_value_FixtureRequest._compute_fixture_value.None_1.finally_.self__schedule_finalizers": {"doc_hash": "881a02224e812431b433183c11ab9b0b2a8252c096c7bb51c304481ddc241691"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureLookupError_FixtureLookupError.formatrepr.return.FixtureLookupErrorRepr_fs": {"doc_hash": "40165d518743ffaaaf06dc47ee6f9627aa0a2077bc7e5a6e808310171e0c4280"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureDef_FixtureDef.addfinalizer.self__finalizers_append_f": {"doc_hash": "44ba65c3776aead87e2977dec72c757ba51a877b30fec65b2cc500bf6f5e6d54"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureDef.finish_FixtureDef.finish.try_.finally_.self._finalizers._": {"doc_hash": "fd1a975210efb45794d1b342356f2cfa9da477aba95c422cb7601fbb2251b6f9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_resolve_fixture_function_resolve_fixture_function.return.fixturefunc": {"doc_hash": "3ca030b9da9f2c872a96e4436649651153e53afc0ff209abd45f88f2d064b3a8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureManager_FixtureManager.__init__.session_config_pluginmana": {"doc_hash": "977e3190e408892ab9e6e44a4c41bf1de3bdedb921462e9d4025879c579221b0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureManager.getfixtureinfo_FixtureManager.getfixtureinfo.return.FuncFixtureInfo_argnames_": {"doc_hash": "1a27421630e79facf3d233e5e49c983224e474a1f52c20771a8fe34437c78bf9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureManager.getfixtureclosure_FixtureManager.getfixtureclosure.return.initialnames_fixturename": {"doc_hash": "d16c5f76c527e3d605602c30fe8bb4bdccfdd6370557b6b7d7ea18ab953bd41e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureManager.pytest_generate_tests_FixtureManager.pytest_collection_modifyitems.items_reorder_items_": {"doc_hash": "d586875c1297932f5e5e027cc44dc8544fa9f3be1e354f96dcb71a5f83367bf7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureManager.getfixturedefs_": {"doc_hash": "074e20c0d0239b23dc8c9e27360059a1d90d7e208e8d19bb8fe848c5987c6297"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/helpconfig.py_pytest_addoption_pytest_addoption.None_5": {"doc_hash": "4506558dee706ed08efe6ef9eaf5a3e3a5fa2148ee3101ec4beb514084aa1170"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/helpconfig.py_pytest_cmdline_parse_pytest_cmdline_parse.if_config_option_debug_.config_add_cleanup_unset_": {"doc_hash": "6c1c94a47a2f662c923c084b9b397631f0d0b319768a279aa23716b84f2d8da2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/helpconfig.py_showhelp_showhelp.return": {"doc_hash": "a0cc82e7e9167bec36105e7d7d85723634bbd242b0c2226ea7200400c36f1c51"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_cmdline_preparse_pytest_cmdline_preparse._Deprecated_modif": {"doc_hash": "5a59402186c79487e7c962307a87c75fde2bc46be3bf3e8be1a3463745300f6c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py__warn_incompatibility_with_xunit2__warn_incompatibility_with_xunit2.if_xml_is_not_None_and_xm.request_node_warn_": {"doc_hash": "d8afaf7c2652d9f371e21b2346770ac11025509fa6383b64f6fc716d66d6a01c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py_record_property_record_property.return.append_property": {"doc_hash": "08004f7b8bfbe0e4d48c45f2ee2061e05dacc2b10e209179cd9cfd3159a29d5d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py_pytest_configure_pytest_configure.if_xmlpath_and_not_hasatt.config_pluginmanager_regi": {"doc_hash": "4c9f8086aa57988dfd1e920896241f34eacfc5867c92eddcfb15b5720c6502f3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py_pytest_unconfigure_mangle_test_address.return.names": {"doc_hash": "77b0cf87aabc49b7f2b917b3562aa6e627af912400e676f8480dc464473997ea"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py_LogXML_LogXML.finalize.if_reporter_is_not_None_.reporter_finalize_": {"doc_hash": "c51e22e085a5d4f4a6657439cd32b492ee2416c8a5835aeed4adf0ea628b1402"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py_LogXML.node_reporter_LogXML._opentestcase.return.reporter": {"doc_hash": "21801cddb429102a4abbb9d3860ef1db6b278a1734b9be7a4165960f42aa5a69"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py_LogXML.pytest_runtest_logreport_LogXML.pytest_runtest_logreport.if_report_when_teardo.if_close_report_.self_open_reports_remove_": {"doc_hash": "9f3d2f0c3a65259f09dbd7d08dec86db957ff904e843d3d8925343f72c77a9b0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py_LogXML.pytest_sessionfinish_LogXML.pytest_sessionfinish.logfile_close_": {"doc_hash": "f4c58a69d642bc03c09d63ff22675fbda06716eddc6f913e1944e2801e31e7d2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py_LogXML.pytest_terminal_summary_": {"doc_hash": "07e977517aba5c39b159e9670d80aa9035196e4080a1dac81f90993293cecb4c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_LogCaptureFixture.text_LogCaptureFixture.record_tuples.return._r_name_r_levelno_r_ge": {"doc_hash": "99ac7ec18a2be3273c89aed6ff611f7d0312e32396f1b5943cb4f95b37e58241"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_caplog_caplog.result__finalize_": {"doc_hash": "40d49cbc74ccccd0f9d852381a46fed4670fb9c77c4f93759ebdb86ca482a3bc"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py__LiveLoggingStreamHandler.emit_": {"doc_hash": "e7765eb70d53b8c8ac9e0fc819196e33fa38d9e49e11f2791a6b8a0b3af0d67a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_wrap_session_wrap_session.return.session_exitstatus": {"doc_hash": "d552d15a551af5c1ced1927596656a7394825e17b6be6b3ef24f0190c18ceaba"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/__init__.py_pytest_addoption_pytest_addoption.parser_addini_EMPTY_PARAM": {"doc_hash": "b68f2d22756cc61fa1c0613b5e458d6d7c429c0a46c366442f9649ba41298a77"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/structures.py_ParameterSet._for_parametrize_ParameterSet._for_parametrize.return.argnames_parameters": {"doc_hash": "125508891d2163bdef7dc286ec95c174cf4e6de44bb66eb14dc26d4237ab09df"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/monkeypatch.py_resolve_resolve.return.found": {"doc_hash": "dce9ac1d7b1f9a6febf1e8e7185dd7d668ee69ecd5fef8c22b2a7b866f898414"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/monkeypatch.py_annotated_getattr_notset.Notset_": {"doc_hash": "e4c637215875d541478dcc7f8d3837d64f06149765941ec2d14c58031a680d0b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/monkeypatch.py_MonkeyPatch.delattr_MonkeyPatch.delattr.if_not_hasattr_target_na.else_.delattr_target_name_": {"doc_hash": "c5ea644962cc22719525c5300eacbc803463edcd9a42c73378c59fd28427baa1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/monkeypatch.py_MonkeyPatch.undo_": {"doc_hash": "eb9daced4f0c96d303ceb9ba012f3663d7a00beff28629707ee7251554df867a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nodes.py_Node.warn_Node.warn.warnings_warn_explicit_": {"doc_hash": "732a3843e0c2fe0881776d8e97ff45eaf5bda47ae0c267c74b7c2b3b4908f5c2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nodes.py_Node.add_marker_Node.add_marker.if_append_.else_.self_own_markers_insert_0": {"doc_hash": "8f8effcb3893d1b7049c79da800e5c3ca68bd0386309d4c7bef1cc4a04614724"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nodes.py_Collector_Collector._prunetraceback.if_hasattr_self_fspath_.excinfo.traceback.ntraceback_filter_": {"doc_hash": "e5e6b287fe2e4eecac2acf2f9a93c54dc627c405b990989fba2d505419e10dc1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pastebin.py_pytest_terminal_summary_": {"doc_hash": "b63dccbfffc5ece10ce645ada2e75ee95794389fd93779e143acd4f2b0fd9425"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py__force_symlink__force_symlink.None_1.except_Exception_.pass": {"doc_hash": "a7db698839534127beb8379f375621d3c076d54ad99e7b8f283feb4bc8a1f412"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_HookRecorder_HookRecorder.getcalls.return._call_for_call_in_self_ca": {"doc_hash": "0ee35c441194240c16aac0f01c49ac0d5ea94b8295d9e170ef675d67ef188c80"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_HookRecorder.matchreport_HookRecorder.matchreport.return.values_0_": {"doc_hash": "a117a9e7cdb3f17aa141cb1e040b8a9b2a25ae3d6e6f512ef20af744ac516724"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_CwdSnapshot_SysPathsSnapshot.restore.sys_path_sys_meta_pat": {"doc_hash": "628619c8e8608973c576a6782e2b76edfd8a61258e6a0788796055adc35c8515"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_LineMatcher_LineMatcher.re_match_lines_random.self__match_lines_random_": {"doc_hash": "7f57d2795bf2f637ba52df8e2f21399386596f0fa6c489900db481141e6f405d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_PyobjMixin.reportinfo_PyobjMixin.reportinfo.return.fspath_lineno_modpath": {"doc_hash": "733959f9cf9b17311f9b8b64f6d15352850c59bf2002f5915c6ef77d6bfdab06"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_PyCollector._matches_prefix_or_glob_option_PyCollector._matches_prefix_or_glob_option.return.False": {"doc_hash": "6ab87230407d9ac284edb3fb43118d0b9c0527d8241e68933b9330878abde1e5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Module_Module._inject_setup_module_fixture.self.obj.__pytest_setup_module.xunit_setup_module_fixtur": {"doc_hash": "b629de0492edc7f150890a2dacf857eed726d5ef92fdc2bc82931edc03773f77"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Module._importtestmodule_Module._importtestmodule.return.mod": {"doc_hash": "0b216194844f093b7ae4109cf892cc531b83a452f34be5a5ab333d716b5ddcba"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Metafunc._resolve_arg_value_types_Metafunc._resolve_arg_value_types.return.valtypes": {"doc_hash": "2033f6d6efcea5a1af555bd04dcd604423971360063f8338328866339fad1279"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Metafunc._validate_if_using_arg_names_Metafunc._validate_if_using_arg_names.for_arg_in_argnames_.if_arg_not_in_self_fixtur.if_arg_in_default_arg_nam.else_.fail_": {"doc_hash": "560140329aa2bb48826a4730c5370df6ab12bc6c2a24c0aa29d64a03ac740ec3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py__idval__idval.return.str_argname_str_idx_": {"doc_hash": "03802e0bc80190d20944ced9f4e96fd8e4bc9f4038f452f0f401d74db452fb83"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py__show_fixtures_per_test.write_item_showfixtures.return.wrap_session_config__sho": {"doc_hash": "f6a8e3629b9925a5e8b39a8e6d80b0c78db3b224b847fecf4e68b223840c2a95"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python_api.py_ApproxNumpy._yield_comparisons_ApproxNumpy._yield_comparisons.if_np_isscalar_actual_.else_.for_i_in_np_ndindex_self_.yield_actual_i_item_s": {"doc_hash": "98f10bed1b8b380c8a9b8e9b1ea57a92cc5e6da945b91c8539826d5a92c0adf7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/recwarn.py_WarningsChecker_WarningsChecker.__init__.self.match_expr.match_expr": {"doc_hash": "88ef4140a2ff67bdcb6fd64003d22a1d7418b11281de77e9afbfdd7512d66a15"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/recwarn.py_WarningsChecker.__exit___": {"doc_hash": "08671f20037812a114ff4477be3cf24ce89e881da602736f99e35c8da052c9a4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/reports.py__report_unserialization_failure__report_unserialization_failure.raise_RuntimeError_stream": {"doc_hash": "0ca3d2a9165e4dc983e8ad719a8df7817b4ff04007a5799efbe2453d578c7786"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/reports.py_TestReport.from_item_and_call_TestReport.from_item_and_call.return.cls_": {"doc_hash": "034f39d05fc2675d0e73cbbaacc097cad3ff8d23a65186a37769fe79fec4b8ac"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/runner.py__update_current_test_var__update_current_test_var.if_when_.else_.os_environ_pop_var_name_": {"doc_hash": "b93ab798e573655523e55d24be8069deb800e1117ba6c667cb8a8e1726abc009"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/setuponly.py__show_fixture_action_": {"doc_hash": "3d8d92cbcd2a964063754e6e4cff34d279c5f08ffa34ba95d8b5addd54f2f9f0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/skipping.py_pytest_configure_pytest_configure.None_2": {"doc_hash": "5b853e2c3fd3b350220524c67206d2033e229a5b9612064ecaafc2905749d989"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/skipping.py_pytest_runtest_makereport_": {"doc_hash": "c642261180c61f5e5469f2d088976c88ed21b6cd94b77bfdd39d1c2fd5c33404"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_pytest_addoption_pytest_addoption.parser_addini_": {"doc_hash": "d825f079b66feb1573d60477008d91e2621fb609de07bfa592783894dd12743c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_pytest_configure_getreportopt.return.reportopts": {"doc_hash": "e872a736bd2785a83b27160c829925a5f36996c704f6a4c1edad6d137ebe6747"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.verbosity_TerminalReporter.hasopt.return.char_in_self_reportchars": {"doc_hash": "a0496f459d14b90fe49b8a6038696a39e4b209c900dae8467c7843303478c950"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.write_ensure_prefix_TerminalReporter.write_line.self__tw_line_line_mar": {"doc_hash": "7cdab5970fe8468739495817adbec084b1cf358f2860802a262ac6f861fd405b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.rewrite_TerminalReporter.rewrite.self__tw_write_r_lin": {"doc_hash": "287b14556a5bfbc9de6f16fcc7be5c8df74f6d842d288cf6aac80d4e5b064c1f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter._get_progress_information_message_TerminalReporter._get_progress_information_message.if_self__show_progress_in.else_.return._100_": {"doc_hash": "d37fffcc0b1da23c1bad481771100b6235977ead24912215bae5eab077f72044"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter._write_progress_information_filling_space_TerminalReporter.pytest_collectreport.if_self_isatty_.self_report_collect_": {"doc_hash": "75135593cf68e61f75e7107afcbeca12f6ad0f568482e1970e8dca7b4d8757a6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.report_collect_TerminalReporter.report_collect.if_self_isatty_.else_.self_write_line_line_": {"doc_hash": "2a99f24f2639a614a31b42f4817b7ede8a965a5c3af7241d006ca76181030a71"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter._write_report_lines_from_hooks_TerminalReporter.pytest_report_header.return.result": {"doc_hash": "61cde295673e9271dd94d2a25f54bab13fe5cfeef527cd5aea1a3ef829482d2e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.pytest_sessionfinish_TerminalReporter.pytest_sessionfinish.self_summary_stats_": {"doc_hash": "f595877058b97cb31492b2c88fbd89e01e132147840eca64f81a2dab5d49007c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter._report_keyboardinterrupt_TerminalReporter._report_keyboardinterrupt.if_KeyboardInterrupt_in.if_self_config_option_ful.else_.self__tw_line_": {"doc_hash": "3e09111cdb33a4d9f9f2ba3ed3fa9ac66d40e40c0028aa274014aa4dc2b8bcc2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter._locationline_TerminalReporter._locationline.return.res_": {"doc_hash": "ed1336f101c324146d19b52d7f364d9325ca38f6462926f21a4ba37ef18a448c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter._getfailureheadline_TerminalReporter.getreports.return.values": {"doc_hash": "18348cd03f338aa1bac0334b7f1aa66be38c4a5f34e5d4b7b5358afe849cde04"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.summary_warnings_TerminalReporter.summary_warnings.if_self_hasopt_w_.self__tw_line_Docs_h": {"doc_hash": "90ac4a01f7905bac9048257dc4a81f9feb5eb0636cf7ebf9ee891a3c07c2fb32"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.short_test_summary.show_skipped_TerminalReporter.short_test_summary.show_skipped.for_num_fspath_lineno_.if_lineno_is_not_None_.else_.lines_append_s_d_s_": {"doc_hash": "b77aa13de54ab12113c9b0e37da8d53964f74fc0b3cef977d64ddfcae790c82c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py__folded_skips__folded_skips.return.values": {"doc_hash": "52b8a549646d4e1893e9364f4ec923d351f0a973fb64c02f628387ad98514b49"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/unittest.py__make_xunit_fixture__make_xunit_fixture.return.fixture": {"doc_hash": "d5fcbf5bed1073a2e1f901f04000e70db7da69a4468b3be765546ce4fb03b55e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/warning_types.py_PytestExperimentalApiWarning_PytestExperimentalApiWarning.simple.return.cls_": {"doc_hash": "2079fa57dfc3c5d543c41e843a3c8edb7c89b36a2782c0d57147c022739d198d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_early_hook_error_issue38_1_TestGeneralUsage.test_early_hook_error_issue38_1.None_2": {"doc_hash": "6daa9c830c760eb6a126360bd04e77825e7cfdb489fd54fe338dfef1fc33bd01"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_config_preparse_plugin_option_TestGeneralUsage.test_config_preparse_plugin_option.result_stdout_fnmatch_lin": {"doc_hash": "d8aa7910a22953f6906a5754ec35cada71f1e5fe4c0b6da695593345f03b21c5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_early_load_setuptools_name_TestGeneralUsage.test_early_load_setuptools_name.if_load_cov_early_.else_.assert_loaded_myplug": {"doc_hash": "bc5966fe80b34838a590db27802a95d140d36b040cc3589e513cd5a601d94bed"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_early_skip_TestGeneralUsage.test_conftest_printing_shows_if_error.assert_should_be_seen_i": {"doc_hash": "ca2ae9f2ccbb5dfa8b8dd532ceaea409e7ecc86533a1f171cc1e2fd05b871081"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_issue109_sibling_conftests_not_loaded_TestGeneralUsage.test_directory_skipped.result_stdout_fnmatch_lin": {"doc_hash": "27dd5d15aa25c540dbd09a077beaccb241eac22b5e0fe7fd43098b8f381288a3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_multiple_items_per_collector_byid_TestGeneralUsage.test_multiple_items_per_collector_byid.result_stdout_fnmatch_lin": {"doc_hash": "7100ba12cb5d4728906fd31d76ac2fa344c24225b5199e8b0cfc6f96269d19b2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_skip_on_generated_funcarg_id_TestGeneralUsage.test_skip_on_generated_funcarg_id.res_stdout_fnmatch_lines_": {"doc_hash": "c4e8c4cb2d468e7f7a4a88b5b0f1e6ac2e88eb36c454027b8f6bd1b7c092cbe6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_issue134_report_error_when_collecting_member_TestGeneralUsage.test_issue134_report_error_when_collecting_member.if_in_lookfor_._usage_error_only_if_ite": {"doc_hash": "f2a6cb705b918ee2472ce80645f192a57244f38623ac052c27a582bef0649938"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_namespace_import_doesnt_confuse_import_hook_TestGeneralUsage.test_namespace_import_doesnt_confuse_import_hook.assert_res_ret_0": {"doc_hash": "da817d87bd38683abcb00fcae204e986b14e9ebaca9407a11394dd1cb8fccd6d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_unknown_option_TestGeneralUsage.test_getsourcelines_error_issue553.res_stdout_fnmatch_lines_": {"doc_hash": "866638881518f731d6f8e0183677a58fb551c5541c44e62f48a229bf1f0277c3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_plugins_given_as_strings_TestGeneralUsage.test_plugins_given_as_strings.assert_pytest_main_args_": {"doc_hash": "c0297606c6f28eb1103289ddb5f3e601d9e78f47c801f122cf6c425340885ace"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_parametrized_with_bytes_regex_TestGeneralUsage.test_parametrized_with_null_bytes.res_assert_outcomes_passe": {"doc_hash": "7f877e4bb6a66ec102c672e375e695e986848f8fad908302752e45ef51da4f43"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestInvocationVariants.test_cmdline_python_package_not_exists_TestInvocationVariants.test_noclass_discovery_if_not_testcase.reprec_assertoutcome_pass": {"doc_hash": "4ec49fcc6ae245d2cf6bbd4f21a71bd7449b51995cd418189cf8b0eff37d2c0f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestInvocationVariants.test_doctest_id_TestInvocationVariants.test_has_plugin.assert_request_config_plu": {"doc_hash": "6866c3e6ad16509cd449f2b863d8f47f9b66266221b9579d3e84a34630a90369"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_test_import_plugin_unicode_name_test_pytest_plugins_as_module.result_stdout_fnmatch_lin": {"doc_hash": "0af7ddde89b64010d2923f8f596b883ed60e1e1fb5c8acca1ed9f3b24fd90913"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_test_deferred_hook_checking_test_deferred_hook_checking.result_stdout_fnmatch_lin": {"doc_hash": "43b706484d7a5cf4c108a71ed583a6eafdb74065c0e07737d04553d1ef3c143b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_test_fixture_values_leak_test_fixture_values_leak.result_stdout_fnmatch_lin": {"doc_hash": "892ac320e046eb0dcbcaac2f2b4c77eee43bfd697a6b02a735a83ac9d1e198ab"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_test_fixture_order_respects_scope_test_fixture_order_respects_scope.assert_result_ret_0": {"doc_hash": "28477a4e88a755b2abc7c1f8efc6b2cc9c5134721ad8a8f08f7ca57d1ecad4bd"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_code.py_test_code_getargs_test_code_getargs.assert_c4_getargs_var_Tru": {"doc_hash": "662216db29454b746e3cf9c56c8df0460d0f92dce1468c1ae4feb8a52243e4d7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_code.py_test_frame_getargs_test_frame_getargs.assert_fr4_getargs_var_Tr": {"doc_hash": "4bf01889aa94d1ec875449f8a26241e4030d2d05b5fa222be2bf593e96673301"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_test_excinfo_getstatement_test_excinfo_getstatement._xxx": {"doc_hash": "8b1ea768e5040d2c5d54833828b7c0e91ac8b7eac92176cf465d60886a627cf4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py__testchain_for_getentrie_TestTraceback_f_g_h.test_traceback_entry_getsource.assert_s_endswith_raise_": {"doc_hash": "1333ef2373efad7b0ccaadb6bbfc05bcaf9795715bef24b813919c69b82dcac4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestTraceback_f_g_h.test_traceback_getcrashentry_TestTraceback_f_g_h.test_traceback_getcrashentry_empty.assert_entry_frame_code_n": {"doc_hash": "8d7dba203d719783e2f1b0308ebdd961689d15d1c487f8a2a6aea1ab7b160277"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo_TestFormattedExcinfo.test_repr_source.assert_lines_1_": {"doc_hash": "86136715302c60923b47c82290435025d156c5c69bf7974c22e4d1f57ffccfb3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_repr_local_TestFormattedExcinfo.test_repr_local.assert_reprlocals_lines_3": {"doc_hash": "be560806f62c2137fd212d57857bf2ad829b34a5095e9fcdc8ec2621f6cff130"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_repr_local_with_error_TestFormattedExcinfo.test_repr_local_with_error.assert_NotImplementedEr": {"doc_hash": "c8d15743d5a71b19e11300a935efbf14610b2f95026e2a3d199264c664c47f81"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_repr_local_with_exception_in_class_property_TestFormattedExcinfo.test_repr_local_with_exception_in_class_property.assert_ExceptionWithBro": {"doc_hash": "c9e722e265b186c82c639fbe9db0c387ff6e30aeaae7fffb26060afba62232f0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_repr_tracebackentry_lines_TestFormattedExcinfo.test_repr_tracebackentry_lines._assert_loc_message_": {"doc_hash": "f72a1d0cf24c9dd8b77a3227e208ed38a69f03cbe479d31fd1e7bbf270ee21ee"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_repr_tracebackentry_no_TestFormattedExcinfo.test_repr_tracebackentry_no.assert_not_lines_1_": {"doc_hash": "77b3c0ebcd0b9c66118566b087a6e73cc67887410f54a12b20213fece1f8a474"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_repr_traceback_tbfilter_TestFormattedExcinfo.test_repr_traceback_tbfilter.None_1": {"doc_hash": "7dff7f38e3df24ef1434af3bf6367e59f7887451a3bc370b1a8a943102e34e35"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_traceback_short_no_source_TestFormattedExcinfo.test_traceback_short_no_source.assert_last_lines_1_": {"doc_hash": "842895a4573ffe3b2e2ac77b6374bfde7604b32f52a39f06d224cca6be68c576"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_repr_excinfo_addouterr_TestFormattedExcinfo.test_repr_excinfo_reprcrash.assert_str_repr_reprcrash": {"doc_hash": "730e69116c8fe4fa4b88e2c99f65abbabd6454ad2bc15bbe440bef8670f04a1f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_repr_traceback_recursion_TestFormattedExcinfo.test_repr_traceback_recursion.for_style_in_short_l.assert_str_reprtb_": {"doc_hash": "01e168af33da68c4036376e28410c593ecaa225d57f651ca71362b9350594de3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_exc_chain_repr_without_traceback_TestFormattedExcinfo.test_exc_chain_repr_without_traceback.matcher_fnmatch_lines_": {"doc_hash": "bdd33ffb2c2724fd9ba1ae4b6602b7f38c1aacd279178eb6069b4785a988455c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_test_exception_repr_extraction_error_on_recursion_test_exception_repr_extraction_error_on_recursion.matcher_fnmatch_lines_": {"doc_hash": "fcf82f3f1a0233aabd861ab3917ebd434afb276f06ae37fca2809511a7c70079"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_test_no_recursion_index_on_recursion_error_": {"doc_hash": "33009dc73b96b5eddea79b742373df33e2096d6fa85d30d8604d2f077eb5a649"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_source.py_test_code_of_object_instance_with_call_test_oneline_and_comment.assert_str_source_ra": {"doc_hash": "f74d1ad1ed38caf64ba64c1874fe960c4c4eed4b2ac58516424f3a08a66bb75e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_source.py_TestTry_TestTryFinally.test_finally.assert_str_source_": {"doc_hash": "615c91860271088425140f30175761c5bbf3d05081d348059cc85f1bfb63ff31"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_source.py_TestIf_TestIf.test_else.assert_str_source_": {"doc_hash": "20f548457402fe6656247058ea31c8cc074510b1116b44d66a1ac7dc7b1a6c1b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_source.py_test_semicolon_": {"doc_hash": "fb5b2d2f0c43802634647e72a150e679f6c87a6c0eebf25b246ccac1f72f10cb"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/acceptance/fixture_mock_integration.py__": {"doc_hash": "a4e3098ecb503dc8b26f2f2b576421f131df5b94959ba0ef8919b4f96c6f6964"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/collect/collect_init_tests/tests/__init__.py__": {"doc_hash": "e1010179058c35b49cb1350eef9a8137eac31b5b0a9ea9bfdc10a0e80aa9f8f7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/collect/collect_init_tests/tests/test_foo.py__": {"doc_hash": "bbcad48462d53c52334f66bf59198a68e4b4db8573509cee47d932798de8c643"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/collect/package_infinite_recursion/conftest.py__": {"doc_hash": "ce995b3b863c4dc07d536e02513c34b375cbbe4ad59cf40b3a27d7fcb36d29f0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/collect/package_infinite_recursion/tests/test_basic.py__": {"doc_hash": "125256bc1fd9fcd280149f328ca7b0557a60216ba910b19e9f0adf6c19f6d607"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/collect/package_init_given_as_arg/pkg/test_foo.py__": {"doc_hash": "0391e5d76de9eba041e4bc6abfd4fe8775c1b5b876c21b9b684ec894fcb02e82"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/config/collect_pytest_prefix/conftest.py__": {"doc_hash": "6eff3aa56d8356a1c90588f9ee634b002f69efe413e7fd5369a3d2ba5775631a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/config/collect_pytest_prefix/test_foo.py__": {"doc_hash": "2024a258b6e7a1fc7f06cb86356c11245ac5606cd2c6b6f531b397a3170e2937"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/conftest_usageerror/conftest.py__": {"doc_hash": "e9351af720cd748b5a6200fa40a6c359fa19f790655e0390ef538915c92b0aeb"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/dataclasses/test_compare_dataclasses.py__": {"doc_hash": "0cab32198c59bf3c413b3e9e3af63753b67dbaf9677df6dec9fc2544debdfc9d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/dataclasses/test_compare_dataclasses_field_comparison_off.py__": {"doc_hash": "a8110c1afd5054cd4aa2d57f1e690e06f4324d027064fb3edc9404308dc472fc"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/dataclasses/test_compare_dataclasses_verbose.py__": {"doc_hash": "805b58cb4acb6f3a4f85622e62858c962351d612e0656dc3e76026abcf54baa3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/custom_item/conftest.py__": {"doc_hash": "965ccfc3dff6655665e33b80fe1fbc169775a000983567cd22e0806e57d52f60"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/custom_item/foo/test_foo.py__": {"doc_hash": "0a4c892a2c72aa1fbd971fe4cdfe996a0be5201ef00f3d4fb8097342f565d5b6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/fill_fixtures/test_conftest_funcargs_only_available_in_subdir/sub1/conftest.py__": {"doc_hash": "b668203d0d6b4184fd31a18d4be94b23371c11f4ee1b2b57fc685db58a3ffba0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/fill_fixtures/test_conftest_funcargs_only_available_in_subdir/sub1/test_in_sub1.py__": {"doc_hash": "9b7af6b8000886125b6b5b2b239934933d05d2852756c64a32927ef876c718e7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/fill_fixtures/test_conftest_funcargs_only_available_in_subdir/sub2/conftest.py__": {"doc_hash": "fcf604761694f72d7ebd5d9c2357f3f434ca2b564e4a3a06e1c9255e36bdf913"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/fill_fixtures/test_conftest_funcargs_only_available_in_subdir/sub2/test_in_sub2.py__": {"doc_hash": "53a540e364f100a1cfd8bc612dacf49a6e84ac53d787d9c9f294fbb5fd7fb675"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/fill_fixtures/test_detect_recursive_dependency_error.py__": {"doc_hash": "27e6969769094c2d15ca41fd626d4526290d98e65df5e02effb4e7531af7bcd8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_conftest_conftest/conftest.py__": {"doc_hash": "367fe16779fbb95e5b97cda56271d3f328a09a5207dc0e5a93b3e9dc0d16999f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_conftest_conftest/pkg/conftest.py__": {"doc_hash": "b078b4f796651a003942d7456a5694db4ed7ea2036c5e6faa29d233000917215"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_conftest_conftest/pkg/test_spam.py__": {"doc_hash": "82692092d3dbb84d6f6d697f8aa373e6a374c5c70d82eb451371dd38cc222e36"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_conftest_module/conftest.py__": {"doc_hash": "c46928dd491ccaba0d93e2e3ab0ab336eabffc6fe7191926a563175724e3041e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_conftest_module/test_extend_fixture_conftest_module.py__": {"doc_hash": "e6cbd2d6ef113460ab19c142538fc31dbcf786c822ed1696686402158d801db4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_module_class.py__": {"doc_hash": "bda7f8e9d81b17d5c1dcff866b5c63f7004f6faa954f05d1389940b2056ef19b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/fill_fixtures/test_funcarg_basic.py__": {"doc_hash": "0a232aa6e92bd1e3188ab751899589f1a7c40319b1d7930b92754d72b584bc6d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/fill_fixtures/test_funcarg_lookup_classlevel.py__": {"doc_hash": "ce0a2c1ef25f856463497b2f32a4a2937bd3cb4c658994d6f2db8847c60554f0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/fill_fixtures/test_funcarg_lookup_modulelevel.py__": {"doc_hash": "b58a7d7a2e4b104cb9b2bfebe318a020154a901c9e3d56de688adf3fcdc91faf"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/fill_fixtures/test_funcarg_lookupfails.py__": {"doc_hash": "f3a37162e5fd7d22237b7c34f53ad1dc18c1e19bc0ee32b3632e13655746092e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/test_getfixturevalue_dynamic.py__": {"doc_hash": "76196373707b9bfb2f449cfcfac6c13d2bdf3944bec3af4e675b29d6ee4444e9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/issue88_initial_file_multinodes/conftest.py__": {"doc_hash": "945815f880a5da4bf822a70f0e35bdcb5a678d6eda86420df6f515c0bc2bf9fe"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/issue88_initial_file_multinodes/test_hello.py__": {"doc_hash": "af3329e0ce140e32d140a87182ed4fd82a88564df3a2b084ead8a0101ac09979"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/issue_519.py_pprint_checked_order.assert_order_": {"doc_hash": "7075300819de1dcdcbec2727042f707ba7d2ddd58aa0c8967f96fb4392cab2c0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/issue_519.py_fix1_": {"doc_hash": "2eafcd7f2579b7af65603cfbf875cab7ced70ab38db1b5e88b42874391334b03"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/marks/marks_considered_keywords/test_marks_as_keywords.py__": {"doc_hash": "75a3ed6696f6d70bb15bf1dfe2dc640fa737dc92ad09213fef1e0546338e0d69"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/perf_examples/collect_stats/generate_folders.py_argparse_": {"doc_hash": "f0195d98ff921063db0671d3fdbd273bd88919d4ddafa67320d07fc6e7ee08f0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/perf_examples/collect_stats/template_test.py__": {"doc_hash": "603fa8a847aa9745ac76ad96c72d7aadb9113c5ac7c9ae0c55568e17fdd906ea"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/tmpdir/tmpdir_fixture.py__": {"doc_hash": "077fc8c82d55f7cc45f0844728e74df66b3f2d1fe1c2b82f703fc3f2aa440d24"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/unittest/test_parametrized_fixture_error_message.py__": {"doc_hash": "f80493fb608472930a3604bfb74c894781fa452289129d480f99d4652af51554"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/unittest/test_setup_skip.py__": {"doc_hash": "a486d07472443acdda3e227de74c318be0fc6f48ce7c30757cb6e798a4d3a5e5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/unittest/test_setup_skip_class.py__": {"doc_hash": "3129cab50a3ed46676dc888e65849726dea95f151fd49dcc784850fd505b69b0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/unittest/test_setup_skip_module.py__": {"doc_hash": "970ed9a455eafe33328fb1e9846379be34cfee6734dfa36260d1d15e29a4cdf1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/warnings/test_group_warnings_by_message.py__": {"doc_hash": "00245e42bddcddafb97fbfa4e7094b6f668ca12767b80607cd8607206bcf6ade"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/examples/test_issue519.py__": {"doc_hash": "eb56aa3b30d29665eeea10435abf27af8170b0f23a187c57a3008aee355fe6fc"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/freeze/create_executable.py__": {"doc_hash": "351c8619bf3c8d449ea34ecd25f7f9f903e8a481e4fed5dda3bf2cd3d2adf8a7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/freeze/runtests_script.py__": {"doc_hash": "f33834fe915e66b0d64845a8d423e1feb85669900e65e32ee06f25e6c989286b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/freeze/tests/test_trivial.py__": {"doc_hash": "da9376b21038d08066465107ccb2863ef2fad4193dc2077ed44febc97a4a04dc"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/freeze/tox_run.py__": {"doc_hash": "3a13430a51182549a433c5602a22713fb45f5f4298a67f0e50fd4a963c66fe4b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_fixture.py_test_with_statement_test_log_access.assert_boo_arg_in_caplo": {"doc_hash": "7731b1030191f34a7b4fcd9f2539b22f034d692a6333edf3f7dd0fb144e54c77"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_fixture.py_test_messages_test_messages.assert_Exception_not_in": {"doc_hash": "5b56ad2aef3b51899c86deff20b57849de06f407d09bcf168f6eb66112b30ab4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_fixture.py_test_record_tuples_logging_during_setup_and_teardown.assert_x_message_for_x_i": {"doc_hash": "fcd5cce2e7db31cc073a29e8856f27e6533de3939f1ac8737e68b4c9d26b5c24"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_messages_logged_test_messages_logged.None_3": {"doc_hash": "1791d9e90ec088dc338c52cc0aa77bbd8709df224cefe6e4e4e17cc784cf01ed"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_root_logger_affected_test_root_logger_affected.with_open_log_file_as_rf.assert_error_text_going_": {"doc_hash": "06d0e2e61840348a3112ea852a5778b69263b01e9662e04252b364df5d888521"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_setup_logging_test_setup_logging.result_stdout_fnmatch_lin": {"doc_hash": "55a9e3bb4beaffcb52c7f44b36b7ef804250d1d54d39c7d973d004a20e6385c3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_teardown_logging_test_teardown_logging.result_stdout_fnmatch_lin": {"doc_hash": "6ec17e4ef4b0cf603e630b44aa951bff98b74cafc715ed616585ec743b90e702"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_log_cli_enabled_disabled_test_log_cli_enabled_disabled.None_1.else_.assert_msg_not_in_result_": {"doc_hash": "74ecaa2e8059db233aa86aadc00968dd14c4f9590a4419f8ec50850a0c2025a0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_log_cli_default_level_test_log_cli_default_level.assert_result_ret_0": {"doc_hash": "87743f680d2858d67357a8f0bf71ffe929e6b2ca03d339fbad268fecaea7c928"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_log_cli_default_level_multiple_tests_test_log_cli_default_level_multiple_tests.result_stdout_fnmatch_lin": {"doc_hash": "ac734c44639ccf70eab8bb65f1cf49190df83c64a64e37beb839fc7a68db067d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_log_cli_default_level_sections_test_log_cli_default_level_sections.result_stdout_fnmatch_lin": {"doc_hash": "b77a5ecd4dbfb9ce0a8c8e1051bd5accf76739a053ab7db07c51e4ed6f1e7d8f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_live_logs_unknown_sections_test_live_logs_unknown_sections.result_stdout_fnmatch_lin": {"doc_hash": "638b436e6b019c632f9054e4000b6b0903cc9230be75dba7c7a9b13d8239392d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_sections_single_new_line_after_test_outcome_test_sections_single_new_line_after_test_outcome.None_1": {"doc_hash": "e94c118dfb599fd0c2e407e4a7138ee9cabe8b2cc8df2bfc5b51f524c5f7e900"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_log_cli_ini_level_test_log_cli_ini_level.assert_result_ret_0": {"doc_hash": "9022f5ada79c336788418402fd9af7275e6ce894899556990c87f67fae942841"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_log_cli_auto_enable_test_log_cli_auto_enable.if_cli_args_log_cli.else_.assert_WARNING_not_in_s": {"doc_hash": "d4c888240c5e10bb2e41d309b97dee477711d314cc97944f8d95b98e85314ff0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_log_file_cli_test_log_file_cli.with_open_log_file_as_rf.None_1": {"doc_hash": "af07d90c1fc2189b14b7f31ddb5bb63cb2e7886871b61d7d80bed129743b69d1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_log_file_cli_level_test_log_level_not_changed_by_default.result_stdout_fnmatch_lin": {"doc_hash": "8c45244cd9771edec1987064ec994644a243e7d8fe8a31c37ccb3d1420937757"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_log_file_ini_test_log_file_ini.with_open_log_file_as_rf.None_1": {"doc_hash": "156ccd83cc60affdbda2ba091d09488c579315996c380dfd91f85a2970dc5704"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_log_file_ini_level_test_log_file_ini_level.with_open_log_file_as_rf.None_1": {"doc_hash": "f8d77ab2c4d18f98751dd8358e291bd80906f74e4841484ed5c1f057efa4cdae"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_log_file_unicode_test_log_file_unicode.with_open_log_file_encod.assert_Another_normal_me": {"doc_hash": "5c83ed84ddc01afa8055be5cbcaacb404038dc0828cb746db387ea0ccd314b5c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_log_in_hooks_test_log_in_hooks.with_open_log_file_as_rf.assert_sessionfinish_in": {"doc_hash": "e43cffd2f9df4ec3dd4a54687dd7502982149077491b0901ed652f09f3ee8831"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_log_in_runtest_logreport_test_log_in_runtest_logreport.with_open_log_file_as_rf.assert_contents_count_lo": {"doc_hash": "79218cde01f23b339cd0758af1fe4662d6f619e5bd8cbd9183164cc484a60cfb"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox_TestApprox.test_repr_string.assert_repr_approx_a_": {"doc_hash": "b1e240add5f830d551066fa05b1696495858d09870b1ff7aeccc09594a24c51a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_repr_nd_array_TestApprox.test_repr_nd_array.assert_repr_approx_np_arr": {"doc_hash": "2d2d1a3b4883e645c788bf333243e118bb2168f3777d530eeb3ec21af8cf70d9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_operator_overloading_TestApprox.test_exactly_equal.for_a_x_in_examples_.assert_a_approx_x_": {"doc_hash": "1321e4cb133a7c3a57eb8e72e41053739dd9e003f37a3a2c4c79731f0f34135a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_opposite_sign_TestApprox.test_zero_tolerance.for_a_x_in_within_1e10_.None_5": {"doc_hash": "f23cb787076722fec0896b807959033f6f017df6281d39e198070e2f04203faa"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_inf_tolerance_TestApprox.test_inf_tolerance.for_a_x_in_large_diffs_.None_3": {"doc_hash": "ca06f0bd41d166c834a7095381be4a804569f7d5eec5c9892e469212cd4c1989"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_inf_tolerance_expecting_zero_TestApprox.test_reasonable_defaults.assert_0_1_0_2_appro": {"doc_hash": "d4968c3e8476f70dcecba962ec3afdd6b3636085c729b5fff2af8567230a01e4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_default_tolerances_TestApprox.test_default_tolerances.for_op_a_x_in_examples_.assert_op_a_approx_x_": {"doc_hash": "bf9d23402282a05154745799c0899483f437b5529af204b63d0c5bedf3aa40a9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_custom_tolerances_TestApprox.test_custom_tolerances.None_11": {"doc_hash": "2e38350c92be988d5b43bd2ffd4407850751521e819bc1e6f4554fc0992ae6a3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_relative_tolerance_TestApprox.test_relative_tolerance.for_a_x_in_within_1e8_re.None_1": {"doc_hash": "64d1aaf7b3978b1ee57379b39e546387d63b282d2752e5d9ef0ad58519c01af9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_absolute_tolerance_TestApprox.test_absolute_tolerance.for_a_x_in_within_1e8_ab.None_1": {"doc_hash": "b8b546b7e95ab16dc4af20c941babd63120bea26429925346770c8c794aee276"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_expecting_zero_TestApprox.test_expecting_zero.for_op_a_x_in_examples_.None_1": {"doc_hash": "88ddd5eb98df5bd235cd51d9b07a957293b0f4747f4b7af20e364d1806d12619"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_expecting_inf_TestApprox.test_expecting_nan.for_op_a_x_in_examples_.assert_op_a_approx_x_na": {"doc_hash": "bf6980b661616e0d799a52019d14d8583c34faf10c9b700df1fc95edd4791ccb"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_int_TestApprox.test_int.for_a_x_in_within_1e6_.assert_approx_x_rel_5e_7": {"doc_hash": "a1b00b5bd74ac47389d7f5a465eee00cf0890af7d124ac16db69809e86ba7b32"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_decimal_TestApprox.test_decimal.for_a_x_in_within_1e6_.None_4": {"doc_hash": "54c896c46b4f3733bc78014d4cb6d461a121042d02fdfea0f77e7b5511718216"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_fraction_TestApprox.test_fraction.for_a_x_in_within_1e6_.assert_approx_x_rel_5e_7": {"doc_hash": "13571a84f0cabacd18cde812bbaebeb62d7ac0c4105a53759cbb7854178f1dbe"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_complex_TestApprox.test_complex.for_a_x_in_within_1e6_.assert_approx_x_rel_5e_7": {"doc_hash": "d31581c9a071d052bc20d23e242a96b37b62e3b2e4a9bb56d14e974a8574dd00"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_list_TestApprox.test_list.None_3": {"doc_hash": "f6515761d9e9fb7db36a4a22e9929eb55353337b09e369d3960fec3b60808b3b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_numpy_array_TestApprox.test_numpy_array.None_7": {"doc_hash": "d180c00770d432c6c15d7bd64007e187242b458eab50ce9203d5796e0c3629ae"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_numpy_tolerance_args_TestApprox.test_numpy_tolerance_args.for_op__abs__rel_in_tes.None_5": {"doc_hash": "aa482e1bb8784e03271525c18d73a0dec1558eda39da31c4ceb75c151ce0959a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_numpy_expecting_nan_TestApprox.test_numpy_expecting_nan.for_op_a_x_in_examples_.assert_op_a_approx_np_ar": {"doc_hash": "b6f6cfba38a10d8eb9a05368391a58c7ecfcf6e27c09dc50a3035d9b44b59e9e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_numpy_array_with_scalar_TestApprox.test_numpy_array_with_scalar.None_3": {"doc_hash": "049e69906a94f8b4be120f3e00f0825a0e0c02ffb756109bdd0f84306263c836"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_numpy_scalar_with_array_": {"doc_hash": "0c72b48ebd99965534662841e32a92050db0ac18d52c193bf3a255bdcf971a99"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestModule.test_syntax_error_in_module_TestModule.test_invalid_test_module_name.result_stdout_fnmatch_lin": {"doc_hash": "1464d5218d21912f926130661fd49606b3c1cc90ad8e1cf9b9ea564306c6decf"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestModule.test_show_traceback_import_error_unicode_TestModule.test_show_traceback_import_error_unicode.assert_result_ret_2": {"doc_hash": "86a428675bb27bbf0fb758dcb5113a666da1ef51746060d6834e36a52bc0d874"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestClass_TestClass.test_class_subclassobject.result_stdout_fnmatch_lin": {"doc_hash": "b0e605e89f3b56620c221c8f855d24c3f8b035cd500aaf898ce7da865cbd943f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestClass.test_static_method_TestClass.test_static_method.result_stdout_fnmatch_lin": {"doc_hash": "aac9427bbe358ed7ffd1770b580ef506e0b2b017f5e3530b33350f0ce0c9a4e3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction_TestFunction.test_function_as_object_instance_ignored.result_stdout_fnmatch_lin": {"doc_hash": "2b14ed662626c39d62d75e3b650dcc13c4e32f6e01903ac0bcf5291c5b9a17d5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.test_parametrize_with_non_hashable_values_TestFunction.test_parametrize_with_non_hashable_values.rec_assertoutcome_passed_": {"doc_hash": "ca6da3bd1029c7adb67e56237240489f627820e12012d0ebac0a5fcfa8af7ba4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.test_parametrize_with_non_hashable_values_indirect_TestFunction.test_parametrize_with_non_hashable_values_indirect.rec_assertoutcome_passed_": {"doc_hash": "a04cfce7aa5b677f959df9c26be8f685cd8e72a98e742240550d9e5219e5b1cf"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.test_parametrize_overrides_fixture_TestFunction.test_parametrize_overrides_fixture.rec_assertoutcome_passed_": {"doc_hash": "44e6f5a5806a7806497f3253849b6b293d0761e26fc24b7ec25535aaf96df1a2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.test_parametrize_overrides_parametrized_fixture_TestFunction.test_parametrize_overrides_parametrized_fixture.rec_assertoutcome_passed_": {"doc_hash": "8e945d7738219070ac91a55b4cf44fc437182954d423ebdce068817aed0ac89d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.test_parametrize_overrides_indirect_dependency_fixture_TestFunction.test_parametrize_overrides_indirect_dependency_fixture.rec_assertoutcome_passed_": {"doc_hash": "ad58459b012e5aeded09f96b8046b1ce25d412de9b607a616e7c25c1ee05f9a8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.test_parametrize_with_mark_TestFunction.test_parametrize_with_mark.assert_foo_in_keywords_": {"doc_hash": "cb4d030dbe9aba14bc159eabeefbdc3737f8263bc6642e98f7b50878016baa60"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.test_multiple_parametrize_TestFunction.test_multiple_parametrize.assert_colitems_3_name_": {"doc_hash": "75adbb6930d6f7aa89a8e15c04ec6f058b2d59c22b79cb7aaaa3b5e91ccd0756"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.test_issue751_multiple_parametrize_with_ids_TestFunction.test_issue751_multiple_parametrize_with_ids.assert_colitems_3_name_": {"doc_hash": "25667ca04e82b11f48ff49e9ae785c3ea300b5cc2560b3ab3b96b48194285fa8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestSorting_TestSorting.test_check_equality.for_fn_in_fn1_fn2_fn3_.assert_modcol_fn": {"doc_hash": "34c86318fd1aaae6e2ec60fd757396e6dbdb2f8db2d01d0ee9135b4e466dd77e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestSorting.test_allow_sane_sorting_for_decorators_TestSorting.test_allow_sane_sorting_for_decorators.assert_item_name_for_ite": {"doc_hash": "71c8865e9674945226f8cf0c095e84d56e91d6da2a554abe8dfcbd64cc764669"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestConftestCustomization_TestConftestCustomization.test_pytest_pycollect_module.result_stdout_fnmatch_lin": {"doc_hash": "328e9201156365baf8bee54a439f95d6623189348d46dc09abf314f2a64e0a4c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestConftestCustomization.test_customized_pymakemodule_issue205_subdir_TestConftestCustomization.test_customized_pymakemodule_issue205_subdir.reprec_assertoutcome_pass": {"doc_hash": "8c6142fa27d56058051be9f668bfcfc9555e6c32f39d4327d54748033adf2d54"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestConftestCustomization.test_customized_pymakeitem_TestConftestCustomization.test_customized_pymakeitem.reprec_assertoutcome_pass": {"doc_hash": "ca977f5e07cb3ddd6a98e439ccf81a08c3c4e88af083961e321069de5508de0a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestConftestCustomization.test_issue2369_collect_module_fileext_TestConftestCustomization.test_issue2369_collect_module_fileext.result_stdout_fnmatch_lin": {"doc_hash": "466f472f03a0605c6c5dd4b1c043f41d536fbf760c2ce6ac9af6cd9bd98607a0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_test_setup_only_available_in_subdir_test_setup_only_available_in_subdir.result_assert_outcomes_pa": {"doc_hash": "6e0450a758a73059c8a3a064760f207b56d0d275c006cb53e3ab95f60d9cdb2d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_test_modulecol_roundtrip_TestTracebackCutting.test_traceback_argsetup.assert_numentries_3": {"doc_hash": "aadf01e9ee065ad40aca971c71d0453470cc16a00d420576e7b25c3322f2a5ab"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestTracebackCutting.test_traceback_error_during_import_TestTracebackCutting.test_traceback_error_during_import.None_2": {"doc_hash": "c0c3ecc2b39d29e9c57f80bdfe90dcc8184d84acc120fd645cf5e916623923fc"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestTracebackCutting.test_traceback_filter_error_during_fixture_collection_TestTracebackCutting.test_traceback_filter_error_during_fixture_collection.result_stdout_fnmatch_lin": {"doc_hash": "31710939c69b777350b8106e817f6c6475437573eb448f5ef9122db7d8c102ff"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestTracebackCutting.test_filter_traceback_generated_code_TestTracebackCutting.test_filter_traceback_generated_code.assert_not_filter_traceba": {"doc_hash": "f6e70d312582d9bc32f1f5120ac6ce8bcb7b9f1a9b5491095fc6e42ead7046d3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestTracebackCutting.test_filter_traceback_path_no_longer_valid_TestTracebackCutting.test_filter_traceback_path_no_longer_valid.assert_filter_traceback_t": {"doc_hash": "4df7215b7fdbb70d24ff460be40aaf458b5335908569d4e233fb69aa89c241f0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestReportInfo_TestReportInfo.test_itemreport_reportinfo.assert_item_location_": {"doc_hash": "0712b40890e2b48acea727cd22a59314d6d429575a5e2d06c7c0da46cf101666"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestReportInfo.test_func_reportinfo_TestReportInfo.test_class_reportinfo.assert_msg_TestClass_": {"doc_hash": "b60c9910f012d69518cc983c2f89740947e02fd51f4b532c2a00605083367d3e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestReportInfo.test_reportinfo_with_nasty_getattr_TestReportInfo.test_reportinfo_with_nasty_getattr.fspath_lineno_msg_ins": {"doc_hash": "e47469f7139bc754291bd725f6c91cc8a09d652e9fc1b6d162b8d18b62e64224"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_test_customized_python_discovery_test_customized_python_discovery.None_3": {"doc_hash": "feb905a33db9f8adc3c18fe53762680ffbbea4b7bd28b27b7b831b89cd132f13"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_test_dont_collect_non_function_callable_test_dont_collect_non_function_callable.result_stdout_fnmatch_lin": {"doc_hash": "2d9898efb42adfb5ac5dddeb6168ffa7b9fa348cd84ac12d7fd8643eba442367"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_test_class_injection_does_not_break_collection_test_class_injection_does_not_break_collection.result_stdout_fnmatch_lin": {"doc_hash": "f662ea8da365627da6f61193744baa0ac08c076dd8125782928b803728716404"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_test_keep_duplicates_test_keep_duplicates.result_stdout_fnmatch_lin": {"doc_hash": "4a69f8da1ab94230d95e611a0993596fd903bb7d22304b272f8ca8928f8ab4c6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_test_package_collection_infinite_recursion_test_package_collection_init_given_as_argument.result_stdout_fnmatch_lin": {"doc_hash": "073373aa9fe156fef147f81778478e12e9c66824334f81ce81498ed7f1c542a9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_test_package_with_modules_test_package_with_modules.None_7": {"doc_hash": "10ba16fbac86fbafaee6aef32a6c3113bd7b42dd39539a6aefade8237d711fed"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_test_package_ordering_": {"doc_hash": "fbaeb698e6ba4eb0582e35426662084f27f216ca90cc64602ad7182cf60b04af"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFillFixtures.test_extend_fixture_conftest_plugin_TestFillFixtures.test_extend_fixture_conftest_plugin.assert_result_ret_0": {"doc_hash": "f07e188b66adf926f507aaaf9c0e77dafa7ca64f0468a944414e2824fac9051d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFillFixtures.test_extend_fixture_plugin_plugin_TestFillFixtures.test_extend_fixture_plugin_plugin.assert_result_ret_0": {"doc_hash": "fba883ed9fcb2dfb9098e61b237752e4070de5f512f8aaa5a6b87b1e0d559088"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFillFixtures.test_override_parametrized_fixture_conftest_module_TestFillFixtures.test_override_parametrized_fixture_conftest_module.None_2": {"doc_hash": "c1ac0026b7c12cd0d6692414b2e34f2f5da497ecd98c8d5fa13dafa1033dcbb2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFillFixtures.test_override_parametrized_fixture_conftest_conftest_TestFillFixtures.test_override_parametrized_fixture_conftest_conftest.None_4": {"doc_hash": "180af6059c12d29420270ac8a14005ef969f828cb17ae1bd475c960020e4758a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFillFixtures.test_override_non_parametrized_fixture_conftest_module_TestFillFixtures.test_override_non_parametrized_fixture_conftest_module.None_2": {"doc_hash": "43d9c38fd6b42b86de985d6502efc26d65c943dfaff5a6b9b60e51a371a17139"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFillFixtures.test_override_non_parametrized_fixture_conftest_conftest_TestFillFixtures.test_override_non_parametrized_fixture_conftest_conftest.None_4": {"doc_hash": "c9511e8fd30213cf1708f6844534de4f27111aef1997a431ddde83cd741d03a1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFillFixtures.test_override_autouse_fixture_with_parametrized_fixture_conftest_conftest_TestFillFixtures.test_override_autouse_fixture_with_parametrized_fixture_conftest_conftest.None_4": {"doc_hash": "84fc1035a9e6808625efef1eac0b3d4880fac24bd7fa60efa56a27c9c9803ee5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFillFixtures.test_autouse_fixture_plugin_TestFillFixtures.test_autouse_fixture_plugin.assert_result_ret_0": {"doc_hash": "897afa16f10205db677e1dedaf0431480d96016d98cee66aa828a399a04110f0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFillFixtures.test_fixture_excinfo_leak_TestFillFixtures.test_fixture_excinfo_leak.assert_result_ret_0": {"doc_hash": "eba433e76ab8dab385dac1f40c4ba7526f31482e49bf28cdfd48c761180f7c05"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestBasic_TestRequestBasic.test_request_attributes_method.assert_req_instance___cla": {"doc_hash": "e479abb6bcdd8d20700f1a2593b801b6f067efb10b4cecaa8c8c777387e9b0cb"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestBasic.test_request_contains_funcarg_arg2fixturedefs_TestRequestBasic.test_request_contains_funcarg_arg2fixturedefs.assert_arg2fixturedefs_s": {"doc_hash": "30a3a31afcf0c05b40289f1df6f7e68be1cbc1e827fe9a57f68a786ac465c55a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestBasic.test_request_garbage_TestRequestBasic.test_request_garbage.result_stdout_fnmatch_lin": {"doc_hash": "c9858ab6c6e8f7f215d011b1ba6b6214479e1bc305117fb3090d578c6865b204"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestBasic.test_getfixturevalue_recursive_TestRequestBasic.test_getfixturevalue_recursive.reprec_assertoutcome_pass": {"doc_hash": "d9607f54e20d73c778bf8e1a34012016f48a8ecc65b3ed8155299c847cde9c08"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestBasic.test_getfixturevalue_teardown_TestRequestBasic.test_getfixturevalue_teardown.result_stdout_fnmatch_lin": {"doc_hash": "d206411d2968afdb127b27c694c6256d385b8defe4dc0cc3360a2943a867093d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestBasic.test_request_addfinalizer_TestRequestBasic.test_request_addfinalizer.assert_teardownlist_1": {"doc_hash": "4a6eedccf9c3d691bee163bc03c2624e17aef25484bdc05580df1586f3518556"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestBasic.test_request_addfinalizer_failing_setup_TestRequestBasic.test_request_addfinalizer_failing_setup.reprec_assertoutcome_fail": {"doc_hash": "1a6c01b767ddd85229c2f8efd3f1ce7d40ccb20a92f8129ff7b014e6a43961af"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestBasic.test_request_addfinalizer_failing_setup_module_TestRequestBasic.test_request_addfinalizer_failing_setup_module.assert_not_mod_values": {"doc_hash": "99d6585dacdc32db80d277c87fb7e9a2dee11e8fdf85495bf5b5d39457bfa075"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestBasic.test_request_addfinalizer_partial_setup_failure_TestRequestBasic.test_request_addfinalizer_partial_setup_failure.result_stdout_fnmatch_lin": {"doc_hash": "69105256271bc17c8f7a77ee1212b97caf892ba2427ef14f9977547ab3a79c46"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestBasic.test_request_subrequest_addfinalizer_exceptions_TestRequestBasic.test_request_subrequest_addfinalizer_exceptions.result_stdout_fnmatch_lin": {"doc_hash": "d59190c6a5e73fb1ce8301ac96042b70497f647a84bbf9dd8db59e7dffd3a878"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestBasic.test_request_getmodulepath_TestRequestBasic.test_request_fixturenames.reprec_assertoutcome_pass": {"doc_hash": "4a076879cba1812d6a409b542770b3f26eb2121e8cb1143fbe303f6042225718"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestBasic.test_fixtures_sub_subdir_normalize_sep_TestRequestBasic.test_fixtures_sub_subdir_normalize_sep.result_stdout_fnmatch_lin": {"doc_hash": "71da7e58dfc8fce75ac678588bc0b8fea340098a7855e8189b4e7fa70a37073c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestBasic.test_show_fixtures_color_yes_TestRequestBasic.test_newstyle_with_request.reprec_assertoutcome_pass": {"doc_hash": "3b376acf6f894dd5593042c50bc51d08bb891dcba1f2e07db6e49ed37bc2f237"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestBasic.test_setupcontext_no_param_TestRequestBasic.test_setupcontext_no_param.reprec_assertoutcome_pass": {"doc_hash": "54278ebae19057becdf0ec55c0f711eae674905912022edcc8f03a1b85157608"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureUsages_TestFixtureUsages.test_receives_funcargs.reprec_assertoutcome_pass": {"doc_hash": "231ffb7412aca645f4a70abb2412917db61052a5ed67648a4146502814e619d7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureUsages.test_receives_funcargs_scope_mismatch_TestFixtureUsages.test_receives_funcargs_scope_mismatch.result_stdout_fnmatch_lin": {"doc_hash": "b541d0a8fe45f0ac2418b1eae7fa65b0089a38830963660e9571f6fcb3576f8a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureUsages.test_receives_funcargs_scope_mismatch_issue660_TestFixtureUsages.test_invalid_scope.result_stdout_fnmatch_lin": {"doc_hash": "f7e4b6455cbb755c6290289a9c55d5bf66bb1630e0c01a42ced892f8b6a5601f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureUsages.test_funcarg_parametrized_and_used_twice_TestFixtureUsages.test_funcarg_parametrized_and_used_twice.result_stdout_fnmatch_lin": {"doc_hash": "ab6ae019b32b41c164839eeb21774900f3f03e612bc4d448ab364b6811d29cb0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureUsages.test_usefixtures_marker_TestFixtureUsages.test_usefixtures_marker.reprec_assertoutcome_pass": {"doc_hash": "f704939c22643c5055b60b93f48f6ed3d42e110482c4a57eb3050510a8493c45"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureUsages.test_usefixtures_ini_TestFixtureUsages.test_usefixtures_ini.reprec_assertoutcome_pass": {"doc_hash": "39c60332e08103652d695917fbb34578c60afb494bb3b170bf43e19c210af3d2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureUsages.test_usefixtures_seen_in_showmarkers_TestFixtureUsages.test_request_instance_issue203.reprec_assertoutcome_pass": {"doc_hash": "98e7b200d218c0a9a34a9bfe8cd8903fe6585d3762974de0dac39d5073e5292a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureUsages.test_fixture_parametrized_with_iterator_TestFixtureUsages.test_fixture_parametrized_with_iterator.assert_values_1_2_1": {"doc_hash": "dacdb9a7fa9c9e5287f76d09a3f21b04e1378e423bd33db2a9995cf8d1ebb390"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureUsages.test_setup_functions_as_fixtures_TestFixtureUsages.test_setup_functions_as_fixtures.result_stdout_fnmatch_lin": {"doc_hash": "945164dd60841a242127a40e3a76a223c05151bef05714bac6875de3d99e0174"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureManagerParseFactories_TestFixtureManagerParseFactories.test_parsefactories_evil_objects_issue214.reprec_assertoutcome_pass": {"doc_hash": "b8d87b1cf4241c2ec39ddb94f8ba9438b226f0178e8abf377e95b3fdb83c905e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureManagerParseFactories.test_parsefactories_conftest_TestFixtureManagerParseFactories.test_parsefactories_conftest.reprec_assertoutcome_pass": {"doc_hash": "f4afae38b8e9427692be204418dca2a96da9673c928b62510cfc00c29fed49e8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureManagerParseFactories.test_parsefactories_conftest_and_module_and_class_TestFixtureManagerParseFactories.test_parsefactories_conftest_and_module_and_class.reprec_assertoutcome_pass": {"doc_hash": "c24f3ce498346bcbf58980f6d3a2566d90ecfc501cfa3a99667d1d37d8a16954"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureManagerParseFactories.test_package_xunit_fixture_TestFixtureManagerParseFactories.test_package_xunit_fixture.reprec_assertoutcome_pass": {"doc_hash": "795d7134df4d9dc33b7cc9bd88cb79a5a12de97fdb352f6e7bfb67fb8f0c4769"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureManagerParseFactories.test_package_fixture_complex_TestFixtureManagerParseFactories.test_collect_custom_items.result_stdout_fnmatch_lin": {"doc_hash": "7baddaa79d2930694101dabdd0721cb4eb78847a37c13d5bed2d5b69ff6fbb91"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestAutouseDiscovery.test_parsefactories_conftest_TestAutouseDiscovery.test_parsefactories_conftest.reprec_assertoutcome_pass": {"doc_hash": "d4658ec0be957ede5dfdc9ef750c3c9d4c125b7a6da27ec4b636b26aee1aa7f3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestAutouseDiscovery.test_two_classes_separated_autouse_TestAutouseDiscovery.test_two_classes_separated_autouse.reprec_assertoutcome_pass": {"doc_hash": "550d6a2db9fada0c4c59a2c197d28b84a50bfb242acd069868fb21124df87fcb"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestAutouseDiscovery.test_setup_at_classlevel_TestAutouseDiscovery.test_setup_at_classlevel.reprec_assertoutcome_pass": {"doc_hash": "a079e3fddac0e2e89ec4417d02e759c70085298f881a08ed31a94ccd6cd9a1ba"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestAutouseDiscovery.test_setup_enabled_functionnode_TestAutouseDiscovery.test_setup_enabled_functionnode.reprec_assertoutcome_pass": {"doc_hash": "e9ef36ee74007fa0c3918ea8f58e69491cfb8e8cb52d7ceace5bfafed9ce93e9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestAutouseDiscovery.test_callables_nocode_TestAutouseDiscovery.test_callables_nocode.reprec_assertoutcome_fail": {"doc_hash": "c6ff41fb925ca9f042bee0de9f6bf64763073de8dfda1f13967f1bcccac329b1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestAutouseDiscovery.test_autouse_in_conftests_TestAutouseDiscovery.test_autouse_in_conftests.result_stdout_fnmatch_lin": {"doc_hash": "1a1ecb5687d8ee264c266788a62201f378dbb872557d35e39bf3a72396a78ae1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestAutouseDiscovery.test_autouse_in_module_and_two_classes_TestAutouseDiscovery.test_autouse_in_module_and_two_classes.reprec_assertoutcome_pass": {"doc_hash": "6a7da27a0181e09bbf6912055ac213df2309bb9ddfbc208ae2efab94f6937fee"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestAutouseManagement_TestAutouseManagement.test_autouse_conftest_mid_directory.reprec_assertoutcome_pass": {"doc_hash": "c1bf1e34541cf14436d27604149bcd3bd50b646cf06b55f1dd14377b07d0cfed"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestAutouseManagement.test_funcarg_and_setup_TestAutouseManagement.test_funcarg_and_setup.reprec_assertoutcome_pass": {"doc_hash": "4297479690c3003ddfd7737800190fd20e529bfb79d63cbeb4a634dbcd30455f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestAutouseManagement.test_uses_parametrized_resource_TestAutouseManagement.test_uses_parametrized_resource.reprec_assertoutcome_pass": {"doc_hash": "85827320c07cf985ace349b317f2eb78166f13071c2331332a8bfa276449c788"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestAutouseManagement.test_session_parametrized_function_TestAutouseManagement.test_session_parametrized_function.reprec_assertoutcome_pass": {"doc_hash": "54a285fa70e00ac25e7fde613d37eaff0da2dfa576060183b8fc3662810e8db5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestAutouseManagement.test_class_function_parametrization_finalization_TestAutouseManagement.test_class_function_parametrization_finalization.assert_values_fin_a1": {"doc_hash": "8aa4f0c24fa6d6874c000bffff1f36a4b4ab165a17465a07435962f664768d25"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestAutouseManagement.test_scope_ordering_TestAutouseManagement.test_scope_ordering.reprec_assertoutcome_pass": {"doc_hash": "ba064e98d89d0f3f8c014f7e0dabc8a7644be91bdf67e6dcd4168f5858fe10bd"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestAutouseManagement.test_parametrization_setup_teardown_ordering_TestAutouseManagement.test_parametrization_setup_teardown_ordering.reprec_assertoutcome_pass": {"doc_hash": "278c30fccd94f877b08c8132aec10eb4c47d4caa691ebe06d4049cc6b72acb68"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestAutouseManagement.test_ordering_autouse_before_explicit_TestAutouseManagement.test_ordering_autouse_before_explicit.reprec_assertoutcome_pass": {"doc_hash": "0589452bc4c2b40d94fff16337f5ef680555d3ed4c91218353af2baa53cb9a96"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestAutouseManagement.test_ordering_dependencies_torndown_first_TestAutouseManagement.test_ordering_dependencies_torndown_first.reprec_assertoutcome_pass": {"doc_hash": "14ec8cfdba95d4396fee5bd74666f33c452a33c7841c8b345afbad5f96d01276"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_override_parametrized_fixture_issue_979_TestFixtureMarker.test_override_parametrized_fixture_issue_979.reprec_assertoutcome_pass": {"doc_hash": "ec0c01c1a43eaba17873becb54164d6f45704f54c452d2abb0f2076b9d2f66d7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_scope_session_TestFixtureMarker.test_scope_session.reprec_assertoutcome_pass": {"doc_hash": "68da3362d2c6d21841c1bdc61519cf6a7ea588c26513d9c74e0efd555abad56e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_scope_session_exc_TestFixtureMarker.test_scope_session_exc.reprec_assertoutcome_skip": {"doc_hash": "f6ad058bb59d9f6b75250955c9c627868731467855e876f6041b414f10c89927"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_scope_session_exc_two_fix_TestFixtureMarker.test_scope_session_exc_two_fix.reprec_assertoutcome_skip": {"doc_hash": "3490e00a4ad7052847218a1454af41ab53068fcdbab657041279e2b2672dc8bd"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_scope_exc_TestFixtureMarker.test_scope_exc.reprec_assertoutcome_skip": {"doc_hash": "892be6aa682752934b5d216a0d6ef5a61898b8b3e4c2ae7142f5939a35d02a31"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_scope_module_uses_session_TestFixtureMarker.test_scope_module_uses_session.reprec_assertoutcome_pass": {"doc_hash": "1c6bf93e1ccf58bd6647ca13513e535739a3cef478cf555bc86f429a93901e48"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_scope_module_and_finalizer_TestFixtureMarker.test_scope_module_and_finalizer.reprec_assertoutcome_pass": {"doc_hash": "96e948bb3f04e6c3df7647348df3692c4f27e8c31b85dd28524d698c884316bc"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_scope_mismatch_various_TestFixtureMarker.test_scope_mismatch_various.result_stdout_fnmatch_lin": {"doc_hash": "2b1fe0aed91f1ca1770cab4ba63a866f6501ec084c29654d773156d09827167d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_parametrize_and_scope_TestFixtureMarker.test_parametrize_and_scope.assert_c_in_values": {"doc_hash": "55d9979d18a0c2d2df7748797d14092d2f7d45e63ce50b35feaa7761c99055dd"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_scope_mismatch_TestFixtureMarker.test_scope_mismatch.result_stdout_fnmatch_lin": {"doc_hash": "5c2428eb9115fb40379c522f22cf0d90dd9a729ba99ed168f9dd48f47bc4abfe"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_parametrize_separated_order_TestFixtureMarker.test_parametrize_separated_order.assert_values_1_1_2": {"doc_hash": "3c323ce50c6e6fd3454a403e906cf2a7cb5be0d0e5033f96e6c64beb8cdbb112"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_module_parametrized_ordering_TestFixtureMarker.test_module_parametrized_ordering.result_stdout_fnmatch_lin": {"doc_hash": "0a6026c5897d0126fa5b01c2e66c740e457659fc6cbd69dd2d0c31b44c9bdc1c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_dynamic_parametrized_ordering_TestFixtureMarker.test_dynamic_parametrized_ordering.result_stdout_fnmatch_lin": {"doc_hash": "cdec357bf0c83d10c828a334ad7972dbfa80c8ff21c9e3bab658842dbd6a9956"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_class_ordering_TestFixtureMarker.test_class_ordering.result_stdout_re_match_li": {"doc_hash": "bf8aac00c0b6349c40b28d9ed88927e74876970b796a6da1d44141db486d8683"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_parametrize_separated_order_higher_scope_first_TestFixtureMarker.test_parametrize_separated_order_higher_scope_first.assert_values_expected": {"doc_hash": "929327dead99ab6b829621eaf1efb655aa320bceb9eac9963bb780aa5e246591"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_fixture_finalizer_TestFixtureMarker.test_fixture_finalizer.for_test_in_test_browse.reprec_stdout_fnmatch_lin": {"doc_hash": "d08e64ca698b0c06c9f8657048a7a733488613047bf843dc81b6ad8cabdd5020"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_class_scope_with_normal_tests_TestFixtureMarker.test_class_scope_with_normal_tests.for_test_in_test_a_t.assert_reprec_matchreport": {"doc_hash": "b833b868945cc07ba0cce850f7af8082964021e1e4c65359af6fa7900eb3a0ac"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_request_is_clean_TestFixtureMarker.test_request_is_clean.assert_values_1_2_": {"doc_hash": "473c443d01bc8348e7ecf52d8ff9f790c9ee49e18017ba1f595817b3d70c3157"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_parametrize_separated_lifecycle_TestFixtureMarker.test_parametrize_separated_lifecycle.assert_values_5_fin2": {"doc_hash": "38d8650500548aed55114a648a85832b535d2d776624ce62f990298691bf8b91"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_parametrize_function_scoped_finalizers_called_TestFixtureMarker.test_parametrize_function_scoped_finalizers_called.reprec_assertoutcome_pass": {"doc_hash": "458aa30054e4cb8d9059cacbb5b18571664b66822438c02dfc22227404bbe89d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_finalizer_order_on_parametrization_TestFixtureMarker.test_finalizer_order_on_parametrization.reprec_assertoutcome_pass": {"doc_hash": "34a458a6a5e1c0d351f801b7771080d98db4e9397ca3e2b7a36372be21fc9ef9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_class_scope_parametrization_ordering_TestFixtureMarker.test_class_scope_parametrization_ordering.assert_values_": {"doc_hash": "30343a3378e22e55a3e8eb62885f784c675bf4656c057853c3efb867ca50d034"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_parametrize_setup_function_TestFixtureMarker.test_parametrize_setup_function.reprec_assertoutcome_pass": {"doc_hash": "6aaf69aadcba4c8df8bb4efc8195b486b3e078c297433d9e53149590fdcabce6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestScopeAccess_TestRequestScopeAccess.test_setup.reprec_assertoutcome_pass": {"doc_hash": "bb9551c82b5b85410752988d0b9ee3af05715ec19411011b52c42ab68def2346"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestScopeAccess.test_funcarg_TestRequestScopeAccess.test_funcarg.reprec_assertoutcome_pass": {"doc_hash": "49dbb8f3e8d5fdb23065f865a7c9c1a7f4dff310832d3419ca4abb7602b88c0e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestErrors_TestErrors.test_subfactory_missing_funcarg.result_stdout_fnmatch_lin": {"doc_hash": "4b3fa1ea5fa69a3bec5b522194a508c00fe36623feca10d1d6902d35505d1213"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestErrors.test_issue498_fixture_finalizer_failing_TestErrors.test_issue498_fixture_finalizer_failing.result_stdout_fnmatch_lin": {"doc_hash": "f35a649e498ee12ca7c5fdd713afeb3da656861a9653bb2f9b67ad846dca76a8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestErrors.test_setupfunc_missing_funcarg_TestErrors.test_setupfunc_missing_funcarg.result_stdout_fnmatch_lin": {"doc_hash": "6b1f1fb51a8881c69f361fe1d0e331d8bbe456b65679b9b7c2707ad843a23fe9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestShowFixtures_TestShowFixtures.test_show_fixtures_verbose.result_stdout_fnmatch_lin": {"doc_hash": "371124118737be138cdcee15a5a148d0d0f8dec2f33d2bafe4cdae250d486140"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestShowFixtures.test_show_fixtures_conftest_TestShowFixtures.test_show_fixtures_conftest.result_stdout_fnmatch_lin": {"doc_hash": "a903479cb10d6a5faa23f6608c78624646c0ff2380ca1657b04e1070b88d3703"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestShowFixtures.test_show_fixtures_trimmed_doc_TestShowFixtures.test_show_fixtures_trimmed_doc.result_stdout_fnmatch_lin": {"doc_hash": "e3f157599191df6e034e84f622b060231c7c9d7ed1c9f1e808d437d5d6457d79"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestShowFixtures.test_show_fixtures_indented_doc_TestShowFixtures.test_show_fixtures_indented_doc.result_stdout_fnmatch_lin": {"doc_hash": "07b879855059b92cd4d065a16ef499b65eca57f42d23519e7d42417041ac7bdc"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestShowFixtures.test_show_fixtures_indented_doc_first_line_unindented_TestShowFixtures.test_show_fixtures_indented_doc_first_line_unindented.result_stdout_fnmatch_lin": {"doc_hash": "3e38d19e2e3da2a22e5b5fdf4f229a3898f32d5c3b873e5433ec90020632a2de"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestShowFixtures.test_show_fixtures_indented_in_class_TestShowFixtures.test_show_fixtures_indented_in_class.result_stdout_fnmatch_lin": {"doc_hash": "6ef19017fd6cad04636265789c0f297172c6c926769e1d3bb12fa8c2f2af9f39"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestShowFixtures.test_show_fixtures_different_files_TestShowFixtures.test_show_fixtures_different_files.result_stdout_fnmatch_lin": {"doc_hash": "c0c519e18e6ef0934888dd57c87d540d4398d224bea6c56c5dd5fb5d31f77954"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestContextManagerFixtureFuncs.test_scoped_TestContextManagerFixtureFuncs.test_scoped.result_stdout_fnmatch_lin": {"doc_hash": "80d6966fae01929c80cbcff0273fd744945356d94d3240ec5cb90ef1053c0486"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestContextManagerFixtureFuncs.test_setup_exception_TestContextManagerFixtureFuncs.test_teardown_exception.result_stdout_fnmatch_lin": {"doc_hash": "bb6977a5c5ab1876f78b854584f0f677112937347577d0b5d827d63cbac09ee3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestContextManagerFixtureFuncs.test_yields_more_than_one_TestContextManagerFixtureFuncs.test_custom_name.result_stdout_fnmatch_lin": {"doc_hash": "4993e7907a68041d07cb3c44dd2d3eb90db36b6f06c5690582e0ef16284f075a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestParameterizedSubRequest_TestParameterizedSubRequest.test_call_from_fixture.result_stdout_fnmatch_lin": {"doc_hash": "64c4980164f09e8f34cbc6f6635e7affeb325c24a9d1689d9e04ed4e4f0a1d25"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestParameterizedSubRequest.test_call_from_test_TestParameterizedSubRequest.test_call_from_test.result_stdout_fnmatch_lin": {"doc_hash": "d3939f7e308b3ea847e3677e82ac5a823bbdcced2d0e281d7957425d71ef22d6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestParameterizedSubRequest.test_external_fixture_TestParameterizedSubRequest.test_external_fixture.result_stdout_fnmatch_lin": {"doc_hash": "6c6926b54f66f24f59841b3c3966bcdd260dbfae9e25625d47887a0a0bf11696"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_test_pytest_fixture_setup_and_post_finalizer_hook_test_pytest_fixture_setup_and_post_finalizer_hook.result_stdout_fnmatch_lin": {"doc_hash": "bbb2c9ae9c0e64432d2b24564c8f1b990eba4c45adfa7f86ab6aa7bf760ed6e6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestScopeOrdering_TestScopeOrdering.test_func_closure_module_auto.assert_request_fixturenam": {"doc_hash": "82cca28819ba7aebb32dd6c686b3cc1fa58a53e30a7c5441641fbde8c4b74048"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestScopeOrdering.test_func_closure_module_TestScopeOrdering.test_func_closure_module.assert_request_fixturenam": {"doc_hash": "7b58c9fdb3f31309d54f0bb69e579babe068f2e3ed9baa9a055f04704d6c1e86"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestScopeOrdering.test_func_closure_scopes_reordered_TestScopeOrdering.test_func_closure_scopes_reordered.assert_request_fixturenam": {"doc_hash": "e8e8b0e79e6b5f4aa2732f96c0f711ec9bb31b91a3121527492cdce00e1a2e50"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestScopeOrdering.test_func_closure_same_scope_closer_root_first_TestScopeOrdering.test_func_closure_same_scope_closer_root_first.assert_request_fixturenam": {"doc_hash": "ef1b5d58091af4860e64daa344101bcfc4409813e9464be49322de788fea3499"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestScopeOrdering.test_func_closure_all_scopes_complex_TestScopeOrdering.test_func_closure_all_scopes_complex.assert_request_fixturenam": {"doc_hash": "b953b15a4702ea99019ebc205a5fd3e5317748e2016f1123e4dde1e0a4ca1ebb"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/integration.py_TestMockDecoration_TestMockDecoration.test_unittest_mock.reprec_assertoutcome_pass": {"doc_hash": "53670aa40aa9645b0f2bfd2e877f7db60384bbcdf9354f3e6128236fc292179c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/integration.py_TestMockDecoration.test_unittest_mock_and_fixture_TestMockDecoration.test_unittest_mock_and_fixture.reprec_assertoutcome_pass": {"doc_hash": "968f8fbb3898c7726a4613e7a443071f92ed25f8a206a3101732ca20dac1108c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/integration.py_TestMockDecoration.test_unittest_mock_and_pypi_mock_TestMockDecoration.test_unittest_mock_and_pypi_mock.reprec_assertoutcome_pass": {"doc_hash": "85e6f5a3e679d140669f1b71ba98d36d01d36dc368389a0120f50fa27b08121d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/integration.py_TestMockDecoration.test_mock_TestMockDecoration.test_mock.assert_funcnames_T_t": {"doc_hash": "76b7e25a84e05bf29982daf2a31f3af96d5b427372174175183a557a8b15cbb7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/integration.py_TestMockDecoration.test_mock_sorting_TestMockDecoration.test_mock_sorting.assert_names_test_on": {"doc_hash": "6df5af7f8c2c2c6404bab6a5e31f3424ea82b58f7f870f73e7b77a70cab2578a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/integration.py_TestMockDecoration.test_mock_double_patch_issue473_TestMockDecoration.test_mock_double_patch_issue473.reprec_assertoutcome_pass": {"doc_hash": "6334e07dbe8269c8b1c85ded64742c163595a41798eebbb28675ee5110fc6db2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/integration.py_TestReRunTests_TestReRunTests.test_rerun.None_3": {"doc_hash": "a8233379ab16c6e3502b49d05536e595707d627867f99ab1bbed662ec0804b1c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/integration.py_test_pytestconfig_is_session_scoped_TestNoselikeTestAttribute.test_class_and_method.assert_not_calls": {"doc_hash": "f00ab63acd04f78f1984869110b2721e067d0ab80896bbc4569ea426af80cd86"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/integration.py_TestNoselikeTestAttribute.test_unittest_class_TestNoselikeTestAttribute.test_unittest_class.assert_call_items_0_cls_": {"doc_hash": "2de332e49272ccee25ec7888ca44835357cdff3a9e9e7bb07400e3f5f0a682af"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/integration.py_TestNoselikeTestAttribute.test_class_with_nasty_getattr_TestNoselikeTestAttribute.test_class_with_nasty_getattr.assert_not_call_items": {"doc_hash": "573088346738248d085d6c175df0115644aae43105fa6492b6f5eb2cd34fb497"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/integration.py_TestParameterize_TestParameterize.test_idfn_marker.res_stdout_fnmatch_lines_": {"doc_hash": "5b7e17c0ac3aeab6dd0dd9e7ff6c7ad4eb253cf9159f6e45fdc6b9f9ef5b0e8c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/integration.py_TestParameterize.test_idfn_fixture_": {"doc_hash": "7f1fc27df317f137355c391e09b96049455303f2394c7339a71eeaedf48515af"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_find_parametrized_scope_TestMetafunc.test_find_parametrized_scope.None_12": {"doc_hash": "5fda5a40d3322c3a5a6e52e11862112d779973e5c9b1c732c3fde70ef5720f82"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_unicode_idval_TestMetafunc.test_unicode_idval.for_val_expected_in_valu.assert__idval_val_a_6": {"doc_hash": "4b42d004fa68d88ba87a818f06a28a482079ba836327b09994253e832801d0cd"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_bytes_idval_TestMetafunc.test_bytes_idval.for_val_expected_in_valu.assert__idval_val_a_6": {"doc_hash": "7d9ed224008db88d472e3cd4b22f256015932d378d8426fd0a408266216698a8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_idmaker_non_printable_characters_TestMetafunc.test_idmaker_non_printable_characters.assert_result_x00_": {"doc_hash": "6ac7e5627479fc60b18c42c181cfd59adb8d6e8b2f607223d6cc7b600ee308f1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_idmaker_manual_ids_must_be_printable_TestMetafunc.test_idmaker_enum.assert_result_Foo_on": {"doc_hash": "de12a222474159f9e8d041ee4274f89790d96c1e48cd47ffbddec4fa93b3b328"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_idmaker_idfn_TestMetafunc.test_idmaker_idfn.assert_result_10_0_I": {"doc_hash": "84621db5241e922048093c0cde07f085fc33ed23d40f5cd2715264289863fa3d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_idmaker_idfn_unique_names_TestMetafunc.test_idmaker_idfn_unique_names.assert_result_a_a0_": {"doc_hash": "9e5d91f88a76a3aa0642dfbb36de687671ee70e7777cb581084a0664c66c20a9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_ids_exception_TestMetafunc.test_parametrize_ids_exception.result_stdout_fnmatch_lin": {"doc_hash": "94b5b388ed15a38a272f2e0d4a54db38abdd46a91b8e06139dbf35059811c4c9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_indirect_TestMetafunc.test_parametrize_indirect.None_4": {"doc_hash": "c58ee1a7dd07052597e67dafacae59f7daeef2ae20f6ec0117ba2a33983a67f9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_indirect_list_functional_TestMetafunc.test_parametrize_indirect_list_functional.result_stdout_fnmatch_lin": {"doc_hash": "44a68e474957b0e3ad1ddbf94d700ddec0996855f10a9648f54481dbdef5daa0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_indirect_list_error_TestMetafunc.test_parametrize_uses_no_fixture_error_indirect_false.result_stdout_fnmatch_lin": {"doc_hash": "eec9b2780a3b290b0ed82bfca21b69bf286ca064b963ddc429118e50b43fb6f9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_uses_no_fixture_error_indirect_true_TestMetafunc.test_parametrize_uses_no_fixture_error_indirect_true.result_stdout_fnmatch_lin": {"doc_hash": "cc8c8d24d8cbe6da85cb1a4195aa706ef6c5af1fbe294ea979fac3660f1bf6a0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_indirect_uses_no_fixture_error_indirect_string_TestMetafunc.test_parametrize_indirect_uses_no_fixture_error_indirect_string.result_stdout_fnmatch_lin": {"doc_hash": "275c9a69acc0bfab1d99c4630366559a56082a94b0a86509da3c18cd01fec92d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_indirect_uses_no_fixture_error_indirect_list_TestMetafunc.test_parametrize_indirect_uses_no_fixture_error_indirect_list.result_stdout_fnmatch_lin": {"doc_hash": "6922f774b456e93820278a33802fca1eef4b5a7f1927c7ee60e9373c0a0ffcdc"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_argument_not_in_indirect_list_TestMetafunc.test_parametrize_argument_not_in_indirect_list.result_stdout_fnmatch_lin": {"doc_hash": "65973f31a0d4cf2a9b08a73ed5ecba7bb6d220919901a798446fc86039d3d367"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_gives_indicative_error_on_function_with_default_argument_TestMetafunc.test_parametrize_gives_indicative_error_on_function_with_default_argument.result_stdout_fnmatch_lin": {"doc_hash": "695ed2ab940caf52669f2a0f81c59f3e20e57be98b54b4e925d9743d71b5cd90"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_functional_TestMetafunc.test_parametrize_functional.result_stdout_fnmatch_lin": {"doc_hash": "56450656c6bd072bff743c89b743adb685be429a75cc28fb692bcf652cb69360"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_onearg_TestMetafunc.test_parametrize_onearg_indirect.None_3": {"doc_hash": "d6c3a7dfe686b35c985fd0c75d2db6f973bf2d2d77767773e9a427a7a5e64c91"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_twoargs_TestMetafunc.test_parametrize_twoargs.None_4": {"doc_hash": "0731eca143425e8c0ec318561a9cfaa4bae469a77752618e9bda8e9d80e05bee"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_multiple_times_TestMetafunc.test_parametrize_CSV.reprec_assertoutcome_pass": {"doc_hash": "566588b7eb570eacbda2aa6f7977f64c059171885466fe5946bb0cd210d12502"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_class_scenarios_TestMetafunc.test_parametrize_class_scenarios.result_stdout_fnmatch_lin": {"doc_hash": "a0493b6d2b413c18a47aa9087c91572950bccd80e474b95857af1e79d460c7d8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_format_args_TestMetafunc.test_format_args.None_3": {"doc_hash": "5ef94c16b07e14383c722772bececed2cfb8da821be64508a8cd380f49f7f40c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional_TestMetafuncFunctional.test_attributes.result_assert_outcomes_pa": {"doc_hash": "a389218bde56698fa2c5d190f6410f38bed500c10e4127fa038edea3ab25d4af"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_two_functions_TestMetafuncFunctional.test_two_functions.result_stdout_fnmatch_lin": {"doc_hash": "a69d607f7bc45792fce6da36076d89ca43a8f585a5b301bd5e0f39034bcddffe"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_noself_in_method_TestMetafuncFunctional.test_generate_tests_in_class.result_stdout_fnmatch_lin": {"doc_hash": "478e5e62eff4b0d847d8d68d05dd7217a2cb657e4a0008119ab4f59fc4529afa"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_two_functions_not_same_instance_TestMetafuncFunctional.test_two_functions_not_same_instance.result_stdout_fnmatch_lin": {"doc_hash": "b57bc7d8e0c926663cbbf76676df3513d437086664193ce80655553aa6435523"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_issue28_setup_method_in_generate_tests_TestMetafuncFunctional.test_issue28_setup_method_in_generate_tests.result_assert_outcomes_pa": {"doc_hash": "b29fee3c81fadf643c825dda87bb031d44b984b023b0301cc29a43dc576901e4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_parametrize_functional2_TestMetafuncFunctional.test_parametrize_functional2.result_stdout_fnmatch_lin": {"doc_hash": "f9df6f6afbcce31ef49feadad8d48f7f412edfc701efc58c6ae2ac4bdd49dd1f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_parametrize_and_inner_getfixturevalue_TestMetafuncFunctional.test_parametrize_and_inner_getfixturevalue.result_stdout_fnmatch_lin": {"doc_hash": "0056a1aeb7b79ff52fea3581387999798015ebfcc9085ee20ad077b7facb6afa"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_parametrize_on_setup_arg_TestMetafuncFunctional.test_parametrize_on_setup_arg.result_stdout_fnmatch_lin": {"doc_hash": "4798a4da9718a51bd9c07ebd3d18c4952f81682ea9f1de4423f7c31955ab32a6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_parametrize_with_ids_TestMetafuncFunctional.test_parametrize_with_ids.result_stdout_fnmatch_lin": {"doc_hash": "e53e28c08d018204224c65acd70333f2856aadd0e34e9963d15992de3d2f4895"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_parametrize_without_ids_TestMetafuncFunctional.test_parametrize_without_ids.result_stdout_fnmatch_lin": {"doc_hash": "7f8b9f1d5a413be59c97af8721f41d65fef7a30412b59ebec1487cb78b9b1f43"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_parametrize_with_None_in_ids_TestMetafuncFunctional.test_parametrize_with_None_in_ids.result_stdout_fnmatch_lin": {"doc_hash": "7014d4c11e5e928a38cfb19b90fa79e5f9a34063a6e47fec1fd960ba0ce78da1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_fixture_parametrized_empty_ids_TestMetafuncFunctional.test_parametrized_empty_ids.result_stdout_fnmatch_lin": {"doc_hash": "94b443b76e0491a88ee5f123d881b6d73d4fb3e27f4b4d5cf8f5bfcba6b11e8b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_parametrized_ids_invalid_type_TestMetafuncFunctional.test_parametrized_ids_invalid_type.result_stdout_fnmatch_lin": {"doc_hash": "5b9fb53c7596a6e2e1f1650a0feaaeeb8595af7c90d9c0935ef7b62047d70485"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_parametrize_with_identical_ids_get_unique_names_TestMetafuncFunctional.test_parametrize_with_identical_ids_get_unique_names.result_stdout_fnmatch_lin": {"doc_hash": "3a684a0b3d0995893e573b7d419c3ee5028bd8b8fc18fd4da9b03e77b28a732d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_parametrize_scope_overrides_TestMetafuncFunctional.test_parametrize_scope_overrides.reprec_assertoutcome_pass": {"doc_hash": "c60accbca4bfddc74b353b6a4501d5873cad8ae7877a96a2effef2b188f6cb80"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_parametrize_issue323_TestMetafuncFunctional.test_usefixtures_seen_in_generate_tests.reprec_assert_outcomes_pa": {"doc_hash": "f96f5b80ebc6f14da5323051ab083fdfd97d8f7481873b98b11655d5c9fac370"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_generate_tests_only_done_in_subdir_TestMetafuncFunctional.test_generate_tests_only_done_in_subdir.result_assert_outcomes_pa": {"doc_hash": "4246d0f577248650536abc7e6fcccd9978629561e1f96b418b2e4096bdb96b21"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctionalAuto_TestMetafuncFunctionalAuto.test_parametrize_auto_scope.result_stdout_fnmatch_lin": {"doc_hash": "4b4f800310db4557872ed1bc7a5da543ccda9493fbe65394e923111071a351ad"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctionalAuto.test_parametrize_auto_scope_indirect_TestMetafuncFunctionalAuto.test_parametrize_auto_scope_indirect.result_stdout_fnmatch_lin": {"doc_hash": "10d9522734b7259532863592568e6242c678ee31687f80ace734229c72ef30ad"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctionalAuto.test_parametrize_auto_scope_override_fixture_TestMetafuncFunctionalAuto.test_parametrize_auto_scope_override_fixture.result_stdout_fnmatch_lin": {"doc_hash": "8ae3ad39d38159326ea390b14778c8b1b31a544ebb928f944df3e65448663572"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctionalAuto.test_parametrize_all_indirects_TestMetafuncFunctionalAuto.test_parametrize_all_indirects.result_stdout_fnmatch_lin": {"doc_hash": "8d39f487422500d0c2e03e7bfab7763b032b1d8823f45aa36f46f8d7b553969b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctionalAuto.test_parametrize_some_arguments_auto_scope_TestMetafuncFunctionalAuto.test_parametrize_some_arguments_auto_scope.assert_class_fix_setup_": {"doc_hash": "46e46f93370811bc23d2ff9d41123ea512deba4dc7d704ec76eb197bfdab5187"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctionalAuto.test_parametrize_issue634_TestMetafuncFunctionalAuto.test_parametrize_issue634.None_1": {"doc_hash": "6e2f47a3ce44474e5b6dfe5da04f1935eda7da6ed97a70b45ef3632ea934ca1e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMarkersWithParametrization_TestMarkersWithParametrization.test_simple_mark.None_3": {"doc_hash": "a288a5695856219d57c5e9300e05a9f6c9459f66caf3123eec47aa276a8a0bd9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMarkersWithParametrization.test_select_based_on_mark_TestMarkersWithParametrization.test_select_based_on_mark.assert_len_fail_0": {"doc_hash": "8bd5bd50a7fe8f28ff641edcdfebad4a4e3d3e8ce13be0be77e3bc3234445305"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMarkersWithParametrization.test_simple_xfail_TestMarkersWithParametrization.test_simple_xfail.reprec_assertoutcome_pass": {"doc_hash": "4270679feff0d48ae0f8c94b07f9ec353b0499f3de45650ea20413b635a78293"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMarkersWithParametrization.test_xfail_with_kwarg_TestMarkersWithParametrization.test_xfail_with_kwarg.reprec_assertoutcome_pass": {"doc_hash": "2a901767f204c3e7e97144f23447329b934bb6779c10259fe5aca5fc5012844b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMarkersWithParametrization.test_xfail_with_arg_and_kwarg_TestMarkersWithParametrization.test_xfail_with_arg_and_kwarg.reprec_assertoutcome_pass": {"doc_hash": "f2a41fd63522ccf829ba58a3637de04a1bd59cc05811c06498d7a359aefee81a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMarkersWithParametrization.test_xfail_passing_is_xpass_TestMarkersWithParametrization.test_xfail_passing_is_xpass.reprec_assertoutcome_pass": {"doc_hash": "c1843dc7e8edd50408ba77971bcb045910a4fc9467841feb0385c4c5ad6fe5f2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMarkersWithParametrization.test_parametrize_marked_value_TestMarkersWithParametrization.test_parametrize_marked_value.reprec_assertoutcome_pass": {"doc_hash": "6f90805249c161671f0509813db6f402f1355016e3d7062971cd1025a94d21e5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMarkersWithParametrization.test_pytest_make_parametrize_id_TestMarkersWithParametrization.test_pytest_make_parametrize_id.result_stdout_fnmatch_lin": {"doc_hash": "7444c60d8e1453d453220c0a9341cfe645e6bf15376b43b99f38a7dcde0a6ab8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/raises.py_TestRaises.test_raises_as_contextmanager_TestRaises.test_raises_as_contextmanager.result_stdout_fnmatch_lin": {"doc_hash": "907da8b938514ba019214e8f99a3bf846e235699e5e12abe4846bc089c761a4d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/raises.py_TestRaises.test_does_not_raise_TestRaises.test_does_not_raise.result_stdout_fnmatch_lin": {"doc_hash": "adf5a9533935d810c5eb9598fca48de8fabdd3b96b6d64d9bbf393657f90c7b4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/raises.py_TestRaises.test_does_not_raise_does_raise_TestRaises.test_does_not_raise_does_raise.result_stdout_fnmatch_lin": {"doc_hash": "b34a0cb1657fcc8062bcbe25b2587d8b0331273dd548839ebd7df8eea60897dc"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/show_fixtures_per_test.py_test_fixtures_in_conftest_test_fixtures_in_conftest.result_stdout_fnmatch_lin": {"doc_hash": "82b2d4f848ec295cfdbf1b0701a508855457a96931ceb43dbeb107c927c0c5e1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/show_fixtures_per_test.py_test_should_show_fixtures_used_by_test_test_should_show_fixtures_used_by_test.result_stdout_fnmatch_lin": {"doc_hash": "3c4c528a5fc0cbafbe8cecbbc6a67622dc5639f1180713946e94f2a999dfd30a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/show_fixtures_per_test.py_test_verbose_include_private_fixtures_and_loc_test_verbose_include_private_fixtures_and_loc.result_stdout_fnmatch_lin": {"doc_hash": "3f97cd7fe5893c9f0604fcbd9388ccf6ef43d3760d296289d525c538781d9e5a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/show_fixtures_per_test.py_test_doctest_items_": {"doc_hash": "cdc90c34f4806524957ffc3bd673c619a378e0193d8e81cf3b31438290ed1805"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_argcomplete.py_FilesCompleter_FilesCompleter.__call__.return.completion": {"doc_hash": "816fa9f1aed74932682bc390ad86536e255d16e45aa29c3ef60ba7946114286f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_argcomplete.py_TestArgComplete_": {"doc_hash": "032b65170601b6a2e89e4fa69ada4932bceb874ed53a9348ead9cadbd1e018a0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestImportHookInstallation.test_pytest_plugins_rewrite_TestImportHookInstallation.test_pytest_plugins_rewrite.result_stdout_fnmatch_lin": {"doc_hash": "de8a8b0856cf2a8ca0b0d2d9ce0bf5a57136707a63e1091d1215bb304a1517a0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestImportHookInstallation.test_pytest_plugins_rewrite_module_names_TestImportHookInstallation.test_pytest_plugins_rewrite_module_names.assert_result_ret_0": {"doc_hash": "5db779b513909f13734a127747e579ef803781b057a6ca95e71b05e42696971f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestImportHookInstallation.test_pytest_plugins_rewrite_module_names_correctly_TestImportHookInstallation.test_pytest_plugins_rewrite_module_names_correctly.assert_result_ret_0": {"doc_hash": "f6530be31166b33df8d2234ba1db0a58eae51e7172cfd2e3e62e138d84abbe2e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestImportHookInstallation.test_installed_plugin_rewrite_TestImportHookInstallation.test_installed_plugin_rewrite.result_stdout_fnmatch_lin": {"doc_hash": "9cc397be131fa82fba2acf75a047b0f12a74b2f38c2d300290bc70db68bcff76"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestImportHookInstallation.test_rewrite_ast_TestImportHookInstallation.test_register_assert_rewrite_checks_types.pytest_register_assert_re": {"doc_hash": "53571a98055766a2626762dacb40374c35b414b316499aa268209742736ad715"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare_TestAssert_reprcompare.test_list.assert_len_expl_1": {"doc_hash": "e5e12d2df7831b8793389f4c3a08d9cbeed765b7be73fbb4f057e31ed53ee054"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare.test_dict_omitting_with_verbosity_1_TestAssert_reprcompare.test_dict_omitting_with_verbosity_2.assert_lines_2_b_": {"doc_hash": "a15ed21568e47eb403a14396b45706c2d9ff471fcb6652482967cc47fbfa1d08"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare.test_dict_different_items_TestAssert_reprcompare.test_dict_different_items.None_1": {"doc_hash": "c2cad088ef4d7667d03686b4aef0fad201eea6cd9f061b4ceec9abb721250b08"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare.test_sequence_different_items_TestAssert_reprcompare.test_sequence_different_items.None_1": {"doc_hash": "6f09a897a677261f9db95c96b7212b2851c54bacb41018bda981cf16e3ae7b33"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare.test_set_TestAssert_reprcompare.test_Sequence.assert_len_expl_1": {"doc_hash": "d0405c9ea1dd7b763915394bf627da4571d0caefedb5819b91b3b26e653b9da2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare_dataclass_TestAssert_reprcompare_dataclass.test_dataclasses.result_stdout_fnmatch_lin": {"doc_hash": "5795f2ead33a3525da18b38cc2e6d725ab4e77bdeb10882d8a1d02851b7fc3af"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare_dataclass.test_dataclasses_verbose_TestAssert_reprcompare_dataclass.test_dataclasses_verbose.result_stdout_fnmatch_lin": {"doc_hash": "1ed5e0355ae1f57266db203ba63286d202f6c2083b8488560b3408722011b4dd"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare_dataclass.test_dataclasses_with_attribute_comparison_off_TestAssert_reprcompare_dataclass.test_comparing_two_different_data_classes.result_assert_outcomes_fa": {"doc_hash": "5e341d88f4e2cdca71bff2b3b9d1632a19ecb864124892dc2342647e0f9dc88d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare_attrsclass.test_attrs_with_attribute_comparison_off_TestAssert_reprcompare_attrsclass.test_comparing_two_different_attrs_classes.assert_lines_is_None": {"doc_hash": "66f8608e4ec7158af6444f0f5f8894a1d1a4ad06ed5899543d1c558c2f5f12d6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestFormatExplanation_TestFormatExplanation.test_fmt_newline_escaped.assert_util_format_explan": {"doc_hash": "9159df5a4239013585513a7e772cd424bf0f144ccf60a4c9deff7c5caee359cd"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestFormatExplanation.test_fmt_newline_before_where_TestFormatExplanation.test_fmt_newline_before_where.assert_util_format_explan": {"doc_hash": "d5158ac43aa4554e84f384b5278168f280946be920ba4061f3d0c3dd3c913b14"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestFormatExplanation.test_fmt_multi_newline_before_where_TestFormatExplanation.test_fmt_multi_newline_before_where.assert_util_format_explan": {"doc_hash": "10d307589d248c17a1fbf589f490778af5bb6851eb7e4aa09a1e85698832de78"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestTruncateExplanation_TestTruncateExplanation.test_doesnt_truncate_at_when_input_is_5_lines_and_LT_max_chars.assert_result_expl": {"doc_hash": "d14c48405a379d038a84c97804466bc78eac629a60c7adec650d343a248f76d1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestTruncateExplanation.test_truncates_at_8_lines_when_given_list_of_empty_strings_TestTruncateExplanation.test_truncates_at_8_lines_when_given_list_of_empty_strings.assert_last_line_before_t": {"doc_hash": "6c31d9ce44e41e944036d0269140d804f67de47c42b7778c09915c678ae43871"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestTruncateExplanation.test_truncates_at_8_lines_when_first_8_lines_are_LT_max_chars_TestTruncateExplanation.test_truncates_at_8_lines_when_first_8_lines_are_LT_max_chars.assert_last_line_before_t": {"doc_hash": "b6dfe95c936623d35fb547c0862da30827d717bd053ab4a9a452193783138297"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestTruncateExplanation.test_truncates_at_8_lines_when_first_8_lines_are_EQ_max_chars_TestTruncateExplanation.test_truncates_at_8_lines_when_first_8_lines_are_EQ_max_chars.assert_last_line_before_t": {"doc_hash": "09da82507236110b3592f96a190ee6f1c27f5206504a7a593e87e18bb605b2d9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestTruncateExplanation.test_truncates_at_4_lines_when_first_4_lines_are_GT_max_chars_TestTruncateExplanation.test_truncates_at_4_lines_when_first_4_lines_are_GT_max_chars.assert_last_line_before_t": {"doc_hash": "f9c4a65a4395a5df85627da39bbe7d9d3598928b3df0aaed48fe14de390f812a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestTruncateExplanation.test_truncates_at_1_line_when_first_line_is_GT_max_chars_TestTruncateExplanation.test_truncates_at_1_line_when_first_line_is_GT_max_chars.assert_last_line_before_t": {"doc_hash": "59fb4487c8bb0233f7cc615f7a3e3934ec10eb230df56fdca0804f174fc9f0cf"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestTruncateExplanation.test_full_output_truncated_TestTruncateExplanation.test_full_output_truncated.None_5": {"doc_hash": "d64d0081d3fa344190a1c858c2fc75550a24b771ff46839756d3330ed6cf81d6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_test_sequence_comparison_uses_repr_test_sequence_comparison_uses_repr.result_stdout_fnmatch_lin": {"doc_hash": "1030015746b908c83394cae03813aaea66dfbec6cce1b9e55db6e8603247d1cc"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_test_assertrepr_loaded_per_dir_test_assertrepr_loaded_per_dir.result_stdout_fnmatch_lin": {"doc_hash": "9e82613dcd023170122fbec2e13e70e0d0074bf4f22df350573de9bacb4f68b5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_test_traceback_failure_test_traceback_failure.None_1": {"doc_hash": "2eb122ade4ba06439b3968b50fa7d46bfafcde736bb32be18fc082f7eada79d9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_test_exception_handling_no_traceback_test_exception_handling_no_traceback.result_stdout_fnmatch_lin": {"doc_hash": "2f1945d2f4404ac395854ec49fddd4f38008fb17b66da4d6c2fc3ab2deec7f11"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite_TestAssertionRewrite.test_place_initial_imports.None_9": {"doc_hash": "39d3ba028089be50748025494a244195493784a043ff40e1391619ba7b27a0e9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_assert_already_has_message_TestAssertionRewrite.test_assertion_messages_bytes.result_stdout_fnmatch_lin": {"doc_hash": "5e83d8f62ec59c7df81f3ebce0034e3ac917f22fdefd84af1eb6e8aabc3b4b69"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_call_TestAssertionRewrite.test_call.None_6": {"doc_hash": "b105edd9ae0f7127827f745bbd244677a0090d9f1872fdf6ac23c60b01021cbb"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_len_TestAssertionRewrite.test_len.if_request_config_getopti.else_.assert_msg_assert_10_": {"doc_hash": "66d69d2b067e03c7d1b6653c843d710d86577fd7c8c3c82735193266ade52d41"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_custom_repr_TestAssertionRewrite.test_custom_repr_non_ascii.assert_UnicodeEncodeErro": {"doc_hash": "bfb440b32baa5ebfbf956b1219daea1058a72b970581516d2aea4abbc14aa21e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestRewriteOnImport_TestRewriteOnImport.test_pycache_is_readonly.try_.finally_.cache_chmod_old_mode_": {"doc_hash": "1177c9a6180151301bd579635a5edd7d2448a62f2079ebe185d8e68707dabef4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestRewriteOnImport.test_pyc_vs_pyo_TestRewriteOnImport.test_pyc_vs_pyo.assert_tagged_pyc_in": {"doc_hash": "91705719846300c47208c33d6f8a8e99ec02e4bcc9e45c27f3bea61fe46b7c62"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestRewriteOnImport.test_remember_rewritten_modules_TestRewriteOnImport.test_remember_rewritten_modules.assert_warnings_": {"doc_hash": "b77f026914c2f881a863d905b4f825d7ea36b37ba6b90ffb8bed4243c83adec5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewriteHookDetails.test_resources_provider_for_loader_TestAssertionRewriteHookDetails.test_resources_provider_for_loader.result_assert_outcomes_pa": {"doc_hash": "0b5e918397b37e6716dc5fe3a4bdda7d105b48751a26c6d359909154d289a7a5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewriteHookDetails.test_read_pyc_TestAssertionRewriteHookDetails.test_read_pyc._no_error": {"doc_hash": "2854adc40017dd4f18e6d9a85b9603a4796465e66a4d55883c3a775e49a91637"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewriteHookDetails.test_get_data_support_TestAssertionRewriteHookDetails.test_get_data_support.result_stdout_fnmatch_lin": {"doc_hash": "a957a1119438dd0ddfec87b11d182ea3a82152931afd58408b238aac5249e7df"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestIssue925_TestIssue925.test_many_brackets.result_stdout_fnmatch_lin": {"doc_hash": "1117eb3264aa1ad6d07de61cca5bbcf94db8a9441d47c35e77f03b36e2b543fe"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestIssue2121_TestIssue2121.test_rewrite_python_files_contain_subdirs.result_stdout_fnmatch_lin": {"doc_hash": "e7b658f9ed7f8e36f052a15450939dba1e299b38f6fbd2ba794677e992e955f7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_test_source_mtime_long_long_test_source_mtime_long_long.assert_result_ret_0": {"doc_hash": "01755dbd85a5b3c49fafe1ae3164a2b94163cc9cab4f515c2d727ff20424ce92"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_test_rewrite_infinite_recursion_test_rewrite_infinite_recursion.assert_len_write_pyc_call": {"doc_hash": "371af9916bbc8ac17d1c1d9bd2a74154d297ea094f531878d3e7c65ff4b7c1d4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestEarlyRewriteBailout_TestEarlyRewriteBailout.hook.return.hook": {"doc_hash": "e00ae538671a63a855dfe9084ad701a888334f5d94fbdef7e8f67c35f6aa5cef"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestEarlyRewriteBailout.test_basic_TestEarlyRewriteBailout.test_basic.None_7": {"doc_hash": "f276c8a347cfa93ce783093a7980406e0160e76898ef358ce6fa3ad2814300a4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestNewAPI.test_config_cache_TestNewAPI.test_config_cache.result_stdout_fnmatch_lin": {"doc_hash": "36d13c33fe63605ef47183da3d4d1141369630fad81998be6e7764a26310d85c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestNewAPI.test_cachefuncarg_TestNewAPI.test_cachefuncarg.result_stdout_fnmatch_lin": {"doc_hash": "99cb146ace29f4ab828de55042a680416f4a2ff736bdee8592d851cff13b8bfc"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_test_cache_reportheader_test_cache_reportheader.result_stdout_fnmatch_lin": {"doc_hash": "e61c4bf83ef06ab7257c466169b0735dc4da22911a3ce214e2fc6b9da95247fa"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_test_cache_reportheader_external_abspath_test_cache_reportheader_external_abspath.result_stdout_fnmatch_lin": {"doc_hash": "131ada3742e6f60744423276cc78172167ec2bd638eecbdedf7ab218296a451e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_test_cache_show_test_cache_show.None_5": {"doc_hash": "5143a834b0d2823e08c1ad8842d78fe80164631bb6e428cc24147358b539f3e4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed_TestLastFailed.test_lastfailed_usecase.None_6": {"doc_hash": "611c307662febf81ee1db7a2d3c2175d6b14f056b5635325985ebe2d460b4d23"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_lastfailed_difference_invocations_TestLastFailed.test_lastfailed_difference_invocations.None_6": {"doc_hash": "d32bc8b75fbe6fa8a071def49d35a79f35f29ea64ea38ad27e8992a5f45dfe31"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_lastfailed_xpass_TestLastFailed.test_non_serializable_parametrize.result_stdout_fnmatch_lin": {"doc_hash": "f11d5375a19a16bcaebb48b6a039c169e669fe276d5e731006dfe3d9e62561ee"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_terminal_report_lastfailed_TestLastFailed.test_terminal_report_lastfailed.None_4": {"doc_hash": "8a4ac8213c4cea4bfb7ed130649789c2294d24777304e387da7f4fd79a815ad6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_terminal_report_failedfirst_TestLastFailed.test_terminal_report_failedfirst.None_2": {"doc_hash": "20c9eab20e17cb01f3cad4ad4d675fcab71712e7ac33ed6485bb61944c74652c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_lastfailed_collectfailure_TestLastFailed.test_lastfailed_collectfailure.None_2": {"doc_hash": "4d67a3d664e83681f6dfc4d724fe1e5f057099fa838c6cbe26adb8575147a3e7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_lastfailed_failure_subset_TestLastFailed.test_lastfailed_failure_subset.None_3": {"doc_hash": "8b07f891b27df1a5041ba0c5d1e082115adc2db378fb1fbfc93b0c2af2675c38"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_lastfailed_creates_cache_when_needed_TestLastFailed.test_lastfailed_creates_cache_when_needed.assert_os_path_exists_p": {"doc_hash": "1f9c5cd3445cb889a6e193e11fa306e5154a23225790a40c3eeb089d60c532e6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_xfail_not_considered_failure_TestLastFailed.test_xfail_strict_considered_failure.assert_self_get_cached_la": {"doc_hash": "359c2c8a99423c6fa32538ef24cc87514a5ba62415a6c05bf125db3c4ec1dc55"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_failed_changed_to_xfail_or_skip_TestLastFailed.test_failed_changed_to_xfail_or_skip.None_4": {"doc_hash": "b19e92587b1bc4584672bd13cd4f6493ba45a8dd54fe3222f88536446f179352"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_lf_and_ff_prints_no_needless_message_TestLastFailed.get_cached_last_failed.return.sorted_config_cache_get_": {"doc_hash": "48519469c7d977bca0163732cccd90444806db60320ffe571587a4b191de5bc6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_cache_cumulative_TestLastFailed.test_cache_cumulative.None_4": {"doc_hash": "9fb8143dbc64ac29f1f6827f267fb9303f6c9d3a9bf3358be7c4d0460aa78c05"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_lastfailed_no_failures_behavior_empty_cache_TestLastFailed.test_lastfailed_no_failures_behavior_empty_cache.None_3": {"doc_hash": "d9ec51654970e7004c8f62c6d83158b252cc67dc3b3aebd86a93092f712c4b75"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_lastfailed_skip_collection_TestLastFailed.test_lastfailed_skip_collection.None_4": {"doc_hash": "01eb5a66c8c27851e9ca34244506a360ba5c8bdee2e09858329fa62e2629746f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestReadme_TestReadme.test_readme_failed.assert_self_check_readme_": {"doc_hash": "15e5c67fdc82d057a5a5abddf868b2fa9527774f8e907c4bccb6270be96d75fc"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_test_gitignore_test_gitignore.None_1": {"doc_hash": "0ffe3e9076d418816bbcf8fe6b02dba3fec89859caefe407b884dc546874c215"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestPerTestCapturing_TestPerTestCapturing.test_capture_and_fixtures.result_stdout_fnmatch_lin": {"doc_hash": "327359d8567826c62c0b6455c886b83e1be01588e61c65c40a10e2b76cf84f47"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestPerTestCapturing.test_capture_scope_cache_TestPerTestCapturing.test_capture_scope_cache.result_stdout_fnmatch_lin": {"doc_hash": "bcaa30d6161a55f662574ba7d9c79b618a0253a6df0fb89ee7ef9cee865fe93a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestPerTestCapturing.test_no_carry_over_TestPerTestCapturing.test_teardown_capturing.result_stdout_fnmatch_lin": {"doc_hash": "029364b7857b4526cbb4b107680d8820aa408c8a500c9b380c1472ad207e7910"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestPerTestCapturing.test_teardown_capturing_final_TestPerTestCapturing.test_teardown_capturing_final.result_stdout_fnmatch_lin": {"doc_hash": "dee6d470e2d74567463fd6bf81cf98755253909a4173475ac1c91221dcd28829"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestPerTestCapturing.test_capturing_outerr_TestPerTestCapturing.test_capturing_outerr.result_stdout_fnmatch_lin": {"doc_hash": "2cc965aec8f616276bb99beb11e338505ea3b0df4108a58031cf09335a5844aa"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestLoggingInteraction_TestLoggingInteraction.test_logging_and_immediate_setupteardown.for_optargs_in_captu.assert_closed_not_in_s": {"doc_hash": "64ca84ded9f5e0cf965ad74299d1c6ebfaef8a27e502ce3091bb93ecf946878f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestLoggingInteraction.test_logging_and_crossscope_fixtures_TestLoggingInteraction.test_logging_and_crossscope_fixtures.for_optargs_in_captu.assert_closed_not_in_s": {"doc_hash": "5972581ef5f4ed09e5479392490a343bf1f0d4a5044e705c77feef9f90775b3f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestLoggingInteraction.test_conftestlogging_is_shown_TestLoggingInteraction.test_conftestlogging_is_shown.assert_operation_on_clos": {"doc_hash": "43ccb92effebbc9ee83241e5d27d7c5b139e18d7efb250db3e1bc0a9e8c376cb"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestLoggingInteraction.test_conftestlogging_and_test_logging_TestLoggingInteraction.test_conftestlogging_and_test_logging.assert_operation_on_clos": {"doc_hash": "eda2948e1822aacb8f349733bf722589081aa3100d62464ba30ac3f4b6f2bfc9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestLoggingInteraction.test_logging_after_cap_stopped_TestLoggingInteraction.test_logging_after_cap_stopped.assert_": {"doc_hash": "d19aa994c263d489333c3ade29524c7be6fd3ccbed4e54e71e4158759b092059"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestCaptureFixture_TestCaptureFixture.test_capsyscapfd.result_stdout_fnmatch_lin": {"doc_hash": "163da6f790edb87bc76ab431a8a7375af13002cedd06fbcdbfbcfcf734952ac4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestCaptureFixture.test_capturing_getfixturevalue_TestCaptureFixture.test_capturing_getfixturevalue.result_stdout_fnmatch_lin": {"doc_hash": "005a9728af021e6a80e23ce9b0456d61ec01a3518d4afbba516f9a59b2c074cf"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestCaptureFixture.test_capfdbinary_TestCaptureFixture.test_capfdbinary.reprec_assertoutcome_pass": {"doc_hash": "a87b08095df0140b150fd3cbfaabfb51fd5439d0bad75b24c0c588a4859f5a8c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestCaptureFixture.test_partial_setup_failure_TestCaptureFixture.test_capture_and_logging.assert_closed_not_in_re": {"doc_hash": "de52a24bd6a5030e1db7cd889dfd36ced683494082a7714360ebef6724ef8c64"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestCaptureFixture.test_fixture_use_by_other_fixtures_teardown_TestCaptureFixture.test_fixture_use_by_other_fixtures_teardown.reprec_assertoutcome_pass": {"doc_hash": "19a9bb809ccf8e4c175871c0ddad61b9a2a1136830d3cd711ee92eea79b620d5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_test_setup_failure_does_not_kill_capturing_test_setup_failure_does_not_kill_capturing.result_stdout_fnmatch_lin": {"doc_hash": "6caec2d31bc1a0d8b1229626db8acb3557610ece04615ccf73ae7e035cbfae43"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_test_error_during_readouterr_test_error_during_readouterr.result_stderr_fnmatch_lin": {"doc_hash": "dc6d99bcde99abc44f17d613057bd38e6cb1cad5b513fa8a80edb951172b3c0b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestCaptureIO_TestCaptureIO.test_write_bytes_to_buffer.assert_f_getvalue_f": {"doc_hash": "13bda5ab71a87a471d09983ffa4e137649f54d91a5121a044019327fb186f0bb"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestFDCapture.test_simple_resume_suspend_saved_fd.try_.finally_.os_close_new_fd_": {"doc_hash": "9e62630953f7dbecf6b830d069a0f07eebd5f91ddc4885b8ca28a2cf586633e1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestStdCapture_TestStdCapture.test_reset_twice_error.assert_not_err": {"doc_hash": "0c1349e1bb59e7a2f6dc0a0097f3d2d00a41f87c802263b091698c63fe104e85"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestStdCapture.test_capturing_modify_sysouterr_in_between_TestStdCapture.test_capturing_modify_sysouterr_in_between.assert_sys_stderr_olde": {"doc_hash": "a252d3ab570e6bfc273d491ec0e2eea19e77b681faebb6ea19f0522135585c0d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestStdCaptureFD_TestStdCaptureFD.test_many.with_lsof_check_.for_i_in_range_10_.cap_stop_capturing_": {"doc_hash": "880914021e0499bd001a060c291df641fe39086b3ebbae1f59ce9d6fa16ddb9f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestStdCaptureFDinvalidFD_TestStdCaptureFDinvalidFD.test_stdcapture_fd_invalid_fd.assert_result_parseoutcom": {"doc_hash": "84cbfb585180a0b75922fb065db7d171bf450fa775b0a7878d529893383286f3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_test_capturing_and_logging_fundamentals_test_capturing_and_logging_fundamentals.assert_atexit_not_in_re": {"doc_hash": "76a9ab746b7b659ee9ce6794d3790acebf0b8a2998c3c1c489f986709cdccc89"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_test_global_capture_with_live_logging_test_global_capture_with_live_logging.None_7": {"doc_hash": "2a481465bfe16d602e89baae6815fd217ebc98f228b66c84885c714e6e4388dc"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_test_capture_with_live_logging_test_capture_with_live_logging.assert_result_ret_0": {"doc_hash": "e6a2c2b78caece42b1d151b25dd54fff2cc7608e998121d1b9667358d24fd294"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestCollector.test_getcustomfile_roundtrip_TestCollector.test_getcustomfile_roundtrip.assert_isinstance_nodes_0": {"doc_hash": "306e8d35785b0ef29d25171705917d8834dda47dc6bfe7d89a6c4b28839d1127"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestCollector.test_can_skip_class_with_test_attr_TestCollector.test_can_skip_class_with_test_attr.result_stdout_fnmatch_lin": {"doc_hash": "6874446475c1cdb7778a3dcb703aad7ad4328ad63f6f0201e85a3d52df84cf36"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestCollectFS_TestCollectFS.test_ignored_certain_directories.assert_test_found_in_s": {"doc_hash": "b2b7947ff43214eaaac8d9c72b490dd056356e00878e3f3e0805b8e52d916688"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestCollectFS.test_ignored_virtualenvs_norecursedirs_precedence_TestCollectFS.test_ignored_virtualenvs_norecursedirs_precedence.assert_test_invenv_in_r": {"doc_hash": "3a826af73f02410f58d443393b2b7c3a320601262d5d078e771f451abbe1d01e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestCollectFS.test__in_venv_TestCollectFS.test__in_venv.None_1": {"doc_hash": "a4ab98d69499edf985d68ad38c1954fa8acb8dbd9d2bf4fd503f6c57f5602870"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestCollectFS.test_custom_norecursedirs_TestCollectFS.test_custom_norecursedirs.rec_assertoutcome_failed_": {"doc_hash": "22c1819113caeaf61b58fefc99e4ec3a5fb6f42d2df87d237d6eea19d0a9048b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestCollectFS.test_testpaths_ini_TestCollectFS.test_testpaths_ini.None_1.assert_x_name_for_x_in_i": {"doc_hash": "724af26827564745f516b4965f4e8143ac7c83567f1c03d26542763bfc5a5dff"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestPrunetraceback_TestPrunetraceback.test_custom_repr_failure.result_stdout_fnmatch_lin": {"doc_hash": "d155648e530932d64aa026842bb8dd0127c26627c05e7b234d088d21466165e2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestPrunetraceback.test_collect_report_postprocessing_TestPrunetraceback.test_collect_report_postprocessing.result_stdout_fnmatch_lin": {"doc_hash": "dc354cc669ebdae04fbe4551d8f362cc0c663dbd666574b547faea17daa20496"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestCustomConftests_TestCustomConftests.test_ignore_collect_path.result_stdout_fnmatch_lin": {"doc_hash": "a7e3a76d3435d33e78be1cd344995f64804a80e7e2df2ae8ab9e64b16167536c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestCustomConftests.test_ignore_collect_not_called_on_argument_TestCustomConftests.test_ignore_collect_not_called_on_argument.None_2": {"doc_hash": "db301d94fde32b9f616fb21e95f3a027b2206571bb884bcf0cab3c845f2e1a09"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestCustomConftests.test_collectignore_exclude_on_option_TestCustomConftests.test_collectignore_exclude_on_option.assert_passed_in_result": {"doc_hash": "45bbe9ab681b751b25819c4c548df2cf5c219262fcd5849921d661a01dad549b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestCustomConftests.test_collectignoreglob_exclude_on_option_TestCustomConftests.test_collectignoreglob_exclude_on_option.None_4": {"doc_hash": "a9f9c336da38f9f92e78b04b7edf7e0a4700151b203c0872d51e6f54695e95f8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestCustomConftests.test_pytest_fs_collect_hooks_are_seen_TestCustomConftests.test_pytest_fs_collect_hooks_are_seen.result_stdout_fnmatch_lin": {"doc_hash": "aaaca82ae5afc9c981f057f73eb0f887f7adb5e3985c63a0635dc3214932294b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestCustomConftests.test_pytest_collect_file_from_sister_dir_TestCustomConftests.test_pytest_collect_file_from_sister_dir.result_stdout_fnmatch_lin": {"doc_hash": "046959429d586095d7555159a00e373f8621c8642802a60f331a9bb6a5ad3bcb"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestSession.test_collect_protocol_single_function_TestSession.test_collect_protocol_single_function.assert_x_name_for_x_in_s": {"doc_hash": "d640a12912e52bb00b9a1f8686f00bd84d09bb9f35f3d3a1c4bad043e537df75"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestSession.test_collect_custom_nodes_multi_id_TestSession.test_collect_custom_nodes_multi_id.assert_len_self_get_repor": {"doc_hash": "6b6bfa186138cbda1ce4d0a094f28b842605eab545867535dcf7de079f9c8518"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestSession.test_collect_subdir_event_ordering_TestSession.test_collect_subdir_event_ordering.hookrec_assert_contains_": {"doc_hash": "4b7e2beea36c8d47170073d1b3e19c498f4c13484e40c065fbdaaae2499a7e2f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestSession.test_collect_two_commandline_args_TestSession.test_collect_two_commandline_args.hookrec_assert_contains_": {"doc_hash": "cf0f9f47a56be03d19117da32dff7890ead0b74c9e8da01a00e318161ffcf2b9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestSession.test_serialization_byid_TestSession.test_find_byid_without_instance_parents.assert_x_name_for_x_in_s": {"doc_hash": "72c593a311dd0131ff1d503108e2363506b622f2bd0a3a3ac71359e6714ee3fc"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_Test_genitems_Test_genitems.test_check_collect_hashes.for_numi_i_in_enumerate_.for_numj_j_in_enumerate_.if_numj_numi_.assert_i_j": {"doc_hash": "78d680e6405fe3f2917da532d4856d5af3923747549f29c1f4c07bfdb466afd1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_Test_genitems.test_example_items1_Test_genitems.test_example_items1.print_s_": {"doc_hash": "5996bad2cf1b01613dc036b64aa8072632994bf217be0b53547c5e89cb6bd822"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_Test_genitems.test_class_and_functions_discovery_using_glob_Test_genitems.test_class_and_functions_discovery_using_glob.assert_ids_MyTestSui": {"doc_hash": "76a0eb207af194309ef000399dc3fada0340465e5033bf6cb1e0760766995c2b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_matchnodes_two_collections_same_file_test_matchnodes_two_collections_same_file.res_stdout_fnmatch_lines_": {"doc_hash": "9bc5690ae59fb11f723e24cc71a81b9ab5327949fcc7fb7be86aa114533c27bb"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestNodekeywords_TestNodekeywords.test_issue345.reprec_assertoutcome_pass": {"doc_hash": "9f2d253a6b22509e91f6e3afe917910d4446fbe144ceba7a694a0f84b041be18"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_COLLECTION_ERROR_PY_FILES_COLLECTION_ERROR_PY_FILES.dict_": {"doc_hash": "1fc33287f72dfd72ab41c946242bfed3034b2dd11e1fdd0b1a9bd5097a74ccad"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_exit_on_collection_error_test_exit_on_collection_error.res_stdout_fnmatch_lines_": {"doc_hash": "182daad620b82a5f671e6b32079a2fbd7ceb56fe9174be43efbe1d6478634b3d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_fixture_scope_sibling_conftests_test_fixture_scope_sibling_conftests.res_stdout_fnmatch_lines_": {"doc_hash": "fba4d42943a1012a735ab6fa5bbadb12b0929d95182e2a94b998e0089334b102"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_collect_with_chdir_during_import_test_collect_with_chdir_during_import.None_4": {"doc_hash": "efb2a7e997d1f112b3d737fc90ed3e403a4b186ba070e5fe1fa6cfe6f9154179"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_collect_pyargs_with_testpaths_test_collect_pyargs_with_testpaths.result_stdout_fnmatch_lin": {"doc_hash": "5c1034cefc78143fdab59c796b445564e118b1acc4807bd4d0984414fd128dba"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_collect_symlink_file_arg_test_collect_symlink_file_arg.assert_result_ret_0": {"doc_hash": "0d3eb8f29b4dacaa8f03ca9557b24ff83b96673c7c09fa8fef41e4e598a624d1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_collect_symlink_out_of_tree_test_collect_symlink_out_of_tree.assert_result_ret_0": {"doc_hash": "0670cc92d559bac51914b064b041da75aad9d86d6dbeba06b941c7592b4c8273"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_collect_pkg_init_and_file_in_args_test_collect_pkg_init_and_file_in_args.None_3": {"doc_hash": "62d4d68a88a5661c58102a54097b0a7f4bb551c53f95a2458a2da4b6e59f8798"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_collect_sub_with_symlinks_test_collect_sub_with_symlinks.result_stdout_fnmatch_lin": {"doc_hash": "2a46e2bb9f26f4083bed648de13413d60d3ac3d8b1e9984772a40806e8884189"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_compat.py_test_get_real_func_test_get_real_func.None_2": {"doc_hash": "3f603a4d7bfcaba057551b0f8b64d1b3a9a1a584f0a3879f6f815f20527dfce8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestConfigAPI_TestConfigAPI.test_getoption.assert_config_getoption_": {"doc_hash": "58874291652aa78b7f05b25f781994d72945e87dc8637ec4b04b106a6a3942c4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestConfigAPI.test_addini_bool_TestConfigAPI.test_addini_bool.assert_config_getini_str": {"doc_hash": "da266237034b078d9fb1d4024dda33a7f570fa25934eb5b2f451c51814ff63e2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestConfigAPI.test_addinivalue_line_existing_TestConfigAPI.test_addinivalue_line_existing.None_3": {"doc_hash": "758531556958b577b26f8cf567d7f5bd98d8b028cbde0f8a2b1a6b5fb74c6585"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestConfigAPI.test_addinivalue_line_new_TestConfigAPI.test_addinivalue_line_new.None_4": {"doc_hash": "2e4716ddba52a548de73dad5d3a5c9cbc962716b23be60bffd1b803495a448d9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestConfigAPI.test_confcutdir_check_isdir_TestConfigAPI.test_confcutdir_check_isdir.assert_config_getoption_": {"doc_hash": "324c8c6a3a55f83059dad594619ed6f5becf47e2bfae3345bdb1d87445c320f3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestConfigAPI.test_iter_rewritable_modules_TestConfigAPI.test_iter_rewritable_modules.assert_list__iter_rewrita": {"doc_hash": "9129ed39554fc3d688b6bd7bc0c617494316ee1ddf6262b130d073bdf5926872"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestConfigFromdictargs.test_inifilename_TestConfigFromdictargs.test_inifilename.None_5": {"doc_hash": "491e1e1c1a2653218da88e7c3ac31256a2a110a4ce792ec01879c21626d65106"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_preparse_ordering_with_setuptools_test_preparse_ordering_with_setuptools.assert_plugin_x_42": {"doc_hash": "4baf9ceaec74ea3dfce56cc7633efc196ba609b408314f3752be36ad77e14733"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_plugin_preparse_prevents_setuptools_loading_test_plugin_preparse_prevents_setuptools_loading.if_block_it_.else_.assert_": {"doc_hash": "56dc77040aa3900756f3f8de1cbb5cf9458083de117827de56788c0fd6fadcd5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_cmdline_processargs_simple_test_invalid_options_show_extra_information.result_stderr_fnmatch_lin": {"doc_hash": "43f8d1180dfc0eab4ae864013cec96f1676d7f66512bd28db6871bb741a4df84"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_consider_args_after_options_for_rootdir_test_consider_args_after_options_for_rootdir.result_stdout_fnmatch_lin": {"doc_hash": "fedfb51d0a9df2a9c17b9eac5efac10e9078b53281528f2e3283d78f9eef3109"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_toolongargs_issue224_test_config_in_subdirectory_colon_command_line_issue2148.assert_result_ret_0": {"doc_hash": "f8bcb4302d72a7948cc9c4cb0cae3638e35eb8dc71aa7d7c124fb1381abc3008"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_notify_exception_test_notify_exception.None_5": {"doc_hash": "d4475d70e55035ad666a042d408746636ae21f295e799e0688e354bf3bbad227"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_collect_pytest_prefix_bug_integration_test_collect_pytest_prefix_bug.assert_pm_parse_hookimpl_": {"doc_hash": "4e01d7222dd119deb4003e58c7c484bafd99f0a3b78a5025044ed0b5045dbef3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestOverrideIniArgs_TestOverrideIniArgs.test_override_ini_names.None_4": {"doc_hash": "40a24f191b950d26b506b7b990b6c213a1c11b803e351102770e7e5bcf6d2229"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestOverrideIniArgs.test_override_ini_pathlist_TestOverrideIniArgs.test_override_ini_pathlist.result_stdout_fnmatch_lin": {"doc_hash": "6ebe145540ec9bed0d38bff20c361de97babfe4ea7bee2407666c4c76384fb61"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestOverrideIniArgs.test_override_multiple_and_default_TestOverrideIniArgs.test_override_multiple_and_default.result_stdout_fnmatch_lin": {"doc_hash": "090b89f2fd80e5c35d3f489552ffe94994024a4f510d83d0a15bc66b6c15d674"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestOverrideIniArgs.test_override_ini_usage_error_bad_style_TestOverrideIniArgs.test_override_ini_handled_asap.result_stdout_fnmatch_lin": {"doc_hash": "1ba70227947b5b2c770d3087ef476abcff467fc026147aaf3b14e765cfda3897"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestOverrideIniArgs.test_addopts_from_ini_not_concatenated_TestOverrideIniArgs.test_override_ini_does_not_contain_paths.assert_config__override_i": {"doc_hash": "171eeca3359b3cb881ea6e838ac25dabe1931201ee3bd21529bf54bd17ea355d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_help_and_version_after_argument_error_test_help_and_version_after_argument_error.None_2": {"doc_hash": "9341591cb9d9bd6a1fc96377b741c90748919b76f5ea9c1a6e37477ca1847bf3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_config_does_not_load_blocked_plugin_from_args_test_config_does_not_load_blocked_plugin_from_args.None_1": {"doc_hash": "9fe4f02ff510e09e618003e49fefc728bddc4a965367c82d8c94624dfb6676ed"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_TestConftestValueAccessGlobal_TestConftestValueAccessGlobal.test_basic_init.assert_conftest__rget_wit": {"doc_hash": "450356c38ca96904368faf860057328af7fdb337b2d4f84c3bf8a32466f755e4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_TestConftestValueAccessGlobal.test_immediate_initialiation_and_incremental_are_the_same_TestConftestValueAccessGlobal.test_immediate_initialiation_and_incremental_are_the_same.None_3": {"doc_hash": "0a6bfde61ab1c21f6ae7772b66180c26a431ee6aade78319c7fa18f048f170d4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_conftest_in_nonpkg_with_init_test_conftest_in_nonpkg_with_init.ConftestWithSetinitial_tm": {"doc_hash": "ba20b92e85912bda8938e2844b0bd242aae3642fcfce9c741e56f635a02faa02"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_doubledash_considered_test_issue151_load_all_conftests.assert_len_d_len_name": {"doc_hash": "9dac255a52c7a1a8a61ea434a979ed96d13a5329340a70bee30e522dcc43bd0d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_conftest_global_import_test_conftest_global_import.assert_res_ret_0": {"doc_hash": "840a2b0b8794b78d0ae6166cf488c2a7833561553307228a9cfc21ba69788e66"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_conftestcutdir_test_conftestcutdir.None_5": {"doc_hash": "b445c7557148d77eb74371aef6fd2535418cb3240154670dae7b49f42f0c6944"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_conftestcutdir_inplace_considered_test_setinitial_conftest_subdirs.if_name_not_in_whatever.else_.assert_len_conftest__conf": {"doc_hash": "d07519647f580496ddc8a32b4e40e845d15b72a144de50618bcc987a138a20a1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_conftest_existing_junitxml_test_conftest_existing_junitxml.result_stdout_fnmatch_lin": {"doc_hash": "59e87e663fdcfcc0c29bbe6ed147e75fea158185fc5e6b23767a866d5a9c517d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_fixture_dependency_test_fixture_dependency.result_stdout_fnmatch_lin": {"doc_hash": "fd1666b2a7cd11ecdbe2cff5824cb738684f9ef895b270c6bee0ffd66ed4c6c6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_conftest_found_with_double_dash_test_conftest_found_with_double_dash.result_stdout_fnmatch_lin": {"doc_hash": "f99ffbf9c171056cdf627756bb40f702807a183fd9e612af30155d62ddc24fa5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_TestConftestVisibility_TestConftestVisibility._setup_tree.return._runner_runner_packa": {"doc_hash": "4da53683924c732a68db68980848270de37e06369fab15ab83fca1fa89ff238a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_search_conftest_up_to_inifile_test_search_conftest_up_to_inifile.result_stdout_fnmatch_lin": {"doc_hash": "20f35ede9e07170dfcfe91770f4730f981b88eee8469605aa3512d879e74d9d3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_issue1073_conftest_special_objects_test_conftest_exception_handling.assert_raise_ValueError_": {"doc_hash": "59006586ebbb81e741f1a6c3a3327929114326be3848cd29c5183ec4f3667754"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_hook_proxy_test_hook_proxy.result_stdout_fnmatch_lin": {"doc_hash": "d394b1d1bbed4121516ac57c8ea482d4a80e6ac994f3a1e2551754eb3a8dcf21"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_required_option_help_": {"doc_hash": "6c5e529fa9f3d1acdaf3fca4fa6f8f0ad1f4bea887f362fbfe26a45688e1f8b1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_simple_doctestfile_TestDoctests.test_new_pattern.reprec_assertoutcome_fail": {"doc_hash": "dc12f7f066ba54f35347ac3a704ea4a86f29ed91db1b5291e23bedd0b29f0b36"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_multiple_patterns_TestDoctests.test_multiple_patterns.None_4": {"doc_hash": "f15751d3258d7de230a0e879074453c59c591534d182ec0624850bd97511235f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_encoding_TestDoctests.test_encoding.result_stdout_fnmatch_lin": {"doc_hash": "c7cf9f673b905e93a6e8bec6db32396a820615bb4c720972f80b7945b8d8f144"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_docstring_full_context_around_error_TestDoctests.test_docstring_full_context_around_error.result_stdout_fnmatch_lin": {"doc_hash": "6996e45cc8e23e71dffa3cde503af28870b631e1bf42ac13b96b020323b70cd0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_doctest_linedata_missing_TestDoctests.test_doctest_linedata_missing.result_stdout_fnmatch_lin": {"doc_hash": "ae57c232ad57410c980cb519a0a111b0eae963f458caeb212ab3cd55564b8803"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_doctest_unex_importerror_only_txt_TestDoctests.test_doctest_unex_importerror_only_txt.result_stdout_fnmatch_lin": {"doc_hash": "7c9218864f7423801a0765ed57d87c02abe1e87c39dea1e417cbab587d2558c8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_txtfile_failing_TestDoctests.test_txtfile_with_fixtures.reprec_assertoutcome_pass": {"doc_hash": "10858f760512227db0a898e00b47e900e9c9dec57095536f0b5d1b59b11101d4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_doctestmodule_two_tests_one_fail_TestDoctests.test_doctestmodule_two_tests_one_fail.reprec_assertoutcome_fail": {"doc_hash": "1ef88915795cff1064eed28d537d8480654e9e45281853eb79d1a91e5e0508d1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_ignored_whitespace_TestDoctests.test_ignored_whitespace.reprec_assertoutcome_pass": {"doc_hash": "33bd2ac0f0b205994d187865d9313da4122c4649e0a7ab528ec9a831baa47016"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_non_ignored_whitespace_TestDoctests.test_non_ignored_whitespace.reprec_assertoutcome_fail": {"doc_hash": "e354b2770df753deda84d76a204b975669c1dc50ab77df4f76a1adcaf69a7591"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_ignored_whitespace_glob_TestDoctests.test_ignored_whitespace_glob.reprec_assertoutcome_pass": {"doc_hash": "8dbceeee3a506ee4978187ebcec9bb1caab7ffb662e8216a9e2aa1b977758238"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_non_ignored_whitespace_glob_TestDoctests.test_non_ignored_whitespace_glob.reprec_assertoutcome_fail": {"doc_hash": "0ad716b1a044110b74f4cec43582e54fd19de85e8736dba5cca9a59348725055"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_junit_report_for_doctest_TestDoctests.test_junit_report_for_doctest.reprec_assertoutcome_fail": {"doc_hash": "20fe3b4764b6a1b71714e668e2dfa44c7d7668788c97825957fa893d6d4b7bd8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_unicode_doctest_TestDoctests.test_unicode_doctest.result_stdout_fnmatch_lin": {"doc_hash": "aa35b3f37dc67bfedcc8b219c38ce5762a0114c76063d033cb10ab7111b5b99c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_unicode_doctest_module_TestDoctests.test_unicode_doctest_module.result_stdout_fnmatch_lin": {"doc_hash": "6c9e2f214171200b91cc899765dcaa8a4e8b69a43aad7167f393894fb44fa280"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_print_unicode_value_TestDoctests.test_print_unicode_value.result_stdout_fnmatch_lin": {"doc_hash": "8a89e4c92a4fda1389c388af60443671f224ddbbbe4f4990d7edcd2ffb3056f6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_reportinfo_TestDoctests.test_reportinfo.assert_reportinfo_1_1": {"doc_hash": "817ee28a90c003ecce7d6b5ffe17d789c91b5c31a19574674b53e8d48241392f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_valid_setup_py_TestDoctests.test_invalid_setup_py.result_stdout_fnmatch_lin": {"doc_hash": "3782545342c728223b220b35ba81e779a56a8244c0666a9c8f06a0d335ac6c67"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestLiterals_TestLiterals.test_allow_unicode.reprec_assertoutcome_pass": {"doc_hash": "db790c99fb412e33bff430749837e4ace7f670da13fb237006774c94205758ef"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestLiterals.test_allow_bytes_TestLiterals.test_allow_bytes.reprec_assertoutcome_pass": {"doc_hash": "3952c814fc31b8ae6235d1990f0657f2fac9a486155beabfa15c3d730cad817e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctestSkips_TestDoctestSkips.test_vacuous_all_skipped.reprec_assertoutcome_pass": {"doc_hash": "a258a5fdfcf8be09c8529e3aae1d30955f27456d779b638e972e7057ab6cb708"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctestSkips.test_continue_on_failure_TestDoctestSkips.test_continue_on_failure.result_stdout_fnmatch_lin": {"doc_hash": "d72f0c8563412faa8a177906e064d4936ccfff053d8f4656215cb8b95b0319b8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctestAutoUseFixtures_TestDoctestAutoUseFixtures.test_doctest_module_session_fixture.result_stdout_fnmatch_lin": {"doc_hash": "6363f98afe4b08c6cdcc0b259b1767a0d3e6940b329a807e06da1e95110239ca"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctestAutoUseFixtures.test_fixture_scopes_TestDoctestAutoUseFixtures.test_fixture_scopes.result_stdout_fnmatch_lin": {"doc_hash": "cbbd3f1a5058527fa3440b2d02c35a8fc5fe3db445429a3bddf3d0b039367118"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctestAutoUseFixtures.test_fixture_module_doctest_scopes_TestDoctestAutoUseFixtures.test_fixture_module_doctest_scopes.result_stdout_fnmatch_lin": {"doc_hash": "46449fc2c018ae4fe89575a89bd4a464438776be31c373febdb87e0fe5594856"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctestAutoUseFixtures.test_auto_use_request_attributes_TestDoctestAutoUseFixtures.test_auto_use_request_attributes.result_stdout_fnmatch_lin": {"doc_hash": "05f8f63b9e8883b48980b9812432e8053832d13fb3b1df53cacf96d9884b3f25"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctestNamespaceFixture_TestDoctestNamespaceFixture.test_namespace_doctestfile.reprec_assertoutcome_pass": {"doc_hash": "699feb7c5800274f54ae1ddc7d6029702426135542d1214246bcffd2461f3a56"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctestNamespaceFixture.test_namespace_pyfile_TestDoctestNamespaceFixture.test_namespace_pyfile.reprec_assertoutcome_pass": {"doc_hash": "71ee2325dc9ff1055cb7ce284ac9beaecfd0b0e340c51ef7fd822f0ea9841e34"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctestReportingOption.test_doctest_report_udiff_TestDoctestReportingOption.test_doctest_report_udiff.result_stdout_fnmatch_lin": {"doc_hash": "a76f02692e43f8de34ca755dd9ec33a3fc45737fab479e380172a1387b1a03ac"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctestReportingOption.test_doctest_report_cdiff_TestDoctestReportingOption.test_doctest_report_cdiff.result_stdout_fnmatch_lin": {"doc_hash": "0863f106d92f774d948c5cf0f9e7a1eb541d423d978d1fb63a6998b36a972b93"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctestReportingOption.test_doctest_report_ndiff_TestDoctestReportingOption.test_doctest_report_ndiff.result_stdout_fnmatch_lin": {"doc_hash": "049d6cccbeed80d95c9527c8313d74df3499e04f0ce194843da94aa3b29f00c4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctestReportingOption.test_doctest_report_none_or_only_first_failure_TestDoctestReportingOption.test_doctest_report_invalid.result_stderr_fnmatch_lin": {"doc_hash": "6df21bb2c93aecb5aa9b84fe004de45e684c2f7810a0b1a18e95af61e3fd2725"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_summing_simple_with_errors_TestPython.test_summing_simple_with_errors.node_assert_attr_name_py": {"doc_hash": "1afbd092a506f48a6817cb76e207fb221680992af2317c26e9226626a1b4cfe9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_junit_duration_report_TestPython.test_junit_duration_report.if_duration_report_to.else_.assert_val_1_0": {"doc_hash": "7c7ab27d80a765a51b04933db604cef08ccbf518dd38be37d2673c70d08aade1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_setup_error_TestPython.test_setup_error.assert_ValueError_in_fn": {"doc_hash": "0b313e91a6772dba678b848a16cc673f585278d2becb7aadbaff5c8f3bc9e8e9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_teardown_error_TestPython.test_teardown_error.assert_ValueError_in_fn": {"doc_hash": "f867e9c93886eb23afeb03c7041e43a469d64c9f64c76ee0b181fc17a8d47087"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_skip_contains_name_reason_TestPython.test_skip_contains_name_reason.snode_assert_attr_type_p": {"doc_hash": "bf62930405c43455b81d1a91ee5e83222f7ac64226146f2fca195b36676b0439"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_mark_skip_contains_name_reason_TestPython.test_mark_skip_contains_name_reason.snode_assert_attr_type_p": {"doc_hash": "428f81577f38640a753e2150369984a7cc22d236fa764f9df65c291b71264c6b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_mark_skipif_contains_name_reason_TestPython.test_mark_skipif_contains_name_reason.snode_assert_attr_type_p": {"doc_hash": "a171bef8d1dd0d65fcc80ecbb49b40d10aa9829c3ad44100bf0181326430054f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_failure_verbose_message_TestPython.test_failure_verbose_message.fnode_assert_attr_message": {"doc_hash": "e65db476ce448cb4c17d2f6f8c30e23c68b186e86445abea450734c2d71f3336"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_junit_prefixing_TestPython.test_junit_prefixing.tnode_assert_attr_": {"doc_hash": "81d93475d2a1575cfca6fd437d420dd324a5cd30be203429dec49776f5221345"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_xfailure_marker_TestPython.test_xfailure_marker.fnode_assert_attr_type_p": {"doc_hash": "8cd51ce821a8bb75950559b6beae6e871344daab8468b7b42e7282b2d2b0e733"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_xfailure_xpass_TestPython.test_xfailure_xpass.tnode_assert_attr_classna": {"doc_hash": "1cfdb0473588d1ab48a6f032bd90b96404d00b2094a53cbbb6a4ac26bbe0d2f4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_xfailure_xpass_strict_TestPython.test_xfailure_xpass_strict.fnode_assert_attr_message": {"doc_hash": "964956bd3e4f9ec793b6c16e9f6fdf180fa173de2fd5aeff576eed555b048edd"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestNonPython_TestNonPython.test_summing_simple.assert_custom_item_runte": {"doc_hash": "d557abc29afd5b109e9e18f0749b56aa56e8f722853ee97bae668c5ad1dfd1a7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_invalid_xml_escape_test_invalid_xml_escape.for_i_in_valid_.assert_chr_i_bin_xml_": {"doc_hash": "ee7c138af6044ef3375014b7c76477aeed9dc3b2f96af288a5bdb24055896a8c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_logxml_path_expansion_test_logxml_path_expansion.assert_xml_var_logfile_": {"doc_hash": "84398d4c93e292526450b483af92272092ba570a8354eabddf4522bfbaa69db0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_double_colon_split_function_issue469_test_double_colon_split_function_issue469.node_assert_attr_name_te": {"doc_hash": "fed6d134774755672d4f240c8937df291784412d2f4eacfbc8e739daa3a38ba1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_double_colon_split_method_issue469_test_double_colon_split_method_issue469.node_assert_attr_name_te": {"doc_hash": "ed498381f6cf585496b9fe5d8e6b06ce21a940a40b679ead2af446cff4504d8d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_unicode_issue368_test_unicode_issue368.log_pytest_sessionfinish_": {"doc_hash": "f2ab0bf9e1ae068de1288e51062fe1709ccd1eeff89f82a97e6652058ac89684"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_record_property_same_name_test_record_fixtures_without_junitxml.assert_result_ret_0": {"doc_hash": "c3c8187fd38e1ea6b8df142db45e27b20de17ab8d8fe9a14fce83b83bd66a5bd"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_record_attribute_test_record_attribute.result_stdout_fnmatch_lin": {"doc_hash": "c92f23969f74ff1cc3a79fa8d251c63c2e57d3322abf177d6667718896d81152"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_record_fixtures_xunit2_test_record_fixtures_xunit2.result_stdout_fnmatch_lin": {"doc_hash": "5320fef4fe3f3edefc429b7fbd6aafcb23a573a32f2e14dbe0cb1fb0cea17c23"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_random_report_log_xdist_test_random_report_log_xdist.assert_failed_test_x": {"doc_hash": "1f0d04e357b0aff4823208a7c9307ab37d6a9301ed52bd011d0cd46cb19a6a8f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_fancy_items_regression_test_fancy_items_regression.assert_items_": {"doc_hash": "61ba35fb69227381263d6d3cb06bd127958a30fd3a63892a4ab8220bac6ab116"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_global_properties_test_global_properties.assert_actual_expected": {"doc_hash": "a8b45e540672b1823066e03360873486fb66f7f4c085e725ed44ffdb2794a73b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_url_property_test_url_property.assert_": {"doc_hash": "26ea07290b70a67e8a9797e8c0ccf1280f465884e45dbfc80c92ad81bd28887e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_set_suite_name_test_set_suite_name.node_assert_attr_name_exp": {"doc_hash": "d2a049e058c0824bd8c85fd6210b79891e460530a10fa8bbff7e74d15823eebd"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_marked_class_run_twice_test_marked_class_run_twice.rec_assertoutcome_passed_": {"doc_hash": "4149e82980e31e8190c3ad4d23100eadd42b0446c4e7caf363cc141cc8c0565c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_ini_markers_test_ini_markers.rec_assertoutcome_passed_": {"doc_hash": "1e81e96f057526e8cdd420fe8e26016377a4615812d03569169ca63a63cf235f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_markers_option_test_ini_markers_whitespace.rec_assertoutcome_passed_": {"doc_hash": "547919d915289c92dfd98427fce08f8cfefb2758b821713f668eb0d83b1a188b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_marker_without_description_test_marker_without_description.rec_assert_outcomes_": {"doc_hash": "9097a68275a02a3c32418202b2828ffcd58932e5d5d8fd593a473dff2780f6f7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_markers_option_with_plugin_in_current_dir_test_markers_option_with_plugin_in_current_dir.result_stdout_fnmatch_lin": {"doc_hash": "712aecb5229f2562a922514c4462ac7095b71512521c84fd0069dfe8d510d531"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_mark_on_pseudo_function_test_strict_prohibits_unregistered_markers.result_stdout_fnmatch_lin": {"doc_hash": "347e2ce635ca2074ba2c5ec9b81961e16a1ad68396ecc5bc3cea5509c8baa390"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_parametrized_collected_from_command_line_test_parametrized_collected_from_command_line.rec_assertoutcome_passed_": {"doc_hash": "5c33d11c493c8fbea05db0563e20ae70a169b04bf3fa6caa4e02e711eb6835ca"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_parametrized_collect_with_wrong_args_test_parametrized_collect_with_wrong_args.result_stdout_fnmatch_lin": {"doc_hash": "ca5c98e73a07b214ad7bb254b18d4b8fcff55976228f94b11e4a824924c1043a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_parametrized_with_kwargs_test_parametrized_with_kwargs.assert_result_ret_0": {"doc_hash": "7ccd950af639d8dc1ffeb4ee455cace9c009e5e6142ebf2dadf05952797cbd19"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestFunctional_TestFunctional.test_merging_markers_deep.for_item_in_items_.assert_x_for_x_in_item_i": {"doc_hash": "99d246f093e83dd85684e07f7186770c2c60ebc26fad0839d371fcad36d16e96"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestFunctional.test_mark_decorator_subclass_does_not_propagate_to_base_TestFunctional.test_mark_decorator_subclass_does_not_propagate_to_base.self_assert_markers_items": {"doc_hash": "f494643b54162d59a2361c326bcd6af32fe33bd55015ac8a56128f9fc9a76170"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestFunctional.test_mark_should_not_pass_to_siebling_class_TestFunctional.test_mark_should_not_pass_to_siebling_class.assert_list_sub_item_iter": {"doc_hash": "11575c68eb20921e983f2f95788fd8b95060538d0b75170cdc8a9f08b6984cff"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestFunctional.test_mark_decorator_baseclasses_merged_TestFunctional.test_mark_decorator_baseclasses_merged.self_assert_markers_items": {"doc_hash": "c58c887629ada98cd71076801619c176131b55af4e384e883aae1cb52019fc5f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestFunctional.test_mark_with_wrong_marker_TestFunctional.test_mark_dynamically_in_funcarg.result_stdout_fnmatch_lin": {"doc_hash": "24043b6eb9fcdcd491678836576eee76093590a0e0a5aadd68d77a846e75ccda"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestFunctional.test_no_marker_match_on_unmarked_names_TestFunctional.test_no_marker_match_on_unmarked_names.assert_len_deselected_tes": {"doc_hash": "5ecbb6fe1c96a588df22d822fba018851d47fc9b3d8af0ee6599ead44be02f1d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestFunctional.test_keyword_added_for_session_TestFunctional.test_keyword_added_for_session.reprec_assertoutcome_pass": {"doc_hash": "0a78b4f20a70f7b81ccb5fbd7fa5783824abb16ee090dabab3598fbb95ce94cb"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestFunctional.assert_markers_TestFunctional.assert_markers.for_name_expected_marker.assert_markers_set_exp": {"doc_hash": "024a2b41e8b5bca3f66a0b9358abf01a81dec37f04f57576f7af33543bc16721"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestFunctional.test_mark_from_parameters_TestFunctional.test_mark_from_parameters.reprec_assertoutcome_skip": {"doc_hash": "8abf9db8a16f3dda6a92ddb9ff34248d5e1d9d771ea044234f21e65fa6effe4f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestKeywordSelection_TestKeywordSelection.test_select_simple.check_TestClass_and_test": {"doc_hash": "3b34662fb2bb8a232547351ec1860b8f531b066a62b85885b3d6853b0b83b756"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestKeywordSelection.test_select_extra_keywords_TestKeywordSelection.test_select_extra_keywords.assert_dlist_0_items_0_": {"doc_hash": "f9fa9703dfcfef61e31ce6ef2dba2901cbdc04aa4b999f3b0a84f4db9706cd84"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestKeywordSelection.test_select_starton_TestKeywordSelection.test_select_starton.assert_item_name_test": {"doc_hash": "96269538481b432e40b93c64a6e9ded195fcdb38792855483a3f6383dfaa5456"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestKeywordSelection.test_keyword_extra_TestKeywordSelection.test_keyword_extra_dash.assert_passed_skipped_": {"doc_hash": "27b4945caae113065f5625a4b4a728cbc4def62ab7dd99e3b393bb12e817a9d7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_parameterset_for_parametrize_marks_test_parameterset_for_parametrize_marks.if_mark_xfail_.assert_result_mark_kwargs": {"doc_hash": "87f7226c54479f966e6b3e64a6b36ae383196abdc12ef3fb4f0aec1cd5458972"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_monkeypatch.py_TestSetattrWithImportPath_TestSetattrWithImportPath.test_delattr.assert_os_path_abspath": {"doc_hash": "dc3c6cab9aef8cb2b0500cccfe526a1908a9dfbb75c2086b27a4a57a1014b536"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_monkeypatch.py_test_delattr_test_delattr.None_3": {"doc_hash": "14ee4d6443a37356de67b538278f7c0c1a0de27c67e00ef2e0ad83933b6c7a47"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_monkeypatch.py_test_setitem_test_setitem.assert_d_x_5": {"doc_hash": "90208e706abaa26215c0c15951852a1b43bc69415b7e843a19a888d592218a5d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_monkeypatch.py_test_setitem_deleted_meanwhile_test_setenv_deleted_meanwhile.None_1.else_.assert_key_not_in_os_envi": {"doc_hash": "813f778817deeebba106265faf065a3ad9f0fba3859c8517b737f1f676bd40ef"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_monkeypatch.py_test_delitem_test_delitem.assert_d_hello_wo": {"doc_hash": "7555c26c18ddca57002dc76d94ecd37e2567a570d6feec0a6345520a746ba5d6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_monkeypatch.py_test_setenv_test_delenv.try_.finally_.if_name_in_os_environ_.del_os_environ_name_": {"doc_hash": "1c9630b56235832b796fd008505eb3f218940ce7208a8a883a6e5798a975a359"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_monkeypatch.py_test_importerror_test_importerror.result_stdout_fnmatch_lin": {"doc_hash": "f750d7b29064a37d75ab5857055e3defbee25ef756a055479ac087dfa0026ecd"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_nose.py_test_setup_func_with_setup_decorator_test_nose_setup_func.result_assert_outcomes_pa": {"doc_hash": "7a7c907387a174d7b94e45e0dbb7e914e5a75375ab7002eeb630be9aa59b604a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_nose.py_test_nose_setup_func_failure_test_nose_setup_func_failure_2.reprec_assertoutcome_pass": {"doc_hash": "ce071f5763fadbe2cfa1585e5204cf09fd8a849a7584cd3add1b0b05e152ae30"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_nose.py_test_nose_setup_partial_test_nose_setup_partial.result_stdout_fnmatch_lin": {"doc_hash": "91b2f8efc2c313515619f10c7394cb0a4f588066efab806c5176434aa3cb7330"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_nose.py_test_module_level_setup_test_module_level_setup.result_stdout_fnmatch_lin": {"doc_hash": "852771edbe84b51eaa746684fb5ca8b3c4a104683a2db8d3a8c6357050efda78"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_nose.py_test_nose_style_setup_teardown_test_nose_setup_ordering.result_stdout_fnmatch_lin": {"doc_hash": "d50be6158e64165507921a69aa035556321e3aa14a35ad907b867b5187b38c57"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_nose.py_test_apiwrapper_problem_issue260_test_apiwrapper_problem_issue260.result_assert_outcomes_pa": {"doc_hash": "6843ed9141719d6a972b29a549bce7e2cd8e220fab3727f0f84fcfcfe165d430"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_nose.py_test_setup_teardown_linking_issue265_test_setup_teardown_linking_issue265.reprec_assert_outcomes_pa": {"doc_hash": "9d3e36d5f1f5ebe9393205c556f2681978fdc897f8b97632e56a9906aabd680f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_parseopt.py_TestParser.test_argument_type_TestParser.test_argument_type.None_3": {"doc_hash": "7cdd043b19ec0d7dc1d7d5ecbf0ae5a46705debc7627ce9c2d32f798035ad8fc"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_parseopt.py_TestParser.test_parse_split_positional_arguments_TestParser.test_parse_split_positional_arguments.None_6": {"doc_hash": "930634bf5accb618baa9d9edb95de58013dc20db4e9452a9d0179968abf90fc0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_parseopt.py_TestParser.test_parse_defaultgetter_TestParser.test_parse_defaultgetter.assert_option_no_is_False": {"doc_hash": "9192e85889e3474594de88574a6b594a6ec7a4a446e5b665eb597082cb50b3b1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_parseopt.py_TestParser.test_drop_short_helper_TestParser.test_drop_short_helper.assert_join_args_file": {"doc_hash": "dfaab9491a01973ef918b5594efb8b21a1064c1ea739467393188a188c8a35fe"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_parseopt.py_TestParser.test_drop_short_0_TestParser._testing_would_be_more_h": {"doc_hash": "fdcf89c23708a8499fdcb8509cdbcaff85ae881473066e55c6854b26971d5bbc"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_parseopt.py_TestParser.test_drop_short_help1_TestParser.test_drop_short_help1.assert_doit_func_arg": {"doc_hash": "f16b00f9ecd766f76738831f42fc5b0cd853d84538c984447f0366d645049791"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_parseopt.py_TestParser.test_multiple_metavar_help_TestParser.test_multiple_metavar_help.assert_preferences_val": {"doc_hash": "bfa829c0d4e7bdba1749fa41910484445119cd4f337a138955dffe496bf77a6a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_parseopt.py_test_argcomplete_": {"doc_hash": "ed7f4f4caee8b37b66d56d2f2ec32adcc43827c3a36d03a3385164b970e1a6ea"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pastebin.py_TestPasteCapture.test_all_TestPasteCapture.test_all.matcher_fnmatch_lines_": {"doc_hash": "c567401e401069a0591756ee3517c35a8aabf43d18ce0437f1a228709043ea3b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pastebin.py_TestPaste.test_create_new_paste_": {"doc_hash": "766d129b8a4b0d63119342a9ad4d494682c7df7fb28c06f394051c5b4c2c5f4e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_TestPytestPluginInteractions.test_addhooks_nohooks_TestPytestPluginInteractions.test_do_option_postinitialize.assert_config_option_test": {"doc_hash": "351a743198ef9f69488dd365cf480ee419ce3124845f76f572a4615eb09eedaa"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_TestPytestPluginInteractions.test_configure_TestPytestPluginInteractions.test_configure.None_4": {"doc_hash": "8cff084156e54bf67b6b7a469b64b992ba67c6b6ef6e0ecaf619889f651e8669"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_TestPytestPluginInteractions.test_hook_tracing_TestPytestPluginInteractions.test_hook_tracing.try_.finally_.undo_": {"doc_hash": "1420d42625cc0b76b91516b3d27cdf111b40b2d720436b751d1be2230b94e4f6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_TestPytestPluginInteractions.test_hook_proxy_TestPytestPluginInteractions.test_hook_proxy.assert_ihook_a_is_not_iho": {"doc_hash": "f97cb0ca71e9bc09f592a6d70f8dc2846f8aafca41de02af3dcb11ed40dc539e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_test_default_markers_test_importplugin_error_message.assert_in_test_traceback": {"doc_hash": "8f6a4cd2cd0c9f0316e9d7ea4c67be24f88d6a9504bb6d95c9b86c4da52430d8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_TestPytestPluginManager_TestPytestPluginManager.test_canonical_import.assert_pm_is_registered_m": {"doc_hash": "f4f36632b5f641a467978c3fb557b4527305c98207f86e8d9c80aac64dba18c9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_TestPytestPluginManager.test_consider_module_import_module_TestPytestPluginManager.test_consider_module_import_module.assert_len_values_1": {"doc_hash": "9aea07175f725514b9bf3b6a57b33ed814877b0f6d6c16e96025f6e45c716a0c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_TestPytestPluginManager.test_consider_env_fails_to_import_TestPytestPluginManager.test_plugin_skip.result_stdout_fnmatch_lin": {"doc_hash": "2ca59a3e4954e727716fcb608c353e853d3712dfb7eb3ca23dfb02ddb9fa83b2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_TestPytestPluginManager.test_consider_env_plugin_instantiation_TestPytestPluginManager.test_consider_env_plugin_instantiation.assert_l2_l3": {"doc_hash": "103566a0798e36c5431127bc27fc2a8f8adccecf1fe20c62679192b4a0de7b09"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_TestPytestPluginManager.test_pluginmanager_ENV_startup_TestPytestPluginManager.test_pluginmanager_ENV_startup.result_stdout_fnmatch_lin": {"doc_hash": "4dbcabb1d555e32e259efb1fb8f5812c5a02757df54541840730004307178e26"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_TestPytestPluginManager.test_import_plugin_importname_TestPytestPluginManager.test_import_plugin_importname.assert_plugin2_is_plugin1": {"doc_hash": "4fb0dd384ef426be2a7718986dbe6679d8a93c0829a3fa5e6b2e85f9ab172444"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_TestPytestPluginManager.test_import_plugin_dotted_name_TestPytestPluginManager.test_consider_conftest_deps.with_pytest_raises_Import.pytestpm_consider_conftes": {"doc_hash": "cb33f5f4a5752c7da5170427c25975cd0a23c8f779c7b42bd5123ffdb895ea0b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_TestPytestPluginManagerBootstrapming_TestPytestPluginManagerBootstrapming.test_preparse_args.with_pytest_raises_UsageE.pytestpm_consider_prepars": {"doc_hash": "9bad8b21d0ebb33dc3d65356eca26df65f2efdf499ca7ddbc7baf5ffc7f08e6a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_TestPytestPluginManagerBootstrapming.test_plugin_prevent_register_TestPytestPluginManagerBootstrapming.test_plugin_prevent_register_unregistered_alredy_registered.assert_42_not_in_l2": {"doc_hash": "61d07e1e1951906357b188bd870aa070f66c99155fb141d8efc77715ea23cc7c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_TestPytestPluginManagerBootstrapming.test_plugin_prevent_register_stepwise_on_cacheprovider_unregister_": {"doc_hash": "a0fbcad4f93ee72a49fc9852356a8bce24574bbb01877c7b6a520fea23462ffd"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_hookrecorder_basic_test_hookrecorder_basic.None_2": {"doc_hash": "035fa7c1af5ff9293a4b6a44688f2be3ec1bb1a43bc8d5690b965576b28ec5bd"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_TestInlineRunModulesCleanup_TestInlineRunModulesCleanup.spy_factory.return.SysModulesSnapshotSpy": {"doc_hash": "f0b873912729f7ae12794d554ef21e645aaa845b8df328077326221b4e08a8c0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_TestInlineRunModulesCleanup.test_inline_run_taking_and_restoring_a_sys_modules_snapshot_TestInlineRunModulesCleanup.test_inline_run_taking_and_restoring_a_sys_modules_snapshot.assert_all_sys_modules_x_": {"doc_hash": "81cba5433f272f3feea5468ae6a17810688add94c10f0ede77b30ca047a995e7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_TestInlineRunModulesCleanup.test_inline_run_sys_modules_snapshot_restore_preserving_modules_TestInlineRunModulesCleanup.test_external_test_module_imports_not_cleaned_up.assert_imported_data_4": {"doc_hash": "7929524e3705c3b4b8f3cd4e888cb2adbb8f8b6700836536a4478554aa3857d0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_assert_outcomes_after_pytest_error_test_cwd_snapshot.None_1": {"doc_hash": "23762dc520caa602c819e47af0db79820d8ad0ab28378e7722398c7b1385f0c6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_TestSysModulesSnapshot_TestSysModulesSnapshot.test_restore_reloaded.assert_sys_modules_ori": {"doc_hash": "60b155d77548517f5c2ad7c64536eb8902051727f739eb770b82aa1f9979544e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_TestSysModulesSnapshot.test_preserve_modules_TestSysModulesSnapshot.test_preserve_container.assert_sys_modules_ori": {"doc_hash": "17d235e829468cac3ba5b70a33ae00dbb309a163a4975a2d53b888edbc160f29"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_TestSysPathsSnapshot_TestSysPathsSnapshot.test_restore.assert_getattr_sys_other": {"doc_hash": "c40f4410778bc06e14bcff41d98b7ec1b47f2143d768f6767919b8de75d08e03"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_recwarn.py_TestWarningsRecorderChecker_TestWarningsRecorderChecker.test_recording.with_rec_.pytest_raises_AssertionEr": {"doc_hash": "c41f252462e466d2d577b119ff1dcbd48b33a0691ddf08045068a7eb4007352f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_recwarn.py_TestDeprecatedCall.test_deprecated_call_modes_TestDeprecatedCall.test_deprecated_call_modes.if_mode_call_.else_.with_pytest_deprecated_ca.assert_f_10": {"doc_hash": "845be5d0e7308dc84d5507375502f7402edf63ef6a2d3721ae9d16ed613c3706"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_recwarn.py_TestDeprecatedCall.test_deprecated_call_exception_is_raised_TestDeprecatedCall.test_deprecated_call_supports_match.with_pytest_raises_pytest.with_pytest_deprecated_ca.warnings_warn_this_is_no": {"doc_hash": "b2aed2c6f91459472f13cba231cf299d9d6b2f5c95b5daf853d518a04a886024"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_recwarn.py_TestWarns.test_as_contextmanager_TestWarns.test_as_contextmanager.None_3": {"doc_hash": "4029c5c251d9a6e278c35833ea47f987c9cf5e4a3890536c223892ba902f76c1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_recwarn.py_TestWarns.test_record_TestWarns.test_record_only.assert_str_record_1_mess": {"doc_hash": "c0b4cb234c3e85e8e074cdc5d697dda4f5bc57c950fc6174ffce408642c83196"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_recwarn.py_TestWarns.test_record_by_subclass_TestWarns.test_record_by_subclass.None_5": {"doc_hash": "1dce61ebca3e995fe844f2e9f0f374c1cb58869442fbadf9834d7caed04c6ae4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_recwarn.py_TestWarns.test_one_from_multiple_warns_": {"doc_hash": "2fa878d5b3e0e67e38d1228867018146b2c71ea447a00b2e81f2ae2ca2d55c50"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestReportSerialization.test_xdist_report_longrepr_reprcrash_130_TestReportSerialization.test_xdist_report_longrepr_reprcrash_130.assert_added_section_in_a": {"doc_hash": "7fbe192ad8b66b923f9169715d2b668830843c674c9970eebf208fd8ac6811c8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestReportSerialization.test_reprentries_serialization_196_TestReportSerialization.test_reprentries_serialization_196.for_i_in_range_len_a_entr.assert_rep_entries_i_lin": {"doc_hash": "978fbed806ddf6c1976fd44ed0810ce0e7dc9d6ecad1c27ac7b80a31d1f41f02"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestReportSerialization.test_itemreport_outcomes_TestReportSerialization.test_itemreport_outcomes.for_rep_in_reports_.if_rep_failed_.assert_newrep_longreprtex": {"doc_hash": "83d33a1b7f511b290790edf0e6be60183e0adba82061498cd98f162dc2e5390b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestReportSerialization.test_collectreport_passed_TestReportSerialization.test_collectreport_passed.for_rep_in_reports_.assert_newrep_skipped_": {"doc_hash": "41671d79942105b5d30fa553ac257dd808e15c104b274f226ca32b1994095671"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestReportSerialization.test_collectreport_fail_TestReportSerialization.test_collectreport_fail.for_rep_in_reports_.if_rep_failed_.assert_newrep_longrepr_": {"doc_hash": "52647441640b3749cc2faac455fa348d36a9815b1424921f5e96b1848027d7b4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestReportSerialization.test_extended_report_deserialization_TestReportSerialization.test_extended_report_deserialization.for_rep_in_reports_.if_rep_failed_.assert_newrep_longrepr_": {"doc_hash": "95b470001b33447ef6f9c6b0b3000ca0ebe183314e0d3b32fbc1bf58301fae0e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestReportSerialization.test_paths_support_TestReportSerialization.test_paths_support.assert_data_path2_s": {"doc_hash": "383b66868f9cf600e0df23b4924a7e9475f126f3db81303009542b5d53c565f4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestHooks_TestHooks.test_test_report.for_rep_in_reports_.assert_new_rep_outcome_": {"doc_hash": "3de4cfd98167b7d30a7c593de8c5f2d6f46311400755a7da59e13e2aa51433a2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestHooks.test_collect_report_TestHooks.test_collect_report.for_rep_in_reports_.assert_new_rep_outcome_": {"doc_hash": "af00c56393bac630c07c517b7427045ba6b515e8b84c82fce5bd3e3cb36dae2a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestHooks.test_invalid_report_types_": {"doc_hash": "ce61a1b975cf01bc87569c19fcf452a098aad39d0586cfc120de1e53dcbeff1a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_TestSetupState.test_teardown_multiple_one_fails_TestSetupState.test_teardown_multiple_one_fails.assert_r_fin3_fin": {"doc_hash": "b4a929bdfbb9783f7d5fe114685a01543725ba4dc532cb8d6ed53c7b0e03e4be"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_TestSetupState.test_teardown_multiple_fail_TestSetupState.test_teardown_multiple_fail.assert_err_value_args_": {"doc_hash": "918bdfb695bb6880b3aa368652ab953eab0111fff1105a5104e3dadf364115ec"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_BaseFunctionalTests_BaseFunctionalTests.test_failfunction._assert_isinstance_rep_l": {"doc_hash": "fb598e5f19c586b32fded573f03b29c2a081d0b4eb6f790468a158fd7f8f55ae"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_BaseFunctionalTests.test_skipfunction_BaseFunctionalTests.test_skipfunction._assert_not_rep_skipped_": {"doc_hash": "6ccc8cd78bc335bdd500aef7202eb1906eba7261f2ba12368fa55c2eb207d027"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_BaseFunctionalTests.test_skip_in_setup_function_BaseFunctionalTests.test_skip_in_setup_function._teardown": {"doc_hash": "c5c9709f8231cd5bde42b38be29a364667d4e1d3cbbf332922daedec8097e9a8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_BaseFunctionalTests.test_failure_in_setup_function_BaseFunctionalTests.test_failure_in_teardown_function.None_1": {"doc_hash": "44e273e99aa91cc7d6b9d5e4956d67418a5bd53271d53c85ca55f3919da7db5d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_BaseFunctionalTests.test_custom_failure_repr_BaseFunctionalTests.test_custom_failure_repr._assert_rep_failed_failu": {"doc_hash": "ac58377fb30d9a2e8288edbffb30b61d190dcd4e94df73734f47aaac2b722d1e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_BaseFunctionalTests.test_teardown_final_returncode_BaseFunctionalTests.test_logstart_logfinish_hooks.for_rep_in_reps_.assert_rep_location_": {"doc_hash": "ac820a70fa58f9daff6fa8e96fa75a3855280fee7b390b3759e696b3a26a4f3c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_BaseFunctionalTests.test_exact_teardown_issue90_BaseFunctionalTests.test_exact_teardown_issue90.assert_reps_5_failed": {"doc_hash": "65253d8540d5fda7dee60f953d51348f3340fae0643b629c837e1e62ba2303b6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_BaseFunctionalTests.test_failure_in_setup_function_ignores_custom_repr_BaseFunctionalTests.test_failure_in_setup_function_ignores_custom_repr._assert_instanace_rep_fa": {"doc_hash": "a54691a9213e1d3fbbd0c4dd80fdd59ed3fb9a8a1683e0868b991bcdeaf3becd"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_TestSessionReports_TestSessionReports.test_collect_result.assert_res_1_name_Te": {"doc_hash": "161078d7de149ca9ea78f2274ce8f84323907db2c837742c9b67ee8ee6304051"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_reporttypes_test_report_extra_parameters.assert_report_newthing_": {"doc_hash": "3d4387bed94941c897c035b17ec1322f849cc119b97c8e55fa8b7e96284c04c7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py__design_question_do_we__test_runtest_in_module_ordering.result_stdout_fnmatch_lin": {"doc_hash": "cbe31c15f6b98e2d11ca51983e3d16fda6363d586e95a56826d2c4e487a49093"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_test_pytest_exit_returncode_test_pytest_exit_returncode.assert_result_ret_98": {"doc_hash": "2ba0ac1bc82b2ea5f1f72f7621ec37f35572eec5adc11128fe73244bae170b84"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_test_pytest_no_tests_collected_exit_status_test_exception_printing_skip.try_.except_pytest_skip_Except.assert_s_startswith_Skip": {"doc_hash": "86bd38d6f74c6090d61bcfe1082d3e640c91fbc507edf3f394d8feab316ec6dd"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_test_failure_in_setup_test_makereport_getsource.result_stdout_fnmatch_lin": {"doc_hash": "fbdb0401238d59f45a8e1aac58338c3452768560e07295900cfa817b688c4da2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_test_makereport_getsource_dynamic_code_test_makereport_getsource_dynamic_code.result_stdout_fnmatch_lin": {"doc_hash": "7f7a71366430762e1a09b578b9d492a3bd188ee2fe577c15cb6119ba592d8d64"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_test_current_test_env_var_test_current_test_env_var.assert_PYTEST_CURRENT_TE": {"doc_hash": "6f2f14595ff58e96ce1d74b968b81435c743d8d57d122072b1a6e5b893f939da"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner_xunit.py_test_class_setup_test_class_setup.reprec_assertoutcome_pass": {"doc_hash": "a6c9dfe4b02a517c31cfa114d38ca39d992875aec7d72bd3883baa4fbbdb4a11"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner_xunit.py_test_class_setup_failure_no_teardown_test_class_setup_failure_no_teardown.reprec_assertoutcome_fail": {"doc_hash": "6f6378885b6a57421c63f87ea9489c6ef2599279e86f67bc783e7d3afc86b623"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner_xunit.py_test_method_setup_test_method_setup.reprec_assertoutcome_pass": {"doc_hash": "2a51e354d46c49beb6ffd8b9e89c92bec245a35d83071644b678564ca06d65dc"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner_xunit.py_test_method_setup_failure_no_teardown_test_method_setup_failure_no_teardown.reprec_assertoutcome_fail": {"doc_hash": "3fa1fe1d4547e9a856489a463066216c77a0131cedb5bd9d0ce180561b25b527"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner_xunit.py_test_method_setup_uses_fresh_instances_test_setup_fails_again_on_all_tests.reprec_assertoutcome_fail": {"doc_hash": "e1210541cc8e7f1b011c95d7208f4b73bfdd81eb30e26e1bf9dbd2945e3f08ba"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner_xunit.py_test_setup_teardown_function_level_with_optional_argument_": {"doc_hash": "d146396b92afc272e4258606b14b39e88e07e86711c3bb5652d43f12cf8b09ad"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_session.py_SessionTests.test_nested_import_error_SessionTests.test_nested_import_error.assert_out_find_does_not": {"doc_hash": "23f99c21e57765305224a0156d9f7fc05a3e92e755416227a0d862326dbbff05"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_session.py_SessionTests.test_broken_repr_with_showlocals_verbose_SessionTests.test_broken_repr_with_showlocals_verbose.assert_repr_locals_lines_": {"doc_hash": "20da0ef1e7c85c4666926b0b42cf8edb3d9754c02ac904b4c357140ff13d19c4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_session.py_SessionTests.test_skip_file_by_conftest_SessionTests.test_skip_file_by_conftest.assert_reports_0_skipped": {"doc_hash": "f7afe35a20ffdb02315c95daa56bef0da11bada7a99b19f986554a8cb4055379"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_session.py_TestNewSession_TestNewSession.test_order_of_execution.assert_passed_7": {"doc_hash": "898fd44ba198119e6197a89b3ed87f7a67698572ae7eb46414778b2278313475"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_session.py_TestNewSession.test_collect_only_with_various_situations_TestNewSession.test_collect_only_with_various_situations.assert_len_colfail_1": {"doc_hash": "6fb5ce5f04721e1fb7682bfce15b4c90c05844a80ddf662ad571241e0f9ac5a3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_session.py_TestNewSession.test_minus_x_import_error_TestNewSession.test_minus_x_overridden_by_maxfail.assert_len_colfail_2": {"doc_hash": "2ff201512ee9b614c340e5e5c459333c62391c6d8766a9a041b3b63586b167ab"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_session.py_test_plugin_specify_test_exclude.result_stdout_fnmatch_lin": {"doc_hash": "28e26ea76ea906735bbdcc2a8891f8d983b5f4472aa0a9bff6afdb82d5a27cc2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_session.py_test_exclude_glob_test_exclude_glob.result_stdout_fnmatch_lin": {"doc_hash": "9c03730860998a931fd0c8f2f2306f40be787f5847dcc9d3b7615f361f8db800"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_session.py_test_rootdir_option_arg_": {"doc_hash": "1c8e43b97bfcc0968667086c0f3f920b31a7edf921768fec771b593634236d97"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail_TestXFail.test_xfail_simple.assert_callreport_wasxfai": {"doc_hash": "c2c0233b8ae06e18969f5fb791747e232265a68157264b48f64d114c246d648c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_xfail_not_run_xfail_reporting_TestXFail.test_xfail_not_run_xfail_reporting.result_stdout_fnmatch_lin": {"doc_hash": "a8f11d0eacc45880006c0a032e62a7ef2c5d29b85393b8daf40ea967df57b579"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_xfail_not_run_no_setup_run_TestXFail.test_xfail_xpass.assert_result_ret_0": {"doc_hash": "925998a6d22f9d441145cdcfd8bc36f61ac9834d3b12051d116768ca16de7e05"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_xfail_imperative_TestXFail.test_xfail_imperative.None_2": {"doc_hash": "191ca4f29b54015f794accd85ed12b8dbe75a38da1c890ba231860f41bbfdf0a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_xfail_imperative_in_setup_function_TestXFail.test_xfail_imperative_in_setup_function.None_2": {"doc_hash": "f4c4bf29fea7e311a6e3f0d05200ae42f21eff0ca3a909b070ce09defe5436c0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_xfail_raises_TestXFail.test_xfail_raises.result_stdout_fnmatch_lin": {"doc_hash": "a53abc9a11da316aafc5c1c5b86da18b8e62764accd53a848ef489c5acbcae30"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_strict_sanity_TestXFail.test_strict_sanity.assert_result_ret_0": {"doc_hash": "423d0c4a9c00c6ebbb0bc56986089cfbfa9572ac65f087bf41522030587fe7a4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_strict_xfail_condition_TestXFail.test_strict_xfail_condition.assert_result_ret_0": {"doc_hash": "a258b7d460670492d9e3583e3a1729f99438813dcd5a8a1864322721e312844e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_xfail_condition_keyword_TestXFail.test_xfail_condition_keyword.assert_result_ret_0": {"doc_hash": "540b0af3a4ed5ae9e124f6874351d9c31d08787fac70e333b2a067a54f88a3af"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_strict_xfail_default_from_file_TestXFail.test_strict_xfail_default_from_file.assert_result_ret_1_i": {"doc_hash": "a37d5ee7048bb5294ea14ef99340579211f4e0b8a3ca528afad7c5dfa581c07e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFailwithSetupTeardown_TestXFailwithSetupTeardown.test_failing_teardown_issue9.result_stdout_fnmatch_lin": {"doc_hash": "f59244a221790dc6079973bbc09bcc67e4fc3b283389b21ec39af8ba6c2ef5dc"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestSkip_TestSkip.test_skip_with_reason.result_stdout_fnmatch_lin": {"doc_hash": "064da45e2c5627d42698ecf7ac9be8f284bcee3c1a889803eb7940e62d18475a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestSkip.test_only_skips_marked_test_TestSkip.test_strict_and_skip.result_stdout_fnmatch_lin": {"doc_hash": "ac2a324fb3d9690a17b8ec11f9182ecde3d97a477f8e52a57037667d77d36ab1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestSkipif_TestSkipif.test_skipif_reporting.assert_result_ret_0": {"doc_hash": "c3b9a65da6391471d526d4cf4c37cb201c019c916c7adf62165e0c7e901bb769"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestSkipif.test_skipif_using_platform_TestSkipif.test_skipif_reporting_multiple.assert_result_ret_0": {"doc_hash": "490198aeb0aa49c7deae59b976f709975b3072c9bbd5efa9e5dca8e6ea4fe514"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_test_skip_not_report_default_test_skipif_class.result_stdout_fnmatch_lin": {"doc_hash": "348877f7ce327918cee7cc8f8c06de771a66b4255df40acfc69480b5a1d7e523"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_test_skipped_reasons_functional_test_skipped_reasons_functional.assert_result_ret_0": {"doc_hash": "a2ed9dd4d7cf4fe97d2375d7060c1bafbff2fa5c86a2d81b2918c0bba856b703"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_test_skipped_folding_test_skipped_folding.assert_result_ret_0": {"doc_hash": "2330429e75ae09c4654d186c4b01e26d5e1ab04708284d1ff0e14e93c90f1921"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_test_reportchars_test_reportchars_error.result_stdout_fnmatch_lin": {"doc_hash": "9f8e809921d4e61953796328aa1c1f5c8ea232ae465a2ad85987fd1b465a29cc"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_test_reportchars_all_test_reportchars_all_error.result_stdout_fnmatch_lin": {"doc_hash": "aed5d1d0f49a7efed5b2a74fece2a4ed2ef15d000d1f993c4faab80b2296fded"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_test_errors_in_xfail_skip_expressions_test_errors_in_xfail_skip_expressions.result_stdout_fnmatch_lin": {"doc_hash": "90be25b97929d79cac15591c9f7182f560229d6f4d128d6daed8c672a6d93e50"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_test_xfail_skipif_with_globals_test_xfail_skipif_with_globals.result_stdout_fnmatch_lin": {"doc_hash": "33d00330242fdb576352051d5a078d70efa88fba8a4ea30c869527c32e5f8640"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_test_imperativeskip_on_xfail_test_test_imperativeskip_on_xfail_test.result_stdout_fnmatch_lin": {"doc_hash": "67d391e0f66e9dc340d99b4b0211d94912bede99d8d4cb26d5da200c87763f5e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_test_xfail_item_test_xfail_item.assert_xfailed": {"doc_hash": "783d44c49eb730ea0b63a33f605022b39764905e41c780a27331a332b625fcd4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_test_module_level_skip_error_test_invalid_skip_keyword_parameter.result_stdout_fnmatch_lin": {"doc_hash": "11b31c8685813315850e64f0c9d8bdd3bd4989518b06dbf56d4bdf0b997d7f19"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_test_mark_xfail_item_test_mark_xfail_item.assert_xfailed": {"doc_hash": "2428b67d6f40914c0a2fa065fc1306357e9195ae23319a677f63025033b902dc"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_stepwise.py_test_fail_and_continue_with_stepwise_test_fail_and_continue_with_stepwise.None_7": {"doc_hash": "96d0439012a370fd26565b590666b0fa20c481f378634dd2a4fe9203d45174c6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_plugin_nameversion_test_plugin_nameversion.assert_result_expected": {"doc_hash": "48938370b4346b12da52733a111cfa3acb9dc8e9513bfe5e2262c2f0ebc7014e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminal_TestTerminal.test_pass_skip_fail.result_stdout_fnmatch_lin": {"doc_hash": "f09411bb28bef83aa1a7bad55a4764ac8c90efdc1d1cbf9ed3106c70f5bf55fc"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminal.test_internalerror_TestTerminal.test_writeline.assert_lines_2_hello": {"doc_hash": "d8bbbf8931f65d4c3c3827141505c184d2791c0d16cd40bd137499aac67d0b69"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminal.test_show_runtest_logstart_TestTerminal.test_runtest_location_shown_before_test_starts.child_kill_15_": {"doc_hash": "4afc4afa72dbf2f7a377fff0866b15d5833c48eb006011f07731a81098fb3b6c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminal.test_keyboard_interrupt_TestTerminal.test_keyboard_interrupt.None_2": {"doc_hash": "08567690f453dac25d35da47fd11ec698f68343eeef43ded99e850ffaf252c1b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminal.test_keyboard_in_sessionstart_TestTerminal.test_rewrite.assert_f_getvalue_h": {"doc_hash": "8aa40da48e47c10cc46c3df03ad5aff15697de163ed6406627aee89664e788a3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestCollectonly.test_collectonly_simple_TestCollectonly.test_collectonly_simple.result_stdout_fnmatch_lin": {"doc_hash": "8ddbc64c0178a0f41baa0bd28081d7fc9dc96e453f93285d907c47512c90ff08"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestCollectonly.test_collectonly_error_TestCollectonly.test_collectonly_more_quiet.result_stdout_fnmatch_lin": {"doc_hash": "6810fddfcfda4d5ec026682dcc8845898325e2d435a5ae8d4c95ba7246f50e18"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestFixtureReporting_TestFixtureReporting.test_setup_fixture_error.assert_result_ret_0": {"doc_hash": "944720465e1faa77fdf41eecf7233f4f6a8634195508f827cc2035ace92bc84d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestFixtureReporting.test_teardown_fixture_error_TestFixtureReporting.test_teardown_fixture_error.result_stdout_fnmatch_lin": {"doc_hash": "567e28da9292c4f533536bd848b448f318df45d6fcabd8f6c988ee4f525f537e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestFixtureReporting.test_teardown_fixture_error_and_test_failure_TestFixtureReporting.test_teardown_fixture_error_and_test_failure.result_stdout_fnmatch_lin": {"doc_hash": "1aa3553d1c1c87b4391e23fd415b206e33271d011f9607a23eeb48bdcf11e703"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestFixtureReporting.test_setup_teardown_output_and_test_failure_TestFixtureReporting.test_setup_teardown_output_and_test_failure.result_stdout_fnmatch_lin": {"doc_hash": "18692cbe059df37f1be15308d19c3b365d8c5cc1a131eddb13b6e91285855731"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminalFunctional_TestTerminalFunctional.test_deselected.assert_result_ret_0": {"doc_hash": "bf80fc7b5296b61e6c3eca214b66b4029055aaf5f06a950004030fc05ddc348e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminalFunctional.test_deselected_with_hookwrapper_TestTerminalFunctional.test_deselected_with_hookwrapper.assert_result_ret_0": {"doc_hash": "1fe501866c0c10e1107d03745051ecb00976e23b436ccb6fe8c4633e0f94d640"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminalFunctional.test_show_deselected_items_using_markexpr_before_test_execution_TestTerminalFunctional.test_show_deselected_items_using_markexpr_before_test_execution.assert_result_ret_0": {"doc_hash": "1f427c05fb1032495e54f7cdd61c6bbd0268acc16de8d7dac0c8d2a02d5bafc2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminalFunctional.test_no_skip_summary_if_failure_TestTerminalFunctional.test_passes.assert_result_ret_0": {"doc_hash": "b79383b179947b0868f60dbad0b40decf4c4fe75c7a365accfd4f78e71df095d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminalFunctional.test_header_trailer_info_TestTerminalFunctional.test_header_trailer_info.if_request_config_pluginm.result_stdout_fnmatch_lin": {"doc_hash": "0a379befc6b4e88e9d3947e7a3e2f2333a18bcf4eddcea0f67ff15b9e041f9af"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminalFunctional.test_header_TestTerminalFunctional.test_header.None_7": {"doc_hash": "2b76505de8ad89174ef03512a2479a13670c677c13ddea042bb8f9caf4ed10c9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminalFunctional.test_verbose_reporting_xdist_TestTerminalFunctional.test_verbose_reporting_xdist.assert_result_ret_1": {"doc_hash": "56b10994bac774274115460020f80112e9ab3df43106b6044bafaa1ceac6575f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminalFunctional.test_quiet_reporting_TestTerminalFunctional.test_more_quiet_reporting.assert_passed_not_in_s": {"doc_hash": "c0c9d7f9c3b66b8f94a0ed18c081a442d6d54874c34e6e62a1b66c824f288236"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminalFunctional.test_report_collectionfinish_hook_TestTerminalFunctional.test_report_collectionfinish_hook.result_stdout_fnmatch_lin": {"doc_hash": "fd1fac287c79c3c4e549393ef4d17ba2ace97631200f09da63395553ee5c2b39"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_pass_output_reporting_test_pass_output_reporting.result_stdout_fnmatch_lin": {"doc_hash": "ae196b969bb5ce576a82c8f19c905b6e2b3b8a7b71b1379fa695473fda98486e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_terminalreporter_reportopt_addopts_test_terminalreporter_reportopt_addopts.result_stdout_fnmatch_lin": {"doc_hash": "1ce6b75b5154d41b87cf265cd2316a97099d3b718d5307f491a0afaac598e25f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestGenericReporting_TestGenericReporting.test_maxfailures.result_stdout_fnmatch_lin": {"doc_hash": "8dac7093807b660d3dafcabae4651bbe825a67c945b3154c7021312fb244cc34"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestGenericReporting.test_tb_option_TestGenericReporting.test_tb_option.for_tbopt_in_long_sh.if_tbopt_no_.else_.assert_IndexError_not_i": {"doc_hash": "3b4d755e195a2d8568a6050b3712ed705308981ad2ca9664825943a64eba72aa"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestGenericReporting.test_tb_crashline_TestGenericReporting.test_tb_crashline.assert_def_test_func2_n": {"doc_hash": "e5b531f1590fc75d831298388d15542f4e8d85cb5f48bd8c0b60ef4c2a21e368"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestGenericReporting.test_pytest_report_header_TestGenericReporting.test_pytest_report_header.result_stdout_fnmatch_lin": {"doc_hash": "a108c2aa75f4386f0d8a70fe79d92c029f62354d10d258f5d9ff7aceddf50b06"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestGenericReporting.test_show_capture_TestGenericReporting.test_show_capture.None_11": {"doc_hash": "722cab0495af2c13aaf45539e466384199a2d4e047f1510de12f62f9c65dae35"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestGenericReporting.test_show_capture_with_teardown_logs_TestGenericReporting.test_show_capture_with_teardown_logs.None_11": {"doc_hash": "4a4a456e3ef07274625e442c04de0e49ec2f10a4db0d0de2e1ee4283135d98d9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_fdopen_kept_alive_issue124_test_fdopen_kept_alive_issue124.result_stdout_fnmatch_lin": {"doc_hash": "0ca3107da6ad6842ed019bafa392e9428e71775cae8a31da2893c598ec373ec5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_terminal_summary_warnings_are_displayed_test_terminal_summary_warnings_are_displayed.assert_stdout_count_": {"doc_hash": "6adacdfcc387cc49214f97a88737660841be5de034ee523f037ba081fb2505a6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestClassicOutputStyle.test_verbose_TestClassicOutputStyle.test_quiet.result_stdout_fnmatch_lin": {"doc_hash": "62d91abc08e55e8bb1e8399f110e70ffd4f7757c0b57242d3e0b125be402793e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestProgressOutputStyle.test_verbose_TestProgressOutputStyle.test_verbose.output_stdout_re_match_li": {"doc_hash": "42dbec6d82d1d515dac02ea44920d2931e41fdda4632ff416437e6b139392f73"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestProgressOutputStyle.test_verbose_count_TestProgressOutputStyle.test_verbose_count.output_stdout_re_match_li": {"doc_hash": "ae53f2e9a932066c263320db7c5d6e94aa24ac3c1377c6a4c8856ccb7a26bf5f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestProgressOutputStyle.test_xdist_normal_TestProgressOutputStyle.test_xdist_normal_count.output_stdout_re_match_li": {"doc_hash": "fb386251fdeec13f9c4b8c1f3d1b714322f1c3a6d1f481314ed47c05bdb20ec7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestProgressWithTeardown.test_teardown_simple_TestProgressWithTeardown.test_teardown_many.output_stdout_re_match_li": {"doc_hash": "1f0e855b4522c52be80e9049fa6a4db62e6cda6edbc6d35740e7ec3858281686"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestProgressWithTeardown.test_teardown_many_verbose_TestProgressWithTeardown.test_xdist_normal.output_stdout_re_match_li": {"doc_hash": "5ca3938910a9e24058339de5b5ce510876191bd2e45b11deaff6d623fbbf4c11"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_tmpdir.py_TestTempdirHandler_TestTempdirHandler.test_tmppath_relative_basetemp_absolute.assert_t_getbasetemp_re": {"doc_hash": "b97d4a351bdda8899086b561f59035930ee7addbe076a1b8aa77e5e4534c76c4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_tmpdir.py_test_tmpdir_always_is_realpath_test_tmpdir_always_is_realpath.assert_not_result_ret": {"doc_hash": "c7688c91de8bc3af4c5a2d15583672316bd133d4d55dd72306641c58900b0b88"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_tmpdir.py_test_tmp_path_always_is_realpath_test_tmp_path_always_is_realpath.reprec_assertoutcome_pass": {"doc_hash": "6bd8a6bd6bf59a9944afa66e0e2f3b6ba6d1bc5347d1cb331f32f4ab40a2b9fa"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_tmpdir.py_test_tmpdir_too_long_on_parametrization_test_tmpdir_factory.reprec_assertoutcome_pass": {"doc_hash": "9abba3cae6a025f84a54609766a98dece769d5e0e90a934e6d92c6d09a8392cd"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_tmpdir.py_test_tmpdir_fallback_tox_env_break_getuser.for_envvar_in_LOGNAME_.monkeypatch_delenv_envvar": {"doc_hash": "cb0c515f19df7262fecfeb68a23b6bee8711d33a84a2898968b48f39ef482095"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_tmpdir.py_test_tmpdir_fallback_uid_not_found_test_tmpdir_fallback_uid_not_found.reprec_assertoutcome_pass": {"doc_hash": "9df357b2dba4a703a4e5c6e3d6a679cd973810ca70c760c6daa6188ae27244b5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_tmpdir.py_test_get_user_uid_not_found_test_get_user.assert_get_user_is_None": {"doc_hash": "eab48172bfe9bb1e76616468f772ac1d2cb3ebcf930fc946d499ac59bd27a3ee"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_setup_test_setup.assert_rep_failed_and_42": {"doc_hash": "e275a18117c5451370cba4ad6ad178b033c38b605d9db039571030b1da289c38"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_setUpModule_test_setUpModule_failing_no_teardown.assert_not_call_item_modu": {"doc_hash": "1333e55942edbac158619e027bd930b07a208471684958bb1d03adc37faf4e88"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_teardown_issue1649_test_teardown_issue1649.for_obj_in_gc_get_objects.assert_type_obj___name__": {"doc_hash": "e08660dd3e5635232128158ec2972e667e1411c50b178d56ab8da90ba3f28aae"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_setup_setUpClass_test_setup_setUpClass.reprec_assertoutcome_pass": {"doc_hash": "287da69fbeea37856c1ce6d5d58836819aeb6d2ae37a36952fadb80bb26cf955"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_setup_class_test_setup_class.reprec_assertoutcome_pass": {"doc_hash": "f9771ad617c5163728f05d62d5932a910697c4132f60681e4e2e38fca068a88e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_testcase_custom_exception_info_test_testcase_custom_exception_info.result_stdout_fnmatch_lin": {"doc_hash": "82dc57871f69e47ae30568ac11bf99b36ea8674745d5a84793557375636dfda8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_testcase_totally_incompatible_exception_info_test_module_level_pytestmark.reprec_assertoutcome_skip": {"doc_hash": "4ac2d8ea95d808b8daaddc0a9ffa157fa79698dbb8818c4bb72e3a20c83a44a1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_TestTrialUnittest_TestTrialUnittest.test_trial_testcase_runtest_not_collected.None_3": {"doc_hash": "7db1ae4b94f47f18990e67347e2637db368c0c78f7e1037e3104e2b372db2a6b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_TestTrialUnittest.test_trial_error_TestTrialUnittest.test_trial_error.result_stdout_fnmatch_lin": {"doc_hash": "030933f15df6d3d90f9db554175df4cd53dfafba891fcd8059732dde14e30e69"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_djangolike_testcase_test_djangolike_testcase.result_stdout_fnmatch_lin": {"doc_hash": "17dcdc7f0eee8c4847b032b6c15e144a6732855bcdf8e6c25ced98d4203ac867"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_unittest_setup_interaction_test_unittest_setup_interaction.result_stdout_fnmatch_lin": {"doc_hash": "33bdcc016e80a0c59d6cd8f5cf973457ea8242e66a21ee13dbbb889e7f6b22bc"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_non_unittest_no_setupclass_support_test_non_unittest_no_setupclass_support.reprec_assertoutcome_pass": {"doc_hash": "c3be48537c26bd90f374f28d13589345fe3362fb05b7f253cd7523599091fb29"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_no_teardown_if_setupclass_failed_test_no_teardown_if_setupclass_failed.reprec_assertoutcome_pass": {"doc_hash": "110d2ca3148998d34bdf68396ed0db0b17e6f414afad317e46a1232cb1d3c940"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_issue333_result_clearing_test_issue333_result_clearing.reprec_assertoutcome_fail": {"doc_hash": "cfef2fdd595df8a0af575af5373aa327b710bb91712346fc8f048b8a4b2a8cc1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_unittest_raise_skip_issue748_test_unittest_raise_skip_issue748.result_stdout_fnmatch_lin": {"doc_hash": "47da4c5541339942d6eb9ccd8bf85d43b0c9dfe13a7e6aef589f107c379fa325"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_unittest_skip_issue1169_test_class_method_containing_test_issue1558.reprec_assertoutcome_pass": {"doc_hash": "dbf79fdaa86b4776631d5988dfe47293211aa2283e9998a1518fa28ed2b4cba5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_usefixtures_marker_on_unittest_test_usefixtures_marker_on_unittest.result_assert_outcomes_pa": {"doc_hash": "f3f7ac389253461cdf86012e4d9986a088d530505c80d00a3015e20008bcb70c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_test_normal_flow_test_normal_flow.result_stdout_fnmatch_lin": {"doc_hash": "3e62f80b1d9c7381749a2480f79bd6cd4c678b544774b4e34e806e4ed5173033"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_test_setup_teardown_warnings_test_setup_teardown_warnings.result_stdout_fnmatch_lin": {"doc_hash": "ccde23d9e176ccd2883be4a8401f52ff026872c5becb69527db74c2db258a108"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_test_as_errors_test_as_errors.result_stdout_fnmatch_lin": {"doc_hash": "0b7afdfca86d68a7c90a7f87ce84fc597076eee4cfb02dd58fb76e56d2575d12"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_test_ignore_test_ignore.assert_WARNINGS_SUMMARY_H": {"doc_hash": "4422482fa3bfbd7d6e10c232a27fa3a026533dc4629f3045867d2ce73bb3fca9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_test_unicode_test_unicode.result_stdout_fnmatch_lin": {"doc_hash": "d26dc617c55ab156c13aa9c95527bc81b860c13d3725b61f0c6c75677a0a84ab"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_test_works_with_filterwarnings_test_works_with_filterwarnings.result_stdout_fnmatch_lin": {"doc_hash": "abed588e705869d444f07ade642d8944ff84bd7b5bdf0e4df730b34f1e68ef0b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_test_filterwarnings_mark_test_filterwarnings_mark.result_stdout_fnmatch_lin": {"doc_hash": "f69a5e3a34e56b538154274070950d9ea2eb705ea8e6b9684c49f298fe298bc4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_test_non_string_warning_argument_test_filterwarnings_mark_registration.assert_result_ret_0": {"doc_hash": "687bdf112f9b29a41f1d0a1d377f1a969c4401417ce9a445edfb0d796357052b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_test_collection_warnings_test_collection_warnings.result_stdout_fnmatch_lin": {"doc_hash": "28c0ae94d744ea691e26d4807427ccd867cfefc2d35d875444e064e8604e2677"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_test_mark_regex_escape_test_mark_regex_escape.assert_WARNINGS_SUMMARY_H": {"doc_hash": "15065d2919c2223b214417dcf7e8f93402b0035788f9e0eeddd41d3cb60c7bce"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_test_hide_pytest_internal_warnings_test_hide_pytest_internal_warnings.None_1.else_.result_stdout_fnmatch_lin": {"doc_hash": "122725212192e6bfe7526bf67ffe32dadf4f9ee3396591231d112028198ccea1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_test_option_precedence_cmdline_over_ini_test_option_precedence_cmdline_over_ini.if_ignore_on_cmdline_.else_.result_stdout_fnmatch_lin": {"doc_hash": "569a4f69bbfd647a3aed2a36686991d01deb597f8e2c471251fe51c45aaa2475"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_test_option_precedence_mark_test_option_precedence_mark.result_stdout_fnmatch_lin": {"doc_hash": "2949398d3533af67265f95b65d8d8450a4b045c8e8a19d09e6e19e20afb8efc4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_TestDeprecationWarningsByDefault.test_shown_by_default_TestDeprecationWarningsByDefault.test_shown_by_default.result_stdout_fnmatch_lin": {"doc_hash": "c15d85bdc611b71a79a536c226b978a2973ea75d90d9bd8a4e8b75f2677ec397"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_TestDeprecationWarningsByDefault.test_hidden_by_ini_TestDeprecationWarningsByDefault.test_hidden_by_mark.result_stdout_fnmatch_lin": {"doc_hash": "42084dfe656deed8c9861cc22bbcbdec0b3d113b06cb2ac8bcba966afab9331e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_TestDeprecationWarningsByDefault.test_hidden_by_cmdline_TestDeprecationWarningsByDefault.test_hidden_by_system.assert_WARNINGS_SUMMARY_H": {"doc_hash": "0fe792e5440c8071aed57ea364fcf35f6edf04a105e05671362218a8ae674e89"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_TracebackEntry_TracebackEntry.getfirstlinesource.return.self_frame_code_firstline": {"doc_hash": "8c845a5b9f27c564c34ac2fd4079c689a6133190fa89f168bfb9f89f44739d5e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_FormattedExcinfo.repr_excinfo_FormattedExcinfo.repr_excinfo.return.ExceptionChainRepr_repr_c": {"doc_hash": "2e15c4121878179628e1052987e20b34a83ba2795495457b1a890304e7a01672"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_ExceptionChainRepr_ReprExceptionInfo.toterminal.super_toterminal_tw_": {"doc_hash": "e1ba8c13da1aa25ca07393b8cfc56220377427b2a382390f06b919b5ee05e9f1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py__Rewrite_assertion_AST__PYC_TAIL._PYTEST_TAG_PYC_EX": {"doc_hash": "290fb72cde32f0e0d03eec64d493f308f7c8490682b8eb2f5847906909538992"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_rewrite_asserts__saferepr.return.saferepr_obj_replace_n": {"doc_hash": "dc00d9e7bd1a480f7fea61c8865a65d754475fe226b1bfbef4658ea1d240ee34"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewriter.visit_Call_AssertionRewriter.visit_Call.return.res_outer_expl": {"doc_hash": "7e8b9205d083690fe68eee25ce77437e6ea4a5c0ba25baf46fbeffe9ba308731"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewriter.visit_Starred_AssertionRewriter.visit_Attribute.return.res_expl": {"doc_hash": "6f23588581851b1652bab8ecfdf1eb3951f8726e5bfcb321e2d00505107efc59"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/util.py__diff_text__diff_text.return.explanation": {"doc_hash": "d44c691a8e69705a53d58836042f5808e022de3a68774d927d603a55a1391930"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_CaptureFixture.readouterr_CaptureFixture.readouterr.return.CaptureResult_captured_ou": {"doc_hash": "ca4c65e57b779881c27edfa60c351a59317453e1fad7113f9fa4c2ba25a37da1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_CaptureFixture._suspend_CaptureFixture.disabled.with_capmanager_global_an.yield": {"doc_hash": "a482aabd386343eedc53a84946ae7204370f357831f8a9889b9ed3d2bdba1ce8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/debugging.py_pytestPDB._get_pdb_wrapper_class_pytestPDB._get_pdb_wrapper_class.PytestPdbWrapper.do_continue.return.ret": {"doc_hash": "f42c70f090d89cb1f2b551077a3657b60453646b92d1db4114cb245910279240"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/debugging.py_pytestPDB._get_pdb_wrapper_class.PytestPdbWrapper.do_c_pytestPDB._get_pdb_wrapper_class.return.PytestPdbWrapper": {"doc_hash": "86fd22c84577b2b3a60e17d504bbee4afc15b0a2d5b196c6fb526b7b36c4afaa"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/debugging.py_pytestPDB._init_pdb_pytestPDB.set_trace._pdb_set_trace_frame_": {"doc_hash": "7b7fe8b28f4e00a220a279925f3d710baf927ad153af238afa951c6d78ba7f7a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureDef.execute_FixtureDef.__repr__.return._FixtureDef_argname_r_": {"doc_hash": "19a0a2704f290258c8014608b4b89a94f213e084ddda573e2f3560f2ab4e1484"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureManager._get_direct_parametrize_args_FixtureManager._get_direct_parametrize_args.return.parametrize_argnames": {"doc_hash": "4f7efaaa994458ddf221930da645e25557e387f2003530b48cd7257e900278ce"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py_record_xml_attribute__check_record_param_type.if_not_isinstance_v_str_.raise_TypeError_msg_forma": {"doc_hash": "c38e52f133e747f117c21bfaab5aa38d5bc3dc3a82a66b609361e8c51089929d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py_record_testsuite_property_record_testsuite_property.return.record_func": {"doc_hash": "76637a6600b825e0ff64b1a75bb2dbd33d9459f8ffd0e7ea06333edc59d52872"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py_pytest_addoption_pytest_addoption.None_6": {"doc_hash": "a136a308c2fcaea8c0015826c9435688a9c2349f7f33b2cf4adec24aa41f8dec"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_ColoredLevelFormatter_ColoredLevelFormatter.format.return.super_format_record_": {"doc_hash": "caf327719d975f4b9ab28c6724a33f9fda9fb1a0c3abaf36f2a8eb6675312447"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_LoggingPlugin._create_formatter_LoggingPlugin._create_formatter.return.formatter": {"doc_hash": "cd1a70c5689ee5f9c783ef3b2a7f0429e180e2811e3da9fa5b9ea157fbffab57"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/structures.py_ParameterSet._parse_parametrize_args_ParameterSet._parse_parametrize_parameters.return._": {"doc_hash": "10a5af12c080856634c6eadc388a388b906f9a4839d42b6a5b8c10a596c4098a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/monkeypatch.py_MonkeyPatch.setitem_MonkeyPatch.delitem.if_name_not_in_dic_.else_.del_dic_name_": {"doc_hash": "3580bfdb17dd39b9a43d5b7830a8e5c47a3d2eae1a6cba98e5a65b63533b8125"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/reports.py_TestReport_TestReport.__repr__.return._r_when_r_outco": {"doc_hash": "d4b6604a617387445a9cb34e88eefe32325262ff2aead55c5c17d07f9d122395"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/reports.py_CollectReport_CollectReport.__repr__.return._CollectReport_r_lenr": {"doc_hash": "4f53f15a45923ff591344f303c53f0a105d74aba8b81bccd92667d744f9831fc"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py__get_line_with_reprcrash_message__get_line_with_reprcrash_message.return.line": {"doc_hash": "c852fb33f77b1b35a064f23300fd0fd871ded48d06b76b9fe4004fd54cf132d8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/tmpdir.py_TempPathFactory.getbasetemp_TempPathFactory.getbasetemp.return.t": {"doc_hash": "266924e0b2e38cf7d7c09cf375ddc17095a94222c5b1a0ed00a489d2b7eded20"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_os_TestGeneralUsage.test_root_conftest_syntax_error.assert_result_ret_0": {"doc_hash": "4568f05bc94eaef40055c4bc145411383045b62f800aec5fd7ee7adb7022aa4b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_reprexcinfo_getrepr_TestFormattedExcinfo.test_reprexcinfo_unicode.assert_x_\u044f_": {"doc_hash": "8108369be6601387babc284117e87881873d38ab806295c29b5e3bd54d4f76da"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_fixture.py_logging_test_change_level.assert_CRITICAL_in_capl": {"doc_hash": "7f380e9ebe2743a8d8f77742c009924d0a7c961be3e96798904015e54ffc6af1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_formatter.py_logging_test_coloredlogformatter.assert_output_dummyp": {"doc_hash": "ebbec7b943f0d0b15408e5f54b8c72cf6a0fe134b6ce8652553ee368eb89d769"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_formatter.py_test_colored_short_level_": {"doc_hash": "a4d0b73561e104fcb868c163eab784799ca7ac9597fc7c0ac13e95bd9adf9985"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_io_test_nothing_logged.with_pytest_raises_pytest.result_stdout_fnmatch_lin": {"doc_hash": "0a925edcbb92f9b72a7b66d9f0702552a16e476498598a548fa5608c03b4b530"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_collection_live_logging_test_collection_collect_only_live_logging.result_stdout_fnmatch_lin": {"doc_hash": "aa83687da71d04625364c5aa43b3b1166cf25799d8434562283dea2108198758"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_collection_logging_to_file_test_collection_logging_to_file.with_open_log_file_encod.assert_info_message_in_t": {"doc_hash": "0d3ce4d89d8213b1dd6ce3bd31e60f68d62fb7fa59118c086d6339f82e29ea75"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_log_set_path_test_log_set_path.None_1.assert_message_from_test": {"doc_hash": "4d10686020d8e6e2fc2d7cecbf18d36d641f476e77e0eb25a9299b022f15a75e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_colored_captured_log_test_colored_captured_log.result_stdout_fnmatch_lin": {"doc_hash": "c933979692e08146de7019396d818ba732facfe38057a4e041b8a68ccf61d3bd"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestScopeOrdering.test_multiple_packages_TestScopeOrdering.test_multiple_packages.reprec_assertoutcome_pass": {"doc_hash": "d586932f8c836103a509f29640f291633043f7aa60fb2f39dcb4b7e0496dc305"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_idmaker_with_idfn_and_config_TestMetafunc.test_idmaker_with_idfn_and_config.for_config_expected_in_v.assert_result_expecte": {"doc_hash": "0b68f3e46de2b7237402ea7c22cdc873e8ba8f7622d70c51120264034c7dcb3a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_idmaker_with_ids_and_config_TestMetafunc.test_idmaker_with_ids_and_config.for_config_expected_in_v.assert_result_expecte": {"doc_hash": "681fa6795a04c02e378aed3593dbddccc8c14e5277cd5e12eff7cc1015c6b71a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_argcomplete.py_subprocess__wrapcall.try_.except_subprocess_CalledP.return._": {"doc_hash": "1181678ac7199be64b91fcc5aa99fe9476de467731be1dafba6a8b02fd581fbf"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_test_recursion_source_decode_test_diff_newline_at_end.result_stdout_fnmatch_lin": {"doc_hash": "8a1d834fda27c9bcc216f968bef6f2da230d1bdd0289fa1ad42cc1129c254b7b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_test_assert_tuple_warning_test_assert_tuple_warning.assert_msg_not_in_result_": {"doc_hash": "b0cd9538b27a0980f5872611e67c308b5ed2fcc0396c18ddb8b399a95db060e5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_ast_rewrite.return.tree": {"doc_hash": "8b4dff65e0e890e92331c828af985aaa8056ae53e5183b255da8c81a5ed091e3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_contains_unicode_TestDoctests.test_ignore_import_errors_on_doctest.reprec_assertoutcome_skip": {"doc_hash": "5b91bffa4304cfb60dbde9f8050bba037f13bbde2ff8de3cc545dcd600e981af"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestLiterals.test_unicode_string_TestLiterals.test_bytes_literal.reprec_assertoutcome_fail": {"doc_hash": "ef0c05acfea06965fc1b4e22564de68c7afb67219b1444fcf9f540096b3fd8b2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_entry_points.py__": {"doc_hash": "b00797b024992104c4a4b7f111430c584a35cf09449019315a54fed959a69811"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_record_testsuite_property_test_record_testsuite_property.p2_node_assert_attr_name_": {"doc_hash": "d44b00fd585ea18eab3dc96e6aaf170bbb6eda913ee9c0718765aa7a889f404d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_record_testsuite_property_junit_disabled_test_record_testsuite_property_type_checking.result_stdout_fnmatch_lin": {"doc_hash": "ab900760c54f4fdc5cb3fed12aca97860ba6ee53ad6c76498c797885cfd5f0fb"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_escaped_skipreason_issue3533_test_escaped_skipreason_issue3533.snode_assert_attr_message": {"doc_hash": "118cc463ad7da664d9bc8406f6270019876e369098dbbbd80b96cbcb52b95f9c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_parametrize_iterator_test_parametrize_iterator.result_stdout_fnmatch_lin": {"doc_hash": "635ea99f79754d7d49ff8710dac5a39883a29e641a9a2579e74171308fc90dec"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_nose.py_pytest_test_nose_setup.result_assert_outcomes_pa": {"doc_hash": "e05c53c359db07c387aaf431db5c6149cda6cf96cf4456f2221fdb27b3499f46"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_os_TestPytestPluginInteractions.test_addhooks_conftestplugin.assert_res_11_": {"doc_hash": "3b2d2a1b6aa52dfbe88b867a53f41a5dd9c568588e5760fc4b4785bbf2c12992"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_makepyfile_unicode_test_makepyfile_utf8.assert_mixed_encoding_": {"doc_hash": "6be12955648dd5bf6a71d379b1b4e7b2bc46868773f2f582a8eace0690157958"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_recwarn.py_re_test_recwarn_functional.reprec_assertoutcome_pass": {"doc_hash": "8533474a63bbac1ef47776d1758d992805c52f4fca1904dc725456cd16175b15"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_session.py_pytest_SessionTests.test_basic_testitem_events._assert_colreports_1_re": {"doc_hash": "70cbea5c5dced54f299fb2a72955445a953bf2871d4b631040d3e184bf091bdf"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_fail_extra_reporting_test_fail_extra_reporting.result_stdout_fnmatch_lin": {"doc_hash": "2289b2f2e471d87002abf34c417f3e0db047f6e15deee2bdb87f41983b4101d4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_skip_reasons_folding_test_skip_reasons_folding.assert_reason_message": {"doc_hash": "d39baaeffb60593081c112bc7d1515fff1a623bc867013a12c1a79a8d66390cc"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_gc_test_simple_unittest.None_1": {"doc_hash": "023bd0e719601747d6e4f94f825ba2de33b079d2e335e63fd6de374be38cf32b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_new_instances_test_function_item_obj_is_instance.result_stdout_fnmatch_lin": {"doc_hash": "f9ad6ad21a41a5de565c2067363399a9292e8576a1775fa299d0d06903993610"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_teardown_test_teardown.assert_passed_skipped_": {"doc_hash": "d803e4dc8e5c5da284c719ec2f30caa7a1d77ced0aff84406b3edfda3ed22866"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_TestTrialUnittest.test_trial_exceptions_with_skips_TestTrialUnittest.test_trial_exceptions_with_skips.assert_result_ret_1": {"doc_hash": "ec290275ca842c1bfecb05eb9eb93843400c5e159491434fb520ac9dfa46134a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_unittest_expected_failure_for_passing_test_is_fail_test_unittest_expected_failure_for_passing_test_is_fail.assert_result_ret_1": {"doc_hash": "033cf7e7b50b8f3218a87f2dfeeea3033ab405ab697748a765c2178f69114384"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_test_deprecation_warning_as_error_test_deprecation_warning_as_error.if_change_default_is_None.else_.result_stdout_fnmatch_lin": {"doc_hash": "8a7deefa8cd82ef0a104521b3dd2b62c6d75a9110ef6a0d591e6fe1b6dd6ac01"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_ExceptionInfo.match_ExceptionInfo.match.return.True": {"doc_hash": "b706490ba600cd1f63d49f02dbedc3becd8b7908c6e1bece5643f414577c6ba3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewritingHook_AssertionRewritingHook._find_spec.importlib_machinery_PathF": {"doc_hash": "da1d5381b4e23d06de5199feab903400c39ef47b92f025155463f1a69f5ab96e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewritingHook.find_spec_AssertionRewritingHook.create_module._default_behaviour_is_fi": {"doc_hash": "054783e4807a0be23cfacbf899b53da6f2426f64a3c56dddbd5f82c04e2b9b2e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewritingHook.exec_module_AssertionRewritingHook.exec_module.exec_co_module___dict___": {"doc_hash": "66b2c8b3966c6836c09a6e17eebbad843f59f3c83050ad917cfffa44f186a9c6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewritingHook._warn_already_imported_AssertionRewritingHook.get_data.with_open_pathname_rb_.return.f_read_": {"doc_hash": "53e115134167493f66294651bb8860121df2beb42fa59389f73f22e568478fdc"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/util.py_issequence_isiterable.try_.except_TypeError_.return.False": {"doc_hash": "f42d73c3ef4b3cf8755504254cf7a5cc3bf6426d441844b08ca729216b1f5919"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_main_main.try_.except_UsageError_as_e_.return.ExitCode_USAGE_ERROR": {"doc_hash": "e118232a29b46ddeb120de77ec81b8ceba7f50ed6c8a1bc68f4c95d09510ebc0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_PytestPluginManager_PytestPluginManager.__init__.self._configured.False": {"doc_hash": "f325ce1674cacecbd9b4e4e36fb72ac0a0eeaf7513d6152b5d671d3a8d746502"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_PytestPluginManager.register_PytestPluginManager.register.return.ret": {"doc_hash": "b8ce2b05538d2f542ec6e2e2579e7dbe0ef9b316516ef41a2f5b5578c25163cc"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_PytestPluginManager.getplugin_PytestPluginManager.None_3": {"doc_hash": "ad4d3bafd2247b8f51d20cda259f484640e90b63378398c954928e27db495b0a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_MyOptionParser.if_sys_version_info_2__MyOptionParser.if_sys_version_info_2_._parse_optional.return.None_arg_string_None": {"doc_hash": "c8abffbbecfd3ec9b69dd6ccd06a3c5edc74eb3178f56c1c2bc5545088584d7f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_report_all_failed_collections_initargs_TestGeneralUsage.test_report_all_failed_collections_initargs.assert_result_ret_Exit": {"doc_hash": "db5cb07ebc65ac471a10b8caf1075138da9e98cddbf363048381b6939803cf8a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestInvocationVariants.test_cmdline_python_package_TestInvocationVariants.test_cmdline_python_package.result_stderr_fnmatch_lin": {"doc_hash": "feaa596f7ae97722099f38b80d9ed79dcb1e18cbd6247bb1fbd3e11077aed006"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestClass.test_setup_teardown_class_as_classmethod_TestClass.test_issue2234_property.assert_result_ret_Exit": {"doc_hash": "32466615936498571a300708963a5bc39202b93aa87e916293c04c2d8f73902e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_test_customized_python_discovery_functions_test_unorderable_types.assert_result_ret_Exit": {"doc_hash": "2e418739534172d9ca350433f70c251a6aec700811a58e7b030eea7c411cecc8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_rewrites_plugin_as_a_package_TestAssertionRewrite.test_rewrites_plugin_as_a_package.result_stdout_fnmatch_lin": {"doc_hash": "827be1522492aa3a3272eb2c4da3f8c6108721c733c7ae8e6670c4c52fb97dd3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestRewriteOnImport.test_cached_pyc_includes_pytest_version_TestRewriteOnImport.test_cached_pyc_includes_pytest_version.assert_found_names_pyc_": {"doc_hash": "0496f9a40a0360ecb24cb78a321c8b1d4d09b7df44bdc1955f237f4d297b3344"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewriteHookDetails_TestAssertionRewriteHookDetails.test_write_pyc.assert_not__write_pyc_sta": {"doc_hash": "d376f8a2cdcf12448f9ab64e890f8245f19a331bf2118f3f4bf30c274212a426"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestEarlyRewriteBailout.test_pattern_contains_subdirectories_TestEarlyRewriteBailout.test_pattern_contains_subdirectories.assert_self_find_spec_cal": {"doc_hash": "3478c98c4fa365ce52abe60f1f6a2415c90c92d20f67a7acb11f86c5a9b7ec4b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_lastfailed_no_failures_behavior_all_passed_TestLastFailed.test_lastfailed_no_failures_behavior_all_passed.assert_result_ret_Exit": {"doc_hash": "490ad83f36aaf99b3faccc2afd200c1b7db6eadf4a57bbbcabbc09bd23ba92ad"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestConfigCmdlineParsing_TestConfigCmdlineParsing.test_absolute_win32_path.assert_ret_ExitCode_OK": {"doc_hash": "4c6bf6bd2db87e937a6daaa0a79f44a9859bcbdaa429a8cca044d2d380222fcc"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_conftest_symlink_files_test_conftest_symlink_files.assert_result_ret_Exit": {"doc_hash": "e53afcbbdb34529ae4d6c193936bd267320446ebbea97f0d0eefedf78275c0ce"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_inspect_TestDoctests.test_collect_testtextfile.assert_len_items_0": {"doc_hash": "4930b5a63c818d6994e9c95370ec392c9e406d17d5ee54fd3b19cf5b40e6977c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_test_doctest_mock_objects_dont_recurse_missbehaved_Broken.__getattr__.raise_KeyError_This_shou": {"doc_hash": "47f379dc3975766988caa32bdd440cc6ecd5e9ec2fca6341a5b6941725914e1f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_faulthandler.py_test_timeout_test_timeout.assert_result_ret_0": {"doc_hash": "369252568848fcd0ec354fb61d324b381d424bcc84a4910fc5bc6d28af589a4c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_parameterset_for_fail_at_collect_test_parameterset_for_fail_at_collect.assert_result_ret_Exit": {"doc_hash": "7c13db1a00331fadb71cc35d4f3f2d055cf1040cf307212721549bd9e594a3de"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_session.py_test_deselect_test_sessionfinish_with_start.assert_res_ret_ExitCod": {"doc_hash": "a48efd36f8628610533d9512b40aa9ff24142029ba53368cd681df9a790f96d5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_tbstyle_short_test_traceconfig.assert_result_ret_Exit": {"doc_hash": "eb9c2c0b2410df2470826b15a9f80375bf994bbd4f5b4a9dd2f848f1946484cd"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_runTest_method_test_isclasscheck_issue53.assert_result_ret_Exit": {"doc_hash": "1093b4e100581ac6b8a20d61fba912ec13fdf2b586c352cf1b1b61fc2aa13026"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warning_types.py_inspect_test_warning_types.assert_warning_class___mo": {"doc_hash": "f01d1020c4f4a1ad57956ad3373204f56f7eb35ac752464cf2c37ea910a50530"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warning_types.py_test_pytest_warnings_repr_integration_test_": {"doc_hash": "289d7cf01b0d7aff505ce45ada2d352afe933bf6a7aea3f174b0a2cd76c5ff03"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_ExceptionInfo_ExceptionInfo.from_exc_info.return.cls_exc_info__striptext_": {"doc_hash": "dfbd607e0fc9c70279f88092e9d8f778059d009fe98f4e046521623d1c66b29c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_ExceptionInfo.exconly_ExceptionInfo.exconly.return.text": {"doc_hash": "ea5de6c9ef15098bf86951ece27737b3a02855817a82089f13f945afad7612c2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_ExceptionInfo.errisinstance_ExceptionInfo._getreprcrash.return.ReprFileLocation_path_li": {"doc_hash": "57f4bcff84e55ef028c69f31ff2ae231703d8569a5bb940dbcc2385d0e970cc0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_io/saferepr.py_pprint__ellipsize.return.s": {"doc_hash": "2817841cc23f8571524cafcb967376590ab4f28657aec49828c225efae3c3468"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_io/saferepr.py_SafeRepr_SafeRepr.repr_instance.return._ellipsize_s_self_maxsiz": {"doc_hash": "c43c9d37d8eedc2dd7b20a821ae4c29dac2b550c7c6c780b8738433a5b3f032f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py__get_assertion_exprs__get_assertion_exprs.return.ret": {"doc_hash": "5a91d2a6e52f6ba840fcb31abb5b937caa45c9939ed96b6b02c661beaf21b84b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewriter_AssertionRewriter._assert_expr_to_lineno.return._get_assertion_exprs_self": {"doc_hash": "412bfeb53a97031ab5dd11b38bad1c6ad5a3fd3499d9294d75e2e5c9476e0bf8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/util.py__Utilities_for_assertio__assertion_pass.None": {"doc_hash": "922b9fa9b99de1065689895a6ae87abed6e6f553ae9d2c5890a974e55bdfaf90"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/util.py__compare_eq_sequence__compare_eq_sequence.return.explanation": {"doc_hash": "bef2230896f0da9e61f40f3bb4e3142fe5f2e802eba7a143c6d3a21a7fb90390"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/compat.py_num_mock_patch_args_num_mock_patch_args.return.len_": {"doc_hash": "fed4ab3d4dffb6c3f4056e0fd3ffda54a1afae6b50625f8f4552c7d8e5656193"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_get_config_get_plugin_manager.return.get_config_pluginmanage": {"doc_hash": "8b2e7379b93464b26be7e7ed14a664c201856fd3685e845f3c5f6be1c1b5c1f7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/findpaths.py_get_dirs_from_args_CFG_PYTEST_SECTION._pytest_section_in_fil": {"doc_hash": "9ce11bf8cade218736898b49720d85ecc85901401618dc74ea81eda85222586a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py__patch_unwrap_mock_aware__patch_unwrap_mock_aware.try_.finally_.inspect.unwrap.real_unwrap": {"doc_hash": "3a597ed355416660c080bd6995577e44f417383610f1a8869b783dc45ea6c992"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nodes.py_Node._repr_failure_py_Node.repr_failure.return.self__repr_failure_py_exc": {"doc_hash": "51f1247fcfa2be3b73ce689502ece2d764575fb79e31aec0e709fcc0ba1315bf"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py_atexit_ensure_reset_dir.path_mkdir_": {"doc_hash": "0e005977832c55733e317723c40de4addfd5308101111df049707b5e64015974"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py__call_with_optional_argument__get_first_non_fixture_func.for_name_in_names_.if_meth_is_not_None_and_f.return.meth": {"doc_hash": "3007394e7cceb262965a779d462ac97be6abc63995c0631cc158a9bb36ed3302"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python_api.py_approx._Delegate_the_comparison_approx.return.cls_expected_rel_abs_n": {"doc_hash": "b0337886735e90e138a6e189b1341b3877ca0bbc492179ef46cc209e3bb747b6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python_api.py_RaisesContext.__exit___": {"doc_hash": "b3e8bb44869237d42e30ad13cf14ee45571f32434756c4d276b6dd8086ece7d2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/recwarn.py_WarningsRecorder_WarningsRecorder.__enter__.return.self": {"doc_hash": "4884ecf8b37c7d12736536df5b1e416b0e72c3061e026964fc579c0a2ebd82ed"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/tmpdir.py_TempdirFactory_TempdirFactory.getbasetemp.return.py_path_local_self__tmppa": {"doc_hash": "51a43a6053a2b141915fd742ca3db96c3790828dee7edb8d1ec23e9e86f82086"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/tmpdir.py_get_user_pytest_configure.mp_setattr_config__tmpd": {"doc_hash": "f47ba159a71cc50bcafc86d21545142d9d586b912fedbbd3e89df9f98dae0166"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/unittest.py_TestCaseFunction.runtest_TestCaseFunction._prunetraceback.if_traceback_.excinfo.traceback.traceback": {"doc_hash": "bcf17a8a2fa7bc795b7109b840d24c5335ef301d10277b79ddf5fef8f71fcacf"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/test_fixture_named_request.py__": {"doc_hash": "1efdcabbf071c1ab822817e580e0b135a521368a8a494e77bac90eca21e11ea4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/io/test_saferepr.py_test_buggy_builtin_repr_": {"doc_hash": "aac3b6fe7a1e64acb3e1c0956f7eab3ee51e23fcac5ce25621b71571b0f4f917"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestBasic.test_getfixturevalue_TestRequestBasic.test_getfixturevalue.assert_request_in_item_": {"doc_hash": "eac2ea75cbe9dc20c0421ccff0a8cf8bf63138112589b0686a5807e2a71ea955"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureUsages.test_factory_uses_unknown_funcarg_as_dependency_error_TestFixtureUsages.test_factory_setup_as_classes_fails.assert_len_values_1": {"doc_hash": "1703b9de5f8c05ea1eb6c2c2a6ef5dec0435ee4b4af239300173890f92c88d07"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/integration.py_TestMockDecoration.test_mock_sentinel_check_against_numpy_like_TestMockDecoration.test_mock_sentinel_check_against_numpy_like.reprec_assertoutcome_pass": {"doc_hash": "6cb038164fc64f6c837cd1a7bf7fbf6aedb2b3f612a724c2aa1385504b692adf"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/raises.py_TestRaises.test_noclass_TestRaises.test_no_raise_message.None_1.else_.assert_False_Expected_p": {"doc_hash": "294fcb4ab41173b6093b34393540e18330badbe6c24f05dd8908c3d92a384046"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestEarlyRewriteBailout.test_cwd_changed_TestEarlyRewriteBailout.test_cwd_changed.result_stdout_fnmatch_lin": {"doc_hash": "b65f75763d9c2e16022ef6ebb8b713e0ef15e52d43f3b787224006cc43e1fa8a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionPass.test_hook_call_TestAssertionPass.test_hook_call_with_parens.result_stdout_fnmatch_lin": {"doc_hash": "e79e90e9ff131e00fe376ce69eb0484d49d4875a78c2e0a7fbfb22549713abf5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionPass.test_hook_not_called_without_hookimpl_TestAssertionPass.test_hook_not_called_without_hookimpl.result_assert_outcomes_pa": {"doc_hash": "d1bd2ff7e0a10b3a8827adc4dc87973769ec6a8a810ea88e09da10c18920d33d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionPass.test_hook_not_called_without_cmd_option_TestAssertionPass.test_hook_not_called_without_cmd_option.result_assert_outcomes_pa": {"doc_hash": "d96d6275cab16086c633ffffc8c5acaad763eb599006777848e0fdaff56a1883"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestParseIni.test_ini_parse_error_TestParseIni.test_confcutdir.assert_result_ret_0": {"doc_hash": "0375256ae6c85bc2b4be13c16f49081cd16e1b74b567d8d51495b442c657d395"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_no_terminal_discovery_error_test_load_initial_conftest_last_ordering.assert_x_function___modu": {"doc_hash": "c78c7870c604b336d8f6437d489e1785aaac4bed9d3f3ff7876dc88074bec15a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_get_plugin_specs_as_list_test_get_plugin_specs_as_list.None_5": {"doc_hash": "2fed6096f733c720cb67f87dda6ee35b38d94f72c6e8c66641b3e2fccedbec55"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_config_blocked_default_plugins_test_config_blocked_default_plugins.None_3.else_.assert_result_stdout_line": {"doc_hash": "396b999d104bcbd3c8d2872c134ed62a79e48dc69aaafee70bb22911b875023e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestPytestPluginsVariable_TestPytestPluginsVariable.test_pytest_plugins_in_non_top_level_conftest_unsupported.res_stdout_fnmatch_lines_": {"doc_hash": "a4808e7f6efd0dc433cbe49b9b45d725068a5bbc4052070a28b5602b68174592"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestPytestPluginsVariable.test_pytest_plugins_in_non_top_level_conftest_unsupported_pyargs_TestPytestPluginsVariable.test_pytest_plugins_in_non_top_level_conftest_unsupported_pyargs.if_use_pyargs_.else_.res_stdout_fnmatch_lines_": {"doc_hash": "bc9e7b9e405752e6bf8a5be273301fbcbed9a44444865acac8ad968af9c9e53c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestPytestPluginsVariable.test_pytest_plugins_in_non_top_level_conftest_unsupported_no_top_level_conftest_TestPytestPluginsVariable.test_pytest_plugins_in_non_top_level_conftest_unsupported_no_top_level_conftest.res_stdout_fnmatch_lines_": {"doc_hash": "c6d1ad0dc25b0e550f3f198063dbf2eae80e8ad17f36e0d84f810258e04208f5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestLiterals.test_number_precision_TestLiterals.test_number_precision.reprec_assertoutcome_pass": {"doc_hash": "52f179bf4196ba051eb7a903aa0bd63f9cd086b6a56a6eb2bba83da7b36f28e8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestLiterals.test_number_non_matches_TestLiterals.test_number_non_matches.reprec_assertoutcome_pass": {"doc_hash": "1bb08b0bd33ed09bd5bf3531fb695694c16ef1d0ff514d022bca3a47200a7b4f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestLiterals.test_number_and_allow_unicode_TestLiterals.test_number_and_allow_unicode.reprec_assertoutcome_pass": {"doc_hash": "b13db451b2d339f0a8e86f004fdf0c392c9f4e7c88ed60ecdef8cb4770eecf27"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_DomNode_DomNode.next_sibling.return.type_self_self___node_ne": {"doc_hash": "56c8e54888c0c58b0ab93e9467c0415d0ec1cd54f4c8b91abd67f437d6553f13"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_root_testsuites_tag_test_runs_twice.assert_first_second": {"doc_hash": "db5ca725d93f855fc3316a0338f70ac3841e402948271dc42d11e6b031a1f374"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_runs_twice_xdist_test_runs_twice_xdist.assert_first_second": {"doc_hash": "a28ad9d446cfdd9518b42182f415a4404fd62744fc0d657f2e6838f008f9c422"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_recwarn.py_TestWarns_TestWarns.test_warning_tuple.pytest_raises_": {"doc_hash": "2ef0c7d87c3dbffe86e3ad3ac3f75b46ff57d9f8994202ad1163a3e0d662ea0a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_stepwise.py_test_stop_on_collection_errors_test_stop_on_collection_errors.result_stdout_fnmatch_lin": {"doc_hash": "28d0c00488d3d92771a13c48be486e168839b101575b194305545aa1fdbc49b6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_stepwise.py_test_xfail_handling_": {"doc_hash": "a313efb901579541975ce34aa92d6f8b2d99cf11446d6a175e584efcb24491a7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_tmpdir.py_os_FakeConfig.option.return.self": {"doc_hash": "d3bc0a2e857066893f9f14130e14220a6fc08b486a46e9c1e0f6e3676e53d22f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_tmpdir.py_TestNumberedDir_TestNumberedDir.test_removal_accepts_lock.assert_folder_is_dir_": {"doc_hash": "c53ca9ab5cb95391ed775b10d4210d3c126189bd5a425af73b1b356d9d00a738"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_tmpdir.py_TestRmRf_TestRmRf.test_rm_rf_with_read_only_directory.assert_not_adir_is_dir_": {"doc_hash": "bc469bb919c8362e429d8393c94d686e575cc53cf5982c7a6df782e5534c96b2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_tmpdir.py_TestRmRf.test_on_rm_rf_error_TestRmRf.test_on_rm_rf_error.assert_not_fn_is_file_": {"doc_hash": "befa48696e63a3e3e2a7169c5527bafe52ab7d21a86861dafbd25db3dfb99f78"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_tmpdir.py_attempt_symlink_to_": {"doc_hash": "e7f818d1b9056257e76606e94c2d7c79ce72c543a07c8d06ab145d2655a6616d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_error_message_with_parametrized_fixtures_test_setup_inheritance_skipping.result_stdout_fnmatch_lin": {"doc_hash": "fb539ce30360bf99a5b6999f8447d809aed1c0f2e3fc2d5a49557d1bd3169cf2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_ExceptionInfo.for_later_ExceptionInfo.__repr__.return._tblen_format": {"doc_hash": "1f5747971f2371a2d9feb1a46735fd1c195e7afafbfe6ff4378ad925c4184ed1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py__call_assertion_pass_set_location.return.node": {"doc_hash": "276e6d5e0d97c6cf16fdb67b729e203847954d87e3a045b66850209c7951c832"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewriter.visit_Compare_AssertionRewriter.visit_Compare.return.res_self_explanation_par": {"doc_hash": "3b594f0d10b71fffe8b1af602959d3d34bd367562e60fa61c7ea0e8963ad6529"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_try_makedirs_try_makedirs.return.True": {"doc_hash": "46e024cfee2ca7414662d3295da580dcc6b41739861e4c5633cf40fa62288ce6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_get_cache_dir_": {"doc_hash": "69f29106c2080aa893d3f41a15958c5ad68bf0531e59f40b5065dbed188bf8c2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/util.py__compare_eq_verbose__surrounding_parens_on_own_lines.if_closing_in_.lines_lines_closi": {"doc_hash": "36c274a08fa117039ad0799522bf14d808b1cee3753e8eea1652237ffb60b718"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/util.py__compare_eq_iterable__compare_eq_iterable.return.explanation": {"doc_hash": "90e8275f51ce8d01a07185cf3b2192101fcae164c37990af80157410154b796c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/util.py__compare_eq_set__compare_eq_set.return.explanation": {"doc_hash": "6c775b83c5e89e002ea26eb3fa25779e42c2f65157f2d2086a63c76d1e2d3764"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/util.py__compare_eq_dict__compare_eq_dict.return.explanation": {"doc_hash": "9bccecaaa6b6adaef2e8fc3b97aaded04d7bf4c075107b92744bf8e665cc4c97"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py__readline_workaround__readline_workaround.if_sys_platform_startswit.try_.except_ImportError_.pass": {"doc_hash": "78dc4cc9ed83bb77a980a6e53085542e0cb1464933b5e94de1935eabe3f54f40"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_PytestPluginManager._try_load_conftest_PytestPluginManager._rget_with_confmod.raise_KeyError_name_": {"doc_hash": "76e5456a9f10e4c9b789126d8e2cd33fcec98cdb8ac37d354e41092cc51f7dd9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_Parser.addoption_Parser.parse.return.self_optparser_parse_args": {"doc_hash": "128727c52cdba7d2a4b1005a3b12faa6f6a3d415b9a889567f1835c1092390a8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_Parser._getparser_Parser._getparser.return.optparser": {"doc_hash": "c88034b0d774a8186c57af22a57417d55ce455eed20c6cba60f34c17689e0f4c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_DropShorterLongHelpFormatter_DropShorterLongHelpFormatter.__init__.super___init___args_": {"doc_hash": "594ef6d72f4810f0cc83778a91946b829f299221597684b307e6fa5adbcbf6d2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_DropShorterLongHelpFormatter._format_action_invocation_": {"doc_hash": "8480de7012321e306bfe2bde4a9fc96be08a3118c8057a4864278fa7ba2666ec"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py__get_runner__get_runner.return.RUNNER_CLASS_type_ig": {"doc_hash": "0840e5f421a9b6c1e7292cb5332ffeed5ab5e649fc10ad0e1148f173774cc572"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py__init_checker_class__init_checker_class.LiteralsOutputChecker._number_re.re_compile_": {"doc_hash": "0e6b98e5cd81863f3fb15146ef3a57bb469218938bf0208b135d4d22d1409389"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py__init_checker_class.LiteralsOutputChecker.check_output__init_checker_class.LiteralsOutputChecker.check_output.return.doctest_OutputChecker_che": {"doc_hash": "f412d92f14e985f2138a29adf02baea30ed64a9296e9b930c66039b423317756"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py__init_checker_class.LiteralsOutputChecker._remove_unwanted_precision__init_checker_class.return.LiteralsOutputChecker": {"doc_hash": "afa3a3b5be3bb848b7a94cc112bacd03dadcfa6571e64c9bd4d29d2107f0736f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py__get_report_choice_": {"doc_hash": "cbefa40d420665486a92c77cd7a03bb9a6cf5facf13733a233eeea4e4591d0b3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_yield_fixture_yield_fixture.return.fixture_": {"doc_hash": "35aadbd774fefa99da8e1b25f64815c8b02d2867dff48cd0a67f6a80e55c90fb"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_PercentStyleMultiline.format_get_option_ini.for_name_in_names_.if_ret_.return.ret": {"doc_hash": "c829d0a459cdf3178fc12d3be974a20b713700b180944ebef8c77d8115776971"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/__init__.py_param_param.return.ParameterSet_param_value": {"doc_hash": "bc383a5bdb3b15b4dcce0b679c27da2a4c60f4fbd5746ea0afbf931d779b4fdb"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/__init__.py_deselect_by_keyword_deselect_by_keyword.if_deselected_.items_remaining": {"doc_hash": "2b7e9daed4f274c44514a7d65a2598db47772798d27704a7bbd8ee1aa08e60a4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nodes.py_Item.add_report_section_Item.add_report_section.if_content_.self__report_sections_app": {"doc_hash": "dca4895d206f2fd459a23c74bc9c23ca66c102a3b4aec63750ceb094c1ba1d07"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nodes.py_Item.reportinfo_": {"doc_hash": "d35a9ea125be26e41e1f635fc2ac13a8bec687ccac33f0f8562cf479fb5d4fa4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pastebin.py_create_new_paste_create_new_paste.if_m_.else_.return._bad_response_invalid_fo": {"doc_hash": "ecbec8f156489dae52b7fd067917b7d62245ec7e07bf7927cce4d076eba8a85d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py_on_rm_rf_error_on_rm_rf_error.return.True": {"doc_hash": "a0d3860cb2c323d52280c99997614f561d5ca1f390bf33ce606e09e53f2a2598"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_HookRecorder.countoutcomes_HookRecorder.clear.self_calls_": {"doc_hash": "042f6840291667521fcb98f44c99535d49d52b7ca8494bb40ec90404e7814cf9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_LineMatcher._no_match_line_": {"doc_hash": "220b551ad45580f726510327736ef5dc00b65915ff4108fbe0b6a02829137d05"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_CallSpec2.setmulti2_CallSpec2.setmulti2.self_marks_extend_normali": {"doc_hash": "d8e67497674755189c017fede330b4edbfd079365e06ca2119ff79edaa69f029"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/recwarn.py_WarningsRecorder.__exit___WarningsRecorder.__exit__.self._entered.False": {"doc_hash": "cdab0b33f53be64fecbe7fbfc4a5c7ce9ac20d381308fd8e10b8728efb884dc7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/reports.py_BaseReport._get_verbose_word_BaseReport._from_json.return.cls_kwargs_": {"doc_hash": "f7c04c5d7cad25f898390f293d943c68582bf3a580b06df0169b27101022b9fa"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/reports.py__report_to_json.d__report_to_json.return.d": {"doc_hash": "f2fa25caf956cb1287341e7f7f021423af841322ae3e0ff6e2aa5f8fc47a3e94"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/reports.py__report_kwargs_from_json__report_kwargs_from_json.deserialize_repr_entry.return.reprentry": {"doc_hash": "00765d51fa52a693c752865718b729e9da75ee8477acf0f86efcc42f51e47626"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/reports.py__report_kwargs_from_json.deserialize_repr_traceback_": {"doc_hash": "b2a96206015415e92b9cdfd1df42594335f3ff89192b32b130f93db5edde10b9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_MoreQuietAction_MoreQuietAction.__call__.namespace.quiet.getattr_namespace_quiet": {"doc_hash": "7b4f30e9a489eeaca0280282136c1f028774d37da3c841744f8756b7d7a8b8ba"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.summary_errors_TerminalReporter._outrep_summary.for_secname_content_in_r.self__tw_line_content_": {"doc_hash": "0497591243a074e299346611b62af8ebd5c50514c351ebc9bdaafcbd2782d498"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.summary_stats_TerminalReporter.summary_stats.None_5.else_.self_write_line_msg_ma": {"doc_hash": "852a842ff53a2aa7a33087a0df5d1a934e46c1b3112f1bf978f215b66685a53c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/warning_types.py_from_typing_import_Any_PytestDeprecationWarning.__module__._pytest_": {"doc_hash": "d379e99679c0e39159bae71a27cb2709cb823114f8525739746bcaa5dea7bda8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/warning_types.py_PytestUnhandledCoroutineWarning__W.TypeVar__W_bound_Pytes": {"doc_hash": "8f08dc04a575782040ec555221d2c5f24229994ba4a21bf56b94e61dcd802050"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/warning_types.py_UnformattedWarning_": {"doc_hash": "03b3330dc3a01c904bf9b2565e9d83f53ec90a10ff9bd94e9f298e8d50b15ff3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_not_collectable_arguments_TestGeneralUsage.test_not_collectable_arguments.result_stderr_fnmatch_lin": {"doc_hash": "245d8378dbeaeb4a2b9c220824775c5d5018e06201e06c23491844236429ed51"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_test_zipimport_hook_test_zipimport_hook.result_stdout_no_fnmatch_": {"doc_hash": "5b68997296201c95a85fd3d3050f4ff8a8a3da79d25167453b842e729b73e7d7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_test_warn_on_async_gen_function_test_warn_on_async_gen_function.assert_": {"doc_hash": "11a6dee51884c8a14b98e991b4412b855bea9d079f58c54cb842108a27da43a6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_test_excinfo_for_later_test_excinfo_no_sourcecode.assert_s_File_str": {"doc_hash": "128974cfbfffa3066c3b98e50f434a982025ee24e710e216d023df1aa9196dc3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_test_entrysource_Queue_example_test_match_succeeds.excinfo_match_r_zero_": {"doc_hash": "c0935f1258840e2443d93294bab2c7ed14a81938d5193707261270e6249f448c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_test_match_raises_error_test_match_raises_error.None_3": {"doc_hash": "fbd92b8995de3e386629dc7e6b070cc047ff78b2fab6334f667f4ef807a94e39"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_repr_tracebackentry_lines2_TestFormattedExcinfo.test_repr_tracebackentry_lines2.assert_tw_mock_lines_2_": {"doc_hash": "b29c30f52af4ee35886e6e123a921884ff6d2da224eaffd201bb941567a43287"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_repr_tracebackentry_lines_var_kw_args_TestFormattedExcinfo.test_repr_tracebackentry_lines_var_kw_args.assert_tw_mock_lines_0_": {"doc_hash": "70086ae48dc1d0f3f2838f3ba3bafaf842a30bfcf535ac3e411823e1dfafba28"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_toterminal_long_TestFormattedExcinfo.test_toterminal_long.assert_tw_mock_lines_12_": {"doc_hash": "d447f10897ffdf5127e5b0562b9d70e142f4be8df93c242137810c64259d911b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_toterminal_long_missing_source_TestFormattedExcinfo.test_toterminal_long_missing_source.assert_tw_mock_lines_10_": {"doc_hash": "4a38c74bf3365fdf4871f93ff7859496fa6a5a8f5f26381aa0d1c7007f25d8a1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_toterminal_long_incomplete_source_TestFormattedExcinfo.test_toterminal_long_incomplete_source.assert_tw_mock_lines_10_": {"doc_hash": "caf017aa7fe54ed120d0bb6f0398c291c9a6564311ea72b4ef44c09a6cb06bbf"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_traceback_repr_style_TestFormattedExcinfo.test_traceback_repr_style.assert_tw_mock_lines_20_": {"doc_hash": "f99806b1a251f6ae4ad7f6e2e7b6ec2caa46bfcc75e4dee4c1ac2912f3ef3bda"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_exc_chain_repr_TestFormattedExcinfo.test_exc_chain_repr.assert_tw_mock_lines_47_": {"doc_hash": "3355791efd58303506e98af97adc2e7cbabc968225e7b139b5bd731f0fe01845"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_exc_repr_chain_suppression_TestFormattedExcinfo.test_exc_repr_chain_suppression.assert_len_tw_mock_lines_": {"doc_hash": "0f4a969d534b027463e96cadc8ee3b3e29a02e6695047a5aa11689b5489ace1c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_test_repr_traceback_with_unicode_test_cwd_deleted.result_stderr_no_fnmatch_": {"doc_hash": "791debc87e498dfa95edd21699d03ebc8a06a31bf867ade354420745beed0aa9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/conftest.py_tw_mock_tw_mock.return.TWMock_": {"doc_hash": "f35bf2c26156b051cdb018a681bf696e79c526c9d1d92243da25069a99de4299"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/io/test_saferepr.py_pytest_test_maxsize_error_on_instance.assert_s_0_and_s_": {"doc_hash": "4f8a44ce684fe6be79519a3c00a6906fb00d84b38593863066d7962b68cb4ea2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/io/test_saferepr.py_test_exceptions_test_exceptions.assert_s2_": {"doc_hash": "842ef84dbbfa9baf28db1cf068bb826e65b93ec6f89d6fe39aaf9ccce4b1e472"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/io/test_saferepr.py_test_baseexception_test_baseexception.None_5.saferepr_BrokenObj_Raisin": {"doc_hash": "b4e1eaede6ab010943f0193bb835093ed1d2f6512fe66aeb9ea440ce4c10444e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_fixture.py_test_change_level_undo_test_change_level_undo.result_stdout_no_fnmatch_": {"doc_hash": "15eb9e96a8e71952a6fc198c62b86c9ce8549e26b34f8d35785e64e3ca0833f7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_formatter.py_test_multiline_message_test_multiline_message.None_10": {"doc_hash": "ef9b9bd6c99b31581fca8a49ce2c4690dfe458e0b17f716b02d0634be13f2be4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_log_cli_level_log_level_interaction_test_log_cli_level_log_level_interaction.result_stdout_no_re_match": {"doc_hash": "e94fd03c0c66db060dad71aab68c9f8aa017b2d97008eaf81b9b0d538b4e5bc0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_log_cli_level_test_log_cli_level.None_1": {"doc_hash": "428edb168b3dd2f2e707bb79a7874faa9a3c916381b6863c8496498de7426736"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_operator_mocked_doctest_runner.return.MyDocTestRunner_": {"doc_hash": "134077874edbb55725a025ed298ceb35775e9f5aa36a5c8ee7aa23a8c394290d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_repr_complex_numbers_TestApprox.test_repr_complex_numbers.assert_repr_approx_3_3_": {"doc_hash": "9c089e0e64670e94ff919a99126f7601bf04fcbb5c3340a6dee1be2f7b281d89"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_test_getfuncargnames_methods_test_getfuncargnames_staticmethod_partial.assert_values_arg1_": {"doc_hash": "b92e523b15f02200f95894850716568b6ac382ce1648eedf63e81ae73ed4e295"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFillFixtures.test_funcarg_lookup_error_TestFillFixtures.test_funcarg_lookup_error.result_stdout_no_fnmatch_": {"doc_hash": "3b63bd6b7442d090dc0ca71a976018bffdbd19c79740a653ed9cf7495ca45654"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_dynamic_scope_TestFixtureMarker.test_dynamic_scope.None_3": {"doc_hash": "e4a750696e07f2c7353604c2f82c44d1f151b97cd35388ab50b9c9c6964b8c0e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_parametrized_fixture_teardown_order_TestFixtureMarker.test_parametrized_fixture_teardown_order.result_stdout_no_fnmatch_": {"doc_hash": "f0986cee6ba945b667daf72e705b317c8ada32d141ff6a6583e0b95b85fe9211"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestShowFixtures.test_show_fixtures_testmodule_TestShowFixtures.test_show_fixtures_testmodule.result_stdout_no_fnmatch_": {"doc_hash": "a9fcb0085cd8c309a442ac34930c2abaecd8de47141df11895ab1783232edd2f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestShowFixtures.test_show_fixtures_with_same_name_TestShowFixtures.test_fixture_disallow_twice.with_pytest_raises_ValueE.foo.raise_NotImplementedError": {"doc_hash": "5bc8d2cf2f2a1062c0439aad5d6edb1f2196771f490f51cd26ab4a5ab4ee9864"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestScopeOrdering.test_class_fixture_self_instance_test_call_fixture_function_error.with_pytest_raises_pytest.assert_fix_1": {"doc_hash": "03e60f5cee60ab2c9097d3e1313df4212efd53b9cb044cd7209bfecccc7846cd"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/raises.py_TestRaises.test_raises_cyclic_reference_TestRaises.test_raises_cyclic_reference.assert_refcount_len_gc": {"doc_hash": "637e2b22e2f789d2f8a54ba6bd10e674b4d2e4346714f2a58ce8b155d229deda"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare.test_list_different_lengths_TestAssert_reprcompare.test_list_wrap_for_multiple_lines.None_1": {"doc_hash": "5307d2d528fb4bac095ce3185722f2badcb912b88bc56a62924a29caf9d91a3b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare.test_list_wrap_for_width_rewrap_same_length_TestAssert_reprcompare.test_list_wrap_for_width_rewrap_same_length.assert_diff_": {"doc_hash": "759e17dc6fa288319894cb7c494071f788c487ac94caf7d8c8f244e02abe4f1b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare.test_dict_wrap_TestAssert_reprcompare.test_dict_wrap.None_1": {"doc_hash": "ee4e331d41fd01a43e90f585d05e3f43a26b8f64377b3c8f79e1db14369a2728"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare.test_dict_TestAssert_reprcompare.test_dict_omitting.for_line_in_lines_1_.assert_b_not_in_line": {"doc_hash": "12e4ac2dab9aab40d80e0b9ae1f6aa9540ae6920065b80893aa4e1fafd6bb0a4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_test_pytest_assertrepr_compare_integration_test_pytest_assertrepr_compare_integration.result_stdout_fnmatch_lin": {"doc_hash": "e09c5a6920dcc389b63c2abc2b6cb77316e50ff48a31e435f38b8d989530b500"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_test_assertion_options_test_triple_quoted_string_issue113.result_stdout_no_fnmatch_": {"doc_hash": "3f3181198624b8233e9089ee99303f963eb9ae0a67ed9a2b0539a53b31f5638b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_test_assert_indirect_tuple_no_warning_test_exit_from_assertrepr_compare.with_pytest_raises_outcom.callequal_1_1_": {"doc_hash": "d9610e43274af0b7b5a0067b8c7b756a2d30051cd4035dbb63a0f0f12e51bcf1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_test_assertion_location_with_coverage_": {"doc_hash": "65de9ac8d931932025b775c46e7285c316ac73c05eefe0db9ac9161ffe9bc041"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_dont_rewrite_TestAssertionRewrite.test_dont_rewrite_plugin.assert_warning_not_in_": {"doc_hash": "51d4119998f68948d2e36f9cc298a2dc67633d6395fbaa216e670947fc175385"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestRewriteOnImport.test_rewrite_warning_using_pytest_plugins_TestRewriteOnImport.test_rewrite_warning_using_pytest_plugins.result_stdout_no_fnmatch_": {"doc_hash": "445f412708cfc6b2cc08bd4ad3d9959bc26d72120a81fba100516af6aca79942"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestRewriteOnImport.test_rewrite_warning_using_pytest_plugins_env_var_TestRewriteOnImport.test_rewrite_warning_using_pytest_plugins_env_var.result_stdout_no_fnmatch_": {"doc_hash": "29131be4f77b813866017e36827bf81fecc72f4d74a5317e4595a872921e68ec"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_test_issue731_test_issue731.result_stdout_no_fnmatch_": {"doc_hash": "67525f900e920014a94d35245b73d576d699cc3800f4b30fa05a217e0be10dbd"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_test_get_assertion_exprs_test_get_assertion_exprs.assert__get_assertion_exp": {"doc_hash": "05a94ccd2ecccf3998125df590829c9a38ec867ded42ad3d299fc10ac29d1bfa"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_test_try_makedirs_test_try_makedirs.assert_exc_info_value_err": {"doc_hash": "a37e8ba32882c58d551c92523fb7d2cb233367c8db3fc8d4acc80facf94265b7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestPyCacheDir_TestPyCacheDir.test_get_cache_dir.assert_get_cache_dir_Path": {"doc_hash": "2cc342da04db3ff79467fb3abfdc4190b463edc960b9d2332c39b532052898c0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestPyCacheDir.test_sys_pycache_prefix_integration_": {"doc_hash": "34f0743d2ae33ea7a7dfe8a52d9498b9862861e4482637f25dd500e5ced951fd"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_os_TestNewAPI.test_cache_writefail_cachfile_silent.cache_set_test_broken_": {"doc_hash": "701457c93c6b8090004f34361ad259b66444a6ec037acd87c30a9d8ff61237a0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_lastfailed_failedfirst_order_TestLastFailed.test_lastfailed_failedfirst_order.result_stdout_no_fnmatch_": {"doc_hash": "b0649d9b0ec4ca5b8179f2e2fb6993680e2f50e94bf7ac9322d4100c8f8f3a05"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestNewFirst.test_newfirst_parametrize_TestNewFirst.test_newfirst_parametrize.None_7": {"doc_hash": "91db91d9d1ae1812eb8a276a754845fef7f4f835cbc3295067ab04f563a21269"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestCaptureFixture.test_disabled_capture_fixture_TestCaptureFixture.test_disabled_capture_fixture.if_no_capture_.else_.result_stdout_no_fnmatch_": {"doc_hash": "1119d8aad5611e8733d71ae741dbfd60d41f8d9b1b1485e95008aa842d985c81"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestCaptureFixture.test_fixture_use_by_other_fixtures_TestCaptureFixture.test_fixture_use_by_other_fixtures.None_3": {"doc_hash": "76e5cf7dcc617d30633f5f4cd695544e120e6d14003b1c2309d68140eecfa0ae"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestCollectFS.test_ignored_virtualenvs_TestCollectFS.test_ignored_virtualenvs.None_1": {"doc_hash": "c6af46514880b7f7f0481655e862ea492a998e797ae92a6453e9af308f3afa90"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_exit_on_collection_with_maxfail_smaller_than_n_errors_test_exit_on_collection_with_maxfail_smaller_than_n_errors.res_stdout_no_fnmatch_lin": {"doc_hash": "d789edfe841ee1f9178219742c2aa9d7a5d8d501c3781d20cff3c184a7dfad44"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_collect_init_tests_test_collect_init_tests.None_7": {"doc_hash": "3feb281548f1a1093733b771b5288ee5b809c447f0d920885525f77c153c19a7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_collect_pkg_init_only_test_collect_pkg_init_only.None_2": {"doc_hash": "a1e70499cc8b008c428483db905cf87c18e410b724e14b5c67769d667ed8c27a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_compat.py_test_get_real_func_partial_test_is_generator_asyncio.result_stdout_fnmatch_lin": {"doc_hash": "466c296758493aee6c14a11e7d6e5749b3a7ea703c5a54d7c0fd5b02c507c094"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_help_formatter_uses_py_get_terminal_width_test_help_formatter_uses_py_get_terminal_width.None_2": {"doc_hash": "c02432d8c5c3b723c9bc159220270ec5475126c27873af3901827791008d1265"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_conftest_confcutdir_test_conftest_confcutdir.result_stdout_no_fnmatch_": {"doc_hash": "3cb603648afba401623bfff7026bbc2eb66e45310e05edebba60a6250cdbed3b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_conftest_badcase_test_conftest_badcase.assert_result_ret_Exit": {"doc_hash": "e3b1215b0fdc72ff253cbd316b7478da4b20cf887dba9f28446d3e21143449c3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_conftest_uppercase_test_no_conftest.None_1": {"doc_hash": "feb1859487f6e4b37a06c4173455cfdbe075c796c43c7f7e56ddc294ae4c307d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_docstring_partial_context_around_error_TestDoctests.test_docstring_partial_context_around_error.None_3": {"doc_hash": "dcd5d8a1ab044b162e5b001e1095cef3c454b34f3aa88ab6ed7118d51bef7826"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestLiterals.test_number_re_TestLiterals.test_number_re.for_s_in_1_abc_.assert__number_re_match_s": {"doc_hash": "6136bc260edb2ee42d964d9591d9fa6827a4aeb0f097300dbbf0abf104ccb1f5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_os_schema.with_fn_open_as_f_.return.xmlschema_XMLSchema_f_": {"doc_hash": "c1f62cf3e49b13b426bdc680dc1b4bdd3bb1b5e5ae3b022c8aeb1ace46205aa6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_run_and_parse_assert_attr.assert_on_node_expecte": {"doc_hash": "bc444acb8ec8a1e63d01e33634557bd373d497eb3e747150f2b7b807a6cbefc8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_parametrize_families_TestPython.test_summing_simple.node_assert_attr_name_py": {"doc_hash": "5512d5bbf9478d21049c756f2f2d2bc985941436afc88284f10b3239ec0fc8d8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_hostname_in_xml_TestPython.test_timestamp_in_xml.assert_start_time_time": {"doc_hash": "b4e5beec2f2d3421c0f8ab4e0d8a94d563a9c9f63b4f0caf16fb10e6591124d0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_mark_skip_doesnt_capture_output_TestPython.test_mark_skip_doesnt_capture_output.assert_bar_not_in_node": {"doc_hash": "82042f5ff03a7988ae7c6bd5d9471922dc139196c02b843abd963a98669f0aee"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_classname_instance_TestPython.test_classname_instance.tnode_assert_attr_": {"doc_hash": "9851198a86e44c75755fab72ab47cc45842c3d1a30c8c93e01621d5cbde03012"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_classname_nested_dir_TestPython.test_classname_nested_dir.tnode_assert_attr_classna": {"doc_hash": "b109c6493bf1e8093a0f5f532bec46547f215af2fe735e5e11b5be54a00a78d8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_internal_error_TestPython.test_internal_error.assert_Division_in_fnod": {"doc_hash": "795aae9a1705b46d83ce4d745d9435fefbac67e427ddd07033a1fb2a565665f8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_xfailure_function_TestPython.test_xfailure_function.fnode_assert_attr_type_p": {"doc_hash": "3e774dfd5a5823496ce380255c599a79b7d97aec33f4b2f66a35c1a834cec186"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_collect_error_TestPython.test_collect_error.assert_SyntaxError_in_f": {"doc_hash": "122445c5cc4b9c90d8cc4b02084ec340371bfed317e5b130696e9d43b77e6872"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pastebin.py_TestPasteCapture.test_non_ascii_paste_text_TestPasteCapture.test_non_ascii_paste_text.assert_len_pastebinlist_": {"doc_hash": "f34f933beee4d1d860d99a89354c3cdd956f83d7006f3acacb077aed7bccc688"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pastebin.py_TestPaste_TestPaste.mocked_urlopen_fail.return.calls": {"doc_hash": "c0549cd3c10b94679c456c4f7b55ff240b81bb2b329af02703ccf8439c2e3bec"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pastebin.py_TestPaste.mocked_urlopen_invalid_TestPaste.mocked_urlopen_invalid.return.calls": {"doc_hash": "943af5685aa101eafe8d3c53891b81f0a5d1dc70e456c91d5cfc19e56fff70d0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pastebin.py_TestPaste.mocked_urlopen_TestPaste.mocked_urlopen.return.calls": {"doc_hash": "7fb49cb7361b9c3b44c7610add2239f31b679e095477ab643cef4ec7a19c5c2f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pastebin.py_TestPaste.test_pastebin_invalid_url_TestPaste.test_pastebin_http_error.assert_len_mocked_urlopen": {"doc_hash": "b3475816cfdfd253401a779988fb432568eaac2b27bbf268ff7dda75b6313d0a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_TestPytestPluginInteractions.test_hook_with_addoption_TestPytestPluginInteractions.test_hook_with_addoption.res_stdout_fnmatch_lines_": {"doc_hash": "85cc553c500e6f1091bdb3343206d7cc92ca2a1b9833b0bb39a9a29ce9f56675"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_spawn_uses_tmphome_test_spawn_uses_tmphome.assert_child_wait_0_": {"doc_hash": "6647575cf25d8c986a00d4ddc384c135edadd3efbfd707bc6a2a59c422b9627b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestReportSerialization.test_deserialization_failure_TestReportSerialization.test_deserialization_failure.with_pytest_raises_.TestReport__from_json_dat": {"doc_hash": "c832a9b3223838e4956efa1a6efcd28834c3c0c22fe3334f37d23d7bb9eab8b3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestReportSerialization.test_chained_exceptions_TestReportSerialization.test_chained_exceptions.if_report_class_is_TestRe.else_.report.reports_1_": {"doc_hash": "de038b260726d0f5ed4f7952d7c60eb2a52447a50fba37201557b90a52ffe56c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestReportSerialization.test_chained_exceptions.check_longrepr_TestReportSerialization.test_chained_exceptions.check_longrepr.assert_desc2_is_None": {"doc_hash": "e6f95cc07c9d3554314ed1e3ff10cc3fc68c0c3e729776ef5e37672b109291bd"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestReportSerialization.test_chained_exceptions.assert_report_failed_TestReportSerialization.test_chained_exceptions.loaded_report_longrepr_to": {"doc_hash": "9884f2a06514daec9f6657dfec40658ff03d2fb29346dd6aef0399c40b90145b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_test_callinfo_test_callinfo.assert_n_not_in_repr_c": {"doc_hash": "293dac08044bedfcc91de2d8bd599904be2e851302754cf4a70b037df7b78ff4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_test_outcomeexception_exceptionattributes__strip_resource_warnings.return._": {"doc_hash": "a1aa31fa302ff5363f719ca6269d6fbcb568b5572ca69bbfa3eabce229d01975"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_test_pytest_fail_notrace_non_ascii_test_pytest_fail_notrace_non_ascii.result_stdout_no_fnmatch_": {"doc_hash": "b1b8760253c6cf879fe1c413937cd65f7d9db3de9b331023fe147d7666a46fd1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner_xunit.py_test_setup_funcarg_setup_when_outer_scope_fails_test_setup_funcarg_setup_when_outer_scope_fails.result_stdout_no_fnmatch_": {"doc_hash": "95a44e2436d755ab81b348acfc98e94f98618e4a49212c441ef13bc1b94f561f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_session.py_SessionTests.test_broken_repr_SessionTests.test_broken_repr.assert_out_find_reprex": {"doc_hash": "9dc35d51ba93814319ff1854fbb066aa91a137e74fae16d83ac5c53bc91ddbe7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_setuponly.py_test_show_different_scopes_test_show_different_scopes.result_stdout_fnmatch_lin": {"doc_hash": "30bf6933150590c53a83c73d6a3b44a95396ffd50d424ba652cb9b8db3352044"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_setuponly.py_test_show_nested_fixtures_test_show_nested_fixtures.result_stdout_fnmatch_lin": {"doc_hash": "7aef3b871142158c922967378ca92fa56508e52a73b8ee5ff0877a23844db9cc"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_setuponly.py_test_show_fixtures_with_autouse_test_show_fixtures_with_autouse.result_stdout_fnmatch_lin": {"doc_hash": "8c57059c71a2206963f3525488f8bb864f25160fd1b7814c3669d407f7e661f1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_setuponly.py_test_show_fixtures_with_parameters_test_show_fixtures_with_parameters.result_stdout_fnmatch_lin": {"doc_hash": "ccc0339b65cb3fe10e9b422cb2747bcfffc3e6168064593ab1853a637fa2f728"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_setuponly.py_test_show_fixtures_with_parameter_ids_test_show_fixtures_with_parameter_ids.result_stdout_fnmatch_lin": {"doc_hash": "fb68c1695a70841119d289aedba3cb28137d117713913e4a1b11aedd24372843"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_setuponly.py_test_show_fixtures_with_parameter_ids_function_test_show_fixtures_with_parameter_ids_function.result_stdout_fnmatch_lin": {"doc_hash": "a39810139b37d3f0decca152f660d2680a92b9694d9db5c326bdd40273a97836"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_setuponly.py_test_dynamic_fixture_request_test_dynamic_fixture_request.result_stdout_fnmatch_lin": {"doc_hash": "ebca5883565166f9de22a74733ea839dd6e4c1e696cd6c92df6d2f1d28a45fad"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_setuponly.py_test_capturing_test_capturing.result_stdout_fnmatch_lin": {"doc_hash": "bd9acff707f48056e0fbf64d13cb1fb0f4a713297adedfbe3c68f6f245a7f18f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_setuponly.py_test_show_fixtures_and_execute_test_test_show_fixtures_and_execute_test.result_stdout_fnmatch_lin": {"doc_hash": "87535c1c972db5f1322437974ae39e501cf8c4a2073aa87bc36a697e0a003aca"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminal.test_report_collect_after_half_a_second_TestTerminal.test_report_collect_after_half_a_second.assert_x1b_32m_x1b_1m": {"doc_hash": "514bc462ababc550e58f0ba588e1d3e71efcf5313c9c3c95cbaf04f4213e5d1a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminal.test_itemreport_directclasses_not_shown_as_subclasses_TestTerminal.test_itemreport_directclasses_not_shown_as_subclasses.result_stdout_no_fnmatch_": {"doc_hash": "3672961c63ed2e4d503565c3debfc23c8db48e48ad46e5fc36e6ea4268f14a30"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_fail_reporting_on_pass_test_pass_reporting_on_fail.result_stdout_no_fnmatch_": {"doc_hash": "bb38ca8543dcc895631c8f3cf535636fb1d3aca77b1f658641c7dd6b53c5b4b2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_skip_counting_towards_summary_test_skip_counting_towards_summary.assert_res_1_faile": {"doc_hash": "c57e5158afec9fd8ed8ee2b7afdf3c7c8a50346e327bdd4590b53bc83f394ab8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestClassicOutputStyle_TestClassicOutputStyle.test_normal_verbosity.result_stdout_fnmatch_lin": {"doc_hash": "74bc210d7d717557f7e91205e43f0d6e4b02238eacfe1a579f658d25e8ee612e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestProgressOutputStyle.test_zero_tests_collected_TestProgressOutputStyle.test_normal.output_stdout_re_match_li": {"doc_hash": "efa6427f14a498523f288dc8a2953996a8d31cf1a77ad32756383ffcf6e91bb8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestProgressOutputStyle.test_count_TestProgressOutputStyle.test_count.output_stdout_re_match_li": {"doc_hash": "29d16d73124085e66113709b016cbb81459a5a84983a40045888dae5fadab941"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestProgressOutputStyle.test_xdist_verbose_TestProgressOutputStyle.test_capture_no.output_stdout_no_fnmatch_": {"doc_hash": "f55373db4f5fc4890a227ae8d563c6dee0b0a2f9362d7b20c91cc3d6af1be470"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_format_session_duration_test_format_session_duration.assert_format_session_dur": {"doc_hash": "1b3ccfd68a2de7fd38445d745a2dc6142fcb7e8ba03247d0ced7618dffb6d7fa"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_setup_failure_is_shown_test_setup_failure_is_shown.result_stdout_no_fnmatch_": {"doc_hash": "9901d67b62953366bb082253532dc51960fc3e6a438a044756802e9b0960a5d8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_testcase_adderrorandfailure_defers_test_testcase_adderrorandfailure_defers.result_stdout_no_fnmatch_": {"doc_hash": "e4c4db1c296034e4c6b814496a46c736396ce25561931aea0029c4af6106926c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_testcase_handles_init_exceptions_test_testcase_handles_init_exceptions.result_stdout_no_fnmatch_": {"doc_hash": "efe21828dc6354292b594459b8f80ae732ab020a8d82fa0d46e3f93cba44cbc2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/conf.py___add_module_names.False": {"doc_hash": "fc3d845b2263feff67d19e07fccce1780f9449b5c743d705308f483a4844c67a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/conf.py_pygments_style__If_false_no_index_is_g": {"doc_hash": "4aac20616b1cf235ef89f137f403b974a1bf1c520b1df031c3d82ffa4cfa5a39"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/conf.py_html_use_index__Example_configuration_f": {"doc_hash": "a0aad0e51204fa9e524797b4e9e984fe0504fdab99ec5d2bf6feb07ffd362d2f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/conf.py_intersphinx_mapping_configure_logging.warn_handler_0_filters_i": {"doc_hash": "9d184532bcefc21eb221e7c628f7fcaa19dd747c211ae720a117484df3d5b1aa"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/conf.py_setup_": {"doc_hash": "544d3bd7c54472d84850f2eba269b9ba152619b886f3a3663c961ed171f53430"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/nonpython/conftest.py__content_of_conftest_py_YamlFile.collect.for_name_spec_in_sorted_.yield_YamlItem_from_paren": {"doc_hash": "fcf9f95f98063a29512dfd0a22d2e51a5e3e3834952b26f83a44744a7a072c91"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/nonpython/conftest.py_YamlItem_": {"doc_hash": "35a26f583945b9e6df4dc7a47391586a1fe1588b719b458740c3d14807f7c81e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/scripts/publish-gh-release-notes.py___publish_github_release.return.repo_create_release_tag_n": {"doc_hash": "cff3c4bedc5d456a94f414b262f0ed0903833bd7ed79332b8bba57622b4f293f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/scripts/publish-gh-release-notes.py_parse_changelog_convert_rst_to_md.return.pypandoc_convert_text_": {"doc_hash": "f67d6fda4a879440149393e775e020c464b421b0b90b2908851a30655822d1ac"}, "/tmp/repos/swe-bench_pytest-dev__pytest/scripts/publish-gh-release-notes.py_main_": {"doc_hash": "f5003e6c182d7ebb42620b57eb3b129f131180310eddab13b252957f076a7abc"}, "/tmp/repos/swe-bench_pytest-dev__pytest/scripts/release.py_regen_check_links.check_call_tox_e_": {"doc_hash": "c45bd4bd75d54132ab38293a9ba67a948dad76e5426982e47018504897f365ef"}, "/tmp/repos/swe-bench_pytest-dev__pytest/scripts/release.py_pre_release_pre_release.print_Please_push_your_b": {"doc_hash": "9a62c7985b45a05642f24eb54a450cdb302d0c0fc3e463ed578c9268baa07d9b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/scripts/release.py_changelog_": {"doc_hash": "0b61ef9441cd08a56a46f47a2f3ad8140cb06304a4d1540b1f492fa8a3bd87ea"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_inspect_if_TYPE_CHECKING_._TracebackStyle.Literal_long_short_": {"doc_hash": "b4321bae42cd8c78b2fe39d7bdd1faf3a564f6cfa42ee586d55c0fb36f2ebed5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_Code.path_Code.path.try_.except_OSError_.return.self_raw_co_filename": {"doc_hash": "4e12c467f883ef13a77153ef959da21ad01802a690b7de3f286be05251cbeb29"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_Traceback_Traceback.__init__.if_isinstance_tb_Traceba.else_.super___init___tb_": {"doc_hash": "8ee566681d9b4ad29f3f35675d19a08b0877aa2438aba5cbf4223928141bc69b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_ExceptionInfo.from_current_ExceptionInfo.from_current.return.ExceptionInfo_from_exc_in": {"doc_hash": "633242ab904a14a811f47b690b89420c59264fd81c3589039c46b69dc2b5eaaf"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_FormattedExcinfo_FormattedExcinfo.repr_args.return.None": {"doc_hash": "53994e112527d3e25016ccea60bbbf22a0afa0281cb4e90e10b540d6a5a84212"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_FormattedExcinfo.get_source_FormattedExcinfo.get_source.return.lines": {"doc_hash": "4c74bb4ee534736a09ec1e6b5847e40b39dd70c1cc81581c15d5d19d2f6a68e0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_FormattedExcinfo.get_exconly_FormattedExcinfo.get_exconly.return.lines": {"doc_hash": "2034289f891389db5fb6f59972887ceba5b119ae746b1c7631e5c62ed7359be4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_FormattedExcinfo.repr_locals_FormattedExcinfo.repr_locals.return.None": {"doc_hash": "c98e1e5a0b4ffafd679f87203bbe99ab2fa626ea6832817f05866f7cc6e3dd53"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_TerminalRepr_TerminalRepr.toterminal.raise_NotImplementedError": {"doc_hash": "41667fa7a75d5ddb2be9fce068923dc1e2d825ace369dc0030620fe670ab3ea2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_ReprTraceback_ReprTraceback.toterminal.if_self_extraline_.tw_line_self_extraline_": {"doc_hash": "80a7fdd0ffc9c80a247c4047107e5225009bdc7476795891d64f5f474a8d31fb"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_ReprTracebackNative_ReprEntryNative.toterminal.tw_write_join_self_lin": {"doc_hash": "ea82540834351c984c348a693d13bce6a74fe5ffb83e647b9cc081036a395189"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_ReprEntry_ReprEntry._write_entry_lines.for_line_in_failure_lines.tw_line_line_bold_True_": {"doc_hash": "d80ed731ce12cc4604d350a01f8d0bd5f1f9b2203989e38e3f057ad7f01d5c03"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_ReprEntry.toterminal_ReprEntry.__str__.return._n_n_format_": {"doc_hash": "c6153c920c65646f36900dfd7098d2feea0af02ec5467d91a1fe56c7ba5e117c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_ReprFileLocation_ReprLocals.toterminal.for_line_in_self_lines_.tw_line_indent_line_": {"doc_hash": "72298dfeaa3088db9f9c40653c5a0cc10885d4b68449259a37196e31a65e6c58"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_io/saferepr.py_safeformat_saferepr.return.SafeRepr_maxsize_repr_ob": {"doc_hash": "30e5355e702fb66f1a6ad6514c8e09cb0c52713187d73264b0ec3eca8a90c0f6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_io/saferepr.py_AlwaysDispatchingPrettyPrinter_": {"doc_hash": "c9187126a7f8e7da791d2e950d7ee72c6f013a41a853d8b06eb14b975b78cda5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/__init__.py_pytest_runtest_protocol_pytest_runtest_protocol.callbinrepr.return.None": {"doc_hash": "6247b635fc04a8c1a6daba382346460254e823dbe0e795885feccecbd26dca18"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/__init__.py_pytest_runtest_protocol.saved_assert_hooks_": {"doc_hash": "db7147d0dde3172453e59c0748deff196dbd1819876199bbcb1b15ddf3226d4b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py__write_pyc_fp__write_pyc_fp.fp_write_marshal_dumps_co": {"doc_hash": "1145051a46df62cd0f2581af050907a1863ec3d8a2ee83b8fe68b18f8f772ec2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewriter.visit_Assert_AssertionRewriter.visit_Assert.negation.ast_UnaryOp_ast_Not_to": {"doc_hash": "04c283014959cce52a66079caeeb6b45e223d229fea2156bc140af954e2f1979"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewriter.visit_Assert.None_2_AssertionRewriter.visit_Assert.return.self_statements": {"doc_hash": "a9fa3a5d4341b3d3ce8369f17b9a95fec8b7b68def4b7309198c2990fdadaacb"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py_Cache._getvaluepath_Cache.get.try_.except_ValueError_OSErr.return.default": {"doc_hash": "bc4e897665742d56982d0f630bb5bf8f465bbc84057cb1621f4dda28059db8f0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py_Cache.set_Cache.set.None_1.else_.with_f_.f_write_data_": {"doc_hash": "ca33eec86204a95ba564721546d59d86822a85585e5acf7d40eb17943c439d7f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py_LFPluginCollWrapper_LFPluginCollWrapper.pytest_make_collect_report.yield": {"doc_hash": "ebceffa5095306feca98b2afda3eaea9566bfb1fe0b4b92c0dd0c120e3adf839"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py_LFPluginCollSkipfiles_LFPluginCollSkipfiles.pytest_make_collect_report.return.None": {"doc_hash": "23318df998c1fcb8a5c3542081240c8cc9b49be88e221aed0c26a07cbd1dac50"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_CaptureIO_TeeCaptureIO.write.return.self__other_write_s_": {"doc_hash": "6abb24ae00bc2f0fb101876d59cffa7c58a31e67481625a57242d7235022a59b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/compat.py_iscoroutinefunction_iscoroutinefunction.return.inspect_iscoroutinefuncti": {"doc_hash": "788b62351dbde67c2d9a29f5eca07786dffae7f6d601a15190c208bef475d971"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/compat.py_is_async_function_getlocation.return._s_d_fn_lineno_1": {"doc_hash": "12845f3aa4616ed1cb9352743a9d883ef415f713c243b342f7faa598a3240dff"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/compat.py_safe_isclass_": {"doc_hash": "347b5691dd610076b8ac9c663fd029bf91d5f92a0137872338eae196d86aac05"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_PytestPluginManager.None_4_PytestPluginManager.consider_preparse.while_i_n_.if_isinstance_opt_str_.self_consider_pluginarg_p": {"doc_hash": "8ed3d7235a813bbf247675d8464bdb093f1482a06dad301f6b9ca3ddf33fda5d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py__get_plugin_specs_as_list_notset.Notset_": {"doc_hash": "b0e306ac8e850f6e80b931182373713552429f3ed221c14cd11e4a5c7fa0b4a0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py__assertion_supported_create_terminal_writer.return.tw": {"doc_hash": "26421e9d8bd232055b9e1f015bd165625591e7d77a4c7b331afd1d25d63df335"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_Parser.parse_setoption_Parser.parse_known_args.return.self_parse_known_and_unkn": {"doc_hash": "2258d270f5738b82d8bef5d2c73f16fab402c7b78d1e135fd2e7f6bbc6de637a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_Parser.parse_known_and_unknown_args_Parser.parse_known_and_unknown_args.return.optparser_parse_known_arg": {"doc_hash": "e9a5568d7900faf9ede54a0a918d3f9afaad0ed75e5ab5be97cd258d2973aed6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_OptionGroup_OptionGroup.addoption.self__addoption_instance_": {"doc_hash": "3403215d3a8e5f31371f2aef116e67db4dd356879c2802991c28ace4c62a3465"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_OptionGroup._addoption_OptionGroup._addoption_instance.self_options_append_optio": {"doc_hash": "44446e343856ae63c98f9248e60ee5afa4c3bf162fb52d753aeea67a2cc5ba9e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_MyOptionParser_MyOptionParser._Type_ignored_because_ty": {"doc_hash": "f97326e16c7a6abce6995f5eb7700238647dea3249f89d6ebf132be864e343e8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_MyOptionParser.parse_args_MyOptionParser.parse_args.return.parsed": {"doc_hash": "f8628047c26d9f42ab7bf6d15171d74f50b5bdce4ff6a3d5d341775fecae988d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/debugging.py_wrap_pytest_function_for_tracing_maybe_wrap_pytest_function_for_tracing.if_pyfuncitem_config_getv.wrap_pytest_function_for_": {"doc_hash": "ce605ab240402d6a9ca89271b5f013a65294f6a1538e17e22534776c6a1f3c3e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py_DoctestTextfile_DoctestTextfile.collect.if_test_examples_.yield_DoctestItem_from_pa": {"doc_hash": "6e1fa89a68b48cc3cacedf6fdcad4433b20fd1d176cdb26569665e7e0fd3add2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py_DoctestModule.collect.if_self_fspath_basename__DoctestModule.collect.for_test_in_finder_find_m.if_test_examples_skip.yield_DoctestItem_from_pa": {"doc_hash": "46d2cd55e3955ca8acfa0c68e82eb6cc4c8c679462a9f2ec4442c45c295fc701"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureRequest.config_FixtureRequest._fillfixtures.for_argname_in_fixturenam.if_argname_not_in_item_fu.item_funcargs_argname_": {"doc_hash": "99151532c84fcfbd48832d5a806d41e891d090e289b0b04d572419a423f2e374"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_pytestconfig_pytest_addoption.parser_addini_": {"doc_hash": "2724d1eefd103edf28d7148e074ebccb9f00041f3c4a04f10c7d0f19e011eb7e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_None_8_pytest_collection._Perform_the_collection": {"doc_hash": "f1b4b3edf9732ac6095b7c27fef9354953c1e89e40a16dc7e49ea13af9af5c4e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_report_teststatus_pytest_report_teststatus._Return_result_category": {"doc_hash": "eda27d706ecea55ee227f281c6a1886c0a213e55c9f7665dd87e69e6d1787550"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py__NodeReporter.record_testreport__NodeReporter.record_testreport.self.attrs_6.temp_attrs": {"doc_hash": "7ff9ea22dda73589aac3a2a4f3b469969f55b1a4c389260b767aeb360de8f7f9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_get_log_level_for_setting_pytest_configure.config_pluginmanager_regi": {"doc_hash": "b1b9f1eea9ee04ff4181d9859a12b642e8481342921130e04ace8efe1df7ec5a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_pytest_runtestloop_pytest_runtestloop.return.True": {"doc_hash": "763133a7ac8c2191138dd48146fe770b57aca9568d9586f7521b1cf60a53cf19"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_pytest_ignore_collect_pytest_ignore_collect.return.None": {"doc_hash": "03e4950507bdb76692983af1a7faacf9554cdb69794c0c1e4d909c4f8277b5cd"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/__init__.py_deselect_by_mark_pytest_collection_modifyitems.deselect_by_mark_items_c": {"doc_hash": "f5099fb265cf9ad7af61ffdff2a2bdf1fb0d2c9ab9992673783741a39cf38a91"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/__init__.py_pytest_configure_": {"doc_hash": "4b04dff70b884169815a7b839ed5921e89b35a5d12cd63d24e5a1f0843cdbefc"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/structures.py_Mark_Mark._has_param_ids.return._ids_in_self_kwargs_or_l": {"doc_hash": "3055ffacb9a107b80015cf732ce50c2379f505016564effbbb82508e37905f2a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/structures.py_MARK_GEN_": {"doc_hash": "d482dfd6e971c455754cca5ec0f4b82dfc0dc4207de89db04e543be5d1537178"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nodes.py_Node_Node.__init__.self._store.Store_": {"doc_hash": "807fa45ddd7a20f9a193ecc27e47b4f965e2d05173a889518cc926a71d8fae9d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nodes.py_Node.from_parent_Node.__repr__.return._format_self___c": {"doc_hash": "efcb54dc21ffab2b059c7531dda0e328b16ec8c13616c87eef61e045a6cd04a9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nodes.py_File_Item.runtest.raise_NotImplementedError": {"doc_hash": "3f9a4b4f374de03ca90c78d0d6a79c5a93d98a664e7b11269777964ae4d7f339"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/outcomes.py_OutcomeException_OutcomeException.__str__.__repr__": {"doc_hash": "85a740afb93b1a681fefb77342a9794e86ef6ed134729d4d70d21413306819db"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/outcomes.py_Failed_exit.raise_Exit_msg_returncod": {"doc_hash": "4622d0c61a6bf5579e3edf3bf2b5dc2ce3225d3b6a0cec50ff90ff5c9f8d36a1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/outcomes.py_skip_skip.raise_Skipped_msg_msg_al": {"doc_hash": "c327315e9707925cb1190113d259859649a73520ab4db59c111b1e8d0f6d1b9c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/outcomes.py_xfail_xfail.raise_XFailed_reason_": {"doc_hash": "e2a66616439a847480bcfc80db2266e167cbd6974a31c4eee00d24050db8edf8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/outcomes.py_importorskip_": {"doc_hash": "4d33c00dc6529796dce955d61491e4b02e2c96069aea885edf254fd091a2a255"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pastebin.py_pytest_configure_pytest_configure.if_config_option_pastebin.if_tr_is_not_None_.tr._tw.write.tee_write": {"doc_hash": "69fad3e36c47b67f6a8b127a3bea2bd0bc3e012ffdf15515e5505fa080f96887"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pastebin.py_pytest_unconfigure_pytest_unconfigure.if_pastebinfile_key_in_co.tr_write_line_pastebin_s": {"doc_hash": "d764bbaf18c36ccbc456bfb973f07e10e007ee0707ed63d22cc4fb6261cb27b8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py_make_numbered_dir_make_numbered_dir.for_i_in_range_10_.else_.raise_OSError_": {"doc_hash": "1bdaf6a1696bbd78a1ac5b291bb790f58f0071ae100d43755ca643bb37b1818a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py_create_cleanup_lock_register_cleanup_lock_removal.return.register_cleanup_on_exit_": {"doc_hash": "de4eef6c55fb1130b5eb5bc861c673f1a6f8ae548c90dab1e9d7c339b0b3db3a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py_maybe_delete_a_numbered_dir_maybe_delete_a_numbered_dir.try_.finally_.if_lock_path_is_not_None_.try_.except_OSError_.pass": {"doc_hash": "025dd5020176d25d08de660c1a376fe26fd8e0ed2f0690e15794c40415f5d20a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_RunResult_RunResult.__repr__.return._": {"doc_hash": "d91ffa1bc85170c1754c5d007ea34cf59e5169b8c3dcdd6f348cafd0b1bba29f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_LineComp_LineComp.assert_contains_lines.LineMatcher_lines1_fnmat": {"doc_hash": "3dc9294732c4f4de1bc18ced9ac64ba896d51fe80f27808b30e9bf1e380c191d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_LineMatcher._match_lines_random_LineMatcher._match_lines_random.for_line_in_lines2_.for_x_in_self_lines_.else_.self__fail_msg_": {"doc_hash": "87ccf4e8a06fd35d92275c0f156a815a53642b2c2618c01f0c9c27b7df78fd3c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_LineMatcher.get_lines_after_LineMatcher._log_text.return._n_join_self__log_outpu": {"doc_hash": "cc61865a8913a8b3d9224aed980daf39eb5b8d2324960d070c1ab14b188402ed"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_LineMatcher.fnmatch_lines_LineMatcher.fnmatch_lines.self__match_lines_lines2_": {"doc_hash": "4822dc21f91b2a497d07096e9131333284b7f2ead4f54d87967655f6e106bf03"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_LineMatcher.re_match_lines_LineMatcher.re_match_lines.self__match_lines_": {"doc_hash": "f3bbd1bf96e531e15fb13e2e8aaa247005fc427e58908594b1bee71dfe4d3d29"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_LineMatcher._match_lines_LineMatcher._match_lines.self._log_output._": {"doc_hash": "863506be4b4fe17fab4caed38f225906bb3fb050b11a60f36a9fab4a8f7b6bb1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_LineMatcher.no_fnmatch_line_LineMatcher.no_re_match_line.self__no_match_line_": {"doc_hash": "d30fce9c22a00c70ae0df97ff72fffda36154235de48f579cbb7d2bc415d3203"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_async_warn_and_skip_async_warn_and_skip.skip_msg_async_def_funct": {"doc_hash": "c423ca1c1a8249c63ddcea54a117568d303ddf7e833995956242080d649da335"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_pytest_pyfunc_call_pytest_pyfunc_call.return.True": {"doc_hash": "854803a0a569bc3e1e1ff3e894e567a03309b00cbc7c54daf881cf774e2277d0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_PyCollector._genfunctions_PyCollector._genfunctions.if_not_metafunc__calls_.else_.for_callspec_in_metafunc_.yield_Function_from_paren": {"doc_hash": "25b0e067e00f882ae6716d2fd52009fe6cfb5c3fbcf89535ad9a382bbfe7039d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Class_Class.collect.return._Instance_from_parent_sel": {"doc_hash": "08e4b2df280414e04f17022264fb175ed5c98a7f9b733511fa6d4ba836e10f93"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_CallSpec2_CallSpec2.id.return._join_map_str_self__i": {"doc_hash": "898e649cb9dfc8a398b63f8700513372a913d2c4815fe37ac176e1c6d493136d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Metafunc._resolve_arg_ids_Metafunc._resolve_arg_ids.return.idmaker_argnames_paramet": {"doc_hash": "0ef95068f1e7081f8ba3393ee73f63523ea1b828ba7463f2311522e792b49672"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Metafunc._validate_ids_Metafunc._validate_ids.return.new_ids": {"doc_hash": "01d91c6a9edc3b2ba22b277df5aca5b48c828528c90e535de60dbb3c6e47422f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py__find_parametrized_scope__find_parametrized_scope.return._function_": {"doc_hash": "b62a5e6b6b2582c9672c783c13b31ef8504a0738eb19cb75e82c9e9d072891b5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py__ascii_escaped_by_config__ascii_escaped_by_config._type_ignore": {"doc_hash": "23be04d3aa8242d9d428dd3e6647e2cfea1583f63cdd96121f7b809c7e6b6e7a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py__idvalset__idvalset.if_id_is_None_.else_.return._ascii_escaped_by_config_": {"doc_hash": "b44644104c02809e7c30bae3f4a5bf6a51cb0c91d12ce2cd6236befa5521b54a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_idmaker_idmaker.return.resolved_ids": {"doc_hash": "481ab6198a8211837f5e5857ed8f3b8ae8af5e70897be1a882d8e545fa692cec"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_show_fixtures_per_test__show_fixtures_per_test.write_fixture.if_fixture_doc_.else_.tw_line_no_docstring": {"doc_hash": "4f543af2906b78c1c46494fb747ca98cf792d2fe9545e0e0b939eb83ddda2c7d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/reports.py__report_to_json__report_to_json.serialize_repr_crash.if_reprcrash_is_not_None_.else_.return.None": {"doc_hash": "969e373b2195a9bb0423f7cdf60885eb27b652d4b874c2f498ed7c31f3081e83"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/runner.py_pytest_report_teststatus_call_and_report.return.report": {"doc_hash": "1ad22831b86eb2988d1d2b3ee2068ccd7b980428008ba45ab3668e6d8070aa45"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/store.py_from_typing_import_Any_StoreKey.__slots__._": {"doc_hash": "cfde884e2694c3720328cfaa3a75101f3f1798c3df661a6f5a3d787878620b40"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/store.py_Store_": {"doc_hash": "333c015a330013504d91a7b93815b637aa5a07cc2b2fec81ae0495fcb7232edf"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter._is_last_item_TerminalReporter.pytest_runtest_logfinish.if_self_verbosity_0_an.if_self__is_last_item_.else_.if_past_edge_.self__tw_write_msg_n_": {"doc_hash": "4775ca19b46dfad32326ab6214e5cdc445721b02fb39047c5252ed92cbc5d2e6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter._printcollecteditems_TerminalReporter._printcollecteditems.for_item_in_items_.for_col_in_needed_collect.if_self_config_option_ver.if_doc_.for_line_in_doc_splitline.self__tw_line_form": {"doc_hash": "43db3484e95e50ac471a1571310adfb17e83c5cdfccb663c2651e34dfb21f21e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.summary_failures_TerminalReporter.summary_failures.if_self_config_option_tbs.if_self_config_option_tbs.else_.for_rep_in_reports_.self__handle_teardown_sec": {"doc_hash": "204adcac3c89ec91700934b5c110a1334fdf506198bbab982fac3127a972d5c9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.short_test_summary.REPORTCHAR_ACTIONS_TerminalReporter.short_test_summary.if_lines_.for_line_in_lines_.self_write_line_line_": {"doc_hash": "dc586339cb8e32d5500ebb0727566a69072572357b0432cae5c936ad9adaba38"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter._get_main_color_TerminalReporter._set_main_color.self._main_color.self__determine_main_colo": {"doc_hash": "9d0efa0597d75e9dccc52247a68d10cdaa43c1fbb30ed6ae3bc1d2a02076d2a4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/tmpdir.py_TempPathFactory.mktemp_TempPathFactory.mktemp.return.p": {"doc_hash": "a29e82bb52ec621c7fdfd1419dba447c65cb6dd82ae0f0e7ddf4ca62dd9cb5e3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/unittest.py_UnitTestCase_UnitTestCase.collect.if_not_foundsomething_.if_runtest_is_not_None_.if_ut_is_None_or_runtest_.yield_TestCaseFunction_fr": {"doc_hash": "21400dc6eee526020014bc44716fd7911ed72c4ab46bea722b56ff8b4c8c33b2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/unittest.py_TestCaseFunction_TestCaseFunction.startTest.pass": {"doc_hash": "aa44ea3324a48e7ed7ef792f575423dd65cdc01aa6fb3a03d6698fff413708f2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/unittest.py_TestCaseFunction._addexcinfo_TestCaseFunction._addexcinfo.self___dict___setdefault_": {"doc_hash": "16dc8deb38079adb6909de5e355dab3ec9f9235abace657522795d8470ec6ebe"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/pytest/__main__.py__": {"doc_hash": "b882673043a2a4b521fc77632be54fe4027f08535f3af1df3fb76b0e0913835e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/pytest/collect.py_sys_": {"doc_hash": "c6f4ac91199102870e73eefa90a1f6ee935ddd648fb659f2c179e7213d416be8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_better_reporting_on_conftest_load_failure_TestGeneralUsage.test_better_reporting_on_conftest_load_failure.assert_result_stderr_line": {"doc_hash": "e040d12b9f9fecff86e02714fa6bea25ff5a75b3d77ba2c379947329c924f69f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestDurationsWithFixture_TestDurationsWithFixture.test_setup_function.result_stdout_fnmatch_lin": {"doc_hash": "6ebde8f4a5dd163834b150634b17d5fae1083146483637706ae949c6166fc978"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_test_pdb_can_be_rewritten_test_pdb_can_be_rewritten.assert_result_ret_1": {"doc_hash": "6556a27b6474ea4557721442e4753a9f62f66140fbf875e29d4b332d1aba0012"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_code.py_TestReprFuncArgs_TestReprFuncArgs.test_not_raise_exception_with_mixed_encoding.assert_": {"doc_hash": "9de0c985cc4631107f40dbe05152ea5b38c6ee7426b35ac1355372bccc8be8d2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_code.py_test_ExceptionChainRepr_": {"doc_hash": "ae2cced1d13e005a5a2eeaf29242f041d5fef9b3f638bf777fb62e5ca779f30b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestTraceback_f_g_h.test_traceback_recursion_index_TestTraceback_f_g_h.test_traceback_only_specific_recursion_errors.assert_RuntimeError_hel": {"doc_hash": "5b0d4581b74db4b1ff426395729badca919d9810c72fae9f53b4aa12f15e377a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestTraceback_f_g_h.test_traceback_no_recursion_index_TestTraceback_f_g_h.test_traceback_messy_recursion.assert_excinfo_traceback_": {"doc_hash": "8752c72a8d066e7fc9e7cc80c6bf7638c5daf09bbfda7a70f56cbda413b02635"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_repr_source_failing_fullsource_TestFormattedExcinfo.test_repr_source_failing_fullsource.assert_repr_chain_0_0_r": {"doc_hash": "26668df0a4459c31d0e57e0ea8094f366bb9619a12251a3291e49db76426add3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_repr_traceback_with_invalid_cwd_TestFormattedExcinfo.test_repr_traceback_with_invalid_cwd.assert_raised_3": {"doc_hash": "92483dad36ae1ef147796688aae04865f231a2a2e933e3b1f2583582a1dc2aa2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_source.py_test_findsource_test_findsource.assert_src_lineno_": {"doc_hash": "27f7a5239ac066135815f32a8f81d8505b2d3e09befb8fd6ef24a65484e89baf"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/conftest.py_re_pytest_collection_modifyitems.yield": {"doc_hash": "31b440cd28b2e23cb2d96481b6f92f50ce7153f45551d3f74af799fa3b0a9cc5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/unittest/test_unittest_asynctest.py__": {"doc_hash": "6bac9fc29b5af8d648bef04b51438c03985c529b961b452db35dfd1ed7bb919b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/warnings/test_group_warnings_by_message_summary/test_1.py__": {"doc_hash": "3c21b4ff723a4b69baaaacc8ca11b1bf30a184c241d1af75a4d7c1d61f4a7563"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/warnings/test_group_warnings_by_message_summary/test_2.py__": {"doc_hash": "240f718ffb44f96af16adcb7bd7fd4ceebfd568b5602fe1408562684a648ee2a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_test_skip_duplicates_by_default_test_skip_duplicates_by_default.result_stdout_fnmatch_lin": {"doc_hash": "f86db7efbe207108f15e2b3b00b592792bbe88ff3077b5efb4f08ef9227d0388"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureUsages.test_parameters_without_eq_semantics_TestFixtureUsages.test_parameters_without_eq_semantics.result_stdout_fnmatch_lin": {"doc_hash": "a87057276186f11cd0e69eafa2ebf676197636f33f0043058d915ca900e41ad3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestParameterizedSubRequest.test_non_relative_path_TestParameterizedSubRequest.test_non_relative_path.None_6": {"doc_hash": "8cd79f7a645738935d48bfc2ed117364a1738144df1bdee1ca5cd00e3f9a637c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_test_indirect_fixture_does_not_break_scope_test_indirect_fixture_does_not_break_scope.result_assert_outcomes_pa": {"doc_hash": "da9ffbdbc9d1e332444fc9629f5234eece44447f60bf92a7648e3ec47bac2c52"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_test_yield_fixture_with_no_value_": {"doc_hash": "b1750121f1f0e5876940dd3aa3b1ccc7a1e7b51e38fd4d80bcb33a607ac034be"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_error_TestMetafunc.test_parametrize_error.with_pytest_raises_TypeEr._type_ignore_arg_type_": {"doc_hash": "f383314d3f0cf297f8dc955aad7b1dced99b1ec99ac09b4515503f40f2f85358"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_error_iterator_TestMetafunc.test_parametrize_error_iterator.with_pytest_raises_._type_ignore_arg_type_": {"doc_hash": "0617128f4168c76d00578f7eb60d02dcf56693eed3e11c0a85da2fb405d2f78e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_bad_scope_TestMetafunc.test_parametrize_request_name.with_pytest_raises_.metafunc_parametrize_req": {"doc_hash": "2a6101e2a7b9ea7f429da556b4572fdc8a0b2051172671f4556712d95eb80118"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_and_id_TestMetafunc.test_parametrize_with_wrong_number_of_ids.None_1.metafunc_parametrize_": {"doc_hash": "657776b657f3b05c85b10ccf404981f7f6ba0722d59b1b180bde6420aed7417a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_ids_iterator_without_mark_TestMetafunc.test_parametrize_ids_iterator_without_mark.assert_ids_4_6_4_": {"doc_hash": "d367f63d7efece8c20d2f52566e49abc35b1e17e260874ef0728a5a974b91a03"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_empty_list_TestMetafunc.test_idval_hypothesis.escaped_encode_ascii_": {"doc_hash": "be4fe444b7d03db1bc54966118bf39a8d7cab62695aa033bdf1cf45b77865b8c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_unicode_idval_with_config_TestMetafunc.test_unicode_idval_with_config.for_val_config_expected.assert_actual_expected": {"doc_hash": "3589f51a6039f22d817bf62d69fb50b0d3f10f1281e36671ee906fdfd7924ce5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_ids_returns_non_string_TestMetafunc.test_parametrize_ids_returns_non_string.result_stdout_fnmatch_lin": {"doc_hash": "8ce3f03220991cfa3413cd0732793761333fbfc485daa710832d038d91ed3176"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_idmaker_with_ids_TestMetafunc.test_idmaker_with_ids_unique_names.assert_result_a0_": {"doc_hash": "a1e03efb2033715c0c0416ccc9508c1816f0584b90e53608a6d6b855bd783755"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_indirect_list_TestMetafunc.test_parametrize_indirect_wrong_type.with_pytest_raises_._type_ignore_arg_type_": {"doc_hash": "f8148fd539e42e98ae81304a51c03028bb34eacf105a777b5a2313ee00731635"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_generate_same_function_names_issue403_TestMetafuncFunctional.test_generate_same_function_names_issue403.reprec_assert_outcomes_pa": {"doc_hash": "8e145050d308636569aa88b3f7a83b6ec80be145384a167c7307cea831dfc040"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_parametrize_misspelling_TestMetafuncFunctional.test_parametrize_misspelling.result_stdout_fnmatch_lin": {"doc_hash": "469d058acf97f127c563f52a7b10ad137fbb0aa46df2c2f6f7c069e0594c4680"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMarkersWithParametrization.test_simple_xfail_single_argname_TestMarkersWithParametrization.test_simple_xfail_single_argname.reprec_assertoutcome_pass": {"doc_hash": "05010af9d1d9922f3c142f39ec519785d206153609d1344a8abd238ddbe360af"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMarkersWithParametrization.test_xfail_with_arg_TestMarkersWithParametrization.test_xfail_with_arg.reprec_assertoutcome_pass": {"doc_hash": "71813194147fd346f3f84fe5e52ad98718de94e72ca83e1dc9330be179e56a06"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMarkersWithParametrization.test_parametrize_called_in_generate_tests_TestMarkersWithParametrization.test_parametrize_called_in_generate_tests.reprec_assertoutcome_pass": {"doc_hash": "1895bc69413d7a3788a1bfc2e089bf67e1b3a514e82b3ac90bbe6d866587bc6c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMarkersWithParametrization.test_parametrize_ID_generation_string_int_works_TestMarkersWithParametrization.test_parametrize_ID_generation_string_int_works.reprec_assertoutcome_pass": {"doc_hash": "ea60bb20041988aab34597167f3334c3907009ce09588bf35657be8055994e34"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMarkersWithParametrization.test_pytest_make_parametrize_id_with_argname_TestMarkersWithParametrization.test_parametrize_positional_args.result_assert_outcomes_pa": {"doc_hash": "845c313ac978b7d07b723e19d7db6a6ca5817c922a16b8a1ca4596a318380bd7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/raises.py_re_TestRaises.test_raises_repr_inflight.with_pytest_raises_E_as_.raise_E_": {"doc_hash": "0cdfaf5d223a776c0aeae0a24d53004fadd9f6856b694139462791f7312ce38f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/raises.py_TestRaises.test_raises_match_TestRaises.test_raises_match.None_3": {"doc_hash": "0fc46c22dddc60de27e350e907f6e99566e450e349bf203312273a45fed0192d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestImportHookInstallation_TestImportHookInstallation.test_conftest_assertion_rewrite.result_stdout_fnmatch_lin": {"doc_hash": "aad38807dc55b73a3003d9e1ec9278f6d588e4e83c0e11350d35bcc9b5f4b1a0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestImportHookInstallation.test_rewrite_assertions_pytester_plugin_TestImportHookInstallation.test_rewrite_assertions_pytester_plugin.None_2": {"doc_hash": "95916798ffcf98788567e24c9ebc0943f43ed5eb2b9e54158e44aea186790479"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestBinReprIntegration_callequal.return.callop_left_right_": {"doc_hash": "7431e6ec8a20aaee60a51063cd78237f8a3bd40b5ebbbeb19338e4263417433a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare.test_list_dont_wrap_strings_TestAssert_reprcompare.test_list_dont_wrap_strings.assert_diff_": {"doc_hash": "32ef846262ab71554ff60917e6c0dbcfeaf8b550f13ae41424f44b5f35c7f58f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare.test_list_bad_repr_TestAssert_reprcompare.test_list_bad_repr.assert_expl_1_": {"doc_hash": "c133c5be765ba9cc06b179a69f3c07f9faabcdbbbfcc2167a48c04f74ad95381"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare.test_one_repr_empty_TestAssert_reprcompare.test_mojibake.assert_msg": {"doc_hash": "2a16e4add4a50f9e5d9294dd0d2cc86eded04f2beea2dcd0f1e6998ec1209401"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_test_python25_compile_issue257_test_reprcompare_whitespaces.assert_callequal_r_n_": {"doc_hash": "2c697e346ceaa91505a7fc8cfa1aef92bbdad9ba4885028eaaab0f2946b51115"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_failedfirst_order_TestLastFailed.test_failedfirst_order.None_2": {"doc_hash": "72a7b78a46282aa218b9d71de67036d1e318e3bed163a836be09b5802baee102"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_lastfailed_usecase_splice_TestLastFailed.test_lastfailed_usecase_splice.None_4": {"doc_hash": "8c397ddecdbf52db8e04b5339bbd6448d334e1aa4bbe4a605ecb5943cd2d895f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestCaptureManager_TestCaptureManager.test_init_capturing.try_.finally_.capouter_stop_capturing_": {"doc_hash": "0e1a40fa1963aa51aa271f526c7fc0484667b44bed5938ab3f13708f2138e8be"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_test_capturing_unicode_test_capturing_bytes_in_utf8_encoding.result_stdout_fnmatch_lin": {"doc_hash": "1ef8bca85a1f52e7a8c4ff601113cc799f49e3ef2bdf1e24ce32e4cbe168b8ed"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_test_collect_capturing_test_collect_capturing.result_stdout_fnmatch_lin": {"doc_hash": "5e6af6ec051625d74a2ef0caf26a407a63f01ef3e96651b60bebefe595aa4dc3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestCaptureFixture.test_capsysbinary_TestCaptureFixture.test_capsysbinary.result_stdout_fnmatch_lin": {"doc_hash": "bd7376f846516fe049f899bf11c0c8a8be0217b0e43f3b3ac7cd7298d4a4c488"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_test_capture_conftest_runtest_setup_test_capture_badoutput_issue412.result_stdout_fnmatch_lin": {"doc_hash": "b121a7e5065756978ceef7d75226488c465c73b50d523981286bc4b337de737e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_test_capture_early_option_parsing_test_capture_binary_output.result_assert_outcomes_pa": {"doc_hash": "5fdf170c9d1dcc37586be03c8e4f34399fce50865b25aaa9f14176c6b1c42b9a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestFDCapture_TestFDCapture.test_stdin.assert_x_b_": {"doc_hash": "973d981c902e20dd8cbd18fcbc2697790cbda71ae5c67587d577265e06422dbb"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestFDCapture.test_writeorg_TestFDCapture.test_writeorg.with_open_tmpfile_name_.assert_stmp_data2": {"doc_hash": "691ec5c031b1158250da35ac0979401cd0c46d276a7533d9fcedf50e1d8a2ca8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestStdCapture.test_capturing_error_recursive_TestStdCapture.test_stdin_nulled_by_default.with_self_getcapture_.pytest_raises_OSError_sy": {"doc_hash": "a1328c13f605e19079997ec73c2e00e7b92215603d1a13d35b52af72291e1c88"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestTeeStdCapture_TestTeeStdCapture.test_capturing_error_recursive.assert_out2_cap2_n_": {"doc_hash": "466db5bc8158b18276759a72bca2f6f6fc76282147d0dba1d824981d1d4a0a2d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestNodekeywords.test_keyword_matching_is_case_insensitive_by_default_TestNodekeywords.test_keyword_matching_is_case_insensitive_by_default.for_expression_in_speci.reprec_assertoutcome_pass": {"doc_hash": "84f0c669e5d2bb26bdd01af6afa5be517555b38ddf83f1c94d3205aca5c3a693"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_exit_on_collection_with_maxfail_bigger_than_n_errors_test_exit_on_collection_with_maxfail_bigger_than_n_errors.res_stdout_fnmatch_lines_": {"doc_hash": "42b7a0fe4d8f02f4fb21a21a5851d1d0d872663b4e49f098d8208d875cc46efb"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_collector_respects_tbstyle_test_collector_respects_tbstyle.result_stdout_fnmatch_lin": {"doc_hash": "0abb4f0cf2de75930378fbca685e591bc85b7ec8b50b8abe45648ad08025ef09"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_does_not_eagerly_collect_packages_test_does_not_put_src_on_path.assert_result_ret_Exit": {"doc_hash": "0fff023b5b231e342048ff0dfa24fb62a86c0a878149888aa589a54c7ab8993c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_compat.py_test_real_func_loop_limit_test_real_func_loop_limit.with_pytest_raises_.get_real_func_evil_": {"doc_hash": "70ff6948cf41b331da3bda4c658d2f069792308d5dd0c1e2688952e916f828c7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestConfigFromdictargs_TestConfigFromdictargs.test_invocation_params_args.assert_config_option_capt": {"doc_hash": "2215e0a9f00fb231410938c36e6da1eea1a5466d9a838ef3d3dd651a52ef69c0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_disable_plugin_autoload_test_disable_plugin_autoload.if_should_load_.else_.assert_PseudoPlugin_attrs": {"doc_hash": "97aac218ba5d8899f52856c992c6e3ff89574dda9b086cc0aad8627acf256368"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_plugin_loading_order_test_plugin_loading_order.assert_result_ret_0": {"doc_hash": "0b30766ccc2731b9212bb758cb20d1974998e632733d78fcafc83c043dc99aa2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestOverrideIniArgs.test_addopts_before_initini_TestOverrideIniArgs.test_addopts_from_env_not_concatenated.assert_": {"doc_hash": "119b424053ab42000f4839c86f2901580163bb2b8db1aca04398ecd61cd88422"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_os_custom_pdb_calls.return.called": {"doc_hash": "d76d0e717e10e4410e072be689bc01c35941958ffab266c882453e97ce5458c5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB_TestPDB.flush.assert_not_child_isalive_": {"doc_hash": "00d26700a154bc19088dcf7b2d679ee0f59635e8a27c240c6ea68616956ed50b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_unittest_postmortem_TestPDB.test_pdb_unittest_postmortem.self_flush_child_": {"doc_hash": "9952270affa713d04542be2b306a12659bc1203c63b02f26692ea5633f50bcd9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_unittest_skip_TestPDB.test_pdb_unittest_skip.self_flush_child_": {"doc_hash": "330d25a71760a6463c8681d384f1a69068a233bf90353c08a1bd006c5f2425a1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_print_captured_stdout_and_stderr_TestPDB.test_pdb_print_captured_stdout_and_stderr.self_flush_child_": {"doc_hash": "90a76027159be837fa01ed669691137517a99f33998254b0f0f56771a827790d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_dont_print_empty_captured_stdout_and_stderr_TestPDB.test_pdb_dont_print_empty_captured_stdout_and_stderr.self_flush_child_": {"doc_hash": "3be84c7cb072addc784e17d4afe66aa3714b445bbfad4c9f41c50001df33440d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_print_captured_logs_TestPDB.test_pdb_print_captured_logs.self_flush_child_": {"doc_hash": "d440e64700d69c6e8d0e8eb691d3850724498124ad05d9147cc2432663937101"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_print_captured_logs_nologging_TestPDB.test_pdb_print_captured_logs_nologging.self_flush_child_": {"doc_hash": "0bbc6ed548fa88ce6341fac30abe7dc51b58edd2c648f2f78273f4c11346d1d0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_interaction_exception_TestPDB.test_pdb_interaction_on_collection_issue181.self_flush_child_": {"doc_hash": "ae2a86322d8363b182b5d5d01d469b2f9fd7de79be2ec172918ca28267bafa87"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_interaction_on_internal_error_TestPDB.test_pdb_interaction_on_internal_error.self_flush_child_": {"doc_hash": "9ad2d6cf32456dab517ac73a2c3bb38bfb8fe36d2229c4a37ffcf4bd4cdba20a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_interaction_capturing_simple_TestPDB.test_pdb_interaction_capturing_simple.self_flush_child_": {"doc_hash": "0cf6b5f02486055e53defd6ad03c2f4d44e7e62fe61072e96bd6066715cfec46"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_set_trace_kwargs_TestPDB.test_pdb_set_trace_kwargs.self_flush_child_": {"doc_hash": "6790e2330472a113e6f00eec0e6c58f2eb9da255f8d33f9a82f32b0d3adad096"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_with_caplog_on_pdb_invocation_TestPDB.test_pdb_with_caplog_on_pdb_invocation.self_flush_child_": {"doc_hash": "343c3193ea44e005b83e72ef028ae687eee48ab80799a6c050b49bda7ce7c48e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_set_trace_capturing_afterwards_TestPDB.test_set_trace_capturing_afterwards.self_flush_child_": {"doc_hash": "e6421b3db47d816ac48ab4f074e79c009e33505d66d366cd69cb97e4cd783749"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_interaction_doctest_TestPDB.test_pdb_interaction_doctest.self_flush_child_": {"doc_hash": "69725bf25c0f3e3589ae425205356fd82bddd19ec44c1914316e119d1bdb1613"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_doctest_set_trace_quit_TestPDB.test_doctest_set_trace_quit.assert_UNEXPECTED_EXCEPT": {"doc_hash": "a9ad027f4cdeda3e1f98de9258c410c16b28592a0403194dca7d05940975a1e4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_interaction_capturing_twice_TestPDB.test_pdb_interaction_capturing_twice.self_flush_child_": {"doc_hash": "66aca6d979bade8874523fc79e3e336592eba667350457d039684e8d6820a6f0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_with_injected_do_debug_TestPDB.test_pdb_with_injected_do_debug.self_flush_child_": {"doc_hash": "9a9b2c6649b498e5949e9a9dc21363d5ebb36e5256e2725708b124891b2db2db"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_without_capture_TestPDB.test_pdb_without_capture.self_flush_child_": {"doc_hash": "b169550889faf3828db8a410f438abdf096f8ad622bb14be25ff35ef1c3ff27f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_continue_with_recursive_debug_TestPDB.test_pdb_continue_with_recursive_debug.assert_1_passed_in_in": {"doc_hash": "348c4cdf362fbe59cc8c60342989b0579427e7f3d3eb6869081028502701eba4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_used_outside_test_TestPDB.test_pdb_collection_failure_is_shown.result_stdout_fnmatch_lin": {"doc_hash": "63bba5cc847f79437b80e2a98bfa1c4d134ac471c860f32b3141b348ecb8c527"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_enter_leave_pdb_hooks_are_called_TestPDB.test_enter_leave_pdb_hooks_are_called.self_flush_child_": {"doc_hash": "1ccccb1d30336c78eb329a22515ae374eaaeef6211c67b90e92d123bb9b26474"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_custom_cls_with_set_trace_TestPDB.test_pdb_custom_cls_with_set_trace.self_flush_child_": {"doc_hash": "1790d0e18b51df4b205f64250ea09d7fb143af4f895ef793cf939b1767e2b7d6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestDebuggingBreakpoints.test_pdb_custom_cls_TestDebuggingBreakpoints.test_pdb_custom_cls.assert_custom_debugger_ho": {"doc_hash": "daa686340ee5f0acb19b65a57ec3d1783b2b2f9d5e27681642cf8ae94be191e1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestDebuggingBreakpoints.test_environ_custom_class_TestDebuggingBreakpoints.test_environ_custom_class.result_stdout_fnmatch_lin": {"doc_hash": "747edeb62426f713c53c3b91a747f0e1df29766dab81bf57c547a6ae9ecedba4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestDebuggingBreakpoints.test_sys_breakpoint_interception_TestDebuggingBreakpoints.test_sys_breakpoint_interception.TestPDB_flush_child_": {"doc_hash": "bc2b7e8ec38e006333c3d4a0970ac5d955f979d4a9f830ad3f6d4c858d16803e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestDebuggingBreakpoints.test_pdb_not_altered_TestDebuggingBreakpoints.test_pdb_not_altered.TestPDB_flush_child_": {"doc_hash": "78bd411ec15cff35961573b84d68fef70923cf7afadc061ac58ae7a89c28176a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestTraceOption_TestTraceOption.test_trace_sets_breakpoint.TestPDB_flush_child_": {"doc_hash": "f0ba56b0755039c3c5a390d838f879a38c4eb71bb4ad755e90b9319fc02f7731"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestTraceOption.test_trace_with_parametrize_handles_shared_fixtureinfo_TestTraceOption.test_trace_with_parametrize_handles_shared_fixtureinfo.TestPDB_flush_child_": {"doc_hash": "c6cb0b921a5714dfc4b03bebf5c4b8408e13fab27cb0a203123dfaf0299a3737"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_test_trace_after_runpytest_test_trace_after_runpytest.assert_result_ret_0": {"doc_hash": "fa67980e030ae92642ebdabeb446ea4b0bb2708ece3a56461300600b7b43b6f3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_test_quit_with_swallowed_SystemExit_test_quit_with_swallowed_SystemExit.TestPDB_flush_child_": {"doc_hash": "717eea2347a49a39fb94149817f6b16b88a9fa3ee7a34fb54e84e428d2356a5f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_test_pdb_suspends_fixture_capturing_test_pdb_suspends_fixture_capturing.assert_PDB_continue_I": {"doc_hash": "db29a8c9d08d15219814ad78e3994cac28881b0edeaaba4f19248c23c93b8ff0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_test_pdbcls_via_local_module_test_pdbcls_via_local_module.None_2": {"doc_hash": "6c49f694826f630b332f79c8e3c4390cc996f60e861df1842929bc8a382736ef"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_test_raises_bdbquit_with_eoferror_test_raises_bdbquit_with_eoferror.assert_result_ret_1": {"doc_hash": "7fd6b2d75628964e164dcb1b9a8a8f1404ea7fac9842501d680341dfb27754f8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_test_pdb_wrapper_class_is_reused_": {"doc_hash": "0982a2cce5cefc2f7791632bd7f26b361374afedbf44b924a07f024eaf51712d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_doctest_unexpected_exception_TestDoctests.test_doctest_unexpected_exception.result_stdout_fnmatch_lin": {"doc_hash": "f48c1c5b9070280f2359c1243ab1f4a3ff8bc84a1fc7109728ba5b4fd0ae6bd1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_doctest_outcomes_TestDoctests.test_doctest_outcomes.result_stdout_fnmatch_lin": {"doc_hash": "47f9c0300ad6a336e71f5d499c87b39c48ee62785e9b8e00f9cdfa7be55069c9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_doctest_linedata_on_property_TestDoctests.test_doctest_linedata_on_property.result_stdout_fnmatch_lin": {"doc_hash": "47773423c89e63c4b60f1338d86ae7ae724a00b8466a4e8609dd64896492e087"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_doctest_no_linedata_on_overriden_property_TestDoctests.test_doctest_no_linedata_on_overriden_property.result_stdout_fnmatch_lin": {"doc_hash": "b230df7837d481b236e5af0a626e68da73c297e3e376b7e08a3c3f87ca218e80"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_error_diffs.py___TESTCASES": {"doc_hash": "cd6a2d90a3d87c925cdc78c3be8b5da590d198d079bf51d61bd1940c3ecfc29e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_error_diffs.py_if_sys_version_info_2__": {"doc_hash": "ee6c25e6010df9485b08da804357483a4adc4d2d9c1c3389c3ec8261127de5f1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_faulthandler.py_test_cancel_timeout_on_hook_test_cancel_timeout_on_hook.assert_called_1_": {"doc_hash": "d302e4f97bdd56cc3c0c6dc3c566d80a9e979e54f711b02e21b963dec847ec03"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_failure_function_TestPython.test_failure_function.if_junit_logging_no_.None_2": {"doc_hash": "eff5b1ea10c0a556378191ab7a7cfb8f46cae1ddb63837185429b3eb162efe41"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_failure_escape_TestPython.test_failure_escape.for_index_char_in_enumer.assert_s_n_char_in_t": {"doc_hash": "b46d35bf734d1b1420a37b8dd9f722fd990edca271dce2d29f46f05bf1e43cd9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_xfail_captures_output_once_TestPython.test_xfail_captures_output_once.if_junit_logging_in_log.else_.assert_len_tnode_find_by_": {"doc_hash": "303a551035677ff6ffc06b95e88de3e3a7ed098c084068acfa8107451d03aadb"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_pass_captures_stdout_TestPython.test_pass_captures_stdout.if_junit_logging_syst.assert_": {"doc_hash": "21dbbc289f18a3964cbd449678f9d8b1009cd6a2ad92621112ca4afaf37aa0ef"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_pass_captures_stderr_TestPython.test_pass_captures_stderr.if_junit_logging_syst.assert_": {"doc_hash": "1a3dba9af6258b8f022d58e7f7f68050a8f76630a42081ee0293a227e2c7716a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_setup_error_captures_stdout_TestPython.test_setup_error_captures_stdout.if_junit_logging_syst.assert_": {"doc_hash": "857e89b4305d609072a198f397f44993b9a4e6c76a94d646a8044836a0936710"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_setup_error_captures_stderr_TestPython.test_setup_error_captures_stderr.if_junit_logging_syst.assert_": {"doc_hash": "9462031bfd715ed95612069c2e65a06833bdcb13d6d69a170521419b859fccc4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_avoid_double_stdout_TestPython.test_avoid_double_stdout.if_junit_logging_syst.assert_hello_stdout_tear": {"doc_hash": "7fbdc99e3c465adf2ce795f8c451298c491006b8f3a48cf2af58bc27306fde3d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_nullbyte_test_nullbyte.if_junit_logging_no_.assert_x00_not_in_text": {"doc_hash": "28ca1012d7d91e9b45c43551299430a05c4c99fc53738f04d58e0cead7500104"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_nullbyte_replace_test_nullbyte_replace.if_junit_logging_no_.assert_x0_not_in_text": {"doc_hash": "07b0ff4cfe77266847ae9e0dcdc0e4998c8e699606e81393b63529c2b2e04490"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_record_property_test_record_property.result_stdout_fnmatch_lin": {"doc_hash": "b777a83b33f08fc1eb213c4b163cb046b8a6d9bccda35dbf8fedda87c82e4cc9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_logging_passing_tests_disabled_does_not_log_test_output_test_logging_passing_tests_disabled_does_not_log_test_output.None_3": {"doc_hash": "0138014b7d2a7a351979fad1bc892cc433323911fdbc9629889ed5be551dccbd"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_logging_passing_tests_disabled_logs_output_for_failing_test_issue5430_": {"doc_hash": "fe29c2c7179d8f31dbe85502155fcbad43844ec480365b3c1dc3bc90d4fd51eb"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestFunctional.test_keywords_at_node_level_TestFunctional.test_keywords_at_node_level.reprec_assertoutcome_pass": {"doc_hash": "280a332c4aae24bb8f66e63ef00bc9b10cfe10ff59e4c58beef601b01f96776a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestMarkDecorator_TestMarkDecorator.test_aliases.assert_md_kwargs_thr": {"doc_hash": "c4f020994cb6c69acedeab2f9ed019ce75899c8c9e128d5bb30e229d4194d786"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_parameterset_for_parametrize_bad_markname_test_mark_expressions_no_smear._assert_skipped_k_fai": {"doc_hash": "fee1a23a56468a63068f6ad3c533f9494e5d9ec4457266897b20db141b01adfd"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_addmarker_order_test_markers_from_parametrize.result_assert_outcomes_pa": {"doc_hash": "540caf999ed59ef79ef44ca0aad357fc4dbab25fe0e43e95a7d4563f00d7efa4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_pytest_param_id_requires_string_": {"doc_hash": "50f9ac59e96d699541f552bc9a2c0f9fff499c7e8e631d52e49deeb9690fdc07"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_nose.py_test_SkipTest_during_collection_test_skip_test_with_unicode.result_stdout_fnmatch_lin": {"doc_hash": "0517fc9a560103d2a2d82a5f0405d346180782f8f6b93918ecbaa5930dcf0b1e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_nose.py_test_raises_": {"doc_hash": "61063dac14780f3881108ee14d47b5ce181ee34c92410cef45e25349bdf070dc"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_parseopt.py_argparse_TestParser.test_custom_prog.None_1": {"doc_hash": "178c93187878c5cde368de048e4bd533c834b7f6cbef21b75af0786168639560"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_parseopt.py_TestParser.test_argument_TestParser.test_argument.assert_str_argument_": {"doc_hash": "990173c95ac354cee040656b6e7f89cf259d7deda0cd48a184881c07fe65c476"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_parseopt.py_TestParser.test_argument_processopt_TestParser.test_parse_known_and_unknown_args.assert_unknown_y_": {"doc_hash": "08560d69054d4c7f72ec9f93b282bab9fa53b4240fb9736ce301d8daf2dd92ac"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_parseopt.py_TestParser.test_parse_will_set_default_TestParser.test_parse_special_destination.assert_args_ultimate_answ": {"doc_hash": "c56a6aee00b7724bf91dddc3e9a11c55a9ea995c05353cf0f7dab46b47fe0f08"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_runresult_assertion_on_xfail_make_holder.return.apiclass_apimod": {"doc_hash": "b33f2288117c9d4e22d8a4a7b565c6b635c3d44232cc0ae22d82149b4caecf29"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_linematcher_with_nonlist_test_linematcher_with_nonlist.None_3": {"doc_hash": "d7aad7bc52db3379a44fe3ce6bb035776e10908cde756fac53f91167ea571bc6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_linematcher_match_failure_test_linematcher_match_failure.None_3": {"doc_hash": "09c605e523fb6922a7595258fa2fb77914bba8b7c43f0a134ffb35ef0738393c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_linematcher_consecutive_test_linematcher_consecutive.None_1": {"doc_hash": "36e31b4a68c452d711762d66824bd7ca0a8e890327b740f14d81f335c10e5488"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_linematcher_no_matching_test_linematcher_no_matching._bad_pattern_does_not_ma": {"doc_hash": "ea5daae143570a37685a7e351060c519134baedac69a21e75fd7fc17bd923b2d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_run_stdin_test_run_stdin.assert_result_ret_0": {"doc_hash": "86762b76e1d4673186085eed0a959f9c21b6da6f30b3e4be1aa7049669992c82"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_popen_stdin_pipe_test_popen_stdin_pipe.assert_proc_returncode_": {"doc_hash": "4486438e2ef001110c59e82239c13d42bde1df15b7ee384af85a22b17a990f38"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_popen_stdin_bytes_test_popen_stdin_bytes.assert_proc_returncode_": {"doc_hash": "873eefebd3a1beaaaa71ccf9bf395c6326667954973711c5cd9d451ef90cee9f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_popen_default_stdin_stderr_and_stdin_None_test_popen_default_stdin_stderr_and_stdin_None.assert_result_ret_0": {"doc_hash": "ef351e87397320e4dac55baea7a96898e00f0fad25a6a9a8979e00592099d669"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_run_result_repr_test_run_result_repr.None_1": {"doc_hash": "5a5cc562f396dd8c8baea8fa22bb91a73b74c5a2407cfa0e6f857248ebfabe4b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_recwarn.py_TestDeprecatedCall_TestDeprecatedCall.test_deprecated_call_ret.assert_ret_42": {"doc_hash": "4f78ef98a7d75822ce795ec508774aee896d2946db8bd7ebcfd567fab8411f66"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_recwarn.py_TestDeprecatedCall.test_deprecated_call_preserves_TestDeprecatedCall.test_deprecated_call_preserves.assert_warn_explicit_is_w": {"doc_hash": "fc856ed887e3d70c90cdc9a0561c8e4fcda5dc99b0bb2436008663a2cfe1cd6a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_recwarn.py_TestDeprecatedCall.test_deprecated_explicit_call_raises_TestDeprecatedCall.test_deprecated_call_no_warning.with_pytest_raises_pytest.if_mode_call_.else_.with_pytest_deprecated_ca.f_": {"doc_hash": "4f8bbe3acda2bf6e534682eba36ea099cc1a5815c404b41b4fdd565147f814f6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_recwarn.py_TestWarns.test_double_test_TestWarns.test_double_test.result_stdout_fnmatch_lin": {"doc_hash": "e00b336f424cee9b567ad792fc82dc315817ad9d46a2a491df99380bde31813e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_recwarn.py_TestWarns.test_match_regex_TestWarns.test_match_regex.None_2.with_pytest_warns_FutureW.warnings_warn_value_must": {"doc_hash": "dcb297a23d74b0b2038e68dbd36fa0923069302a92a100c7afc78af7cd159564"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestReportSerialization.test_chained_exceptions_no_reprcrash_TestReportSerialization.test_chained_exceptions_no_reprcrash.reports.reprec_getreports_pytest": {"doc_hash": "3cc9cf94a659ba21b50a301153f4894d1c2825e8bc117bdfbb374c4aa0d0f2ad"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_BaseFunctionalTests.test_systemexit_does_not_bail_out_TestExecutionNonForked.test_keyboardinterrupt_propagates.try_.else_.assert_False_did_not_ra": {"doc_hash": "94ced8028de529bc4d30909e319129dbc6a44984a17b17e0093aa3ab540c2f3b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_test_importorskip_module_level_test_importorskip_custom_reason.None_2": {"doc_hash": "14a72e4b1170b3bc9268a0da13884d10abeb8e22e7bebf22f9cb6a5f072185cc"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_test_pytest_cmdline_main_test_unicode_in_longrepr.assert_UnicodeEncodeErro": {"doc_hash": "3ace401c78b95f554d5d7643e42f511c19ca4edfe8717b133abd7bbfe8942685"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_test_store_except_info_on_error_test_store_except_info_on_error.None_6": {"doc_hash": "3d918e4d97c271c5eaf0b0b5248deca0ad0e8d77c75532010ab049ccb76326e1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_setupplan.py_test_show_multi_test_fixture_setup_and_teardown_correctly_simple_test_show_multi_test_fixture_setup_and_teardown_correctly_simple.assert_teardown_count_": {"doc_hash": "f70c542131fb1931d7fbf994732a62522b0cae9d60e2f26dfd3d9023efdd466e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_setupplan.py_test_show_multi_test_fixture_setup_and_teardown_same_as_setup_show_": {"doc_hash": "7dee4d2f3538cb74a40aadc61af8e37b3c3ddd72d1886dce8234c4696618c2eb"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_store.py_pytest_": {"doc_hash": "cf2957399ccc8e985c74bd2d302df73041bb8aef46f98638b89bebc80d7064ed"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminal.test_itemreport_subclasses_show_subclassed_file_TestTerminal.test_itemreport_subclasses_show_subclassed_file.None_3": {"doc_hash": "6f2758a422735f4621566cbb74e7c40cd6829ebaf2930d993cb2d5eac2c31963"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminal.test_report_teststatus_explicit_markup_TestTerminal.test_report_teststatus_explicit_markup.result_stdout_fnmatch_lin": {"doc_hash": "7f1f7368f1b0e8ade8e8542c0ee193094118aa6474dc0579cae012495f48ddbb"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestCollectonly_TestCollectonly.test_collectonly_skipped_module.result_stdout_fnmatch_lin": {"doc_hash": "3754abbe425aef223ba4957bbb3dcf482d4fbd82288bae654da5505f409ad936"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestCollectonly.test_collectonly_displays_test_description_TestCollectonly.test_collectonly_displays_test_description.result_stdout_fnmatch_lin": {"doc_hash": "832c0c2db4b48f89e26a43cc70de5972a7c61b364d73eb410f71453327441b82"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestCollectonly.test_collectonly_failed_module_TestCollectonly.test_collectonly_fatal.assert_result_ret_3": {"doc_hash": "a7f665247fb1725d776e966310149e828bdf8f5f6011549b3fe9c2a04bdf5f88"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminalFunctional.test_showlocals_short_TestTerminalFunctional.test_showlocals_short.result_stdout_fnmatch_lin": {"doc_hash": "f153c67955562dd8a3b7611855b2c452ffcab07c57cd27666d31431728a2cdd5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminalFunctional.verbose_testfile_TestTerminalFunctional.test_verbose_reporting.assert_result_ret_1": {"doc_hash": "67cf97127c6b808a9ed2950a097d6a6915893520f25a28d1bc55f939b7f074a5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminalFunctional.test_summary_f_alias_TestTerminalFunctional.test_summary_f_alias.assert_result_stdout_line": {"doc_hash": "6a77e2a9aef138d60bb47e0ef27aa7534d5659e7a13d9ebf9d49d930dc4592c7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminalFunctional.test_summary_s_alias_TestTerminalFunctional.test_summary_s_alias.assert_result_stdout_line": {"doc_hash": "c6d30bec40b1af8b8187dfa66e836356a88d287d2135cd02ebf5e2e9fc93bb27"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_color_no_test_color_yes_collection_on_non_atty.assert_collected_10_item": {"doc_hash": "75c88284beabda3cd805e9242a84610fdf8fae27180c6256e0404807cc148171"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_getreportopt_test_getreportopt.None_14": {"doc_hash": "6a227c6dab5c72f84969a45d9d58fdf1d6f1f9a88cd35ce6a6e26cad0622fdec"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestGenericReporting.test_maxfailures_with_interrupted_TestGenericReporting.test_maxfailures_with_interrupted.result_stdout_fnmatch_lin": {"doc_hash": "2e06842c65bf0b8de4dbaa2a32d252d595731ac6dc07b6832d73ed05bf7ce6e1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestProgressOutputStyle.test_colored_progress_TestProgressOutputStyle.test_colored_progress.None_3": {"doc_hash": "4c1b1ab06be5f2ed4ccbdf06b9d16bb8eca1d3371dec4dde92319e3d9e65d99b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestCodeHighlight_TestCodeHighlight.test_code_highlight_simple.result_stdout_fnmatch_lin": {"doc_hash": "68325bd868e1cd647a376f85bf80a5fab6e6c734ca6d1eae323edfea09ee7d79"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_tmpdir.py_TestConfigTmpdir_testdata._": {"doc_hash": "703ff163775025ea8ee00345112a698354559ba105e63ef1bedac0b6c3703480"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_tmpdir.py_test_mktemp_test_mktemp.if_is_ok_.else_.result_stdout_fnmatch_lin": {"doc_hash": "2a73a308a439fd2931038336a3ae0022bf763b64f7f3b630456a3e15671df83d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_cleanup_functions_test_cleanup_functions.result_stdout_fnmatch_lin": {"doc_hash": "e8946f603ebabbbce39e676e8a17bec12851ff0c85030e1016db5c7e22917169"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_BdbQuit_test_exit_outcome.result_stdout_fnmatch_lin": {"doc_hash": "4d37cc47b27086fb6a388a0101c10ae2c3678b5a1585333c64b2fe1203f315e8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_trace_test_trace.assert_result_ret_0": {"doc_hash": "fa3b0ed7909e4f2ac121557d7a219a70b67b33b8a42e797f28d79a6e773b40e7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_pdb_teardown_called_test_pdb_teardown_called.assert_teardowns_": {"doc_hash": "5a64585acba702d46c6044707768c0aabe2bed2011589005dc9873097001b1fb"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_os_pyfile_with_warnings.return.str_test_file_": {"doc_hash": "7bd0a6e9f191f5435306e47b4f7e9a781c4a422e9c66b0386432e9beee311524"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_TestAssertionWarnings_test_warnings_checker_twice.None_1.warnings_warn_Message_B_": {"doc_hash": "3f036c169905dd731bc688981af2258aa2ce4d756b8c424ab956fd0c33438b7c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_test_group_warnings_by_message_test_group_warnings_by_message.result_stdout_fnmatch_lin": {"doc_hash": "9a1d9d213b8bedf3364eaba3df54f740a64986a79ad3e8ba28e1aec762d7352e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_test_group_warnings_by_message_summary_test_pytest_configure_warning.assert_str_warning_messag": {"doc_hash": "d094ffdf19cb941c368673db0d80483061b24f617ed0259896c48bbc5a21919a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_TestStackLevel_TestStackLevel.test_issue4445_rewrite.assert_lineno_4": {"doc_hash": "a56c2d29a301fd322e1a4da5ab5f011f2cc72c5415eff2c4d5d988c9f9c6e72a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_TestStackLevel.test_issue4445_preparse_TestStackLevel.test_issue4445_preparse.assert_func__preparse": {"doc_hash": "937f64aba2688bcc6c8c8f6194092fb68e93dd946751a7efb3c25f390c3541ca"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_TestStackLevel.test_issue4445_issue5928_mark_generator_": {"doc_hash": "678076348c6ce44027f533f5b41e5f179ff860edfbdca32aaa1d6841d93bf21c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/__init__.py__Python_inspection_code_": {"doc_hash": "f6bc2b0f8222dc30489156844ced080e20e230c841bc3ad1d3fcb40767d11c45"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_Code.fullsource_Code.getargs.return.raw_co_varnames_argcount": {"doc_hash": "6e32617852664a00db20c1c275eba907e652530e7bd21dd7c55c37219ed4b1b7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_TracebackEntry.getsource_TracebackEntry.source.property_getsource_": {"doc_hash": "f066b2faf58d037516f97bf2d248a6e8dbca8b05ddd4145f835ccd1ac6ac7c27"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_FormattedExcinfo.repr_traceback_entry_FormattedExcinfo.repr_traceback_entry.if_style_in_short_lo.else_.return.ReprEntry_lines_None_No": {"doc_hash": "3efce173fb676cfec92edb146f179d5654bdd81d27d56a99d9fe68fbe5951446"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_io/__init__.py__": {"doc_hash": "b350075488fbf50558d972af138b5f49d4b084cf7cfc299e72c87d8bef97b2c7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_io/terminalwriter.py__Helper_functions_for_w_should_do_markup.return._": {"doc_hash": "5dd7b091d7064eaf049c96460f587804b5c095a3f88ad2dbaec88a5d1a33827a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_io/terminalwriter.py_TerminalWriter.markup_TerminalWriter.markup.return.text": {"doc_hash": "bb204730c27e99c8f4b8cf567eddd29bc86080a7270208c6a7715d2e6bd854b4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_io/terminalwriter.py_TerminalWriter.sep_TerminalWriter.sep.self_line_line_markup_": {"doc_hash": "23d6b17b42c8dbc7669d4c97573eff7b7de07241fb12edec38fb53a52b823133"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_io/terminalwriter.py_TerminalWriter.write_TerminalWriter.flush.self__file_flush_": {"doc_hash": "7c8e81cf596f0f4e76a4c5864db5c2725814ad7ff9738941a9e1d8ec00e94e36"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_io/terminalwriter.py_TerminalWriter._write_source_": {"doc_hash": "ff9be4300234a16bf27ad1fadd71c1e36d74bfa9d7897a42d6c1a0b2a7ed9791"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py__py36_windowsconsoleio_workaround__py36_windowsconsoleio_workaround.sys.stderr._reopen_stdio_sys_stderr_": {"doc_hash": "dd55580142583adac0b36cca9763a4fce72234192d89935252f69f77cfe69f52"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py__IO_Helpers__EncodedFile.mode.return.self_buffer_mode_replace_": {"doc_hash": "4012ef8218b5c8628c785f6c58fca3a71e878dce185bae48dbe04e1fda41546b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_DontReadFromInput_NoCapture.__init__.start.done.suspend.resume.lambda_args_None": {"doc_hash": "af72db98656f7a689eaab43d0fb0bd7d60c37b434e7a4fc901c3a0ff026e6513"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_FDCaptureBinary.done_FDCaptureBinary.done.self._state._done_": {"doc_hash": "1f7592b207f2a98e8b142c1dfb8460f364aa8de890bcc11dd8eec67ac876bb80"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_FDCaptureBinary.suspend_FDCaptureBinary.writeorg.os_write_self_targetfd_sa": {"doc_hash": "6dfa7a566d000e53958ca15e1b2cef1e9d99ad258f573fed063d7a38bf38340d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_MultiCapture_MultiCapture.readouterr.return.CaptureResult_out_err_": {"doc_hash": "1c4ac4ade4f24eb3b1714e919f98239591b4f817d9592c21e56f824b28c5434a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py__The_fixtures__capsys.capman_unset_fixture_": {"doc_hash": "2eb4b12d7cb5cfc4597b41a644e9c353ee6347a4b40a750907473e5d9f89798b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_capsysbinary_capsysbinary.capman_unset_fixture_": {"doc_hash": "6676f013d9f7acadb8a268c029c7fb6ddd0fb8c571573c2f77997b9db9213451"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_capfd_capfd.capman_unset_fixture_": {"doc_hash": "100f069637ee7ac22c032af0608b0c13b477db88f035056eb1cdb0ec19603804"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_capfdbinary_": {"doc_hash": "87a6a0225126ffd1e518abb40c997f451f453293962e33d1f320b7a2f498104e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_console_main_console_main.try_.except_BrokenPipeError_._Python_exits_with_error": {"doc_hash": "498b1dcd6c1431329a43e39341fd77befb2f0b055e84f5d293523daa39df30e0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_PytestPluginManager._importconftest_PytestPluginManager._importconftest.return.mod": {"doc_hash": "d4e5ae88f991b9fb2e2ac9cb7d65f289fb766486fb3171763ca4d4497171b997"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_PytestPluginManager._check_non_top_pytest_plugins_PytestPluginManager._check_non_top_pytest_plugins.if_.fail_msg_format_conftestp": {"doc_hash": "02540f6e28ab5ca42095549828e4fd0736cf9f5c68f773d5c7c68ec9a63e97d3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_pytest_addoption_pytest_addoption.None_11": {"doc_hash": "4d9c5a2e7d7d1d00d3253ab5dbd051030f43c3eba4342b2a171830e929298e20"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_LogCaptureHandler_LogCaptureHandler.handleError.if_logging_raiseException.raise": {"doc_hash": "4253d61dbdca657c6a444056b26dfd6c2957506b3c108bc3dc728a7b6414d888"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_LoggingPlugin_LoggingPlugin.__init__.self_log_cli_handler_setF": {"doc_hash": "303d3a4efb8a1724583c6c94b9d0b862b5c2326f6a318c319a4221afa8fe79bf"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_LoggingPlugin.set_log_path_LoggingPlugin.set_log_path.if_old_stream_.old_stream_close_": {"doc_hash": "4c8f14e07e18cf67a8da82872682787e86e12a18c2d9521ef57521c86305417d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_LoggingPlugin._log_cli_enabled_LoggingPlugin.pytest_collection.with_catching_logs_self_l.with_catching_logs_self_l.yield": {"doc_hash": "58ba2b88a2bcd2264afa273e05125082e69b4265596bca9828aa669152068d2a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_LoggingPlugin.pytest_runtest_setup_LoggingPlugin.pytest_unconfigure.self_log_file_handler_clo": {"doc_hash": "1573d3b83171d74311e35219bdad087f558da3f2ccbcbb95d4cb135d12168eab"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py__FileHandler__LiveLoggingStreamHandler.set_when.if_when_start_.self._test_outcome_written.False": {"doc_hash": "c04d2757d077595482f5d6f85d9ef7cc827c0ac30595b7d88bd74c64e30fcfd7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/__init__.py_KeywordMatcher_KeywordMatcher._names.attr_ib_type_AbstractSet_": {"doc_hash": "9cf1bcda39e7806121f8215186c7276f9e46ef9333bed4b0e36eecb58c1e20bc"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/__init__.py_KeywordMatcher.from_item_KeywordMatcher.__call__.return.False": {"doc_hash": "68788ed252d5c8ce2731c0ad27ed269cf937f3372816a21a6a9774e55990ad1d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/__init__.py_MarkMatcher_MarkMatcher.__call__.return.name_in_self_own_mark_nam": {"doc_hash": "0b4165c16799b0db4b0ac6b4c7247f2cf50d9c2d3f31508698ee1dc813c46426"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/expression.py_Scanner_Scanner.lex.yield_Token_TokenType_EOF": {"doc_hash": "1f959c0e4b820389085485a3004aa69278604d09e93f7fb5718d167a068815fd"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/expression.py_Scanner.accept_Scanner.reject.raise_ParseError_": {"doc_hash": "c6acae75c829d51631b136ac19177c590895e647d7f0803c2e616a8699be8218"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/expression.py_Expression_": {"doc_hash": "8ed6b813c231cf21567c74ea3cbe5d1973eb86865df6320e4aa4c8ce3ce49964"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Package_Package.__init__.self.name.os_path_basename_str_fspa": {"doc_hash": "402d598d0ecaee41741c82428378f0fb5920dbeb25232b5184751265200fc5e5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py__showfixtures_main_write_docstring.for_line_in_doc_split_n.tw_line_indent_line_": {"doc_hash": "33e2dd97a67159430f04fe8c74432904aeab70efdc52ce328a06db7064b67fb0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.write_fspath_result_TerminalReporter.write_fspath_result.self__tw_write_res_flush": {"doc_hash": "1985b356fd5fe4d67af443b148945e8d8a765deea120c1643f515cacec928228"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.pytest_runtest_logreport_TerminalReporter.pytest_runtest_logreport.self_flush_": {"doc_hash": "6ce2b70c0e31986c2a3d5bcb23fa4a963ec2e0164b50a81e89cf04feef80f814"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.pytest_collection_finish_TerminalReporter.pytest_collection_finish.if_self_config_getoption_.if_failed_.for_rep_in_failed_.rep_toterminal_self__tw_": {"doc_hash": "e662f0da2e230bb37715dc8f32a15b665e26cef7c1ab90d711a9035ea5e41d9c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_test_tee_stdio_captures_and_live_prints_test_tee_stdio_captures_and_live_prints.assert_this_is_stderr_": {"doc_hash": "7822bf2f9110a5b3a21f119d2ad54c85ae34b1b584014b4eedd2c858fe99af00"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_test_no_brokenpipeerror_message_": {"doc_hash": "f2636e785d06d5ea8efa9cdc2cd834ada13978bbe6591e737384e8f925c6ff0b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_format_excinfo_TestFormattedExcinfo.test_format_excinfo.assert_file_getvalue_": {"doc_hash": "24aa3452076bdd12ce059eda9f9ff8fa7458942488fb0634208799e718dfda78"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_source.py_test_comments_test_comment_in_statement.for_line_in_range_1_3_.assert_": {"doc_hash": "8a83895e10849d217ba9056af4cde6e74b96a60ccf293e60c74173eb0f6c5e21"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_source.py_test_single_line_else_test_multiline.assert_str_source_ra": {"doc_hash": "68168929f0551cc03387cec8dd53b1b066c6fdd1b61ef4ef8ba1639f0af97fea"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/io/test_terminalwriter.py_TestTerminalWriter_TestTerminalWriter.tw.with_f_.yield_tw": {"doc_hash": "f024762b7ef3a3d27b9aeaac865b2178b623600487401209c2c22d4d4e079596"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/io/test_terminalwriter.py_TestTerminalWriter.test_line_TestTerminalWriter.test_attr_fullwidth.assert_len_lines_0_l": {"doc_hash": "6910a62b3ced196d1f61f0add5d622a4693719860f02c9480f1e30b97071d83b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/io/test_terminalwriter.py_TestTerminalWriterLineWidth_TestTerminalWriterLineWidth.test_combining.assert_tw_width_of_curren": {"doc_hash": "5acf229b756da077637b2a3aa9d52edd0ff8c4f3f27ff7436e7b0668b23b2632"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/io/test_terminalwriter.py_test_code_highlight_": {"doc_hash": "58e5c635666bcdc5e753567aaf0fa9622cad0a650fafb2798cf8ab982dfca945"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_colored_ansi_esc_caplogtext_test_colored_ansi_esc_caplogtext.assert_result_ret_0": {"doc_hash": "f3a0b75290b031466fe277bf8cfe242f7a9aa9fa48d34341743640592dae8bf9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_logging_emit_error_test_logging_emit_error.result_stdout_fnmatch_lin": {"doc_hash": "7275102b36d4ad99a6c41ba5aacb3801b394975905a37c3458f232d20dacef89"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_logging_emit_error_supressed_": {"doc_hash": "b1d5b0a3ce8c91f7cb7ade449d332d38494721c39742aaeccba11569bdf47fc2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.test_function_with_square_brackets_TestFunction.test_function_with_square_brackets.result_stdout_fnmatch_lin": {"doc_hash": "1c1d75abd1f1f418543f040f132a6d5ee192d270e1b3ce97f0c97fc5f9a2bb5a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_test_syntax_error_with_non_ascii_chars_test_collect_error_with_fulltrace.result_stdout_fnmatch_lin": {"doc_hash": "ae82439d18865c7e3f027a7e19dd20ac5514054881c5b4977e7415e6fccbc879"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMarkersWithParametrization.test_parametrize_iterator_": {"doc_hash": "89220c9c34fc9f5091f3b1f261d0dab2ddc4c5eaa72245cc086c030c9cf120b6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewriteHookDetails.test_reload_is_same_and_reloads_TestAssertionRewriteHookDetails.test_reload_is_same_and_reloads.result_stdout_fnmatch_lin": {"doc_hash": "b3919b02416c7d75c69bb2e23e36d42fd43527b383abd54aab95cda6b7119e14"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_lastfailed_with_known_failures_not_being_selected_TestLastFailed.test_lastfailed_with_known_failures_not_being_selected.None_8": {"doc_hash": "c859d869ab7752ff6f860d3751c2330f4c631bec908214271bf72e6a34803c85"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_lastfailed_args_with_deselected_TestLastFailed.test_lastfailed_args_with_deselected.None_3": {"doc_hash": "bf04bc1ba5e3bf0f5e90fc8a61ecb2100aae3efc003dff3a3e8e9b91bfa43da8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_lastfailed_with_class_items_TestLastFailed.test_lastfailed_with_class_items.None_2": {"doc_hash": "27146ff3d1f120e2427e084ccebf3d3f6dc60855f7722832710a3c12db964fdd"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_lastfailed_with_all_filtered_TestLastFailed.test_lastfailed_with_all_filtered.assert_result_ret_0": {"doc_hash": "ada95605f69a2095bede88487b9bb29101f79c9dcc8d119f49e383fc4395a2b8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestNewFirst_TestNewFirst.test_newfirst_usecase.None_9": {"doc_hash": "1ee54dd67e1e568e90f15268e3da56227c1f4c4d0854374fadc1ee89a467b1e7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_contextlib_TeeStdCapture.return.capture_MultiCapture_": {"doc_hash": "af072fdcf51f6656f89741f40ff723c3466d99c97bc8ee96650eb8c916e95a49"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestStdCaptureFDinvalidFD.test_fdcapture_invalid_fd_with_fd_reuse_TestStdCaptureFDinvalidFD.test_fdcapture_invalid_fd_with_fd_reuse.with_saved_fd_1_.with_pytest_raises_OSErro.os_write_1_b_done_": {"doc_hash": "c4dc8e04c5682a9615c610f03be10b41896bc2753c8334a4d2b052f9910739be"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestStdCaptureFDinvalidFD.test_fdcapture_invalid_fd_without_fd_reuse_TestStdCaptureFDinvalidFD.test_fdcapture_invalid_fd_without_fd_reuse.with_saved_fd_1_saved_f.with_pytest_raises_OSErro.os_write_2_b_done_": {"doc_hash": "07859504a57cba0032970ddeea4807dca0b490df498943d4fa88065f13ad1d99"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_test_typeerror_encodedfile_write_test_typeerror_encodedfile_write.assert_TypeError_write": {"doc_hash": "1bfd36c440327cdb1e85651ad7dfd2b291bb0507e00670e0bfb4d6b281d8d628"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_test_stderr_write_returns_len_test__get_multicapture.pytest_raises_ValueError_": {"doc_hash": "1ac497a4660b82370d0d2fe8fd7a1ac002aac5729c99b3ffefb2f5978884c76e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_test_logging_while_collecting_": {"doc_hash": "096d9be2b2b48012799b236314d3c57231dfe5bc51ec7edc310cac4bede05ed1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestParseIni_TestParseIni.test_getcfg_and_config.assert_config_inicfg_nam": {"doc_hash": "1c0c01bab4b2eb4313ac79c311de6874ea35ec7989ee83467544daaaee2f5a77"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestPytestPluginsVariable.test_pytest_plugins_in_non_top_level_conftest_unsupported_no_false_positives_TestPytestPluginsVariable.test_pytest_plugins_in_non_top_level_conftest_unsupported_no_false_positives.assert_msg_not_in_res_std": {"doc_hash": "190b8976db8e919c5f0ee24d076548c74612cedeccfe80cf98e01c466669b3a7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_prevent_ConftestImportFailure_hiding_exception_TestPDB.test_pdb_prevent_ConftestImportFailure_hiding_exception.result_stdout_fnmatch_lin": {"doc_hash": "aa6d11f592fab2a37f47b2d7b7fe4d3456c3d35b27bbb46c7df3397774888cf9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_test_warning_on_unwrap_of_broken_object_test_warning_on_unwrap_of_broken_object.None_1": {"doc_hash": "2e836d2204b057744c82c2f6994b79a9099603016cf98d7c898126fd6fefd1a9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_helpconfig.py_pytest_test_version_less_verbose.result_stderr_fnmatch_lin": {"doc_hash": "3fa9187d99a55f1c7f9e5ff75f3e41b838469f2bbab03b10f0d900183cb195f7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_helpconfig.py_test_help_test_help.result_stdout_fnmatch_lin": {"doc_hash": "e8dca95001f872542327bb049e3635a4bc5fc00647d4283eb765de43c20c0c5d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_helpconfig.py_test_hookvalidation_unknown_": {"doc_hash": "d43092ebeb864eb839d3fbbaef922d3adabb83afa1350a0a6a0b243fc227480c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_keyword_option_wrong_arguments_test_keyword_option_wrong_arguments.assert_expected_error_in_": {"doc_hash": "0fbfc196dd04095e814a7727033650bdaf278bc9cc14f97179ad9e7017d4f58b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestKeywordSelection.test_no_magic_values_TestKeywordSelection.test_no_magic_values.assert_len_deselected_tes": {"doc_hash": "0ebfb2099f96f2886550fe18aca71d474a1d96497750cce432d882ffdc3044f9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestKeywordSelection.test_no_match_directories_outside_the_suite_TestKeywordSelection.test_no_match_directories_outside_the_suite.None_2": {"doc_hash": "e3a05c6711bfc8563c0d31bb947b5e847d1a7ded5f7d865cfb10776de6984211"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark_expression.py_from_typing_import_Callab_test_basic.assert_evaluate_expr_mat": {"doc_hash": "8cd4949d36aad2e1b70279f395762aed452285dbdfcda9532e0aa755630d1068"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark_expression.py_test_syntax_oddeties_test_syntax_oddeties.assert_evaluate_expr_mat": {"doc_hash": "3ce980968f89f441351e260be43f16d1bc22ff091c5a742e4c8a6a5f41a23a1b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark_expression.py_test_syntax_errors_test_syntax_errors.assert_excinfo_value_mess": {"doc_hash": "030fa2b64563b2359f719e38f3f927f65abc621e6692c2e99d1d542ae6b2c527"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark_expression.py_test_valid_idents_": {"doc_hash": "4435de4c27060a027c556bab3281a134042f769bd40b9f66a3217960631a3751"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_TestReportContents.test_captured_text_TestReportContents.test_captured_text.assert_teardown_capstderr": {"doc_hash": "f9642227ec15d410c03d3112a931d73f5c9d3aa1e81d375ee136f93b74b4e584"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_TestReportContents.test_no_captured_text_": {"doc_hash": "fd4406896bc658dde7bec58f5f94a0854af07156f3ef92ea8139fe940c8d8a36"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_setuponly.py_sys_test_show_only_active_fixtures.result_stdout_no_fnmatch_": {"doc_hash": "876f611471c8d06a690935ae87a6ef533eb29f46a9f29e4f2391bbd304f16dd5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_setuponly.py_test_setup_show_with_KeyboardInterrupt_in_test_test_setup_show_with_KeyboardInterrupt_in_test.assert_result_ret_Exit": {"doc_hash": "2b07762b7e146a1b90bf5ebb072b6cec8c39a12311adf99743a413e2b43d8668"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_setuponly.py_test_show_fixture_action_with_bytes_": {"doc_hash": "476dece115d90ab0a51b263d2e7a9d6c7ef47d1177fdfbbec36619569da5d6e7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_test_summary_list_after_errors_test_summary_list_after_errors.result_stdout_fnmatch_lin": {"doc_hash": "151eecbacb3592dc6dd4f968e60d5fd81172189804c8da65b600b4466c678539"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_test_importorskip_": {"doc_hash": "282b53b7ee18323635548d20745ff54b3ccc3296d496d5665635531dfb04d964"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_pdb_teardown_skipped_test_pdb_teardown_skipped.assert_tracked_": {"doc_hash": "584ad41ef323381d9a6fc723e969394a4be289c4118fde3ccaeed386784d6e4d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/scripts/towncrier-draft-to-file.py_sys_": {"doc_hash": "18d21f9eb5016bf3ff2f7d5077f472d1daa2614af5053bd6c71f2d91b8fe9374"}, "/tmp/repos/swe-bench_pytest-dev__pytest/setup.py__": {"doc_hash": "28535343d412b586b9c208aff060acadf7eed5fbe06e5b44457805a662b99925"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py__This_class_is_abstract__ExceptionRepr.toterminal.for_name_content_sep_in.tw_line_content_": {"doc_hash": "0d3bb906f7c122352de88210cb15a9f6970185b6b2e1c75d873dfbdcfba20362"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_io/terminalwriter.py_TerminalWriter_TerminalWriter.width_of_current_line.return.wcswidth_self__current_li": {"doc_hash": "9113d8dce6436585bbf234f1f9989fcca53f78ccd9bd3f3a1a0562fce3ef48fc"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_io/wcwidth.py_unicodedata_": {"doc_hash": "22b676fb1547b37cfec2d1ce2d530b220343ca20853179b97de2ac052c382511"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/__init__.py_pytest_addoption_pytest_addoption.parser_addini_": {"doc_hash": "644ff6a608ddab9e57f21ff9f7a3d72a08d88535fa6ee07aafb03a6a8961e0a0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewritingHook._is_marked_for_rewrite_AssertionRewritingHook._is_marked_for_rewrite.try_.except_KeyError_.return.False": {"doc_hash": "ab3773d7ae7092af63b4b285e428c1770f106ad96723e991a95ad8551f7a4b3d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewritingHook.mark_rewrite_AssertionRewritingHook.mark_rewrite.self__marked_for_rewrite_": {"doc_hash": "7e1265f6335ed38fea1853b6fddc4aeb7a049f436b2a76ed32d3e3395003356e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_if_sys_platform_win32_if_sys_platform_win32.else_._write_pyc.return.True": {"doc_hash": "2d13f084244b5489055fa955d34b5447326ae2f0df208c0ed8a15bbac9fce2de"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py__rewrite_test__rewrite_test.return.stat_co": {"doc_hash": "4c20234cf43d152df431cff1d0406e60cf73252fe1d44ccd9e52136065769aa5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/util.py__compare_eq_any__compare_eq_any.return.explanation": {"doc_hash": "be45b9d71244f2c6c0e9b0a070fb8b4b784481c58b35e508a37e7ad3ea6fd6f2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py_NFPlugin_NFPlugin.pytest_collection_modifyitems.if_self_active_.else_.self_cached_nodeids_updat": {"doc_hash": "c6a035033f2b36c940fc51eadea1880f29a9168b438bef51b2aa55efd466ede3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py_NFPlugin._get_increasing_order_NFPlugin.pytest_sessionfinish.config_cache_set_cache_n": {"doc_hash": "4928a593dce9dcb4a42cd10ee71baa5321f1f523d7e1a8a72a84b6d3121f4cd4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py_pytest_cmdline_main_cache.return.request_config_cache": {"doc_hash": "c0fd10a5bf2119ec4632e5284270f243d0b81eb6d5b7d7a7f8a3021052ced258"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py_pytest_report_header_pytest_report_header.return.None": {"doc_hash": "4a87db1bce6fb18dee5eb7723a288735385ea6954940d4408a38e52534dcb7c8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_pytest_addoption__colorama_workaround.if_sys_platform_startswit.try_.except_ImportError_.pass": {"doc_hash": "ebb89ff19ec6a3a34eac31707294ed6a2df6e1f14c69f2b05da2727f7c8c8ab1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py__CaptureManager_and_Capt_CaptureManager.unset_fixture.self._capture_fixture.None": {"doc_hash": "323546a618248fb300c1f6bd62d86a627d0c33c26742c6090582bd2c2810042a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_PytestPluginManager.consider_pluginarg_PytestPluginManager.consider_pluginarg.if_arg_startswith_no_.else_.self_import_plugin_arg_c": {"doc_hash": "b4890ddee69421f585b6bbd44ec842895d05446ac161edc261d13118691236ad"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_PytestPluginManager.consider_conftest_PytestPluginManager._import_plugin_specs.for_import_spec_in_plugin.self_import_plugin_import": {"doc_hash": "3f99f6a459947969294f707e6063a8feee870b521250e406c4d08dcecd847202"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/findpaths.py_os__parse_ini_config.try_.except_iniconfig_ParseErr.raise_UsageError_str_exc_": {"doc_hash": "e7e5edf10f509c634852734644ce04a0b9ac4c7815f51903eda0ba225ed3ba10"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/findpaths.py_load_config_dict_from_file_load_config_dict_from_file.return.None": {"doc_hash": "15bdeb771ccaac20a46abcc9e39dc5950a1ea14c0300aeeef658d6be89b35803"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/findpaths.py_locate_config_locate_config.return.None_None_": {"doc_hash": "ff4754fe85b79195f415dcba7f031fe0338b67a1905f76781a15b7a046f9ad91"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py_pytest_unconfigure_pytest_collect_file.return.None": {"doc_hash": "88c487cbd7ecf778ea0fcca7ecf17232090af460966e77da11ce697fa1ab8dc7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py__is_setup_py_MultipleDoctestFailures.__init__.self.failures.failures": {"doc_hash": "4392c1d05b627207b19d6d07a57ed3cf5562edf430884b4f744e325d58a61129"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py__init_runner_class__init_runner_class.PytestDoctestRunner.report_failure.if_self_continue_on_failu.else_.raise_failure": {"doc_hash": "9b8b8daaded04787b0fd541d836e55b68fd1af6d9ddec674df181586f86c2da3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py__init_runner_class.PytestDoctestRunner.report_unexpected_exception__init_runner_class.return.PytestDoctestRunner": {"doc_hash": "4f9356216631882a6b4b469b53436deb310439a4c674d3df82cf473e6c11d4e0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py_DoctestItem._disable_output_capturing_for_darwin_DoctestItem._TODO_Type_ignored_b": {"doc_hash": "69f6e3b68ea90f1de2175fd577c1a3d83c93176cc7877814627aba887738009f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_getfixturemarker__Key.Tuple_object_": {"doc_hash": "5fdd085d4330ea65a3fa7f37878e596521a6f97941fafa533b14861fdcdc44ae"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_get_parametrized_fixture_keys_get_parametrized_fixture_keys.try_.else_.for_argname_param_index_.yield_key": {"doc_hash": "25d0ec4160dd1c4d7b307f1f05562226d75dcad5e0268b1feb02c6f318646257"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_fix_cache_order_fix_cache_order.for_scopenum_in_range_0_.for_key_in_argkeys_cache_.items_by_argkey_scopenum_": {"doc_hash": "65c3dccde02bb168e462ce70903c9b14a6eb5c80387365f91b81480b7dfe3072"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureRequest.getfixturevalue_FixtureRequest.getfixturevalue.return.fixturedef_cached_result_": {"doc_hash": "78a396e88a54e6e8ea6241980ac4d37758375d2cec36bb3ae701f7fdcadfd202"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureRequest._getscopeitem_FixtureRequest.__repr__.return._FixtureRequest_for_r_": {"doc_hash": "ce79f1c78950435e4cc7efb04fb7264caaa5ccb6375ac915a5a321e7fb821df6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_SubRequest_SubRequest.addfinalizer.self__fixturedef_addfinal": {"doc_hash": "bb5e1af7cafdc1ae1fbf7488c6825a6465697fb8a3d11ce248425f28bfaad30c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_fail_fixturefunc_call_fixture_func.return.fixture_result": {"doc_hash": "a002ab2f91c036b50064df31cd8e88d87817142f3f2ada7168a074cb5ba8c719"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py__teardown_yield_fixture__eval_scope_callable.return.result": {"doc_hash": "f3384ef26b0675cfc1b4c97321000a1fda5837a0929ad63fd11310f48c3d225c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_pytest_fixture_setup_pytest_fixture_setup.return.result": {"doc_hash": "472654e65a1c447d3d05bfbd556adec68d9bf18b4f19675b3466e05641bcf928"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py__ensure_immutable_ids__params_converter.return.tuple_params_if_params_i": {"doc_hash": "02fae66adeaed0e6ef5e0cb8d12a328eb3e84ae01a99cc980f6c155ffd2ae445"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureFunctionMarker_FixtureFunctionMarker.name.attr_ib_type_Optional_str": {"doc_hash": "370c5f32447a2ee24206ac4d34b4010b02b69ab79db4fbeae18234ec2ddc1a96"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureFunctionMarker.__call___FixtureFunctionMarker.__call__.return.function": {"doc_hash": "9274589fc503b8fc00a860ec43ad95a611edc62a463771d2df3320ec6b7e153c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/helpconfig.py_showversion_pytest_cmdline_main.return.None": {"doc_hash": "36875e853453936646aad126549e445e52f275519faa64f6ffdf21a5a9f797a2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_terminal_summary_pytest_terminal_summary._Add_a_section_to_termi": {"doc_hash": "b262baee80450d8c96969d94813dd8b4381707bcb232e9ee20b5d5cb75d239cc"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_warning_captured_pytest_warning_captured._Deprecated_Proce": {"doc_hash": "dc5846369d0eee3bd16bd7b2fcbeeb6604fb4d07ffdc5d0716f32dfe97bdb172"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py_LogXML.update_testcase_duration_LogXML.pytest_sessionstart.self.suite_start_time.timing_time_": {"doc_hash": "a46640a9adad45118408f511c686ab44b8e70ec4c25fe36eb04ca197c8824e30"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_PercentStyleMultiline_PercentStyleMultiline._update_message.return.tmp": {"doc_hash": "3ce1bd0070ec308a8eb615ccc737dff3a2e648bb3b0555b3bb39393aecf9d20c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_PercentStyleMultiline._get_auto_indent_PercentStyleMultiline._get_auto_indent.return.0": {"doc_hash": "252c49aa94c9bfc5b7126f53da644c28ed6222b8acec9f0068e3934fa269ca5b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py__HandlerType_catching_logs.__exit__.root_logger_removeHandler": {"doc_hash": "76cfe7e9883c1aa3c240ce4aedce324b2ae20612f8316429a554b2828b3d0bd1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_LogCaptureFixture.messages_LogCaptureFixture.clear.self_handler_reset_": {"doc_hash": "60ed05b2846538a9e70cea56a9f0126253d1a4e89255ee7a0b70916678830def"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_LogCaptureFixture.set_level_LogCaptureFixture.set_level.self_handler_setLevel_lev": {"doc_hash": "af265e1ed5bb4c74b386294c52819688fc5ed19f98cf6aec7004be8feb1f571c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_LogCaptureFixture.at_level_LogCaptureFixture.at_level.try_.finally_.self_handler_setLevel_han": {"doc_hash": "05a6a7d5f3002f1e68c866d0b73e7420c71697bacb5e3f7f8ecb016bd218c839"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_validate_basetemp_validate_basetemp.return.path": {"doc_hash": "c6ebc0357bb54f8df3aa1f63fe4e53e28b87733478c461a2aa4038e4eb5dca39"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_pytest_collection_modifyitems_pytest_collection_modifyitems.if_deselected_.items_remaining": {"doc_hash": "80a460120ab431c5f2bc2f9831c6d7bd415945677a5cc406c2c27ef8715aa7a5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/__init__.py_pytest_cmdline_main_pytest_cmdline_main.return.None": {"doc_hash": "590b089c3cba3fc98ae12358280cd58d5469a08c3aff3e5c207aded9a486aa9e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/expression.py__True_False_and_None_ar_MatcherAdapter.__len__.raise_NotImplementedError": {"doc_hash": "63950787c67c8205afb69d9a905e56b0aed59a58d95cbb4e7464e8e63275ee38"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/structures.py_collections.abc_istestfunc.return._": {"doc_hash": "e4123af1c8c2e8a9bc9b48c4e212aab8d14659135d3c8495674048e2aa7c7b1c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/structures.py_ParameterSet_ParameterSet.param.return.cls_values_marks_id_": {"doc_hash": "c1a07c5e27ef74d80168b83665e11ed8d075dba6887d0e428783b12c3e32ffa4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/structures.py_ParameterSet.extract_from_ParameterSet.extract_from.if_force_tuple_.else_._type_ignore_arg_type_": {"doc_hash": "1f4ef4b1fc19fe9e38496a125f79bcc5684c19e719d8bb7012dfde44e0932fef"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/structures.py_MarkDecorator_MarkDecorator.__call___8.return.self_with_args_args_k": {"doc_hash": "cb9f7c86146e0312ad86e85bacc6a9ff04762d74e1997610d4ba72482b3ced1e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/monkeypatch.py_monkeypatch_monkeypatch.mpatch_undo_": {"doc_hash": "cf7bc4aa955d1423e8811385f2b68a211dada482e628c1659889d074a7fc136a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/monkeypatch.py_MonkeyPatch.setattr_3_MonkeyPatch.setattr_3.setattr_target_name_val": {"doc_hash": "b0e8fd602b59603bb53079c95d3b375c2cc78dd208937243df44021d6ae9734c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nodes.py_Node.iter_markers_Node.iter_markers_with_node.for_node_in_reversed_self.for_mark_in_node_own_mark.if_name_is_None_or_getatt.yield_node_mark": {"doc_hash": "c412fc829b1695fb76203e727912f064a62ab2ae83a37490269d96f850279019"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nodes.py_Node.get_closest_marker_Node._prunetraceback.pass": {"doc_hash": "a207c5775ad7b7cfc7cb1a22131b039a33b5ca18bf78a270d6c6a0135795522c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nodes.py_get_fslocation_from_item_get_fslocation_from_item.return.getattr_node_fspath_": {"doc_hash": "def0a0de07c3d230dceb30a1c3185180b26ee4971cebfbe2e1fcbe1c0f07b753"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py_ensure_extended_length_path_ensure_extended_length_path.return.path": {"doc_hash": "aae63b4e2dc033b9ee04fb969ce928a89a22ac9af3a2e811b2fd36e6e41efb7f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py_get_extended_length_path_str_parse_num.try_.except_ValueError_.return._1": {"doc_hash": "d8d8d31a474e1aba3864e20eab260833ad82b052ab95610e721dd2fa405f33ba"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py_try_cleanup_cleanup_candidates.for_path_number_in_zip_p.if_number_max_delete_.yield_path": {"doc_hash": "539abee6bb8d163bcbea4e3f72e248242a520e74547b6317c144f6d6acd25d87"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py_fnmatch_ex_fnmatch_ex.return.fnmatch_fnmatch_name_pat": {"doc_hash": "0d3050651388987416752294cd1aeac485c76e270863a987b37a6fbdb27d17c6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py_parts_ImportPathMismatchError._Raised_on_import_path_": {"doc_hash": "61eb8530107442a8eba88eb3505f2cf6cedce127fb176a0e9c1683d40ee6bbb4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_pytest_addoption_pytest_addoption.None_5": {"doc_hash": "4179b9d7d7c56e62f48a6a380259a630e8c1d8ed196bb09233dcffb2bde1d710"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_pytest_collect_file_pytest_collect_file.return.None": {"doc_hash": "6891ef4d75193300a8723ea26b2088e0b4f6ecb205334332ef9255277bc002cf"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_path_matches_patterns_pytest_pycollect_makemodule.return.mod": {"doc_hash": "17fe6fba2aa3b20274b6604e784cca37a08b3902290c95f77df33ab7e7484e46"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_PyCollector.collect_PyCollector.collect.return.values": {"doc_hash": "fa2d2bc256d230c395d4ce2fa520d7fcb6fdb2026a4050ddf68f1cfaf9dff980"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Metafunc.parametrize_Metafunc.parametrize.if_request_in_argnames_.fail_": {"doc_hash": "7f6aa8344ce58987af7fc1aabb146ca9018e4260b3c2f3cb0495a5014a661e56"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Metafunc.parametrize.if_scope_is_None__Metafunc.parametrize.self._calls.newcalls": {"doc_hash": "8c5dedb18b2fabc8ecce099a6c74be54bdb2b117cf8491e6c186abddde19da67"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python_api.py__This_doesn_t_work_with__RaisesContext.__enter__.return.self_excinfo": {"doc_hash": "f35fac7ddcdc684d2e98c02f68172eeaebd98f67a24dfb3481ffcda0d04fdd0b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/reports.py_from_io_import_StringIO__R.TypeVar__R_bound_Base": {"doc_hash": "cdc89abd882a23dc5f7d1a12ad141bb503919e15fb1ff6b3ac3b7f107a62b3cc"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/reports.py_BaseReport.head_line_BaseReport.head_line.return.None": {"doc_hash": "46ce6c61a80a92390bc207e1b0d9545e302bdb493575d6df445e3dbd7ccb7272"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/reports.py_CollectErrorRepr_pytest_report_from_serializable.return.None": {"doc_hash": "287a7c3e237135043c8b2b678ef26207df812965a73f6e6d3270d9044e48b8f5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/runner.py_pytest_sessionstart_pytest_runtest_protocol.return.True": {"doc_hash": "77208d90c2699f63f898c37f64a5da2b1109d501252bc77f6270f7b4bde4e653"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/runner.py_runtestprotocol_runtestprotocol.return.reports": {"doc_hash": "6e14117339d5550f13bcd1fc15e452e63bcb3b7a92c13717347bb29257896424"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/runner.py_show_test_item_pytest_runtest_setup.item_session__setupstate_": {"doc_hash": "f44117eaa865b23051b7a5515a7890d7f26664c607273643cec105c40b140931"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/runner.py_pytest_runtest_call_pytest_runtest_teardown.None_2": {"doc_hash": "44804221967092d29e3a99a426a102d9905d0abcbe685a8395f6439f32c4c85a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/setuponly.py_from_typing_import_Genera_pytest_addoption.None_1": {"doc_hash": "c57c0436beffc625f0a1dcfa59214de54b8660cbfb13fcb9b9868dc6c4096f7d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/setuponly.py_pytest_fixture_setup_pytest_fixture_post_finalizer.if_fixturedef_cached_resu.if_config_option_setupsho.if_hasattr_fixturedef_c._type_ignore_attr_defin": {"doc_hash": "a7ee0db5ef482ae94871ca394037b1a947f309d8f7db6683c5e74ad38319103f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/setupplan.py_from_typing_import_Option_": {"doc_hash": "65e285e4ebc140498e6529d2a6c2a21433073caee2400a74785063d4511f626f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/skipping.py_pytest_addoption_pytest_addoption.parser_addini_": {"doc_hash": "2f8f4ed8fac0540a7056f8e070344ddba3a8f664dceaab2a669af1e6deeebcb0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter._determine_show_progress_info_TerminalReporter._determine_show_progress_info.if_cfg_progress_.else_.return.False": {"doc_hash": "85bbc6f78595053bbb4c9a0e0a4db6c0936cad9d78c5eddabdb8e578dbb2db8b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.write_sep_TerminalReporter.pytest_runtest_logstart.if_self_showlongtestinfo_.elif_self_showfspath_.self_flush_": {"doc_hash": "efb72748b26a3c012214b10fb3bde9ec163969f7ae7e38d0015aba4b224be23c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.pytest_sessionstart_TerminalReporter.pytest_sessionstart.if_not_self_no_header_.self__write_report_lines_": {"doc_hash": "9b66f742a8a75b81c803c5d0176ebea2b6f84d312bf763aea113e75438a22841"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.pytest_terminal_summary_TerminalReporter.pytest_unconfigure.if_self__keyboardinterrup.self__report_keyboardinte": {"doc_hash": "ec0071d657f93dd5d2dbbda992c7517ce9f0fd5956b11468c284b2460748f45f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.summary_passes_TerminalReporter.summary_passes.if_self_config_option_tbs.if_self_hasopt_P_.for_rep_in_reports_.self__handle_teardown_sec": {"doc_hash": "a63a99c9023249a1377a0400c56e8fce8de4bc687ec55358d5c977a4c2145682"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter._get_teardown_reports_TerminalReporter.print_teardown_sections.for_secname_content_in_r.if_teardown_in_secname_.self__tw_line_content_": {"doc_hash": "3cd7c8bae6576921d675508722012f92dd41b9f658f1ab5f3a757f790ae89161"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/timing.py__": {"doc_hash": "1e82d677b5cb2e0e0dabbc6a524592550642e04e14fb374341fc40846792db98"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/unittest.py_pytest_pycollect_makeitem_pytest_pycollect_makeitem.return.item": {"doc_hash": "983f9171535b1e8448011f8160019a0f4a067d4c5b15b2ae7dd3427578a55244"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/unittest.py_UnitTestCase._inject_setup_teardown_fixtures_UnitTestCase._inject_setup_teardown_fixtures.if_method_fixture_._type_ignore_attr_defin": {"doc_hash": "0b31dc136f5de97b12ab73cd539cca2cd5460921e6feb7b10471ce50178de658"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/unittest.py_pytest_runtest_makereport_pytest_runtest_makereport.if_.call.excinfo.call2_excinfo": {"doc_hash": "6ae1724585809d752a89eea8de7ac7a5c83bf0464b886b929cfb506f682ab270"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/warnings.py_catch_warnings_for_item_catch_warnings_for_item.with_warnings_catch_warni.for_warning_message_in_lo.ihook_pytest_warning_reco": {"doc_hash": "c17d8c17edc947d9d1954e47561ba90b2c3722ba25895777bd25b7c34062e569"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_assertion_rewrite_TestGeneralUsage.test_assertion_rewrite.assert_result_ret_1": {"doc_hash": "43381eeda93a3cdb5384f8f5673f572c5f450205e2cb18aff5ec5c431e9210ba"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_nested_import_error_TestGeneralUsage.test_nested_import_error.assert_result_ret_2": {"doc_hash": "ff120252abe133df8b268717da825dff8c544d7b7b677e88919b3b3bf726fb85"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestInvocationVariants.test_cmdline_python_namespace_package_TestInvocationVariants.test_cmdline_python_namespace_package.None_5": {"doc_hash": "9c14480da3a20006b62d2fb90851cada04a3b1e6f81787ac66f8227ce2c3da21"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestInvocationVariants.test_invoke_test_and_doctestmodules_TestInvocationVariants.test_cmdline_python_package_symlink.result_stdout_fnmatch_lin": {"doc_hash": "0177cf74cb6749eec266f65bb5b24becf46ad9806cb8841304bb6f8b05f41dd7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_test_excinfo_exconly_test_excinfo_repr_str.assert_str_excinfo2_": {"doc_hash": "9e7f1cc512dab2f16c68f4579b9c66c8bd29129abfe5041ff338753af3732831"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_repr_local_truncated_TestFormattedExcinfo.test_repr_local_truncated.None_5": {"doc_hash": "c2a159b9c3c0c8cbed69a8ae1d0d0d2ad1da24f26595b0729b99658cc37f9fd1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_repr_tracebackentry_short_TestFormattedExcinfo.test_repr_tracebackentry_short.None_8": {"doc_hash": "704308fea105ef4a40313db77159499fbb18e85189d008ebd36610f11cc0c1a5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_repr_traceback_and_excinfo_TestFormattedExcinfo.test_repr_traceback_and_excinfo.for_style_in_styles_.assert_repr_reprcrash_mes": {"doc_hash": "b22e6ff459c82f8f8b08c0655b90021f21f1c89356e64c1b066fa24e142ba7a2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/conftest.py_mock_timing_": {"doc_hash": "8a5b523d9baf4f690a0bf8fae197cb15fe78cac3b0c07aaa67b9524836ad446d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/dataclasses/test_compare_recursive_dataclasses.py_from_dataclasses_import_d_": {"doc_hash": "3255e66e1f1573a0e91c759192b8c7d8888b52bbd71cb530bb6fea56fb4bb85f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/dataclasses/test_compare_two_different_dataclasses.py_from_dataclasses_import_d_": {"doc_hash": "b8440fc3e02da310c1e520d82ac78ff7666f596c6b39b140001309d978592923"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/unittest/test_unittest_asyncio.py_from_typing_import_List_": {"doc_hash": "e128a93e072ccbf72818eff62756d3f43a4b818752cd46e8e0b1224c2f97ee48"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/io/test_wcwidth.py_pytest_test_wcwidth.assert_wcwidth_c_expe": {"doc_hash": "297fa85527975a88a4d2b8eac3ced7e34c0da852e29826c19f91d24932c3f7ad"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/io/test_wcwidth.py_test_wcswidth_": {"doc_hash": "6d17d30911e5f70d68f2de45f871e617e1d427fbfeeefa6395fb6a2ca9aaf8fb"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_fixture.py_test_caplog_captures_for_all_stages_test_caplog_captures_for_all_stages.assert_set_caplog__item__": {"doc_hash": "3c142bdae9d8919f87d899eb50b01378cbc05bdbe9eea2b59d63c627f0a324e1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_fixture.py_test_ini_controls_global_log_level_test_ini_controls_global_log_level.assert_result_ret_0": {"doc_hash": "8ad3e5b31aee63438a949f33d3a490922fd0a88907cfc433b330847a7fed23a0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_fixture.py_test_caplog_can_override_global_log_level_test_caplog_can_override_global_log_level.assert_result_ret_0": {"doc_hash": "2c61f3f7b875a748a88b58694a0c2d7df0641692ed5b1adbd70686bb9a4eeaa2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_live_logging_suspends_capture_test_live_logging_suspends_capture.assert_cast_io_StringIO_": {"doc_hash": "c01a00fd45ef2b2e6684cc2597f2cf8501367c5cf5f1b8c350b08f02c73297fb"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestModule.test_show_traceback_import_error_TestModule.test_show_traceback_import_error.if_verbose_2_.else_.assert__pytest_not_in_s": {"doc_hash": "8522951ad10cd0804bab821c515332af4eeba5d107171c7731a0eba6108b7093"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestScopeOrdering.test_func_closure_with_native_fixtures_TestScopeOrdering.test_func_closure_with_native_fixtures.assert_FIXTURE_ORDER_": {"doc_hash": "8cb21b0e3ff5a6910dcfa17543ef4ba1a5ea8356e9e333645374d6143e68afbb"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/integration.py_from_typing_import_Any_TestOEJSKITSpecials.test_funcarg_non_pycollectobj.assert_clscol_funcargs_a": {"doc_hash": "75fabc4c392b4525c92e1052695906d5c8a3215a6003c3fa4c31ad2753b345ca"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/integration.py_TestOEJSKITSpecials.test_autouse_fixture_TestOEJSKITSpecials.test_autouse_fixture.assert_not_clscol_funcarg": {"doc_hash": "69a702b2e24c69c9296de85d45fae408964b133a57672beae74198d169c61eb3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/integration.py_test_wrapped_getfslineno_test_wrapped_getfslineno.assert_lineno_lineno2_": {"doc_hash": "3a0e9e0faa4f769ba6913a83999eca234b90393f8415a7d0364beaa02395741c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare.test_iterable_full_diff_TestAssert_reprcompare.test_iterable_full_diff.assert_n_join_verbose_": {"doc_hash": "efee97bcfbe7aa3c95c8c25b26ee0ce9aae2fe0423b474900375959df4198d66"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare.test_list_tuples_TestAssert_reprcompare.test_repr_verbose.None_7": {"doc_hash": "9ebe64b95dc5a30137007eda48ea45ec10adc85b95635d04f1d317471e8bf435"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare_dataclass.test_recursive_dataclasses_TestAssert_reprcompare_dataclass.test_recursive_dataclasses.result_stdout_fnmatch_lin": {"doc_hash": "28870a375712fd1a795c1abdcd8e0ff9d8834105c516e4fedb1065f88beac6d1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare_dataclass.test_recursive_dataclasses_verbose_TestAssert_reprcompare_dataclass.test_recursive_dataclasses_verbose.result_stdout_fnmatch_lin": {"doc_hash": "35ff3295bcabf98cadaa6849d47d1ee432b2dac729c503af738cb5e2222a58f5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare_attrsclass.test_attrs_recursive_TestAssert_reprcompare_attrsclass.test_attrs_recursive.for_line_in_lines_1_.assert_field_c_not_in_": {"doc_hash": "7a00dc40eb503bb9a29d1ffaca6c6296c5c955c399fe407024ef6a364625e014"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare_attrsclass.test_attrs_recursive_verbose_TestAssert_reprcompare_attrsclass.test_attrs_recursive_verbose.assert_field_d_a_": {"doc_hash": "cc5ea093d22bacb91c66b68a8b81acce99a00bb2be63da8bac07ff01eb49eb56"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_getmsg_getmsg.try_.else_.return.None": {"doc_hash": "9caf73bf900d8a9a8082aaf542cc8977be25b2567855c2ddd1996dce317d3350"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_name_TestAssertionRewrite.test_name.if_verbose_1_.else_.assert_lines_assert_": {"doc_hash": "66fe9b43979086d7e826d16e5250213a0d760e2f7d28d11aa5c729cac1ac0e36"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_assertrepr_compare_same_width_TestAssertionRewrite.test_assertrepr_compare_same_width.if_request_config_getopti.else_.assert_line_": {"doc_hash": "dbeab18fa1b102fd57a83ec1cf35d8fb595b5b366f0597ec3f7cd6268d5a8eaa"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_dont_rewrite_if_hasattr_fails_TestAssertionRewrite.test_dont_rewrite_if_hasattr_fails.if_request_config_getopti.else_.assert_lines_": {"doc_hash": "ef82a2d3bc06b905db15b545714c0369a437b1c9836590e5b79c74577aa96573"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_boolop_TestAssertionRewrite.test_boolop.getmsg_f11_must_pass_Tru": {"doc_hash": "9661c3046992bcff578506ce56a441be22b272e98280ef767bf6cfbc1d00ba9a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_short_circuit_evaluation_TestAssertionRewrite.test_unary_op.assert_getmsg_f4_ass": {"doc_hash": "79d28dce67cf7d2d82cff6b671342d72ba71cb24cecaa140853b1a5955a6cec6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_attribute_TestAssertionRewrite.test_attribute.None_1": {"doc_hash": "5c74b553f7e63b492515bf261f63fbcafa614126be705b280ae166a6d20cba23"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_comparisons_TestAssertionRewrite.test_comparisons.getmsg_f5_must_pass_True": {"doc_hash": "a75c5bff33cf4a28c12f05c71080e2654951cc2d19aea6731fe48556a0e011f7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_custom_reprcompare_TestAssertionRewrite.test_custom_reprcompare.assert_getmsg_f2_ass": {"doc_hash": "2b76ad4364f4efa07464e894a622bf0352fef1f5a7ed93f4b53693c45c91bb49"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_assert_raising__bool__in_comparison_TestAssertionRewrite.test_formatchar.assert_msg_startswith_as": {"doc_hash": "5f6b443866fcef6f78edf38fab003e6d5be1efba3a89c1eee9c112e3d2bc5d49"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_fscollector_from_parent_test_fscollector_from_parent.assert_collector_x_10": {"doc_hash": "e9b29ee76a02af1a0c54156b1526fdf39b234d0e5d056402440826b799d56acb"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestImportModeImportlib_TestImportModeImportlib.test_collect_duplicate_names.result_stdout_fnmatch_lin": {"doc_hash": "5451463c14a56e268f46222b2f12e3126cdccf0114210152df6f81c6034fc06d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestImportModeImportlib.test_conftest_TestImportModeImportlib.test_conftest.result_stdout_fnmatch_lin": {"doc_hash": "eea8ba3cceef8f90b518881e9a46f18d128ff743a7dd689d9d1a8f3adee25b7e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestImportModeImportlib.setup_conftest_and_foo_TestImportModeImportlib.test_modules_importable_as_side_effect.result_stdout_fnmatch_lin": {"doc_hash": "6eee97e341850d1ccddafbf391d5fc0776e1d4d63393842472d5acdcd922ff8c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_compat.py_enum_test_is_generator.assert_not_is_generator_f": {"doc_hash": "deb22d3d11516d1ba685b3332a32a259b4d01013967fa4f1d25476943bf5febd"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestConfigAPI.test_getconftest_pathlist_TestConfigAPI.test_getconftest_pathlist.assert_pl_1_somepath": {"doc_hash": "2f8d9a6a3429a2ddf026c714789b3ea18006863e47da85ea3f3e7b0534c0c6a6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestConfigAPI.test_addini_linelist_pprojecttoml_TestConfigAPI.check_config_linelist.assert_values_": {"doc_hash": "519517cb8eafa93dbe37a9644d2a104f98926563f7f6d92dc96aba4729664b0e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_options_on_small_file_do_not_blow_up_test_options_on_small_file_do_not_blow_up.runfiletest_v_v_": {"doc_hash": "eb71215d90f6c7f814d57f12106c7fc2043639bd49dc4bf15735a3d362fdca3b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestRootdir.test_with_ini_TestRootdir.test_with_ini.assert_ini_config_x_": {"doc_hash": "632fa8045b78438ae68aadf00f4751948a0e1d9e13d03d7f3840852d3f789bf5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestRootdir.test_with_specific_inifile_TestRootdir.test_with_specific_inifile.assert_ini_config_x_": {"doc_hash": "99a67f373e2c8c19bf99b61aa926c16bcbfe6c4dc12c651b81c3b2c83f4d9a38"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_invocation_args_test_invocation_args.with_pytest_raises_TypeEr._type_ignore_arg_type_": {"doc_hash": "7bb69948d442ce7b30416537059133f8ae8b7524e644d99e89a8a37c2b2ec225"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_TestConftestValueAccessGlobal.test_value_access_not_existing_TestConftestValueAccessGlobal.test_value_access_by_path.assert_": {"doc_hash": "ce74704d15581f5552e7a03aa16195065345ace2a899ccdeaf125a3829b56db0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_conftest_symlink_test_conftest_symlink.None_1": {"doc_hash": "baec568235bb2538b64b11fadc10b3f7131c016bebfdc5a5d614f8bed0f3f55f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_custom_debugger_hook_custom_debugger_hook.None_2": {"doc_hash": "c9316cb1282f8beb5455feefc6204b41d20b92a2f330775559fca8bf51befcb4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_findpaths.py_TestLoadConfigDictFromFile.test_valid_toml_file_TestLoadConfigDictFromFile.test_valid_toml_file.assert_load_config_dict_f": {"doc_hash": "ef95fe114f88c8cb5bd83d3830e643f3017c4eccfb9f0928d902407a03d9e7a4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_timing_function_TestPython.test_timing_function.assert_float_val_7_0": {"doc_hash": "45f4a80d05212100af03de90f587649160924e0fc2540cfcac8d8ac2abd18fa5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_mangle_test_address_test_dont_configure_on_workers.None_1": {"doc_hash": "c9dfe089c3108f885a7c786ce26028bf560a3437bf48441f2a7034eb8b4cc3fc"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_link_resolve.py_os.path_subst_path_linux.try_.finally_.pass": {"doc_hash": "a28f3bc79f67d278e26fd69ee87b86911ff09d8b716520191f472d469176a0d5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_link_resolve.py_test_link_resolve_": {"doc_hash": "7eacf3ee663589f20acb07797ba70fef0546f8fb677eca1f99e55b70a1031f5f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_main.py_argparse_test_wrap_session_notify_exception.if_returncode_is_False_.else_.assert_result_stderr_line": {"doc_hash": "b94f095d106eb72e88659e25dc46027fe15e58d84734ca86cb94c79d20d5882f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_main.py_test_wrap_session_exit_sessionfinish_test_wrap_session_exit_sessionfinish.assert_result_stderr_line": {"doc_hash": "acf33b4d4f10a76c1c60c6e7bfd5f76ad3e715b9cbce056c231c6ba12c5b2439"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_monkeypatch.py_os_test_setattr.with_pytest_raises_TypeEr._type_ignore_call_overl": {"doc_hash": "27e61622ef34e46e9491c77f0cf2a008010a2e09441364fa3ac6bbdd782961e3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_monkeypatch.py_TestEnvironWarnings_TestEnvironWarnings.test_setenv_non_str_warning.with_pytest_warns_pytest_._type_ignore_arg_type_": {"doc_hash": "ee77d02a26a939e2eef1c1489cf14d9fb29a2056b57fbf783982331478d8e12e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_monkeypatch.py_test_setenv_prepend_test_issue185_time_breaks.result_stdout_fnmatch_lin": {"doc_hash": "a44a23911ddddfdf60340ddf32defabdb65569fbb226691eb7ac689506eb1d8f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_monkeypatch.py_test_syspath_prepend_with_namespace_packages_": {"doc_hash": "229a4258a2df262332c27e48cb026f5a6be1a937782ae72e2dcb419969bf062e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_nodes.py_test_failure_with_changed_cwd_": {"doc_hash": "a7b7af4504ad6434b1f18de7ede21cc2d3b3f84e3564036344cbe494c14ac87a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_TestFNMatcherPort_TestFNMatcherPort.if_sys_platform_win32.else_.drv2._d_": {"doc_hash": "bf74a1e9949619b65f28efe1d78815546c05d95ee65ca169ca25ae07147f2b35"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_TestFNMatcherPort.test_matching_TestFNMatcherPort.test_matching.assert_match_pattern_pat": {"doc_hash": "7883a35b9145f58a048cbc76ebacf336e2d230233cac5673e05dc27af1aaf1f9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_TestFNMatcherPort.test_matching_abspath_TestFNMatcherPort.test_not_matching.assert_not_match_pattern_": {"doc_hash": "9dc77a0de1ccebc4c49048581746ec6196df6b45003f4933c434cce0ef2b5715"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_TestImportPath.test_messy_name_TestImportPath.test_import_after.assert_module1_is_mod1": {"doc_hash": "853b5f51db80621df794bbd64eeda8874ca14a403cc378ef20305325ceb7548c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_TestImportPath.test_check_filepath_consistency_TestImportPath.test_check_filepath_consistency.assert_issubclass_ImportP": {"doc_hash": "89de4c8810734bfc234cb5983dfce49487d8adc21ef2391bb2b8517d3d4da7da"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_TestImportPath.test_no_meta_path_found_TestImportPath.test_no_meta_path_found.with_pytest_raises_Import.import_path_simple_module": {"doc_hash": "166a5cc1daf9cb4db8a5ff4c452a89d08f6b18f723ca7726b3910d5f28c732e5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_test_long_path_during_cleanup_test_long_path_during_cleanup.assert_not_os_path_isdir_": {"doc_hash": "06cc280e732aac12f012a25893d3f3d14077f5057a964f67f245783c7a59b44a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_test_get_extended_length_path_str_test_get_extended_length_path_str.None_3": {"doc_hash": "e602ea63df7298edd198e5f2230936c7c751cae4b1fe03dbc20904a2f13fde67"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_TestPytestPluginManager.test_consider_module_TestPytestPluginManager.test_consider_module.None_2": {"doc_hash": "525659e8e0530b67123123703de19e3c98ff59c74bfdae65d548af41ea916d66"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestReportSerialization.test_chained_exceptions_no_reprcrash.check_longrepr_TestReportSerialization.test_chained_exceptions_no_reprcrash.check_longrepr.assert_fileloc2_message_": {"doc_hash": "c585428f22a46463dc19a8db10fa7e4409fa68fac61eeec9fdc89f5879f0edd3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminalFunctional.test_no_header_trailer_info_TestTerminalFunctional.test_no_header_trailer_info.if_request_config_pluginm.result_stdout_no_fnmatch_": {"doc_hash": "c1672c2639e6a0e12cdf6514163224f6895a1e62fe5d87ef7e8e9f2112fb8d2f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminalFunctional.test_no_header_TestTerminalFunctional.test_no_header.None_4": {"doc_hash": "56717995758dd19cb7c4fd92d7e23afe935cfd5a8abb3dbd87acfa7cbc0a27e3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminalFunctional.test_no_summary_TestTerminalFunctional.test_showlocals.result_stdout_fnmatch_lin": {"doc_hash": "da5ffdf4c50bfbacc453348a4fc0d65d070a0d01f0ae8460a9abb45f8eea88ad"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_terminal_summary_warnings_header_once_test_terminal_summary_warnings_header_once.assert_stdout_count_": {"doc_hash": "4307a6fb389cb3fec23480b614ce8f3079d70d19fde5689449e411d67aa8b54e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_terminal_no_summary_warnings_header_once_tr.return.TerminalReporter_config_": {"doc_hash": "38cf2ed94cb3e6d1b0c82c1258cfa9e52b77608166012fee4248e7262d693e99"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_line_with_reprcrash_test_line_with_reprcrash.None_19": {"doc_hash": "8e2286eade8656c6a229c266bb055f24a79a9a2b495a9370510bfd97d00e36e0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_line_with_reprcrash.None_20_test_line_with_reprcrash.None_21": {"doc_hash": "3a12f9307139039e710d8812a72df16b319c7f0e5e18ab853c425cc53b43ccdf"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_collecterror_test_collecterror.result_stdout_fnmatch_lin": {"doc_hash": "55dcc9ab3ae635ab0c28d20f85f95ba2b35a7a341de5e22313780f4edcd29ac7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_no_summary_collecterror_test_via_exec.result_stdout_fnmatch_lin": {"doc_hash": "f6a9948aefba445bc4dcdc3ec59f9bbef2ac61e896a75ab08300394ac2dd39c6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_test_warning_captured_hook_test_warning_captured_hook.for_index_in_range_len_ex.if_collected_result_3_is.else_.assert_collected_result_3": {"doc_hash": "f4d01b4f4be48b42a17b74f6548c4e07aad5922152c05d2b4e4b315609c15b35"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_ConftestImportFailure_filter_traceback_for_conftest_import_failure.return.filter_traceback_entry_a": {"doc_hash": "dac5060abe0f16923b89efe23c477c1631f453302c456f721d5882e5afb07ab8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config._consider_importhook_Config._consider_importhook.self__warn_about_missing_": {"doc_hash": "df7382e76c0780722abad44f601f99c859270efbd182923805fa5c42c20c0fab"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/helpconfig.py_conftest_options_getpluginversioninfo.return.lines": {"doc_hash": "0c4c8315fae1f6580b2e048c97747e3578e60246e30e4dac4dc38b671d7d5844"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/helpconfig.py_pytest_report_header_": {"doc_hash": "0d85624293b06898fde9b30337ba11b51e59dbee4357b99749166bb5b78dd19c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_collection_modifyitems_pytest_collection_finish._Called_after_collectio": {"doc_hash": "cf3e753f35e4f1460009256a31d0b277592bdbd93cab6682019c13a0451ba361"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_ignore_collect_pytest_ignore_collect._Return_True_to_prevent": {"doc_hash": "c797384a687a021a6915cd13d95b7aa94ea10b77cee7d211d3816dd13bd9b15c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_None_12_pytest_pycollect_makemodule._Return_a_Module_collec": {"doc_hash": "acd04b549497ece97d6aa71eb5d99550b8bc6c88d7be3c64b78a19c12542a7de"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_make_parametrize_id_pytest_make_parametrize_id._Return_a_user_friendly": {"doc_hash": "6b78168d6cd2d013235dca0a9af703d2133d2d7fd6f1cf1648386e1675d0bc7c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_runtest_logstart_pytest_runtest_call._Called_to_run_the_test": {"doc_hash": "3d9e6b9241ffb304812273a415511166d1eb7137ed0be69ec7471e7b60ca437e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_runtest_teardown_pytest_runtest_teardown._Called_to_perform_the_": {"doc_hash": "6d4fdc47dc268ff2055d3d2fd2009e68e85b3c737457bf4689ffa023137e7a22"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_runtest_makereport_pytest_runtest_makereport._Called_to_create_a_py": {"doc_hash": "370c67c2d5f3724730fb5cdac33c879d9196bdbb087a8de3cc62b33141ce5bb9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_fixture_post_finalizer_pytest_unconfigure._Called_before_test_pro": {"doc_hash": "39d2f695cf7393eba8b410caaf6f77da9ed3195c7c2d29b68666e6639122adfe"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_None_24_pytest_assertrepr_compare._Return_explanation_for": {"doc_hash": "a94061deafe736a9bb156ee0886cc2970c3afcff7328d670a7f063a473282df4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_exception_interact_pytest_exception_interact._Called_when_an_excepti": {"doc_hash": "abb2e56a75f693280e2f788a1ca4712f415372dc0f0f082a05395a9da862a1fe"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_enter_pdb_": {"doc_hash": "c458463a193fd054e61871e6c438d12e135bd113dc8e936d4ad611ca0324391a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_pytest_cmdline_main_pytest_collection.session_perform_collect_": {"doc_hash": "f143f5c5bbb1ae79eb07ddaed9aea34054e140ae6ec124f75e032e1859ef13da"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/structures.py_get_empty_parameterset_mark_get_empty_parameterset_mark.return.mark": {"doc_hash": "adf50b4d4386760a00a8cb828ef6a48c9efdad120faaf1b01dba6795ba9de7a6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/structures.py_get_unpacked_marks_normalize_mark_list.return._x_for_x_in_extracted_if_": {"doc_hash": "89b5ea8ba239ac0e58118832c22e07b144a51a0e3a2f94b302b56249db9c634b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/structures.py_MarkGenerator.__getattr___MarkGenerator.__getattr__.return.MarkDecorator_Mark_name_": {"doc_hash": "5128f7a91983b4b454ed42fa751935de24737b747785ab3a6dc4b32d3d7c09e0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_RunResult.parseoutcomes_RunResult.parseoutcomes.return.self_parse_summary_nouns_": {"doc_hash": "05846ed4312a135b1129f829dfa37891891b7377d457bc68f7f0011b892e39d6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_RunResult.parse_summary_nouns_RunResult.parse_summary_nouns.return._to_plural_get_k_k_v_f": {"doc_hash": "8dc541f625dc0214cec8c3efeaf02f6f649d688c69c88238e0c300636494a595"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/recwarn.py_deprecated_call_3_deprecated_call_3.return.warns_DeprecationWarning": {"doc_hash": "e3767505f8a6790ab766d1255f5990779a27a6f41e7ec20625d4ab18d61b79ee"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/recwarn.py_warns_6_warns_6.if_not_args_.else_.with_WarningsChecker_expe.return.func_args_1_kwargs_": {"doc_hash": "c7d986433e9e565b0bbb32337645777aa776e6f8faecb7f7ba9301cdc816e2df"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/skipping.py_evaluate_condition_evaluate_condition.return.result_reason": {"doc_hash": "2e1183a4be4cca496dd89cbeac74cb4574737555e7ceb21a78d46b1754747ea5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/skipping.py_Skip_evaluate_skip_marks.return.None": {"doc_hash": "8d687398548e914bee46eee88c40177e5f102a6face21e795ce65146d6ab0d44"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/skipping.py_Xfail_evaluate_xfail_marks.return.None": {"doc_hash": "97eb7b391d687b97f240d075762ea7de2c62bd0639b0fba789ae78bfe79e4876"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_test_excinfo_no_python_sourcecode_test_excinfo_no_python_sourcecode.for_item_in_excinfo_trace.if_isinstance_item_path_.assert_str_item_source_": {"doc_hash": "642d9990fbbbd7076c48f3b00b31ec999243623a763c1878f892a88104983b8d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_negative_tolerance_TestApprox.test_negative_tolerance.with_pytest_raises_ValueE.1_1_approx_1_rel_abs": {"doc_hash": "d16c1de44929118eb6acba89ff932a22dadf1d7b0a9a34b3a6cd23f4b988fa86"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_test_warn_missing_test_warn_missing.result_stdout_fnmatch_lin": {"doc_hash": "6c0186a82a86ef576d024bcde6d4a924419d41676556d378b3aabf57bf111b67"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_call_failure_teardown_error_TestPython.test_call_failure_teardown_error.snode_assert_attr_": {"doc_hash": "13f2bc93dde35d297539af89417fe62471021bccd40ed2b7f509cce4a888ffac"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestFunctional.test_reevaluate_dynamic_expr_TestFunctional.test_reevaluate_dynamic_expr.reprec_assertoutcome_pass": {"doc_hash": "2f44070e84aefe050c252216a421239fcbdea4c39f63bc9463f14ff6a1563278"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_sys_TestEvaluation.test_marked_one_arg_with_reason.assert_skipped_reason_": {"doc_hash": "aaa0123df1d88d01a35cd4aa2ee511d2d59fb09d0e62edc8570e2fb28582b71a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestEvaluation.test_marked_one_arg_twice_TestEvaluation.test_marked_one_arg_twice.for_i_in_range_0_2_.assert_skipped_reason_": {"doc_hash": "f1667539efac528ddbecc604ccb7ee3b8ffe3e24f0e8cf8d84cf6669e7be0aa4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestEvaluation.test_marked_skipif_with_invalid_boolean_TestEvaluation.test_skipif_class.assert_skipped_reason_": {"doc_hash": "841894fb4f53f5b9e4fdb383f29638ff8cc18ac3d2c27eee13afd862260dfcba"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_test_default_markers_test_xfail_test_setup_exception.result_stdout_no_fnmatch_": {"doc_hash": "a92f257c910901722a323027af092ea93ad4c8cc69915b42deedc90f7a49f5b6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/assertion/failure_demo.py_pytest_TestFailing.test_not.assert_not_f_": {"doc_hash": "878eb2661c945e30865419d108e2f333f38a4a5a4d0b1383344c60741393c7b8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/assertion/failure_demo.py_TestRaises_test_dynamic_compile_shows_nicely.module_foo_": {"doc_hash": "0bf58598217ce0aae7528748caaea32db576687fd61199bfc4f545ee5d76d105"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_getfslineno_getfslineno.return.code_path_code_firstline": {"doc_hash": "b4c1b9cc489d64b4270b269f2c33576e2f397392f9f0d8e84bad993eb87e184f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/source.py_ast_Source.__str__.return._n_join_self_lines_": {"doc_hash": "4e44eb852c95ad9e43e6b3cfb809b4bb5bdca5d40d361206406095c779080f2f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/source.py___deindent.return.textwrap_dedent_n_join": {"doc_hash": "5d0a3010e58bab8b9191c356c731467ef2afe760b14dee51c3881c443696f768"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_LogCaptureFixture_LogCaptureFixture.handler.return.self__item__store_caplog_": {"doc_hash": "1bd8a1d45749675db8d46701345af5aeeddfe58137f70d0eb302b0344891049d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_LogCaptureFixture.get_records_LogCaptureFixture.get_records.return.self__item__store_caplog_": {"doc_hash": "3147627c2e3f80e5517c681e09f127d9df0b3115afaaf54ee3a06f96296b5876"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_LoggingPlugin.pytest_runtestloop_LoggingPlugin.pytest_runtest_logreport.self_log_cli_handler_set_": {"doc_hash": "9810ea485909e938ad42ed8c00e840de9c5d805e73de471937f019c523ee6e9d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_LoggingPlugin._runtest_for_LoggingPlugin._runtest_for.with_catching_logs_.item_add_report_section_w": {"doc_hash": "f5bcd45f8c88fcb87a0d262854bee915c6832c07031aba2c63b47ebe368f1e2a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py_cleanup_numbered_dir_resolve_from_str.if_isabs_input_.else_.return.rootpath_joinpath_input_": {"doc_hash": "47f162fb921372cb29c71942fd029f1aa12371de003f98827261dfe0cc00b8c3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_pytest_addoption_pytest_configure.config_addinivalue_line_": {"doc_hash": "8af00881be9e5ab0a374cc388e2e1e5f5571f506836a6efb7aa2945341a11780"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_LsofFdLeakChecker_LsofFdLeakChecker.matching_platform.try_.else_.return.True": {"doc_hash": "775596828912ebc32790c11d38bd4690670d0e9e32f5583e14de93fe186e7558"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_pytest_pycollect_makeitem_pytest_pycollect_makeitem.if_safe_isclass_obj_.elif_collector_istestfunc.if_not_inspect_isfunctio.elif_getattr_obj___test.return.res": {"doc_hash": "115397bedbb99aa8ce7aa2ad0de0921bfd573c8eb080b5aa9b9e5856acf91982"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_PyobjMixin_PyobjMixin._getobj.return.getattr_obj_self_name_": {"doc_hash": "5e8fa7839095988b53b41a00c8e4abc9ddab2010e3baf3fed2d84ce8329cd86b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_PyobjMixin.getmodpath_PyobjMixin.getmodpath.return._join_parts_": {"doc_hash": "d56b0a28bc4456a700ec088b83fd30c8df3d7a6678568f0564921b43c42a4486"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Instance_hasnew.return.False": {"doc_hash": "f9caf310961785db65df3117a7a4f87a00f747bfd789c90e9346123134da1366"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Function._prunetraceback_": {"doc_hash": "22e69ec5f1d06aa4b53a7dc59ed16d197363c2a0fa2c9c9aae75307dc82b1e5f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_test_frame_leak_on_failing_test_test_usage_error_code.assert_result_ret_Exit": {"doc_hash": "9a7b96e24aad7e03fab1c5ce28b4d0951b13aebacfc2620a907dbb020c59dacb"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_test_warn_on_async_function_test_warn_on_async_function.assert_": {"doc_hash": "690e06349fbd78a5692c5238c59c561e08a56980dc42144145f13e513e5f4394"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestTraceback_f_g_h.test_traceback_entry_getsource_in_construct_TestTraceback_f_g_h.test_traceback_entry_getsource_in_construct.assert_source_deindent_": {"doc_hash": "dc65517a36f299754d8da443490693298c8edf52077588400625bc1f560cf2b0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_repr_source_excinfo_TestFormattedExcinfo.test_repr_source_excinfo.assert_lines_": {"doc_hash": "a63d6b713a2e7c7857ca3cb88a3f17db1ff1a12006c1a2f58533e88cdb10a62e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_repr_source_not_existing_TestFormattedExcinfo.test_repr_many_line_source_not_existing.assert_repr_chain_0_0_r": {"doc_hash": "f2a7769f64a1fa6ac4ba34bbdf4e7615afe10bcf969010b024b1c2ec22c9f602"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_source.py__flake8_noqa_test_source_strip_multiline.assert_source2_lines_": {"doc_hash": "bedae1050c307d00429a3122f82ffb36eeea4a91ddc5807835f499a0f58d479b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_source.py_TestAccesses_TestAccesses.test_iter.assert_len_values_4": {"doc_hash": "e61d9b5e3127642e2cc98d38b0910a40c5aa1a3de5a62e10f3e54bd12bafc97f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_source.py_TestSourceParsing_TestSourceParsing.test_getstatementrange_triple_quoted.None_1": {"doc_hash": "5f7a3da187b95aa3edf4c2bb3908a711dd3342cd32a935ed2656051295243923"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_source.py_TestSourceParsing.test_getstatementrange_within_constructs_TestSourceParsing.test_getstatementrange_within_constructs.None_4": {"doc_hash": "97658bb82707dfa3d64628ef9faed5083df4942bea948107e2893a77073513a4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_source.py_TestSourceParsing.test_getstatementrange_bug_TestSourceParsing.test_getstatementrange_with_syntaxerror_issue7.pytest_raises_SyntaxError": {"doc_hash": "1cc455d5b0be62c7864d3f44fa0e2b02251031de157ab08ed5d32c8fded730fd"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_source.py_test_getstartingblock_singleline_test_deindent.assert_lines_def_f_": {"doc_hash": "802418377b29fc46532cc1ba30a9c4e6b0b33b423887ebd79d012bf38bdbf6c6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_source.py_test_source_of_class_at_eof_without_newline_test_findsource_fallback.assert_src_lineno_": {"doc_hash": "b0c7d4da1d8a1c7481241004a9de16c70cfe230ab37f6e7f19780e787d3cb8d9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_source.py_test_source_with_decorator_test_source_with_decorator.assert_": {"doc_hash": "1b5aaacfb03486e0cc30ad3766576edd5d92a6142bbd8c07bfcc9b63a514bac7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/io/test_terminalwriter.py_io_test_terminalwriter_dumb_term_no_markup.with_monkeypatch_context_.assert_not_tw_hasmarkup": {"doc_hash": "d8b1c5f952c4b9e7250348c03216c0863200db386068a74f37ba9c3b98aec9db"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/io/test_terminalwriter.py_test_terminalwriter_not_unicode_win32.int_sys_platform_win3": {"doc_hash": "e042cc30aa82bbfc01035d7042c2e799a4e1ca5215fa416230ed034da3810bca"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/io/test_terminalwriter.py_test_attr_hasmarkup_test_should_not_do_markup_NO_COLOR_and_FORCE_COLOR.assert_color_not_set_": {"doc_hash": "b881028c7f7b2b8e58bf696dccdf0f404206d20231010dce57a15d7539cb5078"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_fixture.py_test_caplog_captures_despite_exception_test_caplog_captures_despite_exception.assert_result_ret_1": {"doc_hash": "be47ae00ec94a3248958a46f800c600aa51c4070316fefbeddf4bab8f731480a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_fixture.py_test_log_report_captures_according_to_config_option_upon_failure_": {"doc_hash": "9e5d1b0b1d63d85ced05d6f15ed70b7dd858efc2d6f1aed7539a6f0286319d58"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.test_function_originalname_TestFunction.test_function_originalname.assert_originalnames_": {"doc_hash": "d57ac997c418cb3dfa888e260f979fd88899657610854f15a7488c699ac6945d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare_attrsclass_TestAssert_reprcompare_attrsclass.test_attrs.for_line_in_lines_2_.assert_field_a_not_in_l": {"doc_hash": "dcb5a0a07055e00ccf8a9b04f634a73937b41e0b249363b78cf37c5838c6f6b7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare_attrsclass.test_attrs_verbose_TestAssert_reprcompare_attrsclass.test_attrs_verbose.assert_lines_3_fie": {"doc_hash": "6946d891c7f1c44ee36c56960d2898124fddd4606af17e424e494100f51d5b8c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_os_TestMark.test_pytest_mark_name_starts_with_underscore.with_pytest_raises_Attrib.mark__some_name": {"doc_hash": "de62cd3ba4875c1a92c48b31bf423302f15e2aed55bb45df06252e19a810b948"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_xfail_run_with_skip_mark_TestXFail.test_xfail_run_with_skip_mark.result_stdout_fnmatch_lin": {"doc_hash": "a7c982b74190ca9e30a1a54832f9317adab8c9f915e8e4bc5ae7980d967208f5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_xfail_evalfalse_but_fails_TestXFail.test_xfail_not_report_default._": {"doc_hash": "989e5de05080ddd9e27e513b855b44c86786eb2c7dc33644a73b38fb35597589"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_summary_stats_test_summary_stats._Fake__is_last_item_to": {"doc_hash": "37f430dad6d81c2d0ce648c8798bdc5fccd0134bb07c62b8cf72186b8db035f2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_summary_stats.fake_session_test_summary_stats.assert_color_exp_color": {"doc_hash": "c96f56f8451ab18c971336318cb7bfe10440746494f81e7ce9263d828a0c2dc9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/bench/unit_test.py__": {"doc_hash": "3d1178aa589368cc5a8a300a5344ff134d0c427b0d79e2ea060d876c86381df1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/bench/xunit.py__": {"doc_hash": "51c8536ae2c29d10ac4e93513104544d40e048b62f8f4a3b0d2109d7e223a026"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/fixtures/test_fixtures_order_autouse.py_pytest_": {"doc_hash": "73b353fd819ceef22e9b00c36185036c41e44509b7c8b89367ac48b7b03a0a35"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/fixtures/test_fixtures_order_autouse_multiple_scopes.py_pytest_": {"doc_hash": "501aefa3a7509a5a1c9c23c190d4faf342222dfd4c4783f2ca3bd893b13963ae"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/fixtures/test_fixtures_order_autouse_temp_effects.py_pytest_": {"doc_hash": "3fef8afafcd769768742ee4422956149399baecdc24f3b5863d9c6e673f57bed"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/fixtures/test_fixtures_order_dependencies.py_pytest_": {"doc_hash": "94599d1f231e0db55269c9cb0a5bb0666786e14a76acae882ec8b53d99003cf6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/fixtures/test_fixtures_order_scope.py_pytest_": {"doc_hash": "28086877f575ce9d38f71770bb9fc17040c7d08c2cc624d706b9fd639f609f9b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/fixtures/test_fixtures_request_different_scope.py__": {"doc_hash": "de07d403ebb8128cb09d77fa0fd75bbd4156414443f1475c7012e7b2bbc9ce5e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/scripts/prepare-release-pr.py___login.return.github_repository_owner_": {"doc_hash": "99f89689b4f68e93673a54fe4d56bc8c5be8c4d89a4982da406653dbe04cc542"}, "/tmp/repos/swe-bench_pytest-dev__pytest/scripts/prepare-release-pr.py_prepare_release_pr_prepare_release_pr.print_f_Pull_request_For": {"doc_hash": "dbb8b6c7c1ac861a2c2e4c0be61caa029e16a47402f6c7a324cd3a97a5ad7a23"}, "/tmp/repos/swe-bench_pytest-dev__pytest/scripts/prepare-release-pr.py_find_next_version_": {"doc_hash": "2e4ba4ab781cfece586f198d3edb15aa410610c4c10d61ff72935e658d7b9a61"}, "/tmp/repos/swe-bench_pytest-dev__pytest/scripts/release-on-comment.py___get_comment_data.if_comment_in_payload_.else_.return.payload_issue_": {"doc_hash": "4ed2c0c0d01357fdd06d6df4185ecbc6c6379d6d8d7573ed664ee864b853eddf"}, "/tmp/repos/swe-bench_pytest-dev__pytest/scripts/release-on-comment.py_validate_and_get_issue_comment_payload_print_and_exit.raise_SystemExit_1_": {"doc_hash": "9f1d2e3e983e2beefddc2f85754b05f2623bd9a89684acefeb397dab866cf241"}, "/tmp/repos/swe-bench_pytest-dev__pytest/scripts/release-on-comment.py_trigger_release_trigger_release.if_error_contents_.else_.print_f_Fore_GREEN_Succe": {"doc_hash": "2f16435e8ec4ac5bb81a4350ab54e9b9e229f18e52fd5f7e7e42278dfaf000f7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/scripts/release-on-comment.py_ERROR_COMMENT_": {"doc_hash": "17ee0459c164efafa6ed2db1ba4dc2020e1fee6c94701ff757f4f4ba3b6295fb"}, "/tmp/repos/swe-bench_pytest-dev__pytest/scripts/release.py__Invoke_development_tas_announce.check_call_git_add_": {"doc_hash": "0e8c662c25fd08c281a185547246ed49ef4fcf89677ce569b5cd3a1ac77ae1ee"}, "/tmp/repos/swe-bench_pytest-dev__pytest/scripts/update-plugin-list.py_datetime_DEVELOPMENT_STATUS_CLASSIFIERS._": {"doc_hash": "94f08926f468ecb323d9ddd67e59ed73492402ba8b36d78ca7fd9b8db6f60454"}, "/tmp/repos/swe-bench_pytest-dev__pytest/scripts/update-plugin-list.py_iter_plugins_": {"doc_hash": "a09063df85d89b5363f18ad18ef9c70016fe80de8716456f0af2dd3a791aed1b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_argcomplete.py__Allow_bash_completion__from_typing_import_Option": {"doc_hash": "fc1df388b50bb455962dfaa31232e21d39e124c17acb17ca5f1beddd7761a51e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_Code_Code.name.return.self_raw_co_name": {"doc_hash": "e294488493ceeb5fa82edb361445700afd097ad28230316fb9dd74691c0808c4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_TracebackEntry.ishidden_TracebackEntry.ishidden.return.tbh": {"doc_hash": "6e533a9440a31a180dbe81c9bd2185685d630aa02372c0d1f1fbf56151935804"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_TracebackEntry.__str___TracebackEntry.name.return.self_frame_code_raw_co_na": {"doc_hash": "0fec71c5080569e359ba0b8a772cbacc0505720e3da0dbd9e9b2d1dd99ec152f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_Traceback.__getitem___Traceback.filter.return.Traceback_filter_fn_self": {"doc_hash": "0b81ac37d08af9027533dfa7553728d30c781b9a5ad3ae329385c81d314dea25"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_Traceback.getcrashentry__E.TypeVar__E_bound_BaseE": {"doc_hash": "ff4a65f45ea07b031b1ad43428c510bb086d4f67526ec01d53e892cb916156d7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py__Relative_paths_that_we___PY_DIR.Path_py___file___parent": {"doc_hash": "3f43ccb8a7727df3a962e39a6d06d4dc015b2101b890dafe7188c94224fee7a8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/__init__.py__Support_for_presenting_if_TYPE_CHECKING_.from__pytest_main_import_": {"doc_hash": "42f6e8c18efdd7ef1e28746fedcd6a57e87114007a3966b95476e10d842e56da"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py__should_repr_global_name__call_reprcompare.return.expl": {"doc_hash": "efd2928a1fc17d58c95dbb1134c38f55704b1af0ad8d26cdf8c6afd445fff8eb"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/truncate.py__Utilities_for_truncati__running_on_ci.return.any_var_in_os_environ_for": {"doc_hash": "601deb6177ba30f2a56b8f43ef132a326da32320f71f5dfd2c86b431463c1699"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/util.py_format_explanation_format_explanation.return._n_join_result_": {"doc_hash": "3af425e3e046d1dbd5660a3ca012a4d779b1a6c95e46da05831a1328cbc869f2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py__Implementation_of_the__CACHEDIR_TAG_CONTENT.b_": {"doc_hash": "094c5f1d963d2e0ffd94d22990fad464393d638242bdbf3ea82f0bfa4b991f53"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py_Cache_Cache.__init__.self._config.config": {"doc_hash": "efbec19bd6f1587daf5336c5e32638e2ba4ad4240e862bcd74b796732557c97f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py_Cache.for_config_Cache.for_config.return.cls_cachedir_config__is": {"doc_hash": "60281e8c4dc31480696782cf38422607ea150af7b5c71a4e0ed0868bb85fa9c8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py_Cache.clear_cache_Cache.warn.warnings_warn_": {"doc_hash": "6749b4a412e3750943095a4f09ef80af7faa5e430688d8e6c286eeda9b4252e0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py__Per_test_stdout_stderr_if_TYPE_CHECKING_._CaptureMethod.Literal_fd_sys_no_": {"doc_hash": "1a618cca98d1d443b15fb76a28d95324dbbb390e3dca4b50a27965af76ad6694"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_SysCaptureBinary_SysCaptureBinary.writeorg.self__old_buffer_flush_": {"doc_hash": "c306246c1eb2bfcd7aeca4a5bab27e7eb2de4eb0dccbe3bc713767a699365ece"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_SysCapture_FDCaptureBinary.snap.return.res": {"doc_hash": "a6203b6af2aaea6444dcf3cfe5fed983a29746ac1f7e4701614226fa0896ec59"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_FDCapture_FDCapture.writeorg._XXX_use_encoding_of_ori": {"doc_hash": "33e6d12d092e5baf9a21cd11fc791066270ac60badb345763719f75b1b613251"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py__MultiCapture_CaptureResult.__repr__.return.f_CaptureResult_out_self": {"doc_hash": "d0d229636853e43a7eb1fda52ea506e6b6b43dc74ea2e1c64b6d0e83a759396a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py__get_multicapture__get_multicapture.raise_ValueError_f_unknow": {"doc_hash": "c12d69afe7c1083a33e3dcc1907fa44b35495134f5582c55553b4489a69af372"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_CaptureManager.activate_fixture_CaptureManager._Helper_context_managers": {"doc_hash": "f4926fc401d14ca661c2286cc4119109bc1ca4a9010ca4a49509fa49a66805c4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_CaptureManager.global_and_fixture_disabled_CaptureManager.global_and_fixture_disabled.try_.finally_.if_do_fixture_.self_resume_fixture_": {"doc_hash": "dc098165503e5eab6b0e2f0fc6a55a8c05f1f33c1135e45541739e74f58a7b9f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_CaptureManager.item_capture_CaptureManager.pytest_internalerror.self_stop_global_capturin": {"doc_hash": "b332412352be7c9c743ba2c7d221c7af37593e7fc617c08926529569ea49be8f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/compat.py__Python_version_compati_is_generator.return.genfunc_and_not_iscorouti": {"doc_hash": "e3c13836e107d6c40c1e01e4daf3a9a4b6d0cbb573e5638cfb98434091e2b087"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/compat.py_if_sys_version_info_3___bytes_to_ascii.return.val_decode_ascii_back": {"doc_hash": "a9e054b5c924850d71c059ffc582b3d3fd76903a5ed32791fc958529f0ec8063"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/compat.py_ascii_escaped__PytestWrapper.obj.attr_ib_": {"doc_hash": "c6dd9dc3ca0c1163dff49ce9f4e2b7ae88a4f0413fab873ffecb8b2faf423c4b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/compat.py_get_real_func_get_real_func.return.obj": {"doc_hash": "56fb4f0809ebeeb1b5b2e1626e16a2aa03061365b1fa64c6964dfd50a541a0a1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/compat.py_get_real_method_getimfunc.try_.except_AttributeError_.return.func": {"doc_hash": "5162d4720436ce0d95acff7018ab213ab913fdcabf74ed56b5359acd529bfdab"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/compat.py_safe_getattr_safe_getattr.try_.except_TEST_OUTCOME_.return.default": {"doc_hash": "342231f59394ead66da5822a1720fa88e0e2b98687b2f7d4ee14ba43d4afc7b4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py__Command_line_options__hookspec.HookspecMarker_pytest_": {"doc_hash": "157a6a31a1134626d342f51d9137dc35e4a4994f3466c5684fa7cce17aae749d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_ExitCode_ExitCode.NO_TESTS_COLLECTED.5": {"doc_hash": "0bc62895c9691984757e7cfe13801e07ed97771e8c9d8c21daded89fc35097e4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_cmdline_None_1": {"doc_hash": "a3147110706f8041beb7ca32041d53c3801ff89c9daa064ca0e4ce7ac098d66a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py__iter_rewritable_modules__args_converter.return.tuple_args_": {"doc_hash": "b0cadad0186eb309d4657aac71304e805079180ba1d1fc215e0ce602da09ebad"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config_Config.InvocationParams._The_directory_from_whi": {"doc_hash": "011c2e33c7a56673e897c5aa8e43451332a0017e6beac4444263e8b46f0604ab"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config.__init___Config.add_cleanup.self__cleanup_append_func": {"doc_hash": "16d33f10f5610248f35a97cacc6e5c6ec2e0a6292266098e607cd0abe662ffa1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config._do_configure_Config.get_terminal_writer.return.terminalreporter__tw": {"doc_hash": "e99d289f849b67ef2cfca15ee393df6279c8b4cd00c88f8d3f034dd963fb35a1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config._preparse_Config._preparse.try_.except_ConftestImportFail.if_self_known_args_namesp.else_.raise": {"doc_hash": "0fa5e875a97822ed550062e7584635d983e199ed75c653695574a1f1345b6f1e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config.pytest_collection_Config._checkversion.if_minver_.if_Version_minver_Vers.raise_pytest_UsageError_": {"doc_hash": "1f41b791a4c8e2e473a8c685fbbb7c49786c6e3f3531c280dcd96f7ead2c41ea"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config._validate_config_options_Config._validate_plugins.if_missing_plugins_.raise_UsageError_": {"doc_hash": "b96ec5f31cca7bed5c792ba445148fc14cc78a537a101ebfb51799e506c60a8b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config._warn_or_fail_if_strict_Config.parse.try_.except_PrintHelp_.pass": {"doc_hash": "313a769e0b8cea520403e51fc43af27422b2f22e51f29177dce8b028011a63e5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config.issue_config_time_warning_Config.issue_config_time_warning.if_records_.None_1": {"doc_hash": "999dc434bf003dd4795c128ff68fd4205799263b4578f400a4ad4f230f0d3999"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config.getoption_Config.getvalueorskip.return.self_getoption_name_skip": {"doc_hash": "2b0165a79420d0ee8c4e0015ddbfda9232909458568d306b31ff323b477f9605"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config._warn_about_missing_assertion_Config._warn_about_skipped_plugins.for_module_name_msg_in_s.self_issue_config_time_wa": {"doc_hash": "e97206e8fc5f1378fe8d6b02c29870e44da12070de89ff08f2de4cf24fdcd2fa"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py__strtobool__strtobool.if_val_in_y_yes_t.else_.raise_ValueError_f_invali": {"doc_hash": "5f0d741586e6648be50dcde17af5cfa7771694749301f0b7d5780d5135d722c2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_parse_warning_filter_": {"doc_hash": "ac2a28343d062ade0b9ca66eebb40ff33eafd99beccd9a1c7d2535164bb6c799"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_argparse_FILE_OR_DIR._file_or_dir_": {"doc_hash": "6b3580478d1bd491327489522438131d4026de4c8ded7e74ba2401b9ce49bbac"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_Parser_Parser.processoption.if_self__processopt_.if_option_dest_.self__processopt_option_": {"doc_hash": "62cb07bfc2042719c204c4561aaded37f232d3b94dbb9713bd501b429a85ff3d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/debugging.py__Interactive_debugging___validate_usepdb_cls.return._modname_classname_": {"doc_hash": "5716c08d2f52c8baee3a60e1bf7c9ee38fbb4af7accec2c115223641f7efa103"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/debugging.py_pytestPDB_pytestPDB._is_capturing.return.False": {"doc_hash": "2c3ccc88f385335e24d84d76a8440eaa66a004860c67eb26fb653d49aaa68378"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/debugging.py_pytestPDB._import_pdb_cls_pytestPDB._import_pdb_cls.return.wrapped_cls": {"doc_hash": "8c741fc401b59c66309ba67688aca3306cbbfc625b05cee6d6631f7496d0f042"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/deprecated.py__Deprecation_messages_a_ARGUMENT_TYPE_STR_CHOICE.UnformattedWarning_": {"doc_hash": "39e12db433d24c2082f464e38b98f8e7267607445ec24f82b3ab993f4d3000d8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/deprecated.py_ARGUMENT_TYPE_STR_": {"doc_hash": "2a2d96fa704503da867168163ef1ae6310959284404857b42e8044ffb924f391"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py__Discover_and_run_docte_CHECKER_CLASS.None": {"doc_hash": "f2dadbeb6f21b8cb76036edc5122a4cbf9f59ef6ee1290f30517f75fb5b4ca5e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py_pytest_addoption_pytest_addoption.None_6": {"doc_hash": "2025fbfd91bc716f5f798d7dd8ed5ae5ba3694de75f768a230a5fe9c4c72dea9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py_DoctestItem_DoctestItem.setup.if_self_dtest_is_not_None.self_dtest_globs_update_g": {"doc_hash": "a2d4784c9e4b44940f2d93f6a0199fe8dff32fb5cf47702711e219cba8ed1282"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py_DoctestItem.runtest_DoctestItem.runtest.if_failures_.raise_MultipleDoctestFail": {"doc_hash": "6c137fdb9d8543f65f1ae1fa368e871b93337bddb0636e91294e191e3a6f5115"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py__get_checker__get_number_flag.return.doctest_register_optionfl": {"doc_hash": "dc8ba533a2daf7915ce650d70b7b2246f6597e5335c0801bc2d47721bfc3de05"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/faulthandler.py_io_pytest_configure.faulthandler_enable_file_": {"doc_hash": "3777839e7b2a7df5add829b338b259f5306e93fda4ad8f2e80effd3f32e1c2d2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/faulthandler.py_pytest_unconfigure_pytest_unconfigure.if_config__store_get_faul.faulthandler_enable_file_": {"doc_hash": "303b9a79810065453edebf05fd9f2af6ee841a9d14f2091499e34e2006b22d21"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/faulthandler.py_get_stderr_fileno_get_timeout_config_value.return.float_config_getini_faul": {"doc_hash": "05e28bf35bacfb9f56b58e6af29c5122995dc19886bd42c1b62fdde00dbd7e9c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/faulthandler.py_pytest_runtest_protocol_": {"doc_hash": "94a68f09147e9e32a1e0824f49d91de69c8dc79415ca4209c14614cd7ba9aaed"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_functools_pytest_sessionstart.session._fixturemanager.FixtureManager_session_": {"doc_hash": "bb82de69f00117e562333ee20911dda5a434da9f9a40aec524569484ddfd46ed"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_get_scope_package_name2pseudofixturedef_key.StoreKey_Dict_str_Fixtu": {"doc_hash": "e69faec4edee39abdacd589dbc5c64bf79bc0cc54af71c159e9869a7d7d6bb7b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_add_funcarg_pseudo_fixture_def_add_funcarg_pseudo_fixture_def.for_argname_valuelist_in.if_name2pseudofixturedef_.else_.if_name2pseudofixturedef_.name2pseudofixturedef_arg": {"doc_hash": "7b6c6b3b5c5fc2929a57f4d8326e2ebf8bc00a324def1b46f7a4aeaca7c2bd2e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py__Algorithm_for_sorting_o_reorder_items.return.list_reorder_items_atscop": {"doc_hash": "0d62dd8f8d0238e8952fd2fb63e8a40a514332f62bb699943170a76a6f7b7b2e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py__fillfuncargs_fillfixtures._fill_fixtures_impl_funct": {"doc_hash": "17ad29cd329c9ae3297cd8e37980b756ee3a43fff909b753ad9a0c3ccc3ac9fa"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py__fill_fixtures_impl_get_direct_param_fixture_func.return.request_param": {"doc_hash": "f622cfcd15ffe86da24e077d34f092ab10521a1ee644603454a3b80744a73969"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureRequest._get_active_fixturedef_FixtureRequest._get_fixturestack.return.values": {"doc_hash": "71c36873876da1f7b9f4683e31cc91b407c95e6c7625180277bde375a7102818"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureRequest._schedule_finalizers_FixtureRequest._check_scope.if_scopemismatch_invoking.fail_": {"doc_hash": "7a0d4267447f38ea10504ee09fc433a59da048f14da18b98937b97ba1dbb859a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureRequest._factorytraceback_FixtureRequest._factorytraceback.return.lines": {"doc_hash": "44b8b2a1382d36c9f68c842d0d1b47513a3de16cb05f3a4917ff81e273a43c5a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_SubRequest._schedule_finalizers_SubRequest._schedule_finalizers.super__schedule_finaliz": {"doc_hash": "12b850b0561ad8d1975ef02b577324035782495786427c8aa761c8299fdfd15d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_scopes_scope2index.try_.except_ValueError_.fail_": {"doc_hash": "818f30fd6275684f11d1590701e86ebdcefb06e2f553c12ad649a510078dbe66"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureLookupErrorRepr_FixtureLookupErrorRepr.toterminal.tw_line_s_d_os_fsp": {"doc_hash": "4927481c86dcbdd7879b7efa76610c6d90bf9fbb8fbbfaeb4def66e5655cc568"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_wrap_function_to_error_out_if_called_directly_wrap_function_to_error_out_if_called_directly.return.cast__FixtureFunction_re": {"doc_hash": "334a0a4bfd105d922a1024c44a887ab99297e41d0040c5f39e03859282f9ebde"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_fixture_fixture._": {"doc_hash": "37a6f28207788c9b4fa94225ead90213695cc7c67e62f1cb0c69a2dce8593004"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_fixture_25_fixture_25._": {"doc_hash": "85cf219070603056163a8becad980ad6a3a5245c76bf90186a9e8131012676c0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_fixture_26_fixture_26.return.fixture_marker": {"doc_hash": "5af2f131ad784beaeccd23e96e4d0db0c0393e26e83b456aa119eb6eebfc29f2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureManager.pytest_plugin_registered_FixtureManager._getautousenames.for_parentnodeid_in_nodes.if_basenames_.yield_from_basenames": {"doc_hash": "22c3f6ad06f19934fa6b05a1f3dc52d1568ab595e548e56543a5cb224438eba4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureManager.parsefactories_FixtureManager.parsefactories.if_autousenames_.self__nodeid_autousenames": {"doc_hash": "cb2b0382bae7bd7ddfa5462c31fb14fc7ecb808b1a87f6730aca371415a25665"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/freeze_support.py__Provides_a_function_to_": {"doc_hash": "860847843115736ba54b74e44c4a8338dc92dfbdd7dcb9ac0358248d69a606d0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/helpconfig.py__Version_info_help_mes_HelpAction.__call__.if_getattr_parser__parser.raise_PrintHelp": {"doc_hash": "0e364a1638e424de759f36d0c3728db0b943a1ef529cbec73523a6356ca879a3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py__Hook_specifications_fo_pytest_plugin_registered._A_new_pytest_plugin_go": {"doc_hash": "fef42c94759d1c34298b4bd4154f128facd3f6367171f7713b40f6427678a43f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_addoption_pytest_addoption._Register_argparse_styl": {"doc_hash": "d7e4b0ec72c22b92f98c33ed711a1af699ca84bfb3687b4952bf6fa6404697db"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_configure_pytest_configure._Allow_plugins_and_conf": {"doc_hash": "00062098e137db4ede86c121d7eec40d5f2c336aba304aee346e1fac9d734827"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_None_4_pytest_cmdline_parse._Return_an_initialized_": {"doc_hash": "8a4e498894c4b6659681ddf24282efaa3d2b40c6b79178669e0eed298fd0b88d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_cmdline_main_pytest_cmdline_main._Called_for_performing_": {"doc_hash": "15199109073dacca4816c22302e7460b0d7294326ea9b34ce773c990aec863ba"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_load_initial_conftests_pytest_load_initial_conftests._Called_to_implement_th": {"doc_hash": "2c2fef9fd852870eb5be9607969f5249b739a8d5ba3c430fe72ee3ab341e0c83"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_collect_file_pytest_collect_file._Create_a_Collector_for": {"doc_hash": "1946a3caed659a3aebfd2e483ecba694c25816be1338b81f989d04aea744676a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py__logging_hooks_for_colle_pytest_make_collect_report._Perform_collector_co": {"doc_hash": "79170595e4451477ab9ff44bbea9d302b6c27234d3d42e003632121ec42b633e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_pycollect_makeitem_pytest_generate_tests._Generate_multiple_pa": {"doc_hash": "59a626f89d11d2d0432d82ef29b0597c6bb71b8d8c47789321a1eab28b0b4c07"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_None_15_pytest_runtestloop._Perform_the_main_runte": {"doc_hash": "506d418d8698bf52886f486db4a45508866f8002e58096b564e0b98195fa33f7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_runtest_protocol_pytest_runtest_protocol._Perform_the_runtest_pr": {"doc_hash": "8821c7dfb6e192048b27976601923b7533dbdb941e9cf22161ec95f1f6854c8c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_runtest_logreport_pytest_report_from_serializable._Restore_a_report_objec": {"doc_hash": "d4fec94d766866296f61822eba6885df47a6c5beba06acd677da73574ac59e41"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_None_18_pytest_fixture_setup._Perform_fixture_setup_": {"doc_hash": "4b72ac6d1a61b6d6a472c16d160ce4068621f279802fc5d0382751f47e8a7870"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_assertion_pass_pytest_assertion_pass._Experimental_Cal": {"doc_hash": "4e200a4fdb3caa2a94b63faa5b7f0d8e08e22e346b5301aeb7645296190a35f4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_None_27_pytest_report_header._Return_a_string_or_lis": {"doc_hash": "4b1fb760921c74506b2b9161eff555b39d6ea3fb5d1c4054f3ac8bac84172515"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_report_collectionfinish_pytest_report_collectionfinish._Return_a_string_or_lis": {"doc_hash": "ebe9f56cbf9b8023f4c8b52fd6dd8abec80590c61ea1092c57f4859f84c2005f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_warning_recorded_pytest_warning_recorded._Process_a_warning_capt": {"doc_hash": "ccd857f01e308c2b0dbbe49be4de54512e515fadfe3bdb919dc8bee861d9efc6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_None_30_pytest_markeval_namespace._Called_when_constructi": {"doc_hash": "fdc95810b58737a13f16e56cf3e69662b887f9e1e8f3c7028a65ef5fd273cda8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_None_33_pytest_keyboard_interrupt._Called_for_keyboard_in": {"doc_hash": "3525a1ea821d421c40b0a9c99603c9e78bcd961dad24fe5d2f62bb75fcaa46b8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py__Report_test_results_in_xml_key.StoreKey_LogXML_": {"doc_hash": "6bb4fc2ac91fcb83247a712ebdcfd50e3b8db9193767aa3273b5beb387c79a54"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py_bin_xml_escape_bin_xml_escape.return.re_sub_illegal_xml_re_re": {"doc_hash": "a3928c53072a904b8fee3f93acf95775139ddd840dfbba061d0aebf439b38807"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py_merge_family_families_xunit2_fami": {"doc_hash": "4cc915b771d940fb8b5f77a81ec76d5781aa53924363d43194db826a9d9d61f2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py__NodeReporter__NodeReporter.make_properties_node.return.None": {"doc_hash": "6ac47744f2e82678cbbe38f01626cfb95e8d03fc96480521b304442b0c377355"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py__NodeReporter.to_xml__NodeReporter._add_simple.self_append_node_": {"doc_hash": "dd15237abf43a76de3cef05490243672962ac539c12916b96bc52a1774ba378c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py__NodeReporter.write_captured_output__NodeReporter.write_captured_output.if_content_all_.self__write_content_repor": {"doc_hash": "a4b6d04608a2b8d15d4a98cb0d59739d2620dbcd1d282f181144ec2699f1cfc8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py__NodeReporter._prepare_content__NodeReporter.append_failure.if_hasattr_report_wasxf.else_.self__add_simple_failure": {"doc_hash": "592940becda6781fa32a4c038127b3c2c8bea278defa43b7aa3970edee26d004"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py__NodeReporter.append_collect_error__NodeReporter.append_error.self__add_simple_error_": {"doc_hash": "e6a26b154365b1e7ad92710bb5f1e15e60dedd5c7463e7641b55cbf0eebbfeb7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py__NodeReporter.append_skipped__NodeReporter.finalize._type_ignore_assignment": {"doc_hash": "a0e46f61ad2d6dc5fc2f6302c5faa6a87a9f5148b57bf626184fefa049ba7321"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py__Access_and_control_log__remove_ansi_escape_sequences.return._ANSI_ESCAPE_SEQ_sub_": {"doc_hash": "1bd3d8b2fa99fd69c68f745f54968220bb6d92ffdc2688ccec429623dbf3558c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py__Core_implementation_of_if_TYPE_CHECKING_.from_typing_extensions_im": {"doc_hash": "13d558dcc71629b40be96246762e3fc5934f004b76cf8b26db09e21f6726940d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_pytest_addoption_pytest_addoption.None_15": {"doc_hash": "40a9be8fee356c5f5dddf045726219c8208fbe180e37c69f7586e90ef0b7433b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_pytest_addoption.None_16_pytest_addoption.None_22": {"doc_hash": "832c454c8123ebf1a7208b73464e1f0e55618a22a3c8bf0d2db0be1a3da78baf"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py__in_venv__in_venv.return.any_fname_name_in_activat": {"doc_hash": "dcf0ce4f8e453109d4ab584cc62446bef403c6d2425a1f86cfb81ced937e7366"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_FSHookProxy__bestrelpath_cache.__missing__.return.r": {"doc_hash": "d24b3339f13b7fe7bb70babd02a377221d3aca0f2e0824a6838f671f830a252b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_Session_Session.isinitpath.return.Path_path_in_self__initi": {"doc_hash": "fba9c0547769e32a619133ffa3042a1295433de93316340f0937b9bf11f383e0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_Session.gethookproxy_Session.gethookproxy.return.proxy": {"doc_hash": "c23bb6645d02b54c4f8e17b4ffb9ae789a7e354ef17fdab4f277ab2f66909195"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_Session._recurse_Session._recurse.return.True": {"doc_hash": "48663ccf1c945df250e3463361e2ebc35e04e6bfab4c0b7652971893db3c61b9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_Session._collectfile_Session.perform_collect_10._": {"doc_hash": "040811652d3ad6958988e5a5b4facfbaebadb23a2273c0e546cde428a318df71"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_Session.perform_collect_11_Session.perform_collect_11.return.items": {"doc_hash": "5dc7870cc20997dde3012ffd7358a5cf5fa388665f72cc1193e1def3e12fb920"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_Session.collect_Session.collect.pkg_roots._": {"doc_hash": "06def90889b86fbe8561fb1bc8469929678023bb44cbda5e15af43a197934664"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_Session.collect.for_argpath_names_in_sel_Session.collect.for_argpath_names_in_sel.None_1": {"doc_hash": "272c1f7f09ce6543d41c171537e557a5b343fd766242265b6865e168ace41aea"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_Session.genitems_Session.genitems.if_isinstance_node_nodes.else_.node_ihook_pytest_collect": {"doc_hash": "d379676a1cfadc5afa21ebb37621cb1465557465144df8b668da246a77ebdaa3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_search_pypath_search_pypath.if_spec_is_None_or_spec_o.else_.return.spec_origin": {"doc_hash": "b8b830bb945eafee669d7be1e703b76728565fd98a527732630a93263d6bd1e1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_resolve_collection_argument_": {"doc_hash": "4aa5ee2344500ddac72bcca896eb5a9837336b605c2f307ddb06a4c7d5230379"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/__init__.py__Generic_mechanism_for__old_mark_config_key.StoreKey_Optional_Config_": {"doc_hash": "450b87ca3824852edeebc9ca4b936131461b15a40daba647f613b17f55d4d43d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/expression.py_r_Evaluate_match_expres_ParseError.__str__.return.f_at_column_self_column_": {"doc_hash": "d545b9cd3d9ae9f17e5520543924a1f3ce8253fcc5980f37ac9790fdd0d8018e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/structures.py_Mark.combined_with_Markable.TypeVar_Markable_bound": {"doc_hash": "a3be6e01920dae0de7855731f5610678a34de442a3e1db7c85492a13cc99e657"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/structures.py_store_mark_None_1._FilterwarningsMarkDecorator.__call__._": {"doc_hash": "5802f098e520820af0d3391135ce4a4d5a082cbbdd2cc63d3990dfe7a2514a77"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/structures.py_MarkGenerator_MarkGenerator.__init__.self._markers.set_": {"doc_hash": "f5ca15597a17e1ef1361e2cde18cec61864b4f2d7cecd442b36c73d0d2c03bf9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/monkeypatch.py__Monkeypatching_and_moc_V.TypeVar_V_": {"doc_hash": "0a4f4a9f53007adc37f6b1f10e34d35b609e763eb036e5670a34502a4b3c4e20"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/monkeypatch.py_MonkeyPatch_MonkeyPatch.__init__.self._savesyspath.None": {"doc_hash": "0d57d2c6332e12db9577c3746686276556ec3665ed014ba6974cd7ca8df7a940"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/monkeypatch.py_MonkeyPatch.context_MonkeyPatch.setattr_2._": {"doc_hash": "9b1f8e960a6da3714b006e958ef96ae130d83903adacf82bd9c7cf7ad4e5f014"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/monkeypatch.py_MonkeyPatch.setenv_MonkeyPatch.setenv.self_setitem_os_environ_": {"doc_hash": "95010cef06ad88aa6d5549db745e8fce633ef8f4d2b5eca04bc10354efe47ef6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/monkeypatch.py_MonkeyPatch.delenv_MonkeyPatch.chdir.os_chdir_path_": {"doc_hash": "b68c54ab00805df6d1a9c0df5997b21f74a1d059f27a39e3054219aee21fcbff"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nodes.py_os_tracebackcutdir.Path__pytest___file___pa": {"doc_hash": "bcd0a79369a47e057926e40b8c192a0fc97265a2c971fb575466fc72d5967fec"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nodes.py_iterparentnodeids_iterparentnodeids.while_True_.if_at_1_and_sep_SE.else_.pos.at_len_sep_": {"doc_hash": "2d443d97080feef4f6100bce82d14d7167e4de2e9a914778249e460aa0186c21"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nodes.py__NodeType_NodeMeta._create.return.super___call___k_kw": {"doc_hash": "83da1a5349e518c42b9675eed1ca4ec64fb196f3366b8b84b62781f2861c00d4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nodes.py_Node._Methods_for_ordering_no_Node.listchain.return.chain": {"doc_hash": "8812785c6ab80e13f27ea9c0b559d0bb7bbb87c85df12e87c6fdc8c704ed3ec1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nodes.py__check_initialpaths_for_relpath_FSCollector.isinitpath.return.self_session_isinitpath_p": {"doc_hash": "c318ce9094c117e38f233793753a3262e02ed1caea1d31050b8ac7951a818f85"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nose.py__Run_testsuites_written_": {"doc_hash": "e451e183361db94aa60d99948bd99db6eef2de6c5ca4c3bb516b0019e636f61d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/outcomes.py__Exception_classes_and__if_TYPE_CHECKING_.else_.Protocol.Generic": {"doc_hash": "b06cf522a65bf687b5d6e51ca266493284730b890e26aff02f59671dd9198b3d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/outcomes.py_TEST_OUTCOME_Skipped.__init__.self._use_item_location._use_item_location": {"doc_hash": "322314c5ef5a0afc700e07e84abe6885b505909f19cdec78e0b74b84d2666169"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/outcomes.py_fail_XFailed._Raised_from_an_explici": {"doc_hash": "79ca0a88e4984b0a58abc84dc911988d20ce19eb15fa6054fafe1e51fdb1db5c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pastebin.py__Submit_failure_or_test_pytest_addoption.group__addoption_": {"doc_hash": "fc5d53a4e9f7e49781bbbb0b9462a5bc9acb0c9d2be481d975afa680c1f13761"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py_ensure_deletable_ensure_deletable.None_1.else_.return.False": {"doc_hash": "7ef850ae770f1b69fd2e9bde5726939fe7efea2f68b034efb5f320962ef17279"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py_import_path_import_path.return.mod": {"doc_hash": "55de1cb25fccdcec2f953ef4b2867855877133bdb5005211327273149f6f6598"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py__Implement_a_special__is_resolve_package_path.return.result": {"doc_hash": "76951c460be0ba17911ae888b5ae1b094142b1098b3b25292745b3415fc916a3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py_visit_visit.for_entry_in_entries_.if_entry_is_dir_and_rec.yield_from_visit_entry_pa": {"doc_hash": "2efee6641c04e4d8b4809f67ccff271db29eea7b6241c5aaedbfa7c3f3327154"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py_absolutepath_commonpath.try_.except_ValueError_.return.None": {"doc_hash": "a366076955b6f9339477967afde9d81ebe4e73efaceb1a6f1cd2ef3b807aa63c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py_bestrelpath_": {"doc_hash": "038d64dcdc3e897b2b0cea34e720428136f7c58910436e1902c42fc43af6e77f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py__Disabled_by_default__IGNORE_PAM._filenames_added_when": {"doc_hash": "2fc2a41fec983ff3cdba251e87081edc460dcbba22fc555c704513863f6d5029"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_LsofFdLeakChecker.pytest_runtest_protocol_LsofFdLeakChecker.pytest_runtest_protocol.if_leaked_files_.item_warn_PytestWarning_": {"doc_hash": "7768727f847b86f53201473a26087edb672e5d104588a718716871ee7803e101"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py__used_at_least_by_pytest_get_public_names.return._x_for_x_in_values_if_x_0": {"doc_hash": "5aad64c822ab10c7106d6e9941f6a5c73c6d298001b1e833967046bc47adb696"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_ParsedCall_ParsedCall.if_TYPE_CHECKING_.__getattr__._": {"doc_hash": "acdcadd5a17c34731ef32d5cf851fbd24299255f16981af683391b7e0e341e21"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_HookRecorder.assert_contains_HookRecorder.assert_contains.while_entries_.for_ind_call_in_enumerat.else_.fail_f_could_not_find_na": {"doc_hash": "09c34544e137865495b98ed3f97d7a31c922c59d36702d75e314c3b4938b425d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_HookRecorder.popcall_HookRecorder.getreports_8.return._x_report_for_x_in_self_g": {"doc_hash": "c6956eff8e188382967ebb1971d41afd7becc9496442e56b575179bb562e4df3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_HookRecorder.getfailures_HookRecorder.getfailedcollections.return.self_getfailures_pytest_": {"doc_hash": "febdb6ce6db80ad2860b86d236efcdc3084e588e2d6938d5a4cc1f2c683651e2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_HookRecorder.listoutcomes_HookRecorder.listoutcomes.return.passed_skipped_failed": {"doc_hash": "8c30e6f39f6b0d390165073d98719683dcec2150a6b83aec399894746cf150f0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_linecomp_LineMatcher_fixture.return.LineMatcher": {"doc_hash": "e1e2b2ffa72810984f63a49eb8da83b244749f8a1d141aae355dd40ed1221609"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_pytester_pytester.return.Pytester_request_tmp_pat": {"doc_hash": "3d0c5090cb60c78226a5dd21138287a50e9297448db13359bc96dfa16643b905"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_testdir_rex_outcome.re_compile_r_d_w_": {"doc_hash": "4df0c5a026fddbb273e7491e7eb38d9264decf0c6bfba0e18907cbbaa55b4a97"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_RunResult.assert_outcomes_RunResult.assert_outcomes.assert_outcomes_": {"doc_hash": "35b095d4f46b3e520e33c1532ad2598ee4b77f269101026a92d0ff323c4e33d5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester_Pytester._finalize.self__monkeypatch_undo_": {"doc_hash": "d344ddd3ef4525424a7a913adbf272ad117a097453d6da44595b0623b50e3360"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.__take_sys_modules_snapshot_Pytester.chdir.os_chdir_self_path_": {"doc_hash": "f4f93c596a8a788595aafd416397273c9bc484b3f7a280cc462b9ceeb2e91d17"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester._makefile_Pytester._makefile.return.ret": {"doc_hash": "093a44d79fb9905ecdc9e309b77fb2999f3523c3d9c854c82dea6c04512c9f46"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.makefile_Pytester.makefile.return.self__makefile_ext_args_": {"doc_hash": "2252554749b9ca8b205a8bbdb40f392cb50a9e5936aee5fb584e319d9c7985c0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.makeconftest_Pytester.makepyprojecttoml.return.self_makefile_toml_py": {"doc_hash": "1b4899a55708d6e16a1cc277418261612b078dd4ce5ba2e4dd216ff48e2e8a93"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.makepyfile_Pytester.makepyfile.return.self__makefile_py_arg": {"doc_hash": "654d3316baf361043cb41e6fb28bff569d8c4f416abe88a4a81ba206b5589825"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.maketxtfile_Pytester.maketxtfile.return.self__makefile_txt_ar": {"doc_hash": "6f93a493b91ed6f987ad7708b4f0cf009ca708a04c2c670a202628a782b5c8cf"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.syspathinsert_Pytester.mkpydir.return.p": {"doc_hash": "619e3dd3a88930aac724e5421775ab36f85fb0252d54178debcd1418131a861f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.copy_example_Pytester.copy_example.if_example_path_is_dir_.else_.raise_LookupError_": {"doc_hash": "77ab9c69cdf54ff17be1a6e63bd52217c964dd5075ab25c7c5f15c91a8d344bc"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.Session_Pytester.getnode.return.res": {"doc_hash": "8083d0ff2be6801dfe92b74cdaa07adf2d45ed11242bf8db2e16f24ada033f1e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.getpathnode_Pytester.getpathnode.return.res": {"doc_hash": "527550482435c22daaebf3bd7dcb7ce1314d3644c0863cdf4b536a6471c14207"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.genitems_Pytester.runitem.return.runner_item_": {"doc_hash": "7142196ea69548169d488fb403f4f2507871d02615b427fb8b33c7ae81f335b6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.inline_runsource_Pytester.inline_runsource.return.self_inline_run_values_": {"doc_hash": "43992d72830516f800201263d92c4f5f56db26b977d3623b5290f4bea611e333"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.inline_genitems_Pytester.inline_genitems.return.items_rec": {"doc_hash": "9687383f72dbb31726f772fbf25bdf6b8f156a630b0db6b7502f633fa8e8d599"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.inline_run_Pytester.inline_run.try_.finally_.for_finalizer_in_finalize.finalizer_": {"doc_hash": "1d5e9b10e275cfbfbb69247afcf8e28e42a2f1aecfe882406b8e564770cda999"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.runpytest_inprocess_Pytester.runpytest_inprocess.return.res": {"doc_hash": "7cd2a800cbf0f9698fdd98fb23693ca4d8d86bf11caa576d142428c24ac248f5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.runpytest_Pytester._ensure_basetemp.return.new_args": {"doc_hash": "dc1676544ec262aea8b3c9d33fedbee185e8565a4d321e3787733ff847b7b1f3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.parseconfig_Pytester.parseconfig.return.config": {"doc_hash": "de25b04a1cade89355563480fcb7f8c63d373746e774f5f7347af657096972d5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.parseconfigure_Pytester.getitems.return.self_genitems_modcol_": {"doc_hash": "01f3b66c9ba6e42bb5bd532feac9a23e93e2979736afc3a3925f2aeea1e47e66"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.getmodulecol_Pytester.getmodulecol.return.self_getnode_config_path": {"doc_hash": "56497b39ea26bf5ea742131ef2ad1351808b95a4b99225963560280247edcbfd"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.collect_by_name_Pytester.collect_by_name.return.None": {"doc_hash": "c73afa644b50cb0aa508c4d181e3c24d390024f43ee0c6e49aae682207deff0c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.popen_Pytester.popen.return.popen": {"doc_hash": "0151fa68bb0f956459194813da73a90bcc4d39ee3b886ab021cb8950a6c5b9c8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.run_Pytester.run.return.RunResult_ret_out_err_": {"doc_hash": "e8b5d850eb5b7e7ac0190535eb56312cce6ed615f8223b4bde0065c621ab7efb"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester._dump_lines_Pytester.runpython_c.return.self_run_sys_executable_": {"doc_hash": "f41104371d1755f3d7ac74774e21d1792c4cabea8508d08c40029cc3a41e71b6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.runpytest_subprocess_Pytester.runpytest_subprocess.return.self_run_args_timeout_t": {"doc_hash": "b339f116feb1317ab7ab0aa699fb7d8f14cc559780ae0a419f3baff6567634ea"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.spawn_pytest_Pytester.spawn_pytest.return.self_spawn_cmd_expect_ti": {"doc_hash": "01696fac8206f09f328b07dad8fdd13ab6a192add4731b1720e38afc6b17c136"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.spawn_Pytester.spawn.return.child": {"doc_hash": "1269ae05d2df2fe280ba10031eccede2e440f79471743b0c4f22b27308d994fc"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Testdir_Testdir.finalize.return.self__pytester__finalize_": {"doc_hash": "e3e2bef4942e3ac051c174d48cf40f9e3c4e88e5c77a440c6e97ec1680f64f10"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Testdir.makefile_Testdir.makefile.return.py_path_local_str_self__p": {"doc_hash": "2a976c2b8b4f120603d0e246e4bd12cdf26dfa3b906ffb1bbeb76ef9e2881031"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Testdir.makeconftest_Testdir.runpytest_inprocess.return.self__pytester_runpytest_": {"doc_hash": "b58d87322d910f760a1656ecfc50e0f40c652da717d2ec918f5210b0a25bd896"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Testdir.runpytest_Testdir.__str__.return.str_self_tmpdir_": {"doc_hash": "69ccd4e6401dac0f355753275baaea947f1422faa19837f24c54845769c1cf28"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester_assertions.py__Helper_plugin_for_pyte_assertoutcome.assert_obtained_expect": {"doc_hash": "9786c4ce002cebb9846497962ab020fc5a29c11a2208640b484482debf8d69a4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester_assertions.py_assert_outcomes_": {"doc_hash": "6fc457e8b94905b70fceb8f9337392baa676974396363b83dc63eb37614a3fd5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py__Python_test_discovery__if_TYPE_CHECKING_.from__pytest_fixtures_imp": {"doc_hash": "d0a367d1ce18c20098761c83a1f872eddde563a26dd324dce5853a70e4578eef"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_pytest_cmdline_main_pytest_configure.None_1": {"doc_hash": "9db80ae355819561e026a4d04df457631f1fdd58594d1e2db9e87f6b1038c9d0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py__As_an_optimization_the_del__EmptyClass": {"doc_hash": "2ae94b39cf582ae48d41f8e1b8fdf17a0775ba5d15c79166f7b0fef83c923002"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py__fmt_on_PyCollector.istestclass.return.self_classnamefilter_name": {"doc_hash": "caf4d3dd773265543ce2ae1593172157227a7da0c444f482fc37d608a9b59166"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Module._inject_setup_function_fixture_Module._inject_setup_function_fixture.if_setup_function_is_None.return": {"doc_hash": "d83cffb40b493859dc8997f5562942f2c8aa0fb4b8269dd57369db98f43842a4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Module._inject_setup_function_fixture.xunit_setup_function_fixture_Module._inject_setup_function_fixture.self.obj.__pytest_setup_function.xunit_setup_function_fixt": {"doc_hash": "bc9a6c099bac76fa388918d7e13914e01ab6e2fc8307bf06ba9d96e7df5243dc"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Package.setup_Package.setup.if_teardown_module_is_not.self_addfinalizer_func_": {"doc_hash": "dfdaa0e7a81768fc75506921eb013af38fe3f1355b807851baa6e18aea275305"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Package.gethookproxy_Package._recurse.return.True": {"doc_hash": "6d1d124138c9517e4e21fb52c1fe4315d9fc4f3c538be6be898d5c8a39bf1870"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Package._collectfile_Package._collectfile._type_ignore_no_any_ret": {"doc_hash": "42c42e94c0d7a23654b66c3da2a53c5f08897a0785a5e8a9aa1ff82ca6052e26"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Package.collect_Package.collect.for_direntry_in_visit_str.None_2.elif_path_joinpath___ini.pkg_prefixes_add_path_": {"doc_hash": "c8fac932a2d7528c218af9b013bdab1e78c30a78d3b88c978ab577334ce96f1f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Class._inject_setup_class_fixture_Class._inject_setup_class_fixture.if_setup_class_is_None_an.return": {"doc_hash": "3caa13d474b3a19cb472eb681eb3f880ac460ef447356f80e8a50a8fe5d5b640"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Class._inject_setup_class_fixture.xunit_setup_class_fixture_Class._inject_setup_class_fixture.self.obj.__pytest_setup_class.xunit_setup_class_fixture": {"doc_hash": "74e31f220cb8d93b41af55b516f143db0dd65222c2b46a36680c29c8d7b2a414"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Class._inject_setup_method_fixture_Class._inject_setup_method_fixture.if_setup_method_is_None_a.return": {"doc_hash": "66e39fa8647381e57cea17197f3efb164bce38a8d37bad2eacfd10f73988161b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Class._inject_setup_method_fixture.xunit_setup_method_fixture_Class._inject_setup_method_fixture.self.obj.__pytest_setup_method.xunit_setup_method_fixtur": {"doc_hash": "a229e9c3b3a92a96db50aba455d120f5a14e305df17c5f3d3a5693f5062c7c6f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Metafunc_Metafunc.__init__.self._arg2fixturedefs.fixtureinfo_name2fixtured": {"doc_hash": "0119ef30acaaac5ae94769c9ae39eec3825d4625c1632431f3319f20b94b890e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Function_Function.from_parent.return.super_from_parent_paren": {"doc_hash": "93ff23c694b16f13726bc68bb9b49359de910a5051ceed6d567e8dbae27622a3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Function._initrequest_Function.setup.self__request__fillfixtur": {"doc_hash": "99ac40b5bc992c979b9de60630e491b5f748444eb3139ef318665fc3d5d62ece"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python_api.py_math__non_numeric_type_error.return.TypeError_": {"doc_hash": "50e2fc4381e0274a04dbcbeaec1660e262d5a1a954ea936ab876c415ac9324f1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python_api.py__builtin_pytest_approx_h_ApproxBase._check_type.pass": {"doc_hash": "f6f267b5352ed52e6783aa7b5737d729702d79962e63e632b946889a3ef271b6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python_api.py__recursive_list_map_ApproxNumpy.__eq__.return.ApproxBase___eq___self_a": {"doc_hash": "63e2746661c6a1472ecf78e93a53124af5ca6cd54bd271f2f60ce7b315370104"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python_api.py_ApproxMapping_ApproxMapping._check_type.for_key_value_in_self_ex.if_isinstance_value_type.raise_TypeError_msg_forma": {"doc_hash": "1e5b5e56e382f6612a1ace83a135a8ea7c80bb428ff2e9da8b4859de4d7d3ba6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python_api.py_ApproxSequencelike_ApproxSequencelike._check_type.for_index_x_in_enumerate.if_isinstance_x_type_sel.raise_TypeError_msg_forma": {"doc_hash": "e7588573c2aef148092557692a18412ab6c640e45e8bebdea17d66f7181c9fa4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python_api.py_ApproxScalar_ApproxScalar.__repr__.return.f_self_expected_vett": {"doc_hash": "d0e4942b953679fd2b894d05e97fa3aa2dec30119e74c2c6727d7b050182300f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python_api.py_ApproxScalar.__eq___ApproxScalar.__eq__.return.result": {"doc_hash": "22e63aee94c3ed235ee5b7ad996b4b79598b7fa3ed52dc6fed70a4b6d5890b22"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python_api.py_ApproxScalar._Ignore_type_because_of__approx": {"doc_hash": "05b995f105f6d50ce69e059e5e47891cfcb89fe255015b4166a5a7d9c6729e20"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python_api.py_approx._Assert_that_two_number_approx._Assert_that_two_number": {"doc_hash": "ff21e24993f913be5fc6150f43b53492cfe3a5bf353dd0e20822be2e21dc8cb5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python_api.py__is_numpy_array__as_numpy_array.return.None": {"doc_hash": "44f452d5909ccddcc634138ec72b92dd5deb09ee2315b1bd099ff4740b847189"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python_api.py__builtin_pytest_raises_h_raises_6._": {"doc_hash": "d86d4b2fa5d62c90e052a6dacc03b754d2f24ad7924aab4b12bb38bcc526bc46"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python_api.py_raises_7_raises_7.r_Assert_that_a_code_bl": {"doc_hash": "1e6191ec70a6d28b76286deb3f5d51928b2bff89ac46e3e2a50c717c23040dc7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python_api.py_raises_7.__tracebackhide___raises_7.fail_message_": {"doc_hash": "b6dcecd7cbfa65bfa43cb77bdedd64bc3aa58f804efb61a4b8c3e84ff50d4321"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/recwarn.py__Record_warnings_during_deprecated_call_2._": {"doc_hash": "7bbad85ab17088225b176b142b2a6c34af62b1f428dcea29bbde5448cd9816b4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/recwarn.py_warns_warns_5._": {"doc_hash": "e01e60b8476183198d7ab4b4d89f672d845fcf31d121e739134b14ad67e2c43e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/reports.py_BaseReport_BaseReport.if_TYPE_CHECKING_.__getattr__._": {"doc_hash": "9b2bd7b4ade51c45bf58b9deb5a78c04d7e00c3634841ef38abfe1d22d9fd8b9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/reports.py_BaseReport.toterminal_BaseReport.toterminal.if_hasattr_longrepr_tot.else_.out_line_s_": {"doc_hash": "1b35ba99308a58aed602336f134a912d47ebe871a9b7f1c6b77e9af224e6ad7b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/reports.py_BaseReport.get_sections_BaseReport.count_towards_summary.return.True": {"doc_hash": "456b32632e7f8c66ed3b1b15235ca134c9d76aedff458c912b0f77833b8f1b18"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/reports.py__report_to_json.serialize_exception_longrepr__report_to_json.serialize_exception_longrepr.return.result": {"doc_hash": "1c4e5517ada9a93ce4a6d94df744bf41665575a0034949a95e1a4ab0b70820cf"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/runner.py__Basic_collect_and_runt_if_TYPE_CHECKING_.from__pytest_terminal_imp": {"doc_hash": "5061f44cfaed07596a4adb4aaf085f428c5b12682ff5fff6badb5e9817788c5b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/runner.py___pytest_addoption.None_1": {"doc_hash": "7ab27c3e3a4bbcf463c834e93e6faf20696030b0d3578d0707042abb0ad35e6a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/runner.py_pytest_terminal_summary_pytest_terminal_summary.for_i_rep_in_enumerate_d.tr_write_line_f_rep_dura": {"doc_hash": "77d461aff6a046a7e1f5b77f20d5f0c904eb97c1424c019bea247ce820787b5b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/runner.py_check_interactive_exception_call_runtest_hook.return.CallInfo_from_call_": {"doc_hash": "ecf4e3d5ae95ce69a530298aac815b2ec57465eea60b39c71cd40133537870c8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/runner.py_TResult_CallInfo.__init__.self.when.when": {"doc_hash": "38c74bbe9460a20360e75daa4bece4848119e36239e849708d98979fb34f798c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/runner.py_CallInfo.result_CallInfo.result.return.cast_TResult_self__resul": {"doc_hash": "87071cce67cfb8ddac0d46d9d93a3793e8374ba8ccab618f7f9904e06a0c44b7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/runner.py_CallInfo.from_call_CallInfo.__repr__.return.f_CallInfo_when_self_wh": {"doc_hash": "af080a878e924587484a190663e970b084377e91dc3dbede3446e56d8d65ca70"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/runner.py_pytest_runtest_makereport_pytest_make_collect_report.return.rep": {"doc_hash": "df5f285a2c697183f01f3cc9e16db19f8d15d1a42b48d28eaf2fc9014cbb6ef5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/runner.py_SetupState_SetupState.__init__.self.stack._": {"doc_hash": "bcb82a5966357f97975b093c2e4a34fd8d57c853d03b702c21d1343951733c97"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/runner.py_SetupState.setup_SetupState.addfinalizer.self_stack_node_0_appen": {"doc_hash": "47f12815d23441ffc81c9794b643d78a8eaefed5791df0629bd50fbfba49c7c6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/runner.py_SetupState.teardown_exact_": {"doc_hash": "d29522f70376dc49cc1533c5b1f2e9334664c35ec8bdfb4a624756a6f2b3bba1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/skipping.py__Support_for_skip_xfail_from__pytest_store_import": {"doc_hash": "6adb8a025fd5c6d8e631a957894844bfde023184a1bbd066d059f6253776cd66"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/skipping.py__Saves_the_xfail_mark_ev_pytest_runtest_setup.if_xfailed_and_not_item_c.xfail_NOTRUN_xfail": {"doc_hash": "67e654172b0fd355f1fc940af5d5c607da177f86680893b7717cd24a94106847"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/skipping.py_pytest_runtest_call_pytest_runtest_call.None_2.item__store_xfailed_key_": {"doc_hash": "b9aa5383e4f96deaee66da47512b880c6fb8cb71d9d9ddaae59836d9a2297fdd"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/stepwise.py_from_typing_import_List_pytest_addoption.None_1": {"doc_hash": "abbf40be24c8d672d65d5b241120f6a9fb0c28f1461ccd39b0c08217d8df2439"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/stepwise.py_pytest_configure_pytest_sessionfinish.if_not_session_config_get.session_config_cache_set_": {"doc_hash": "205398295200bc8b305eedab918c1b02206200cf60c368922476cbd078486974"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/stepwise.py_StepwisePlugin_StepwisePlugin.pytest_collection_modifyitems.if_failed_index_is_None_.else_.config_hook_pytest_desele": {"doc_hash": "ebbd25ede53e5829dd498f7623277dae628bd0017a811d1c1585c3f0934df0c4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/stepwise.py_StepwisePlugin.pytest_runtest_logreport_": {"doc_hash": "b4312c7ffdcb3a8c73ecaabd375882b6107d608b6b9475f4cc6a13a91c01845a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py__Terminal_reporting_of___REPORTCHARS_DEFAULT._fE_": {"doc_hash": "9dbfd2cb5655fe4171cd617b0dba84b3b3563847aec40a97e46276c5f7adcac9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_pytest_report_teststatus_WarningReport.get_location.return.None": {"doc_hash": "aac1226637f24f680e8c5201ed20643af34b41df6c47d3829cd4b051970058c2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter_TerminalReporter.__init__.self._keyboardinterrupt_memo.None": {"doc_hash": "bb16c0c60c8cf8864b53c1c9678932bc669e9e7f1679da845f84c92dec2c3af9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.short_test_summary_TerminalReporter.short_test_summary.show_xpassed.for_rep_in_xpassed_.lines_append_f_verbose_w": {"doc_hash": "22c42811b018a2d8a8209a2ff02a63f1d49c242505be3871cdd2e49fac561189"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.build_summary_stats_line_TerminalReporter.build_summary_stats_line.if_self_config_getoption_.else_.return.self__build_normal_summar": {"doc_hash": "7cd24ea5ce8302f38b7f1056d3ce5beb9155597121ca35772d595d3aa52760c0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter._get_reports_to_display_TerminalReporter._build_normal_summary_stats_line.return.parts_main_color": {"doc_hash": "c6b87e6211f941ea5165f60801138116845cddfef745011cfe04967e5a4f806a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter._build_collect_only_summary_stats_line_TerminalReporter._build_collect_only_summary_stats_line.return.parts_main_color": {"doc_hash": "4218c4515205a7c52149ba4dc471f3974dab05509e9c0032af3fb8a25cda37f4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py__get_pos__format_trimmed.return.format_format_msg_": {"doc_hash": "e3d0c522e071a276c125be87f1a6671b1ac64fad5a50300171cc88f6d58ea326"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py__color_for_type_pluralize.return.count_noun_s_if_coun": {"doc_hash": "7767d45b341e4f8a5c747c5d6e66b45c3eb0d6113630cfe54538ab98ce7e483b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py__plugin_nameversions_format_session_duration.if_seconds_60_.else_.return.f_seconds_2f_s_dt_": {"doc_hash": "9c466982a96d997bb532c5f37515056e51417a3ef281af585b28dc461306a754"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py__get_raw_skip_reason_": {"doc_hash": "e05532cce917dbf32c2ca984f0984730453156f91cbadab9095b16e8e6c6d6ad"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/threadexception.py_threading_catch_threading_exception.__exit__.del_self_args": {"doc_hash": "747b3788578feedded7f828225ece27343de2386f7bb6f9540493386d6ac5289"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/threadexception.py_thread_exception_runtest_hook_thread_exception_runtest_hook.with_catch_threading_exce.if_cm_args_.warnings_warn_pytest_Pyte": {"doc_hash": "c8ef02bb9e66754fe0106735a06ea76409775ccebb4ac652d83562ec222d30d9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/threadexception.py_pytest_runtest_setup_": {"doc_hash": "e0f6e27a1a1d68d1d3b7d4f4661b1f187d4be4d7c2e741cc7fe15f2fd916a264"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/tmpdir.py__Support_for_providing__from__pytest_monkeypatch_": {"doc_hash": "ca409c65bd68b89b4d6244ef5bafe2cc663c502a46d097a1ccb04c37d154e2ee"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/tmpdir.py_TempPathFactory_TempPathFactory._ensure_relative_to_basetemp.return.basename": {"doc_hash": "c15c983fa458655b450b91f508e6851192fbccb2df6bd964b3694193ea73f3b0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/tmpdir.py_tmpdir_factory__mk_tmp.return.factory_mktemp_name_numb": {"doc_hash": "00e6d9a65f09e505fd6cc9fffbb9011bf6ebed2a48cb62c448025afaae97edc6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/tmpdir.py_tmpdir_tmpdir.return.py_path_local_tmp_path_": {"doc_hash": "bfc5871a4f874e2dc5cdf2d87d7bdf5579a2eb7a5feb0aa5043b67b4a29e9953"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/tmpdir.py_tmp_path_": {"doc_hash": "89d2988c4d121c496503bbeeffd1bb7172e646c7ec940f6dc2dc4f8aa3520181"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/unittest.py__Discover_and_run_std_l_if_TYPE_CHECKING_._SysExcInfoType.Union_": {"doc_hash": "8ea4a6b759cb58cdcd3588058c1b177e5030cc169d88599c6a1aff4f9fbb87e2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/unittest.py_TestCaseFunction.addError_TestCaseFunction.stopTest.pass": {"doc_hash": "a06568ed004881afa3c6a0b5f8a648d51b099978a73b7b0f9759f85c1409476e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/unittest.py__Twisted_trial_support__": {"doc_hash": "70c9ce20a559eb0e062d67bf6676db439ad8e3f26533f5d105be7ae32f166fca"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/unraisableexception.py_sys_catch_unraisable_exception.__exit__.del_self_unraisable": {"doc_hash": "77499930819d15074667dffea666f18133db507debd77d53ca93f3e9eeb1384d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/unraisableexception.py_unraisable_exception_runtest_hook_unraisable_exception_runtest_hook.with_catch_unraisable_exc.if_cm_unraisable_.warnings_warn_pytest_Pyte": {"doc_hash": "1a76b12fb9e39c03f36dfaa0c5d763664bd27dc5463621fbded18dddbd92d03d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/unraisableexception.py_pytest_runtest_setup_": {"doc_hash": "3afb5f885e5d0222ca3f08adf06c53eb6c859e05074301e50221461188b297f4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/warnings.py_sys_pytest_configure.config_addinivalue_line_": {"doc_hash": "edd68b41c55963b7c0c05ca6fb8cb651d5b243d3cedaad83227a105aca6810c6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/warnings.py_warning_record_to_str_": {"doc_hash": "e5382d3f399d86f66db217f17ee9e933d21b6a4658d515ba00cf4ba1ef2ce433"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/pytest/__init__.py__PYTHON_ARGCOMPLETE_OK_set_trace.__pytestPDB_set_trace": {"doc_hash": "a06370ca67a2681e36afbdfbd75a5aaac5ff43907d7044b4b118f3f75f273200"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/pytest/__init__.py___all___": {"doc_hash": "6fbf739f8449f5f6ca664ede6ae164b3830f7ea97a046f0d31283b745e234556"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_early_hook_configure_error_issue38_TestGeneralUsage.test_file_not_found.result_stderr_fnmatch_lin": {"doc_hash": "3ab368ef0946b04e9b200ed15ea97dc9f052c49031006cc9e6b78ab287420d73"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_file_not_found_unconfigure_issue143_TestGeneralUsage.test_file_not_found_unconfigure_issue143.result_stdout_fnmatch_lin": {"doc_hash": "a7f3b25c315dd9c03476f7d4099d8758a70984de008ce5e7ae98156247e79ffc"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_direct_addressing_selects_TestGeneralUsage.test_direct_addressing_selects.res_stdout_fnmatch_lines_": {"doc_hash": "5b985d63821e58472eb66b99606185b76133605e13d91036baec568b6fc5d58c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_direct_addressing_notfound_TestGeneralUsage.test_initialization_error_issue49.assert_sessionstarttime_": {"doc_hash": "c43786e18948c421b1e7f2e11750f8a208c065295c00ed6dfc82b49e26d46e50"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestInvocationVariants_TestInvocationVariants.test_invoke_with_path.out_err_capsys_readout": {"doc_hash": "0c0b257779defe4bc14c4ba158a174821000c2fd9c12b509b8b39459a2758c50"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestInvocationVariants.test_invoke_plugin_api_TestInvocationVariants.test_pyargs_importerror.result_stdout_fnmatch_lin": {"doc_hash": "414f71b96016aa832a63fa563188581c9f825d517294b89e8e2df2b728e4f9e4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestInvocationVariants.test_pyargs_only_imported_once_TestInvocationVariants.test_pyargs_filename_looks_like_module.assert_result_ret_Exit": {"doc_hash": "fefef11d0a6308aacb7d7b330ad5ae6030a151d024071186691b8dcfc9d58736"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestDurations_TestDurations.test_calls.None_2": {"doc_hash": "e95de43aea5ef5112df09d51654b6506843fa1618415f263ac31837db090fc3a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestDurations.test_calls_show_2_TestDurations.test_calls_showall.for_x_in_tested_.for_y_in_call_s.for_line_in_result_stdout.else_.raise_AssertionError_f_no": {"doc_hash": "71171c4728ae65e8711a0fd69d6499cd109230f01a94179a7c4aa6a89a3e50d0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestDurations.test_calls_showall_verbose_TestDurations.test_with_deselected.result_stdout_fnmatch_lin": {"doc_hash": "d12f2f6d14ad0e259a2c94149b4400d2a78fcf0198f7e4cc5770b161fda3c6ea"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestDurations.test_with_failing_collection_TestDurations.test_with_not.assert_result_ret_0": {"doc_hash": "c2188f685c089616f17188846692291ecabd829997f056ebda5fc71dd3d9ec69"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_code.py_re_test_unicode_handling.str_excinfo_": {"doc_hash": "22725754322dffe12d341d3664b295a6585c7a5dbb5abe63fd14d35134622dd4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_code.py_TestExceptionInfo_TestTracebackEntry.test_tb_entry_str.assert_re_match_pattern_": {"doc_hash": "587ff5aec9650f37446d245270eb8c20260bdb39a59c86cf3e5a306bb07a9714"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_importlib_test_excinfo_from_exc_info_simple.assert_info_type_Value": {"doc_hash": "9f4227ba1609b182e452a78a7ccc598e82c1d64a00d65909b40a8c0b1d5a1a05"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestTraceback_f_g_h.test_traceback_cut_TestTraceback_f_g_h.test_traceback_cut_excludepath.assert_newtraceback_1_f": {"doc_hash": "02c7c4db463cd197d6e34781bcebcc099c76e435942872d37050191a69667adb"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestTraceback_f_g_h.test_traceback_filter_TestTraceback_f_g_h.test_traceback_filter_selective.if_matching_.else_.assert_len_ntraceback_": {"doc_hash": "c7c0a8dd3c26108284abefbc82aacd75d98c8bb1550632892e97985be16922f9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_toterminal_long_filenames_TestFormattedExcinfo.test_toterminal_long_filenames.assert_line_3_Value": {"doc_hash": "2d4da2a115078f8ae0a846eb1c0b20db928cdaa6da107096081f26f16c769ae4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_exc_chain_repr_cycle_TestFormattedExcinfo.test_exec_type_error_filter.excinfo_traceback_filter_": {"doc_hash": "5d52cfbaee66c91c2d9926f26f5f6336b6aa36648fc690ddfb8027c4f08de25a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_source.py_test_getfslineno_test_getfslineno.assert_getfslineno_B_1_": {"doc_hash": "e211ede784959440f008b274cc005090e7556280c872562f0460b83e978f30c3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/conftest.py_dummy_yaml_custom_test_pytester.return.pytester": {"doc_hash": "b416e7b9ec310fdb142b13221386503d8d73fda7f6c8a4245703e149be6e3c2c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/conftest.py_color_mapping_color_mapping.return.ColorMapping": {"doc_hash": "33f0274d5646ffbb23f9ff4e5582f6c2a42a95ff6588a740eaba38f85892906e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/deprecated_test.py_re_test_fillfixtures_is_deprecated.with_pytest_warns_._pytest_fixtures_fillfixt": {"doc_hash": "3a0a6e693e4946d63a9b72184daead4a17214701b83c594dced896553720e56e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/deprecated_test.py_test_minus_k_dash_is_deprecated_test_minus_k_dash_is_deprecated.result_stdout_fnmatch_lin": {"doc_hash": "75a340594969185fbab86ff1e3f7f023111320da5e3c16442c0bf255ef244f6e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/deprecated_test.py_test_minus_k_colon_is_deprecated_test_minus_k_colon_is_deprecated.result_stdout_fnmatch_lin": {"doc_hash": "92cd07d9f1eab38338ddc3f924f49eb88b845d24e30c3712349b5d1aaad366ed"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/deprecated_test.py_test_fscollector_gethookproxy_isinitpath_test_fscollector_gethookproxy_isinitpath.assert_len_rec_0": {"doc_hash": "abdd4652087aa67bb3e012e43b3705e83a427a41c4d02405e82988e752a8f463"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/deprecated_test.py_test_strict_option_is_deprecated_test_strict_option_is_deprecated.result_stdout_fnmatch_lin": {"doc_hash": "9e472c209c94e51ad396407217402a4ffddfafab5ca310eecb140d8d94580837"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/deprecated_test.py_test_yield_fixture_is_deprecated_": {"doc_hash": "8619d8354a97542029e7743934ba7fedd252daaa7d7ceb7b23a7e6a8f562e11b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/unittest/test_unittest_plain_async.py__": {"doc_hash": "17abdba0cf965ba417a244234d3285a20de92ba735f214b33de1ff1c0fb3061d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_fixture.py_test_change_level_undos_handler_level_test_change_level_undos_handler_level.result_assert_outcomes_pa": {"doc_hash": "dedcc9e76117e35139ce539d4150136199f0170d43253087c56f1bfb30841d67"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/plugins_integration/bdd_wallet.py_from_pytest_bdd_import_gi_": {"doc_hash": "9e34fa1341147464801e1f169c3a8469bf748da0024118237255169ef4e1deb5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/plugins_integration/django_settings.py__": {"doc_hash": "eba90abd1870b02a314f56689c9751fe969ef169e9254196d5eec8c0d7f00337"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/plugins_integration/pytest_anyio_integration.py__": {"doc_hash": "b8087949f5461705f13ec9365c16482b950ebe29fa95827846a75d234cc196df"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/plugins_integration/pytest_asyncio_integration.py__": {"doc_hash": "91b5c042d501bbb39815a4b52859e1e5a031ccf23c68eb6b0ea9f9e4feb18903"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/plugins_integration/pytest_mock_integration.py__": {"doc_hash": "c07d638b0b9d50fe736ca88b105f6544a554124c419be84f775061ecbbd0a834"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/plugins_integration/pytest_trio_integration.py__": {"doc_hash": "198628d68cb4f2ef1b30d2ecd33691900a1f86c4e261df106e1a866d59f02ce6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/plugins_integration/pytest_twisted_integration.py__": {"doc_hash": "0c29fb2ae5f5ff2d855f8a94c16078a1ba060f5b3445f3cbe6c7fcb2d3ea0176"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/plugins_integration/simple_integration.py__": {"doc_hash": "82c3f8c69642c8bf18ecbfd55d5303f6fa9abcef3405f47ba06b38c7ee15761b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_list_wrong_len_TestApprox.test_tuple_vs_other.assert_1_approx_1_": {"doc_hash": "9d8caa3d7087919136c2055dfc3f54dc25af6fdc7c21a0d4233446c48b57b04d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_dict_TestApprox.test_dict.None_3": {"doc_hash": "b79290ef45df38b0db6c9da4b01292c034b6d97d4cb903b76f650b6f5e4273b8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_dict_wrong_len_TestApprox.test_dict_vs_other.assert_1_approx_a_": {"doc_hash": "1e0ec4c8b54cabea3de2837fb18408c366450d6874aa6a820e29c809020a1750"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_numpy_expecting_inf_TestApprox.test_numpy_array_wrong_shape.assert_a21_approx_a12_": {"doc_hash": "e11c9eca333614e4644177f5b3793cc3211b79da656a0d2b660df5311fe76e68"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_numpy_array_protocol_TestApprox.test_numpy_array_protocol.None_4": {"doc_hash": "6e5f7b61f227388c34a13b181eff4f649c4e623ffe85db0335718e3949baefd6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_doctests_TestApprox.test_unicode_plus_minus.result_stdout_fnmatch_lin": {"doc_hash": "d4cdc47e76a038fa1d98079e1fcd204fdc8f93e81c427b170e89413c906977ac"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_expected_value_type_error_TestApprox.test_nonnumeric_okay_if_equal.assert_x_approx_x_": {"doc_hash": "2e1a11a28b4ea93fbf647f348b15a08cdf6e67b2b56f83bdbddccc983cea166b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_nonnumeric_false_if_unequal_TestApprox.test_nonnumeric_false_if_unequal.None_3": {"doc_hash": "2455c3287d1c1524d802b2f00e7a7d08a70b5404dfa83f591bba977ed0e7c790"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_nonnumeric_dict_repr_TestApprox.test_nonnumeric_dict_repr.assert_": {"doc_hash": "0b3a5979adfc593ef2d5517b121924d8059e05a5cd0044afcc539c22bd1d9021"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_nonnumeric_list_repr_TestApprox.test_comparison_operator_type_error.with_pytest_raises_TypeEr.op_1_approx_1_rel_1e_6_": {"doc_hash": "192fd6b01c298499d7f38509ce022020eb9c101f4e533a804ea6041e1ddffbec"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_os_TestModule.test_import_duplicate.result_stdout_fnmatch_lin": {"doc_hash": "1e837c9eae60b6bd96e08d69987af93ec5bb00f5733ce121b81ec2b3dd4f8dda"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestModule.test_import_prepend_append_TestModule.test_import_prepend_append.with_monkeypatch_context_.None_2": {"doc_hash": "93cdf8a386fbb92d0fcc5351f58e1eeaceebf3c9e0d4dbb43eaef708080f0525"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.make_function_TestFunction.test_single_tuple_unwraps_values.reprec_assertoutcome_pass": {"doc_hash": "ea8a9fe0f0c061efe3c8d4f7c9c7c18a47189d4615a748c9e5b3212900e55bb4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.test_issue213_parametrize_value_no_equal_TestFunction.test_issue213_parametrize_value_no_equal.reprec_assertoutcome_pass": {"doc_hash": "6af5059606e7b00f3c01f48dbb02e9d4fdcef2977a487506109597bb25e14792"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.test_parametrize_with_empty_string_arguments_TestFunction.test_parametrize_with_empty_string_arguments.assert_names_test_": {"doc_hash": "bcc1c5a8bfcb8e236c13d466f47a99ab96c0fed3a9d107285cd62ce7528ad0c6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.test_function_equality_with_callspec_TestFunction.test_pyfunc_call.config_hook_pytest_pyfunc": {"doc_hash": "84c156cf9d461f2f327186ea795be6a621fb77e7cd719c9f1e65897a344f8491"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.test_parametrize_skipif_TestFunction.test_parametrize_skipif.result_stdout_fnmatch_lin": {"doc_hash": "b9b7b393ade691f2e1101a958e1d882c19f95b8de0250b895a9e1445fadf96af"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.test_parametrize_skip_TestFunction.test_parametrize_skipif_no_skip.result_stdout_fnmatch_lin": {"doc_hash": "3ea354e14e81b9d68ee9624f431c33b70d0a700ba850a2b97335600a22ceabd8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.test_parametrize_xfail_TestFunction.test_parametrize_xfail.result_stdout_fnmatch_lin": {"doc_hash": "6f20d7c11d91b45991e20cedb353a95b1fbc851df5aac3fbe354f88f440f8fb1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.test_parametrize_passed_TestFunction.test_parametrize_xfail_passed.result_stdout_fnmatch_lin": {"doc_hash": "4ed2a9b90e386b6356839f36de02fa8c0b20eb3aaddc00931fad73c6fec3ac71"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestConftestCustomization.test_pytest_pycollect_makeitem_TestConftestCustomization.test_pytest_pycollect_makeitem.result_stdout_fnmatch_lin": {"doc_hash": "2dfca8b728fabbf2165ee5cdc391b4eafac376a81dbfc6a2cd630ac086ff4bd7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestConftestCustomization.test_early_ignored_attributes_TestConftestCustomization.test_early_ignored_attributes.assert_len_items_1": {"doc_hash": "0c81dfa770ae40a9acb75be43ce1da45b6d8c007a874023112317568521e1676"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_os_test_getfuncargnames_functions.assert_getfuncargnames_j_": {"doc_hash": "cd0943efedfc5ed68421e1abe04773f232c526aaa8d31f44cfa624722cf9626c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFillFixtures_TestFillFixtures.test_detect_recursive_dependency_error.result_stdout_fnmatch_lin": {"doc_hash": "c760782c3f42a481f1cda43e4ab60d68467f3bf2d37574bcbf5b07e1b50b70fc"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFillFixtures.test_funcarg_basic_TestFillFixtures.test_funcarg_basic.assert_item_funcargs_oth": {"doc_hash": "5aec535b492c6d4e8d23e6755501863e1e78f5012bd73f935d9d3d3320190e4e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFillFixtures.test_funcarg_lookup_modulelevel_TestFillFixtures.test_extend_fixture_conftest_conftest.None_1": {"doc_hash": "f82cd365041546fdda3667cb6e36c9c412e253d73e058f3b40dfcf5a80d8e3e1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFillFixtures.test_override_fixture_reusing_super_fixture_parametrization_TestFillFixtures.test_override_fixture_reusing_super_fixture_parametrization.result_stdout_fnmatch_lin": {"doc_hash": "494032291cfda89f101858179912194ca185a216a54ee8b4c35b2168eca7aebb"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFillFixtures.test_override_parametrize_fixture_and_indirect_TestFillFixtures.test_override_parametrize_fixture_and_indirect.result_stdout_fnmatch_lin": {"doc_hash": "6b7abbd7ad682e75ebb70af1e2d105b05d7fa935ee132702a89398a003b66d5c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFillFixtures.test_override_top_level_fixture_reusing_super_fixture_parametrization_TestFillFixtures.test_override_top_level_fixture_reusing_super_fixture_parametrization.result_stdout_fnmatch_lin": {"doc_hash": "ec6019b3e847f882008eb0bb0dfe3f38ce7023939aa85221a2f0c714b640cd15"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFillFixtures.test_override_parametrized_fixture_with_new_parametrized_fixture_TestFillFixtures.test_override_parametrized_fixture_with_new_parametrized_fixture.result_stdout_fnmatch_lin": {"doc_hash": "3fa2ac411b6e420b3c8ced442b53ad53a8bd00747ae7517f29c6654ab3502d04"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestBasic.test_request_fixturenames_dynamic_fixture_TestRequestBasic.test_setupdecorator_and_xunit.reprec_assertoutcome_pass": {"doc_hash": "a4a06304fbc730f60b1d1356d9dfd57fd23b64f156497399dc34f4adbd98f554"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestMarking_TestRequestMarking.test_applymarker.with_pytest_raises_ValueE._type_ignore_arg_type_": {"doc_hash": "8b5176e537866eb6de7778ce5603b22e824214a014d6c29e272803de4bab2c74"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestMarking.test_accesskeywords_TestRequestMarking.test_accessmarker_dynamic.reprec_assertoutcome_pass": {"doc_hash": "c641896ab16a7eace875067b31b9dfed1fda1e14a7dc64501b15470c313969b6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureManagerParseFactories.test_parsefactories_relative_node_ids_TestFixtureManagerParseFactories.test_parsefactories_relative_node_ids.with_monkeypatch_context_.reprec_assertoutcome_pass": {"doc_hash": "3de52cf26beef8a9fdd9746ccc636e39113e0f397a63613eeec023b033e9a4b4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestAutouseDiscovery_TestAutouseDiscovery.pytester.return.pytester": {"doc_hash": "dc3475118b0b28baf35896158e02ba65440e022da2f80f1ebf6b56531894d7f1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker_TestFixtureMarker.test_parametrize.reprec_assertoutcome_pass": {"doc_hash": "77a9c1e0aeb1673fe2c54fe178eeef3a53bb163c07948543d97d2acc5236721d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_multiple_parametrization_issue_736_TestFixtureMarker.test_multiple_parametrization_issue_736.reprec_assertoutcome_pass": {"doc_hash": "cadc7e7f4355659b64e4299b83b78c3e20b1c0804a8253696b11151dc7e29740"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_dynamic_scope_bad_return_TestFixtureMarker.test_dynamic_scope_bad_return.result_stdout_fnmatch_lin": {"doc_hash": "41df20bb6cf35feb48acc6a2442a0557ebfd81052b00a33424a45ce8bbffa6fd"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_register_only_with_mark_TestFixtureMarker.test_register_only_with_mark.reprec_assertoutcome_pass": {"doc_hash": "35bf8e94b122aca5159ee7b062782c03b61e4ad2df00b759deebcd81eee1bb4b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_fixture_marked_function_not_collected_as_test_TestFixtureMarker.test_params_and_ids.res_stdout_fnmatch_lines_": {"doc_hash": "394831253ab3cfa47f4f87c4a2b7bdd698a71393a33644ec9a9420ea78d10060"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_params_and_ids_yieldfixture_TestFixtureMarker.test_params_and_ids_yieldfixture.res_stdout_fnmatch_lines_": {"doc_hash": "4f48196e9a798ddcacbb3b81cfc3cf425894efe59e4b47e305384a9fb24ba31b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_deterministic_fixture_collection_TestFixtureMarker.test_deterministic_fixture_collection.assert_output1_output2": {"doc_hash": "998a4de86ae25f69aa573bee68bcf4dbe1b22cf3bda2a32b60ec1523c26aa384"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestContextManagerFixtureFuncs_TestContextManagerFixtureFuncs.test_simple.result_stdout_fnmatch_lin": {"doc_hash": "50e9af1f442273fb54453ee82c64f0f0e14176a11ffc851d031f79dc8d28b2e9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_test_fixture_param_shadowing_test_fixture_named_request.result_stdout_fnmatch_lin": {"doc_hash": "9c04c15893064bcaf2c75530627e20dc5e692b67d2b93046093a4b2604c1d4d1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_test_fixture_parametrization_nparray_test_fixture_parametrization_nparray.result_assert_outcomes_pa": {"doc_hash": "b519937fbb48da43db812e8b1fbbf27c066e53cf59c8806e44d7f3ee0c3bff85"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_test_fixture_arg_ordering_test_fixture_arg_ordering.assert_result_ret_0": {"doc_hash": "632b1b7fb8d32e19e2f92eea10ff068ec15a1dd84d6042c913aab20633657b22"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_itertools_None_25": {"doc_hash": "150798c5a032ddd1ee4a8255ed5f0f8ffb08e41c5a968682adb88ecf27fd1fd6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc_TestMetafunc.test_function_basic.assert_metafunc_cls_is_No": {"doc_hash": "d39bdc86826f284ebae67ccbc84e4ce149f79a31d892b15f845c6bc6886cfd10"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_class_or_function_idval_TestMetafunc.test_notset_idval.assert__idval_NOTSET_a_": {"doc_hash": "6cdbf57bcc6aff6959eeb7985f5f6c2d0d0aff1322ad8ab0200a7c110720213b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_idmaker_autoname_TestMetafunc.test_idmaker_with_bytes_regex.assert_result_foo_": {"doc_hash": "44477834b83de639b22fa7cd502e64ca36084fda3a6d1a4f59a500de695394b6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_idmaker_native_strings_TestMetafunc.test_idmaker_native_strings.assert_result_": {"doc_hash": "05dc89da7617fa8f9b9d6656be75ab6f8d47b755ea63a3c1aacc26faeaaaeeb2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/raises.py_TestRaises.test_match_failure_string_quoting_TestRaises.test_match_failure_exact_string_message.assert_msg_": {"doc_hash": "d76bb4076d179ec665b3f0cc7cb109287163b16d0ea35d7f48763739cbf3e57a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/raises.py_TestRaises.test_raises_match_wrong_type_TestRaises.test_raises_exception_looks_iterable.with_pytest_raises_.pytest_raises_ClassLooksI": {"doc_hash": "66d1d2ff10bcd9e5d9fc50a745dad73504ec4aba23207011092f84f2f0adc53d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/raises.py_TestRaises.test_raises_with_raising_dunder_class_TestRaises.test_raises_context_manager_with_kwargs.assert_Unexpected_keywor": {"doc_hash": "d5b03c9f23f178807a538e42cf1282e540e858a30e798c3efd5287805681f81f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/raises.py_TestRaises.test_expected_exception_is_not_a_baseexception_": {"doc_hash": "a4d02d30fc400ddec39eff5351af7733c0d34f80ec434369c686c8d66f84bf35"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/show_fixtures_per_test.py_from__pytest_pytester_imp_test_fixtures_in_module.result_stdout_no_fnmatch_": {"doc_hash": "38f5910e46c8522ba243162e31a08face89b69f818653b8e4c79e58f063ba346"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_collections_mock_config.return.Config_": {"doc_hash": "c91d0feeb5178a567fca608efbfecf3eefe31fc9fe149d6dfb936500db57e90e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare_namedtuple_TestAssert_reprcompare_namedtuple.test_namedtuple.assert_lines_": {"doc_hash": "646cd274a170706c50b7cea36fa63b575fb37a9046b92875f0cbc1bfc5d04f2d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare_namedtuple.test_comparing_two_different_namedtuple_TestAssert_reprcompare_namedtuple.test_comparing_two_different_namedtuple.assert_lines_": {"doc_hash": "8f94c042243e5586c625a1a4aa20afe1900180c74b509636eec03ffc0dd2b9b8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_honors_pep_235_TestAssertionRewrite.test_honors_pep_235.pytester_runpytest_asse": {"doc_hash": "3912b1e625e21ba6aad4f93d2756b7974b5ae62bb8773556af4d2838aac245f8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_binary_op_TestAssertionRewrite.test_boolop_percent.assert_getmsg_f2_ass": {"doc_hash": "9dd209172d799b5d7f17112d31c81b9a5f3d33ae1e109264be451d3ae8992fb5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_at_operator_issue1290_TestAssertionRewrite.test_starred_with_side_effect.pytester_runpytest_asse": {"doc_hash": "58ad80994607f57786b33daa4ba8bb3013008e39b16c0d875a4c2344d9dab267"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestRewriteOnImport.test_zipfile_TestRewriteOnImport.test_zipfile.assert_pytester_runpytest": {"doc_hash": "e3e3542cc236aeb42cf91f36746927eeb50cafddc3796f794329c433a3ecdf51"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestRewriteOnImport.test_readonly_TestRewriteOnImport.test_dont_write_bytecode.assert_pytester_runpytest": {"doc_hash": "3b85f5d59c70af8542298775dd0113dd788daacf27b48616d09c38ba079e9b87"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestRewriteOnImport.test_orphaned_pyc_file_TestRewriteOnImport.test_orphaned_pyc_file.assert_pytester_runpytest": {"doc_hash": "d9919879fa095048917f53dfdde173f9b13d842df66a55751052e58282c2b1b0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestRewriteOnImport.test_package_TestRewriteOnImport.test_rewrite_module_imported_from_conftest.assert_pytester_runpytest": {"doc_hash": "3792d773161c388b01ef41daa7c15e3e4f794d83d0f1a78943152aa35dffeff7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewriteHookDetails.test_read_pyc_more_invalid_TestAssertionRewriteHookDetails.test_read_pyc_more_invalid.None_5": {"doc_hash": "92d1e9eac4ae10444cdcee36267ecf637d353f896572e740e0f9a2a07ba5f045"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionPass_TestAssertionPass.hook_on.pytester_makeconftest_": {"doc_hash": "ae761e3482b35fdacf46d8fd0998b89c288d311a1a7c4610770765e4e52094db"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestNewAPI.test_cache_writefail_permissions_TestNewAPI.test_cache_writefail_permissions.try_.finally_.cache_dir_chmod_mode_": {"doc_hash": "9db57a3ba76265586746c97293fecf017518ae0eed667ad3f9d2def34ba7b35c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestNewAPI.test_cache_failure_warns_TestNewAPI.test_cache_failure_warns.try_.finally_.cache_dir_chmod_mode_": {"doc_hash": "67b558f97ba6fc987b736b23b2b7320ceb8d26065d97b12bae5a2b042b5fe41b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestNewAPI.test_custom_rel_cache_dir_TestNewAPI.test_custom_rel_cache_dir.assert_pytester_path_join": {"doc_hash": "3fefc02ec6ba69b9de8da01e4f3bc4e42cdfd41bb84cb7b2089dbd58997edc51"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestNewAPI.test_custom_abs_cache_dir_TestNewAPI.test_custom_abs_cache_dir.assert_abs_cache_dir_is_d": {"doc_hash": "4d3891e7f07c98e499f04f3c85a88681e5a69494308487c90ffb3cbd2f5b5170"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestNewAPI.test_custom_cache_dir_with_env_var_TestNewAPI.test_custom_cache_dir_with_env_var.assert_pytester_path_join": {"doc_hash": "5c2492b1468e5853bd8d5413833e9bfae0213e7d9d78089e8fac7784a10d9a70"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_packages_TestLastFailed.test_packages.None_3": {"doc_hash": "a0f7a20fe58d62869f82ffcfdfe9490d2da3353a434436a07ac7b086a1b67dac"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_test_does_not_create_boilerplate_in_existing_dirs_test_does_not_create_boilerplate_in_existing_dirs.None_2": {"doc_hash": "c31662ef17d4b6ae42d02d7558395900295460d23b57dbdaa98c37bf70bafc6e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_test_cachedir_tag_": {"doc_hash": "41a0c58a4dda7fc6365401920101cca806942debd7838d106d4466a088dc2ea8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestCaptureFixture.test_capsyscapfdbinary_TestCaptureFixture.test_cafd_preserves_newlines.assert_out_endswith_nl_": {"doc_hash": "78b3dce1c06f12ae66e1dae23856f3a6d4897fc76448ee5e5e485c052cfcf7de"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestCaptureFixture.test_disabled_capture_fixture_twice_TestCaptureFixture.test_disabled_capture_fixture_twice.result_stdout_fnmatch_lin": {"doc_hash": "db5e0d68922b29f53bacbcaa4fabdde32e9724abb89e5c3c4ce52850fcf8f5cd"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestTeeCaptureIO_test_dontreadfrominput._just_for_completeness": {"doc_hash": "40ca2ddaba262315b627737c47982fd8939788ac44a8f282f3adaa983aa522f0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_test_captureresult_test_captureresult.assert_cr__replace_err_r": {"doc_hash": "c71164cbde7cec1815efc423e0c915057740688c1d8824707ad537af2b352ee9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_tmpfile_lsof_check.assert_len2_len1_3_o": {"doc_hash": "bba4de3316ee11731caa4faa368468dc7a947b09285f5849ada322af9cd118fc"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_test_capture_not_started_but_reset_test_fdcapture_tmpfile_remains_the_same.assert_capfile2_capfil": {"doc_hash": "d016d53364126259d80c663b7d54587acc3bff68db1e6eb699e0d234975b8c5b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_test_close_and_capture_again_test_close_and_capture_again.result_stdout_fnmatch_lin": {"doc_hash": "81173d6ee6363f3a6f44119b49f890e7694beff097a4dd35c2aa155a0e2686d0"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_test_error_attribute_issue555_test_dontreadfrominput_has_encoding.reprec_assertoutcome_pass": {"doc_hash": "57bf12cd943eaa4ab902be6b613a752fc9880e3aed01509f141e5cad35d43457"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_test_crash_on_closing_tmpfile_py27_test_crash_on_closing_tmpfile_py27.result_stdout_no_fnmatch_": {"doc_hash": "3671e4f023e7209d9fbfd323dc2cb41ea9c064439e9f42f3d486f16b0abced7e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_os_ensure_file.return.file_path": {"doc_hash": "b0ddecb53e7306cc55f531b9c5d77463b05270c104618271c5a2454aa3acbbc3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestCollector_TestCollector.test_check_equality.assert_pytester_collect_b": {"doc_hash": "5a5105ea77bfe8dc60ed27826dc5d44a51e9d19be8c0d01ca31cc20b784035b9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestCollector.test_getparent_TestCollector.test_getparent.assert_class_parent_is_cl": {"doc_hash": "03aff2296e4ee4ff67442f9a4455917fc647de80ada6cb81cf23260d7574691c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestCollectPluginHookRelay_TestCollectPluginHookRelay.test_pytest_collect_file.assert_wascalled_0_ext_": {"doc_hash": "4065dced92c9bfbd0e89ae1aafbf81e734e24360e90c882116b036e3d5319b30"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestSession_TestSession.get_reported_items.return._": {"doc_hash": "04f7c98c9d55f5d3e6f1069c27316e155dbaadd04156f52aa97ec3b4484a856b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestSession.test_collect_protocol_method_TestSession.test_collect_protocol_method.for_id_in_p_name_p_name.assert_x_name_for_x_in_s": {"doc_hash": "3af3399ba3ef2009e6f7ea64d44458b44b8904112b435514852a22a1e2d7fb7d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_Test_getinitialnodes_Test_getinitialnodes.test_pkgfile.for_parent_in_col_listcha.assert_parent_config_is_c": {"doc_hash": "c80f963d5c8c5148d846e19eab976fdda33546e6f95f23fefe7d0bce482692f3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_continue_on_collection_errors_test_continue_on_collection_errors.res_stdout_fnmatch_lines_": {"doc_hash": "1dbdf2a5894b63f2b7be5e9bafc56635f43a4727d6d68e0fb8f4f2770efdc91a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_continue_on_collection_errors_maxfail_test_continue_on_collection_errors_maxfail.res_stdout_fnmatch_lines_": {"doc_hash": "da0ca8e601e25fdbef56637d0f3614e7c330b3e96bb0a9a6137b56145f772462"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_collect_invalid_signature_message_test_collect_invalid_signature_message.result_stdout_fnmatch_lin": {"doc_hash": "ab37293399da71137c2a8f7b0ef3d87308b3cbf4c8b01d8b605dc972f5abdc9e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_collect_handles_raising_on_dunder_class_test_collect_handles_raising_on_dunder_class.assert_result_ret_0": {"doc_hash": "5e470a29f04b626ab8205cb805d94d101b1707c178421c29e2fc503a81cf4126"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_collect_symlink_dir_test_collectignore_via_conftest.assert_result_ret_Exit": {"doc_hash": "55da13be1f642236bd94fe8423a8a4b9aae837efdc0425c83e3abe3b86b5a2f6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestImportModeImportlib.test_modules_not_importable_as_side_effect_TestImportModeImportlib.test_modules_not_importable_as_side_effect.result_stdout_fnmatch_lin": {"doc_hash": "c1483ba2f09597df9ec7936c8140507692324c8e21b93ef28a9e429efb5b698d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_does_not_crash_on_error_from_decorated_function_": {"doc_hash": "0523a22a53fdfedffc2fe9080b43935362f5f448d60a2d09060cd2ad2bf936db"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_compat.py_test_is_generator_async_syntax_": {"doc_hash": "8c0daf650d27ec9ac26d882bbeb2920bfb90d97448c2687d58f40f7e4a31aca7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_os_from__pytest_pytester_imp": {"doc_hash": "e02c12e1afaa8da4711424538de2cc7b3b4294b1b2835f35df50126cad49df94"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestParseIni.test_setupcfg_uses_toolpytest_with_pytest_TestParseIni.test_setupcfg_uses_toolpytest_with_pytest.assert_result_ret_0": {"doc_hash": "ca229de15507b82be2d028db6f44eb19fd85e0bcb466ffb1c025f0413fa3007c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestParseIni.test_append_parse_args_TestParseIni.test_append_parse_args.assert_config_option_verb": {"doc_hash": "bc857e9c3bb767abe13acb04fea5d3b0fbd9fe678c03864f861c4d2cedb2495d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestParseIni.test_tox_ini_wrong_version_TestParseIni.test_ini_names.assert_config_getini_min": {"doc_hash": "31d81eb245bb7eda823200a2919f70a2f4e96da20a2a3a950ce34e2547445d7a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestParseIni.test_pyproject_toml_TestParseIni.test_toxini_before_lower_pytestini.assert_config_getini_min": {"doc_hash": "005f13284460b9092c41b20deeb62b7765ca6b0e039cf6b93fd0d168f7a063e2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestParseIni.test_invalid_config_options_TestParseIni.test_invalid_config_options.if_exception_text_.else_.assert_result_ret_pyte": {"doc_hash": "55b71f83634b4548dc1ec9ed1583d5646e49c6e5984aba3da09e33fd3582d6d4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestParseIni.test_silence_unknown_key_warning_TestParseIni.test_silence_unknown_key_warning.result_stdout_no_fnmatch_": {"doc_hash": "aaeba4aa4f5392d56b48d2acb3f97b476780ef1d7401684f0c01a9e227a08c97"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestParseIni.test_disable_warnings_plugin_disables_config_warnings_TestParseIni.test_disable_warnings_plugin_disables_config_warnings.result_stdout_no_fnmatch_": {"doc_hash": "930807d9b9e9d3ae474897ef377e7264318601255c889e7ed1c1dd011de6799b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestParseIni.test_missing_required_plugins_TestParseIni.test_missing_required_plugins.if_exception_text_.else_.pytester_parseconfig_": {"doc_hash": "57d0c2b5254d7afaeb1f6375fa9a6f91e35f89d8ec806c5d28f233e9ae14e490"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestParseIni.test_early_config_cmdline_TestParseIni.test_early_config_cmdline.result_stdout_fnmatch_lin": {"doc_hash": "ccb269b2d8edf000b4433e7142ccd5555c0c9ffb94f9e9551a3f11937923d113"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestConfigAPI.test_addini_TestConfigAPI.test_addini.pytest_raises_ValueError_": {"doc_hash": "7a981c76fbc2c8216dd4b335636d3ba0d0e98f39242579f355d867b97f3412c9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestConfigAPI.make_conftest_for_pathlist_TestConfigAPI.test_addini_linelist_ini_files.self_check_config_linelis": {"doc_hash": "3678b0e6638d06ddb62d7fbf5f107dc2f838f56bbd369b1e85bb67de9a9b2702"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_setuptools_importerror_issue1479_test_setuptools_importerror_issue1479.with_pytest_raises_Import.pytester_parseconfig_": {"doc_hash": "f81e7dbdc99a8fdab6b2c98434fd429c1c4116586ef6116abfb43ecc28566300"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_importlib_metadata_broken_distribution_test_importlib_metadata_broken_distribution.pytester_parseconfig_": {"doc_hash": "a954762597f41456131b4c6a895ad7e08fff58fe250cd49be3288ea3aafd52e4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestRootdir_TestRootdir.test_simple_noini.None_5": {"doc_hash": "2ae61f512737d670ae50195c82f719273944fd0c68986cc1800f00e0928a6c98"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestRootdir.test_pytestini_overrides_empty_other_TestRootdir.test_pytestini_overrides_empty_other.assert_parsed_inipath_": {"doc_hash": "7654155c7aed26394e7a6e87a4c995448a460c0edcc67f5e98c9ba1b65f26f31"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestRootdir.test_setuppy_fallback_TestRootdir.test_nothing.assert_inicfg_": {"doc_hash": "368d8b6e534d54519ba18b31e7515e73bc601335d889ad2a0b5d7a0c8351b281"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestRootdir.test_with_arg_outside_cwd_without_inifile_TestRootdir.test_with_existing_file_in_subdir.assert_inipath_is_None": {"doc_hash": "c67a0625c69d809bade6b2d2a0627d9d30d1f400ca014a7de6df0084ee23d2dd"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestRootdir.test_with_config_also_in_parent_directory_TestRootdir.test_with_config_also_in_parent_directory.assert_inipath_tmp_pat": {"doc_hash": "7742c87cbf7c926bcf79dcda8b1b31cddf83d9b8129c77c0501103a89c421e7f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestOverrideIniArgs.test_multiple_override_ini_options_TestOverrideIniArgs.test_multiple_override_ini_options.result_stdout_fnmatch_lin": {"doc_hash": "6deeaa247e92b2b2577a98a874fdd0e6cc421a869b1b42f8e2d54acc59adf906"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_help_via_addopts_test_help_via_addopts.result_stdout_fnmatch_lin": {"doc_hash": "cadd44385245d9fe416cf3f4f3abaf8dd600d9709f6e27273adc0203e80a4cc9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestSetupCfg_TestSetupCfg.test_pytest_custom_cfg_unsupported.with_pytest_raises_pytest.pytester_runpytest_c_": {"doc_hash": "de5d62606ac23fae7198a45f8dfb3b94cb6db242824925ea60ab8a04d4fb0935"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_conftest_import_error_repr_test_strtobool.with_pytest_raises_ValueE._strtobool_unknown_": {"doc_hash": "1deae79e20376edf75151152520ace295394584e6ac9ed30c91b5847735924e9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_parse_warning_filter_": {"doc_hash": "07a130e77605fb97ee28eb73d0a7c274adebb711324fd3297c6f52ee56d63f8a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_argparse_ConftestWithSetinitial.return.conftest": {"doc_hash": "1ced1ab4d3e859a2ade456eadb68c9c45eb4febeedc0e8f86813008354d2bf28"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_conftest_setinitial_conftest_setinitial.conftest__set_initial_con": {"doc_hash": "1049cbc6e400a07fb7905ee2c07f48ced3e0f20406a72742d7f2183835866165"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_TestConftestValueAccessGlobal.test_value_access_with_confmod_TestConftestValueAccessGlobal.test_value_access_with_confmod.assert_path_stem_conf": {"doc_hash": "5fc7e0fc9ebc0f86477146ffedeffaf2fb881ca0e04791c7671b2e2a1773ec70"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_conftest_import_order_test_conftest_import_order.assert_mods_expected": {"doc_hash": "34044c56d739b936c4e451496da9423143e19c9ff1643480af949dfc99e7617d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_TestConftestVisibility._N_B_swc_stands_for__TestConftestVisibility.test_parsefactories_relative_node_ids.reprec_assertoutcome_pass": {"doc_hash": "92468b1da2fd52cc8c40f8e42054b76b8d2f28850ca1ef3f3aed34d2901cc419"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_set_trace_interception_TestPDB.test_pdb_set_trace_interception.self_flush_child_": {"doc_hash": "ed19a8710dd9520bcbb63a99213a8afd649c88cd55dca0e4574194c62a3b3031"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_and_capsys_TestPDB.test_pdb_and_capsys.self_flush_child_": {"doc_hash": "753f28465a4367bd36bf03ae8e205ee2bb5b02a171bda9ee6fd8469e9e6319e6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_custom_cls_TestPDB.test_pdb_custom_cls.assert_custom_pdb_calls_": {"doc_hash": "90377bc40e58b9a503a9f4515a46096ac21484e8ca1c38e3364a379e79244ad1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_custom_cls_invalid_TestPDB.test_pdb_custom_cls_without_pdb.assert_custom_pdb_calls_": {"doc_hash": "439ae8924894ae677664c87381979d7e8b4679b4adda61451bdb0ea2745eb16f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestDebuggingBreakpoints_TestDebuggingBreakpoints.test_sys_breakpointhook_configure_and_unconfigure.result_stdout_fnmatch_lin": {"doc_hash": "29783b800eb1c8fb1bc56acbe2308cb78f6adae817f80e52f01c509c5d77ca0e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_collect_module_empty_TestDoctests.test_collect_module_single_modulelevel_doctest.for_p_in_path_pytester_.None_3": {"doc_hash": "b2dd3b47f069750d6d4fe299a7208b7014a039a41b39badb4ab9ad8af4893dc2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_collect_module_two_doctest_one_modulelevel_TestDoctests.test_collect_module_two_doctest_one_modulelevel.for_p_in_path_pytester_.assert_items_0_parent_is": {"doc_hash": "921cf8a990fca97e4de21ce32937332da0ad2580fed4f6f8cafd559bc9c8e93f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_collect_module_two_doctest_no_modulelevel_TestDoctests.test_collect_module_two_doctest_no_modulelevel.for_p_in_path_pytester_.assert_items_0_parent_is": {"doc_hash": "2d55088e5d9a6e025690cd4321d166137f1fe9ef8a344b665c34955fb8420f08"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_doctest_unex_importerror_with_module_TestDoctests.test_doctestmodule.reprec_assertoutcome_fail": {"doc_hash": "6ea38ccf95853e3e6b9dfd88bd236f816df9d461ea823d96e50f56748ae6c61d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_doctestmodule_external_and_issue116_TestDoctests.test_doctestmodule_external_and_issue116.result_stdout_fnmatch_lin": {"doc_hash": "efd4042f75bb6ef9ca12801b8cd46b8d5b93788d62cdb4f18ac1685536e9bfc7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_txtfile_with_usefixtures_in_ini_TestDoctests.test_txtfile_with_usefixtures_in_ini.reprec_assertoutcome_pass": {"doc_hash": "f37f0358ed113802f6c2759d59b6ed81cf82d21f02a9092bda66a8e87f85149b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_doctestmodule_with_fixtures_TestDoctests.test_doctestmodule_three_tests.reprec_assertoutcome_pass": {"doc_hash": "7712007c5207a45547272600e50316c8dabad1ce2b61030eecacd6e4f0e22fb5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctestReportingOption_TestDoctestReportingOption._run_doctest_report.return.pytester_runpytest_doc": {"doc_hash": "40e93dd1b2ae90ab3fd3942d429666eb365d64c445395cc9c97a13121a7db35f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_test_is_setup_py_not_named_setup_py_test_is_setup_py_is_a_setup_py.assert__is_setup_py_setup": {"doc_hash": "ddb2e3a5a81b4028b7ffa0466e085560de21ee6096c1a8cd8d3cfbbd94deb731"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_test_is_setup_py_different_encoding_": {"doc_hash": "95f258bfd4cf8fee0d696194a01ea9f084991a3f1483fb5d6a09982344357046"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_faulthandler.py_io_test_disabled.assert_result_ret_0": {"doc_hash": "fc823f669702557fb3a2be5cadd3b429b8eab2a18977d67bf2b96d6f1e1a47d9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_faulthandler.py_test_already_initialized_crash_test_already_initialized_crash.assert_result_ret_0": {"doc_hash": "35dcde358794532ec943119a847b7efa7f8a979f689f70241fddced4aecb0fa6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_faulthandler.py_test_get_stderr_fileno_invalid_fd_": {"doc_hash": "e818a020f8d86cde2413d331112f70856c0dbeeb914196f4dc89c348eca6203a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_findpaths.py_from_pathlib_import_Path_TestLoadConfigDictFromFile.test_invalid_toml_file.assert_load_config_dict_f": {"doc_hash": "ad1ad4cb4a5a7ea8061605dbe389c9b47a001e54ab13351c32715c85cc07da90"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_findpaths.py_TestCommonAncestor_TestCommonAncestor.test_single_file.assert_get_common_ancesto": {"doc_hash": "e9e2902bd392f91419e7bfb3a125877b08b4a208c3cd2d3faa16844705aa9344"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_findpaths.py_test_get_dirs_from_args_": {"doc_hash": "b4b675cfdc9dfc27bea258e1abf190086f69c734f721c226dcaafe3a66908590"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_helpconfig.py_test_none_help_param_raises_exception_test_empty_help_param.result_stdout_fnmatch_lin": {"doc_hash": "09eccefd075ae755dadc1773ff86ecb9acfe42cc953b0788b7ce57845ab5ceff"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_RunAndParse_RunAndParse.__call__.return.result_DomNode_xmldoc_": {"doc_hash": "d1dc44326ca613d5e0851d8bf271659f2bbbec11a626ac80a9af9e6ae3c92fe8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_unicode_TestPython.test_unicode.assert_hx_in_fnode_toxm": {"doc_hash": "f9f49d1978875bcbbbaaa18a99177c08ee79b924740a3ffec8354411c8c45d0c"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_assertion_binchars_TestPython.test_assertion_binchars.print_dom_toxml_": {"doc_hash": "826cfd82e77c750b0cf1a246fe11a683146393e964db7c000adc3d6c417a0a15"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_logxml_changingdir_test_logxml_check_isdir.result_stderr_fnmatch_lin": {"doc_hash": "64771dd1543b2930c1c54075766bb0d7dceb92207aa4fd0c579f15b55fe13323"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_escaped_parametrized_names_xml_test_escaped_parametrized_names_xml.node_assert_attr_name_te": {"doc_hash": "44d84757cbb9db2150065d932d0b246d570d4cad7bfe3702d44b5d3b5a343b99"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_main.py_test_validate_basetemp_ok_test_validate_basetemp_integration.result_stderr_fnmatch_lin": {"doc_hash": "46db1b6381274c557c50adc0df0ddd3d0f601024d56a17f138be52ae8ac2fae1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_main.py_TestResolveCollectionArgument_TestResolveCollectionArgument.test_file.None_3": {"doc_hash": "f947a0b7c01177d90a1ae9504136529235ebef904ae56e3d0fb4ca709debfff4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_main.py_TestResolveCollectionArgument.test_dir_TestResolveCollectionArgument.test_dir.None_1.resolve_collection_argume": {"doc_hash": "fec7ae6862a1ef6fe7f369e5a54c532451774d92a6ad800c9cce5478787635f3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_main.py_TestResolveCollectionArgument.test_pypath_TestResolveCollectionArgument.test_pypath.with_pytest_raises_.resolve_collection_argume": {"doc_hash": "e835b17694058e407a362de512f8536fd3ce322a07f42baff7da55b996d83780"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_main.py_TestResolveCollectionArgument.test_does_not_exist_TestResolveCollectionArgument.test_does_not_exist.None_1.resolve_collection_argume": {"doc_hash": "f12336d977c40633d08b3b384ca0e3ba0fca9feef377e129e7e8fc0fcbf97ce2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_main.py_TestResolveCollectionArgument.test_absolute_paths_are_resolved_correctly_TestResolveCollectionArgument.test_absolute_paths_are_resolved_correctly.None_2": {"doc_hash": "f32024aac0400f59edb3d10009d193ed50eb85a8d45e8e45b5f0d06727e5eabc"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_main.py_test_module_full_path_without_drive_": {"doc_hash": "531dfb1ef80e2c0a7068f8335df010c58f84ad9b78f76b636eb93ee6d98868b5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_mark_option_test_mark_option.assert_passed_str_expe": {"doc_hash": "df8aaa9f21e7e2945a1dba48a2e523f8b02f3e40bf0f469db25c52f39441b9fa"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_mark_option_custom_test_mark_option_custom.assert_passed_str_expe": {"doc_hash": "6d4678f2c357ea0e839033afa40b93df413ee29127b3d4207baadfeac937143d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_keyword_option_custom_test_keyword_option_custom.assert_passed_str_expe": {"doc_hash": "a91d3499b695a3a06681581e0a851e9392fad84bc62ce8b80feecbd091e75663"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_keyword_option_considers_mark_test_keyword_option_parametrize.assert_passed_str_expe": {"doc_hash": "30675490bcfba2645eeaa049ba8b6dae07c49fd0a1cb4073116a5b173e1747e6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_parametrize_with_module_test_parametrize_with_module.assert_passed_0_nodeid_s": {"doc_hash": "6020a55b0fd786a4763e431cb05d07a813ade8df03628c7d9f138d0b9e059f32"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestFunctional.test_mark_closest_TestFunctional.test_mark_closest.assert_has_own_get_closes": {"doc_hash": "1c7db2411f095b59aad4a666950d19eb4f3d5e2933fef354c39cf0e291c3c36d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_meta.py__Test_importing_of_all__": {"doc_hash": "603d5f139a28c9c0202a0f81fd5c2a7c883a7931ef1b12d510e7d2e6782f257b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_monkeypatch.py_Sample_test_issue156_undo_staticmethod.assert_Sample_hello_": {"doc_hash": "76652c0af711cb6a7af1256e12ed90aea066b65f4c0033e470bfa3de59f236ca"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_monkeypatch.py_test_undo_class_descriptors_delattr_test_undo_class_descriptors_delattr.assert_original_world_": {"doc_hash": "fa56be029775b3fb474995808a9b6dd074373360afffb1f6c85828fcbf71b902"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_monkeypatch.py_test_issue1338_name_resolving_test_context_classmethod.assert_A_x_1": {"doc_hash": "62bc8a5209826942e07181dfa7d3b9a0e5ee2f728632e15eb0f0832b87383e91"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_nodes.py_from_pathlib_import_Path_test_iterparentnodeids.assert_result_expected": {"doc_hash": "33d470969b5f96bdf1ced1c17d1e5e2ca26f509fc3ebf68cf7d7015c1dad9015"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_nodes.py_test_node_from_parent_disallowed_arguments_test_node_warn_is_no_longer_only_pytest_warnings.with_pytest_warns_warn_ty.items_0_warn_warn_type_m": {"doc_hash": "3e89e5d11faafc3faa6fb011172b30c2f23939d7e60c7e92b436f86504496ece"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_nodes.py_test_node_warning_enforces_warning_types_test__check_initialpaths_for_relpath.None_2": {"doc_hash": "c27fe363eb93c118dba9fc50162897c1f16c39bc29068412438a7d727aaf93cf"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pastebin.py_io_TestPasteCapture.test_failed.assert_reprec_countoutcom": {"doc_hash": "fe8334d3b1a2b0867d1c6531ae44e99ad3d93fe911e4bb027e549810ff7b7d37"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_os.path_from__pytest_tmpdir_impor": {"doc_hash": "94dc68d7ab6efd68116e754772850169e4a608ab8c7e78ae7b5ac39005561373"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_TestImportPath_TestImportPath.path1.assert_path_joinpath_sam": {"doc_hash": "3fac8f6fb6bb1c503a887d819e74924fc23020cb8c12f9b52cddb755c2e14cb5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_TestImportPath.setuptestfs_TestImportPath.setuptestfs.module_d_write_text_": {"doc_hash": "2c2dfe1c506bd16ea464cbd2a348becee0792306e8bb9897cff47ad7b2872204"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_TestImportPath.test_smoke_test_TestImportPath.test_renamed_dir_creates_mismatch.None_1.import_path_tmp_path_join": {"doc_hash": "d615a47080bd2a038c565bf24442677ff026e8f0a461561a0a504e050ddedb44"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_TestImportPath.test_issue131_on__init___TestImportPath.test_issue131_on__init__.assert_m1_m2": {"doc_hash": "6904d844f7ba676d6a2302ccfdbbf37faa2f75306f95698c25e027115f6b003b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_TestImportPath.test_ensuresyspath_append_TestImportPath.test_importmode_twice_is_different_module.assert_module1_is_not_mod": {"doc_hash": "5774a2a94baa2f307ef4e674ad09a9619e0b1280d045ab8ca3f8b8ab6a98c052"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_test_resolve_package_path_test_package_unimportable.assert_not_resolve_packag": {"doc_hash": "a030f425e19ac27824a275630ecdbe5bb3e899c94d176886da60387ac63dfe26"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_test_access_denied_during_cleanup_test_access_denied_during_cleanup.assert_not_lock_path_is_f": {"doc_hash": "a4ea037a7a4fbc847f7742df8b43742d78c6569ef5f55148ab55a0f25c16e49e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_test_suppress_error_removing_lock_test_suppress_error_removing_lock.assert_not_lock_is_file_": {"doc_hash": "3debed86472e87bb47c9e2635fb105b5b955d4e772a6445ac6cbbad942b68309"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_test_bestrelpath_test_bestrelpath.None_4": {"doc_hash": "2bdc7ed8f12a8447bc0a818a86bae1842b3d193e15d6521799651ed061f9554e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_test_commonpath_test_visit_ignores_errors.assert_": {"doc_hash": "c1b39b0a71dd0a64169f426ed8ba8425cbd12fb23b277280b223280e69e2b6c7"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_test_samefile_false_negatives_": {"doc_hash": "a198977cde647ca0a379dce3239886180ed0383e284bb76f6a7e0fe49f58fcb8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_os_None_18": {"doc_hash": "d070f38174d1d2fb574871cfadc00b473681bca7b2e003330d58d3b893b258ff"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_make_hook_recorder_test_make_hook_recorder.pytest_raises_ValueError_": {"doc_hash": "e9cc13ce8e21bff2b7799d4fb7583eea1a594ff80b7469bbcef6115247f22fd2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_parseconfig_test_pytester_runs_with_plugin.result_assert_outcomes_pa": {"doc_hash": "aa5d6eb892ebc1e6169a8d002e1d4bc2c663623dc62219408369bf0331ad3d37"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_pytester_with_doctest_test_pytester_with_doctest.assert_result_ret_0": {"doc_hash": "6556e43b602b32f49899157d23f83caf25cbe79720fcf37f2282723e8ee2dffc"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_TestSysPathsSnapshot.test_preserve_container_test_pytester_subprocess.assert_pytester_runpytest": {"doc_hash": "c57e3c8e9074f1eba99bde788e0526192861e57ebc6c4d41cc05c53fe9e37b57"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_pytester_subprocess_via_runpytest_arg_test_pytester_subprocess_via_runpytest_arg.assert_result_ret_0": {"doc_hash": "c75884a49366a7fb43d1afb56e3b7f111ea7ce193df4aa42695a8d8291ba9eb5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_unicode_args_test_pytester_run_timeout_expires.with_pytest_raises_pytest.pytester_runpytest_subpro": {"doc_hash": "77b4aa36ccb0eb8f2f27de5e74e3e36598f86dc79a1d60ba9387afda0632928a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_linematcher_no_matching_after_match_test_linematcher_string_api.assert_str_lm_foo_nb": {"doc_hash": "3e57f708303fc2ea2689c482165e7aa3b4b9a8af7694addf08c429d00ba972d6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_pytester_addopts_before_testdir_test_pytester_addopts_before_testdir.None_2": {"doc_hash": "09899ea5d419810612077dac34f3d3051b4f466a9a7a915583fa00062d9b19eb"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_pytester_outcomes_with_multiple_errors_test_pytester_outcomes_with_multiple_errors.assert_result_parseoutcom": {"doc_hash": "3ad7f45bc915ee496f839d037c19c01226fcbf1202c9d1e9be345ebf1eedfb49"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_parse_summary_line_always_plural_test_parse_summary_line_always_plural.None_1": {"doc_hash": "b31766e6ace891e2527a123094c1b3ba9f15f2332ae553e706ba94864d574a54"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_makefile_joins_absolute_path_": {"doc_hash": "8d1b00cdb78d7d259c17ae1e4201b1823e73e8a2296dba8dcf163f8edd861992"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_recwarn.py_TestWarningsRecorderChecker.test_warn_stacklevel_TestWarningsRecorderChecker.test_invalid_enter_exit.with_WarningsRecorder__is.None_1.with_rec_.with_rec_._can_t_enter_twice": {"doc_hash": "0acc8502993a7e6e5151b0f142da99b235d8c5b262502106f4b04f8029272270"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_from_typing_import_Sequen_TestReportSerialization.test_xdist_longrepr_to_str_issue_241.assert_test_b_call__to_js": {"doc_hash": "3416b4498f13b3ca90f57fc0cf3a0d934bf78c5126e40ca00582829b87803ff3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestReportSerialization.test_reprentries_serialization_170_TestReportSerialization.test_reprentries_serialization_170.for_i_in_range_len_a_entr.assert_rep_entry_style_": {"doc_hash": "ff73ae15fbae8406d4b0a345a0554c97dae730246a8e58dfc2f4a783dc1e9617"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestReportSerialization.test_chained_exceptions_no_reprcrash._3_reports_setup_call_t_TestReportSerialization.test_chained_exceptions_no_reprcrash.loaded_report_longrepr_to": {"doc_hash": "775185383231886d490bf23d3e8a6575b51544c3493f52c5a4d1932cd4cd2ba3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestReportSerialization.test_report_prevent_ConftestImportFailure_hiding_exception_TestReportSerialization.test_report_prevent_ConftestImportFailure_hiding_exception.result_stdout_no_fnmatch_": {"doc_hash": "451ae7fc8ddec74111c866651dd6689a989666dbe1b29592f907ef05a1517974"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_inspect_TestSetupState.test_setup_fails_and_failure_is_cached.None_1.ss_setup_item_": {"doc_hash": "f032b29f9386100af8249f5cdd476158f1432539aa6248b535d2acbaab7fe466"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_TestSetupState.test_teardown_multiple_scopes_one_fails_TestSetupState.test_teardown_multiple_scopes_one_fails.assert_module_teardown_": {"doc_hash": "ed852ccf8c28bb66bae6e6d199411a80c78455f520769ec4f401fbfd44d05c97"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_BaseFunctionalTests.test_exact_teardown_issue1206_BaseFunctionalTests.test_exact_teardown_issue1206.None_12": {"doc_hash": "48d237b0725dc9fd4c1a40784b372d7063085150a2341e674f56846ebd2c8aa9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_test_pytest_fail_notrace_runtest_test_pytest_fail_notrace_runtest.result_stdout_no_fnmatch_": {"doc_hash": "46dfe99b4848289ea834523e0c2e086bc22b8478287303d2cec0e8dee0496d8f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_test_pytest_fail_notrace_collection_test_pytest_fail_notrace_collection.result_stdout_no_fnmatch_": {"doc_hash": "0df9f5885c7fc46a4b954db08a9d2026df0d541df9bf6d34df6db3c167cb2908"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_test_importorskip_test_importorskip.try_.except_pytest_skip_Except.assert_False_f_spurious_": {"doc_hash": "0df79681a83b59e25735f98f857499e7543125060f289cc0fdf3eddf9bca2b21"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_test_importorskip_imports_last_module_part_test_importorskip_dev_module.try_.except_pytest_skip_Except.assert_False_f_spurious_": {"doc_hash": "abd4836fe331af32fc86bd459c7f330c539fbb51d29dabd750e5712f873c39d9"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_TestReportContents_TestReportContents.test_longreprtext_skip.assert_Skipped_in_call_": {"doc_hash": "e3f26842f8ab33886886818f2afd49df17ee015e22f2558eecbad6d1c244265e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_TestReportContents.test_longreprtext_collect_skip_TestReportContents.test_longreprtext_failure.assert_assert_1_4_in": {"doc_hash": "ff0b518d98cc6263e8c5fa4056205928bcb6b53ca9afb300a305cf0b41eeb01e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner_xunit.py__Test_correct_setup_tea_test_module_and_function_setup.None_1": {"doc_hash": "a6b4d8c5ad5189ef5a267e88851d87d9598483db7c4910c43298742013964fb4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner_xunit.py_test_module_setup_failure_no_teardown_test_module_setup_failure_no_teardown.assert_calls_0_item_modu": {"doc_hash": "a2b12a1617ff8a35fd3465db57446f987cb31a3cf69c53700fbe1cb8bef1b2fa"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner_xunit.py_test_setup_function_failure_no_teardown_test_setup_function_failure_no_teardown.assert_calls_0_item_modu": {"doc_hash": "30d5724a6c8effb5456aac47de71acea3b719c86e09a25bc8072c5a995b0d09b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_session.py_SessionTests.test_raises_output_SessionTests.test_raises_output.assert_DID_NOT_RAISE_in": {"doc_hash": "0e0754eff31c15ec122ffb3ea6c6755940fed56ad555049c4666b17811e0cdac"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_session.py_SessionTests.test_syntax_error_module_SessionTests.test_maxfail.assert_passed_skipped_": {"doc_hash": "596c4e5862efd8a7c78cfec5570ec297c3f0c892953804c0698192f8e3f0d8fd"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_setupplan.py_from__pytest_pytester_imp_test_show_fixtures_and_test.result_stdout_fnmatch_lin": {"doc_hash": "93722e13e0600b631a81b307b18d988d77f7470cd570ac68492e94c78d7bfa10"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestEvaluation.test_marked_one_arg_twice2_TestEvaluation.test_marked_one_arg_twice2.assert_skipped_reason_": {"doc_hash": "d2692d3e8b226d1ab19021af15b33c4afe3ab47e800cd2e99831fb098ef0a7eb"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestEvaluation.test_marked_skipif_with_boolean_without_reason_TestEvaluation.test_marked_skipif_with_boolean_without_reason.assert_": {"doc_hash": "834456e1d2f251fb19f2b256be3f6a28ca3d2b78945aa48607d3577df22a602f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestEvaluation.test_skipif_markeval_namespace_TestEvaluation.test_skipif_markeval_namespace.None_2": {"doc_hash": "bce0b1765fbd2073ae02ccbb3d2d065e43701c9da7e06ccd05caa3801365c515"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestEvaluation.test_skipif_markeval_namespace_multiple_TestEvaluation.test_skipif_markeval_namespace_multiple.reprec_assertoutcome_skip": {"doc_hash": "22679c17c7436e629980023296f5c3b5366253e93282e0dc1e85db7ace6ea851"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestEvaluation.test_skipif_markeval_namespace_ValueError_TestEvaluation.test_skipif_markeval_namespace_ValueError.res_stdout_fnmatch_lines_": {"doc_hash": "93e850a9f5d3bc642ac86c3d17fbdafc975125e25423788f48471610b930e767"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_xfail_xpassed_TestXFail.test_xfail_xpassed.assert_callreport_wasxfai": {"doc_hash": "f697cc04ac2301bcb194472364655b141e26e9ef3c71fdbd66b00a41ea503894"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_xfail_using_platform_TestXFail.test_xfail_using_platform.assert_callreport_wasxfai": {"doc_hash": "2f701e99b2abf0e762856fdb3cffdb76890c647aff15609bd736925c013a9e0f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_xfail_xpassed_strict_TestXFail.test_xfail_xpassed_strict.assert_not_hasattr_callre": {"doc_hash": "0c26fc789b56a0260dce735a009f1887b62032b7fa393e1ff749fc4468fc5d92"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_xfail_run_anyway_TestXFail.test_xfail_run_anyway.result_stdout_fnmatch_lin": {"doc_hash": "27f77b70a0d98b9e9547957f8ea951d9bc5268053197bd372fb8d0f28d87474b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.xtest_dynamic_xfail_set_during_setup_TestXFail.xtest_dynamic_xfail_set_during_setup.result_stdout_fnmatch_lin": {"doc_hash": "9e594c5760899ac53a7a57ac2e10cd14f3237863eda8326e87ffffd848ea2aa2"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_dynamic_xfail_no_run_TestXFail.test_dynamic_xfail_set_during_runtest_passed_strict.result_assert_outcomes_fa": {"doc_hash": "ed754363be2adaee1339331b446b45c09b3d750a5628094f870821a0ac5c85ea"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_strict_xfail_TestXFail.test_strict_xfail.assert_pytester_path_join": {"doc_hash": "6032931023385bbb64700bb10a9ddc32d7a53ecbd6873b76073f669d4a09fb59"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_xfail_markeval_namespace_TestXFail.test_xfail_markeval_namespace.None_2": {"doc_hash": "202bbf922dbc3182bbce508c14fdfeb03292922f96ec23b498f67558258129ce"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestBooleanCondition_TestBooleanCondition.test_skipif.result_stdout_fnmatch_lin": {"doc_hash": "0a01d909937cd01a44e7a9f7b28d252afb63e7c2602651844a337845b4307d22"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestBooleanCondition.test_skipif_noreason_TestBooleanCondition.test_xfail.result_stdout_fnmatch_lin": {"doc_hash": "9df270a29bd6201c36372e727819e389a4ab7e16d32ba59f9ccfcffbf766ce95"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_stepwise.py_pytest_stepwise_pytester.return.pytester": {"doc_hash": "05cacd241b346c6a1c1925451edf7d40938123ca9ca7b63d5f494b125ef50cd6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_stepwise.py_error_pytester_test_run_without_stepwise.None_2": {"doc_hash": "8977cc2d8d3ab79e6d047daa89cbfff617bb6ed106fceeb063cd2ba022a47cd6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_stepwise.py_test_stepwise_output_summary_test_stepwise_output_summary.None_2": {"doc_hash": "7fcd53a0dc8fbed2a86502f6f5413402bf5674d7dbbccdc34d529ec5ec6b9e3a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_stepwise.py_test_run_with_skip_option_test_run_with_skip_option.None_4": {"doc_hash": "c8c8d7a78c782ccccf4e9d22675797d5875daac534bcda0f5ab51dffafb5c6cd"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_stepwise.py_test_fail_on_errors_test_change_testfile.assert_test_success_PASS": {"doc_hash": "7fa297209833ee1659da534ec81c8039c24116880ae2bd97eb4ed8ba7d763d35"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py__Terminal_reporting_of__option.return.request_param": {"doc_hash": "16a0e491a79dddff7cff6f73813b59acbf7cf6579feee1570d7869bfd8c64869"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminal.test_verbose_skip_reason_TestTerminal.test_verbose_skip_reason.result_stdout_fnmatch_lin": {"doc_hash": "1a56d81e21bd5e8d85d1b8e59083e637d282d8845438bcb6199df80cea76968f"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestCollectonly.test_collect_only_summary_status_TestCollectonly.test_collect_only_summary_status.None_10": {"doc_hash": "2f7101eabdce18803afccd369dd2c64dac569824c35014c1356a643f2660dca3"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminalFunctional.test_header_absolute_testpath_TestTerminalFunctional.test_header_absolute_testpath.result_stdout_fnmatch_lin": {"doc_hash": "e5b8be248ac10cf725664e2a01bab6c94ae3197572be438494ead6bab304eb2d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_color_yes_test_color_yes.None_1": {"doc_hash": "f8308fda995b186f182e956b43fb0f1dfd02192d083c861f6569da1f974c5256"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_tbstyle_native_setup_error_test_tbstyle_native_setup_error.result_stdout_fnmatch_lin": {"doc_hash": "8ad33beaae23d80b52126627d9d363058165e5942490beb5bd4427073205eeab"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_terminal_summary_test_terminal_summary.result_stdout_fnmatch_lin": {"doc_hash": "acdf0ba90329a5bf88fba46600bd90d9dc2a29ba6742b29a4ff10e0b15340c36"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestProgressOutputStyle_TestProgressOutputStyle.many_tests_files.pytester_makepyfile_": {"doc_hash": "84bb49371e9db514fa58cd6dc94696951815e62bf5a9deedde9ccc2c30faca7d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestProgressWithTeardown_TestProgressWithTeardown.many_files.pytester_makepyfile_": {"doc_hash": "6010a7ca39fb89c3ebeb6ed958cefaf44ada1a748ca2ce3d8054a54aa062600e"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestCodeHighlight.test_code_highlight_continuation_TestCodeHighlight.test_code_highlight_continuation.result_stdout_fnmatch_lin": {"doc_hash": "1d867c422400bbd4a6c37add3f5cd398a432add5ee9601549645e678389fe713"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_raw_skip_reason_skipped_": {"doc_hash": "b2c5efc1e6a5972230150fa6cee810a760a87a7c9bdb0724c7f8ed1bd2b15826"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_threadexception.py_sys_test_unhandled_thread_exception.result_stdout_fnmatch_lin": {"doc_hash": "938b29c4670d85c118cc00f363a1a2f44f246e366521a5ffda5d5ff719462e2d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_threadexception.py_test_unhandled_thread_exception_in_setup_test_unhandled_thread_exception_in_setup.result_stdout_fnmatch_lin": {"doc_hash": "184759578ad1199872d3e82daac251c647f306fecd20718fb5c8444545f07c7d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_threadexception.py_test_unhandled_thread_exception_in_teardown_test_unhandled_thread_exception_in_teardown.result_stdout_fnmatch_lin": {"doc_hash": "61eb60bc5b0120d30dd81e247763be27e9b75281a51c3dacb065ed25676d455d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_threadexception.py_test_unhandled_thread_exception_warning_error_": {"doc_hash": "8afe17a4abd033ff20864e1a70ee3fdaf5a19d91c74caf8caa23180de25ef97b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_unittest_skip_issue148_test_unittest_skip_issue148.reprec_assertoutcome_skip": {"doc_hash": "fe8686931edf9aca0f683422653cdde8fa4f94f6be9f157a408f4368b295a8a6"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_method_and_teardown_failing_reporting_test_method_and_teardown_failing_reporting.result_stdout_fnmatch_lin": {"doc_hash": "ed32b20e2f4132af71d9d03f2a4bc12ec29e17f45542ea8885781b3c71070b5b"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_TestTrialUnittest.test_trial_pdb_TestTrialUnittest.test_trial_testcase_todo_property.reprec_assertoutcome_skip": {"doc_hash": "733611eb55767274b2f96bf606015e25e09d1bc9e9360eb9af49b16e2f153b13"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_TestTrialUnittest.test_trial_testfunction_todo_property_TestTrialUnittest.test_trial_testfunction_todo_property.reprec_assertoutcome_skip": {"doc_hash": "7341377637649db154565f86785ae6838f9dd6d0eaf0e81f55a7f62efda8ca86"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_unittest_not_shown_in_traceback_test_unorderable_types.assert_result_ret_Exit": {"doc_hash": "d826ccd1b18eaee4cffaaa2736018360d237939f5ba129554021e3a5c44b86da"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_unittest_typerror_traceback_test_unittest_expected_failure_for_failing_test_is_xfail.assert_result_ret_0": {"doc_hash": "4cec663477b81198e259140666a7b6944a6635d431f4f463e4a8631b47918580"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_async_support_test_asynctest_support.reprec_assertoutcome_fail": {"doc_hash": "a00ca45d7bba242421b7c97d4daa7aee46b902f1ab9c7a1ade11d9cd4bcc34a5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_plain_unittest_does_not_support_async_test_plain_unittest_does_not_support_async.result_stdout_fnmatch_lin": {"doc_hash": "febe502559c7e622736ae2cd111cc649507612ff4f0ce8fa7ea7af694035588d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_do_class_cleanups_on_success_test_do_class_cleanups_on_success.assert_passed_3": {"doc_hash": "031be6a292419093abc42213bd39cfbf6fb8c2011a3b8485fd02e7ee0315edbc"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_do_class_cleanups_on_setupclass_failure_test_do_class_cleanups_on_setupclass_failure.assert_passed_1": {"doc_hash": "c4254a201a28f97db4ec676a2b5a5f4a275bf9794131b7b553d30ff4e8d5259a"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_do_class_cleanups_on_teardownclass_failure_test_do_class_cleanups_on_teardownclass_failure.assert_passed_3": {"doc_hash": "b7e8c0549ae9b1674e33973b2304979f452007d9a964b606621041c8b96420bb"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_do_cleanups_on_success_test_do_cleanups_on_success.assert_passed_3": {"doc_hash": "d36a56cb9efefc34c87342845afc510627949d6f8e88d6c0206705ca09265f91"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_do_cleanups_on_setup_failure_test_do_cleanups_on_setup_failure.assert_passed_1": {"doc_hash": "24666f9b2c2fd65d4c8176b1cacf36acd99fa35cfab6bbdb61bbbd06837480a4"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_do_cleanups_on_teardown_failure_": {"doc_hash": "cc413c74b3e5ef1e631cd701e6ee117c2cd22bb4fbcb7848296e23e3e08135cf"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unraisableexception.py_sys_test_unraisable.result_stdout_fnmatch_lin": {"doc_hash": "992e1da732f9c8d571b8949579083a6de7e84a2f3f99f3c73d68374512f5b7d5"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unraisableexception.py_test_unraisable_in_setup_test_unraisable_in_setup.result_stdout_fnmatch_lin": {"doc_hash": "6974ac186d6c47fc2badd8441401f001e55583d9725940ae704387e288565f69"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unraisableexception.py_test_unraisable_in_teardown_test_unraisable_in_teardown.result_stdout_fnmatch_lin": {"doc_hash": "bd7f648baf1591b0fb9434fbfcc5edc16acfc0c03245b6fd6a5a72f7f874ea8d"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unraisableexception.py_test_unraisable_warning_error_": {"doc_hash": "03f97f33b95a0e19875fce9249f59e02d43a517c33174b2d686fea7312d0bdc8"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_TestDeprecationWarningsByDefault_TestDeprecationWarningsByDefault.create_file.pytester_makepyfile_": {"doc_hash": "ab893ce553329fb1f923f24db1068b4044f943f913b587725ea87982c3fb4419"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_TestStackLevel.test_conftest_warning_captured_TestStackLevel.test_conftest_warning_captured.result_stdout_fnmatch_lin": {"doc_hash": "c7e207aad4b162c68eaa7e9b93834c702c4ad04899a10d40a1c06bff47da0296"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_TestStackLevel.test_issue4445_import_plugin_TestStackLevel.test_issue4445_import_plugin.assert_func__warn_abo": {"doc_hash": "e5896a1d6f9ccdb7d76d68848b234fdc4eaea03b03e30468633e372c3ba0e177"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/typing_checks.py__File_for_checking_typi_": {"doc_hash": "53a6b30ce6e8d32d8f781f12c85a9e1c4fdce421a63a16ef051b2b5a461cd397"}}, "docstore/data": {"/tmp/repos/swe-bench_pytest-dev__pytest/bench/bench.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/bench/bench.py__", "embedding": null, "metadata": {"file_path": "bench/bench.py", "file_name": "bench.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 14, "span_ids": ["impl:2", "imports:2", "impl", "imports"], "tokens": 99}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import sys\n\nif __name__ == \"__main__\":\n import cProfile\n import pytest # NOQA\n import pstats\n\n script = sys.argv[1:] if len(sys.argv) > 1 else [\"empty.py\"]\n cProfile.run(\"pytest.cmdline.main(%r)\" % script, \"prof\")\n p = pstats.Stats(\"prof\")\n p.strip_dirs()\n p.sort_stats(\"cumulative\")\n print(p.print_stats(500))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/bench/bench_argcomplete.py__10000_iterations_just__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/bench/bench_argcomplete.py__10000_iterations_just__", "embedding": null, "metadata": {"file_path": "bench/bench_argcomplete.py", "file_name": "bench_argcomplete.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 20, "span_ids": ["impl", "docstring", "imports"], "tokens": 179}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "# 10000 iterations, just for relative comparison\n# 2.7.5 3.3.2\n# FilesCompleter 75.1109 69.2116\n# FastFilesCompleter 0.7383 1.0760\nimport timeit\n\nimports = [\n \"from argcomplete.completers import FilesCompleter as completer\",\n \"from _pytest._argcomplete import FastFilesCompleter as completer\",\n]\n\ncount = 1000 # only a few seconds\nsetup = \"%s\\nfc = completer()\"\nrun = 'fc(\"/d\")'\n\n\nif __name__ == \"__main__\":\n print(timeit.timeit(run, setup=setup % imports[0], number=count))\n print(timeit.timeit(run, setup=setup % imports[1], number=count))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/bench/empty.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/bench/empty.py__", "embedding": null, "metadata": {"file_path": "bench/empty.py", "file_name": "empty.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 3, "span_ids": ["impl"], "tokens": 22}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "for i in range(1000):\n exec(\"def test_func_%d(): pass\" % i)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/bench/manyparam.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/bench/manyparam.py__", "embedding": null, "metadata": {"file_path": "bench/manyparam.py", "file_name": "manyparam.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 15, "span_ids": ["test_it2", "test_it", "imports", "foo"], "tokens": 40}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import pytest\n\n\n@pytest.fixture(scope=\"module\", params=range(966))\ndef foo(request):\n return request.param\n\n\ndef test_it(foo):\n pass\n\n\ndef test_it2(foo):\n pass", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/bench/skip.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/bench/skip.py__", "embedding": null, "metadata": {"file_path": "bench/skip.py", "file_name": "skip.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 10, "span_ids": ["impl", "imports", "test_foo"], "tokens": 35}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import pytest\n\nSKIP = True\n\n\n@pytest.mark.parametrize(\"x\", range(5000))\ndef test_foo(x):\n if SKIP:\n pytest.skip(\"heh\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/conftest.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/conftest.py__", "embedding": null, "metadata": {"file_path": "doc/en/conftest.py", "file_name": "conftest.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 2, "span_ids": ["impl"], "tokens": 7}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "collect_ignore = [\"conf.py\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/assertion/failure_demo.py_TestSpecialisedExplanations_TestSpecialisedExplanations.test_eq_attrs.assert_left_right": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/assertion/failure_demo.py_TestSpecialisedExplanations_TestSpecialisedExplanations.test_eq_attrs.assert_left_right", "embedding": null, "metadata": {"file_path": "doc/en/example/assertion/failure_demo.py", "file_name": "failure_demo.py", "file_type": "text/x-python", "category": "implementation", "start_line": 43, "end_line": 121, "span_ids": ["TestSpecialisedExplanations.test_eq_attrs.Foo", "TestSpecialisedExplanations.test_in_list", "TestSpecialisedExplanations.test_eq_list", "TestSpecialisedExplanations.test_eq_attrs.Foo:2", "TestSpecialisedExplanations.test_eq_set", "TestSpecialisedExplanations.test_eq_dataclass", "TestSpecialisedExplanations.test_eq_dataclass.Foo:2", "TestSpecialisedExplanations.test_eq_similar_text", "TestSpecialisedExplanations.test_eq_long_text_multiline", "TestSpecialisedExplanations.test_not_in_text_single", "TestSpecialisedExplanations.test_eq_dataclass.Foo", "TestSpecialisedExplanations.test_not_in_text_single_long", "TestSpecialisedExplanations.test_eq_list_long", "TestSpecialisedExplanations.test_eq_dict", "TestSpecialisedExplanations", "TestSpecialisedExplanations.test_eq_text", "TestSpecialisedExplanations.test_eq_attrs", "TestSpecialisedExplanations.test_not_in_text_single_long_term", "TestSpecialisedExplanations.test_eq_long_text", "TestSpecialisedExplanations.test_not_in_text_multiline", "TestSpecialisedExplanations.test_eq_multiline_text", "TestSpecialisedExplanations.test_eq_longer_list"], "tokens": 680}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestSpecialisedExplanations:\n def test_eq_text(self):\n assert \"spam\" == \"eggs\"\n\n def test_eq_similar_text(self):\n assert \"foo 1 bar\" == \"foo 2 bar\"\n\n def test_eq_multiline_text(self):\n assert \"foo\\nspam\\nbar\" == \"foo\\neggs\\nbar\"\n\n def test_eq_long_text(self):\n a = \"1\" * 100 + \"a\" + \"2\" * 100\n b = \"1\" * 100 + \"b\" + \"2\" * 100\n assert a == b\n\n def test_eq_long_text_multiline(self):\n a = \"1\\n\" * 100 + \"a\" + \"2\\n\" * 100\n b = \"1\\n\" * 100 + \"b\" + \"2\\n\" * 100\n assert a == b\n\n def test_eq_list(self):\n assert [0, 1, 2] == [0, 1, 3]\n\n def test_eq_list_long(self):\n a = [0] * 100 + [1] + [3] * 100\n b = [0] * 100 + [2] + [3] * 100\n assert a == b\n\n def test_eq_dict(self):\n assert {\"a\": 0, \"b\": 1, \"c\": 0} == {\"a\": 0, \"b\": 2, \"d\": 0}\n\n def test_eq_set(self):\n assert {0, 10, 11, 12} == {0, 20, 21}\n\n def test_eq_longer_list(self):\n assert [1, 2] == [1, 2, 3]\n\n def test_in_list(self):\n assert 1 in [0, 2, 3, 4, 5]\n\n def test_not_in_text_multiline(self):\n text = \"some multiline\\ntext\\nwhich\\nincludes foo\\nand a\\ntail\"\n assert \"foo\" not in text\n\n def test_not_in_text_single(self):\n text = \"single foo line\"\n assert \"foo\" not in text\n\n def test_not_in_text_single_long(self):\n text = \"head \" * 50 + \"foo \" + \"tail \" * 20\n assert \"foo\" not in text\n\n def test_not_in_text_single_long_term(self):\n text = \"head \" * 50 + \"f\" * 70 + \"tail \" * 20\n assert \"f\" * 70 not in text\n\n def test_eq_dataclass(self):\n from dataclasses import dataclass\n\n @dataclass\n class Foo:\n a: int\n b: str\n\n left = Foo(1, \"b\")\n right = Foo(1, \"c\")\n assert left == right\n\n def test_eq_attrs(self):\n import attr\n\n @attr.s\n class Foo:\n a = attr.ib()\n b = attr.ib()\n\n left = Foo(1, \"b\")\n right = Foo(1, \"c\")\n assert left == right", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/assertion/failure_demo.py_test_attribute_globf.return.x_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/assertion/failure_demo.py_test_attribute_globf.return.x_1", "embedding": null, "metadata": {"file_path": "doc/en/example/assertion/failure_demo.py", "file_name": "failure_demo.py", "file_type": "text/x-python", "category": "implementation", "start_line": 124, "end_line": 161, "span_ids": ["test_attribute_multiple.Bar", "test_attribute_instance", "test_attribute.Foo", "test_attribute_instance.Foo", "test_attribute_multiple.Foo", "test_attribute_failure", "test_attribute_failure.Foo._get_b", "test_attribute_instance.Foo:2", "test_attribute_multiple", "test_attribute_multiple.Bar:2", "globf", "test_attribute_failure.Foo", "test_attribute.Foo:2", "test_attribute_multiple.Foo:2", "test_attribute"], "tokens": 143}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_attribute():\n class Foo:\n b = 1\n\n i = Foo()\n assert i.b == 2\n\n\ndef test_attribute_instance():\n class Foo:\n b = 1\n\n assert Foo().b == 2\n\n\ndef test_attribute_failure():\n class Foo:\n def _get_b(self):\n raise Exception(\"Failed to get attrib\")\n\n b = property(_get_b)\n\n i = Foo()\n assert i.b == 2\n\n\ndef test_attribute_multiple():\n class Foo:\n b = 1\n\n class Bar:\n b = 2\n\n assert Foo().b == Bar().b\n\n\ndef globf(x):\n return x + 1", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/assertion/failure_demo.py_TestMoreErrors_TestMoreErrors.test_try_finally.try_.finally_.x.0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/assertion/failure_demo.py_TestMoreErrors_TestMoreErrors.test_try_finally.try_.finally_.x.0", "embedding": null, "metadata": {"file_path": "doc/en/example/assertion/failure_demo.py", "file_name": "failure_demo.py", "file_type": "text/x-python", "category": "implementation", "start_line": 206, "end_line": 253, "span_ids": ["TestMoreErrors.test_z2_type_error", "TestMoreErrors.test_z1_unpack_error", "TestMoreErrors.test_startswith", "TestMoreErrors.test_instance", "TestMoreErrors.test_compare", "TestMoreErrors.test_startswith_nested", "TestMoreErrors.test_try_finally", "TestMoreErrors.test_complex_error", "TestMoreErrors.test_global_func", "TestMoreErrors"], "tokens": 228}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMoreErrors:\n def test_complex_error(self):\n def f():\n return 44\n\n def g():\n return 43\n\n somefunc(f(), g())\n\n def test_z1_unpack_error(self):\n items = []\n a, b = items\n\n def test_z2_type_error(self):\n items = 3\n a, b = items\n\n def test_startswith(self):\n s = \"123\"\n g = \"456\"\n assert s.startswith(g)\n\n def test_startswith_nested(self):\n def f():\n return \"123\"\n\n def g():\n return \"456\"\n\n assert f().startswith(g())\n\n def test_global_func(self):\n assert isinstance(globf(42), float)\n\n def test_instance(self):\n self.x = 6 * 7\n assert self.x != 42\n\n def test_compare(self):\n assert globf(10) < 5\n\n def test_try_finally(self):\n x = 1\n try:\n assert x == 0\n finally:\n x = 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/assertion/failure_demo.py_TestCustomAssertMsg_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/assertion/failure_demo.py_TestCustomAssertMsg_", "embedding": null, "metadata": {"file_path": "doc/en/example/assertion/failure_demo.py", "file_name": "failure_demo.py", "file_type": "text/x-python", "category": "implementation", "start_line": 256, "end_line": 283, "span_ids": ["TestCustomAssertMsg.test_single_line.A", "TestCustomAssertMsg.test_multiline.A", "TestCustomAssertMsg.test_single_line.A:2", "TestCustomAssertMsg.test_custom_repr.JSON", "TestCustomAssertMsg", "TestCustomAssertMsg.test_multiline", "TestCustomAssertMsg.test_single_line", "TestCustomAssertMsg.test_custom_repr", "TestCustomAssertMsg.test_custom_repr.JSON:2", "TestCustomAssertMsg.test_multiline.A:2"], "tokens": 161}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCustomAssertMsg:\n def test_single_line(self):\n class A:\n a = 1\n\n b = 2\n assert A.a == b, \"A.a appears not to be b\"\n\n def test_multiline(self):\n class A:\n a = 1\n\n b = 2\n assert (\n A.a == b\n ), \"A.a appears not to be b\\nor does not appear to be b\\none of those\"\n\n def test_custom_repr(self):\n class JSON:\n a = 1\n\n def __repr__(self):\n return \"This is JSON\\n{\\n 'foo': 'bar'\\n}\"\n\n a = JSON()\n b = 2\n assert a.a == b, a", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/assertion/global_testmodule_config/conftest.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/assertion/global_testmodule_config/conftest.py__", "embedding": null, "metadata": {"file_path": "doc/en/example/assertion/global_testmodule_config/conftest.py", "file_name": "conftest.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 15, "span_ids": ["impl", "imports", "pytest_runtest_setup"], "tokens": 79}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import os.path\n\nimport pytest\n\nmydir = os.path.dirname(__file__)\n\n\ndef pytest_runtest_setup(item):\n if isinstance(item, pytest.Function):\n if not item.fspath.relto(mydir):\n return\n mod = item.getparent(pytest.Module).obj\n if hasattr(mod, \"hello\"):\n print(f\"mod.hello {mod.hello!r}\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/assertion/global_testmodule_config/test_hello_world.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/assertion/global_testmodule_config/test_hello_world.py__", "embedding": null, "metadata": {"file_path": "doc/en/example/assertion/global_testmodule_config/test_hello_world.py", "file_name": "test_hello_world.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 6, "span_ids": ["test_func", "impl"], "tokens": 11}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "hello = \"world\"\n\n\ndef test_func():\n pass", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/assertion/test_failures.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/assertion/test_failures.py__", "embedding": null, "metadata": {"file_path": "doc/en/example/assertion/test_failures.py", "file_name": "test_failures.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 14, "span_ids": ["impl", "imports", "test_failure_demo_fails_properly"], "tokens": 101}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import os.path\nimport shutil\n\nfailure_demo = os.path.join(os.path.dirname(__file__), \"failure_demo.py\")\npytest_plugins = (\"pytester\",)\n\n\ndef test_failure_demo_fails_properly(testdir):\n target = testdir.tmpdir.join(os.path.basename(failure_demo))\n shutil.copy(failure_demo, target)\n result = testdir.runpytest(target, syspathinsert=True)\n result.stdout.fnmatch_lines([\"*44 failed*\"])\n assert result.ret != 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/assertion/test_setup_flow_example.py_setup_module_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/assertion/test_setup_flow_example.py_setup_module_", "embedding": null, "metadata": {"file_path": "doc/en/example/assertion/test_setup_flow_example.py", "file_name": "test_setup_flow_example.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 45, "span_ids": ["TestStateFullThing.test_42", "TestStateFullThing.setup_class", "TestStateFullThing.test_23", "teardown_module", "setup_module", "TestStateFullThing.teardown_class", "TestStateFullThing.setup_method", "TestStateFullThing"], "tokens": 279}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def setup_module(module):\n module.TestStateFullThing.classcount = 0\n\n\nclass TestStateFullThing:\n def setup_class(cls):\n cls.classcount += 1\n\n def teardown_class(cls):\n cls.classcount -= 1\n\n def setup_method(self, method):\n self.id = eval(method.__name__[5:])\n\n def test_42(self):\n assert self.classcount == 1\n assert self.id == 42\n\n def test_23(self):\n assert self.classcount == 1\n assert self.id == 23\n\n\ndef teardown_module(module):\n assert module.TestStateFullThing.classcount == 0\n\n\n\"\"\" For this example the control flow happens as follows::\n import test_setup_flow_example\n setup_module(test_setup_flow_example)\n setup_class(TestStateFullThing)\n instance = TestStateFullThing()\n setup_method(instance, instance.test_42)\n instance.test_42()\n setup_method(instance, instance.test_23)\n instance.test_23()\n teardown_class(TestStateFullThing)\n teardown_module(test_setup_flow_example)\n\nNote that ``setup_class(TestStateFullThing)`` is called and not\n``TestStateFullThing.setup_class()`` which would require you\nto insert ``setup_class = classmethod(setup_class)`` to make\nyour setup function callable.\n\"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/conftest.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/conftest.py__", "embedding": null, "metadata": {"file_path": "doc/en/example/conftest.py", "file_name": "conftest.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 2, "span_ids": ["impl"], "tokens": 7}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "collect_ignore = [\"nonpython\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/multipython.py___python2.return.Python_request_param_pyt": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/multipython.py___python2.return.Python_request_param_pyt", "embedding": null, "metadata": {"file_path": "doc/en/example/multipython.py", "file_name": "multipython.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 22, "span_ids": ["impl", "python2", "docstring", "imports", "python1"], "tokens": 115}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"\nmodule containing a parametrized tests testing cross-python\nserialization via the pickle module.\n\"\"\"\nimport shutil\nimport subprocess\nimport textwrap\n\nimport pytest\n\npythonlist = [\"python3.5\", \"python3.6\", \"python3.7\"]\n\n\n@pytest.fixture(params=pythonlist)\ndef python1(request, tmpdir):\n picklefile = tmpdir.join(\"data.pickle\")\n return Python(request.param, picklefile)\n\n\n@pytest.fixture(params=pythonlist)\ndef python2(request, python1):\n return Python(request.param, python1.picklefile)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/multipython.py_Python_Python.dumps.subprocess_check_call_se": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/multipython.py_Python_Python.dumps.subprocess_check_call_se", "embedding": null, "metadata": {"file_path": "doc/en/example/multipython.py", "file_name": "multipython.py", "file_type": "text/x-python", "category": "implementation", "start_line": 25, "end_line": 46, "span_ids": ["Python", "Python.dumps"], "tokens": 152}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Python:\n def __init__(self, version, picklefile):\n self.pythonpath = shutil.which(version)\n if not self.pythonpath:\n pytest.skip(f\"{version!r} not found\")\n self.picklefile = picklefile\n\n def dumps(self, obj):\n dumpfile = self.picklefile.dirpath(\"dump.py\")\n dumpfile.write(\n textwrap.dedent(\n r\"\"\"\n import pickle\n f = open({!r}, 'wb')\n s = pickle.dump({!r}, f, protocol=2)\n f.close()\n \"\"\".format(\n str(self.picklefile), obj\n )\n )\n )\n subprocess.check_call((self.pythonpath, str(dumpfile)))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/multipython.py_Python.load_and_is_true_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/multipython.py_Python.load_and_is_true_", "embedding": null, "metadata": {"file_path": "doc/en/example/multipython.py", "file_name": "multipython.py", "file_type": "text/x-python", "category": "implementation", "start_line": 48, "end_line": 73, "span_ids": ["test_basic_objects", "Python.load_and_is_true"], "tokens": 173}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Python:\n\n def load_and_is_true(self, expression):\n loadfile = self.picklefile.dirpath(\"load.py\")\n loadfile.write(\n textwrap.dedent(\n r\"\"\"\n import pickle\n f = open({!r}, 'rb')\n obj = pickle.load(f)\n f.close()\n res = eval({!r})\n if not res:\n raise SystemExit(1)\n \"\"\".format(\n str(self.picklefile), expression\n )\n )\n )\n print(loadfile)\n subprocess.check_call((self.pythonpath, str(loadfile)))\n\n\n@pytest.mark.parametrize(\"obj\", [42, {}, {1: 3}])\ndef test_basic_objects(python1, python2, obj):\n python1.dumps(obj)\n python2.load_and_is_true(f\"obj == {obj}\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/pythoncollection.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/pythoncollection.py__", "embedding": null, "metadata": {"file_path": "doc/en/example/pythoncollection.py", "file_name": "pythoncollection.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 15, "span_ids": ["test_function", "TestClass.test_anothermethod", "TestClass", "docstring", "TestClass.test_method"], "tokens": 46}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "# run this with $ pytest --collect-only test_collectonly.py\n#\n\n\ndef test_function():\n pass\n\n\nclass TestClass:\n def test_method(self):\n pass\n\n def test_anothermethod(self):\n pass", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/xfail_demo.py_pytest_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/xfail_demo.py_pytest_", "embedding": null, "metadata": {"file_path": "doc/en/example/xfail_demo.py", "file_name": "xfail_demo.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 39, "span_ids": ["test_hello6", "impl", "test_hello5", "test_hello", "test_hello2", "imports", "test_hello7", "test_hello3", "test_hello4"], "tokens": 143}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import pytest\n\nxfail = pytest.mark.xfail\n\n\n@xfail\ndef test_hello():\n assert 0\n\n\n@xfail(run=False)\ndef test_hello2():\n assert 0\n\n\n@xfail(\"hasattr(os, 'sep')\")\ndef test_hello3():\n assert 0\n\n\n@xfail(reason=\"bug 110\")\ndef test_hello4():\n assert 0\n\n\n@xfail('pytest.__version__[0] != \"17\"')\ndef test_hello5():\n assert 0\n\n\ndef test_hello6():\n pytest.xfail(\"reason\")\n\n\n@xfail(raises=IndexError)\ndef test_hello7():\n x = []\n x[1] = 1", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/extra/get_issues.py_json_get_issues.while_1_.if_not_another_page_.return.issues": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/extra/get_issues.py_json_get_issues.while_1_.if_not_another_page_.return.issues", "embedding": null, "metadata": {"file_path": "extra/get_issues.py", "file_name": "get_issues.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 30, "span_ids": ["impl", "imports", "get_issues"], "tokens": 170}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import json\nfrom pathlib import Path\n\nimport requests\n\nissues_url = \"https://api.github.com/repos/pytest-dev/pytest/issues\"\n\n\ndef get_issues():\n issues = []\n url = issues_url\n while 1:\n get_data = {\"state\": \"all\"}\n r = requests.get(url, params=get_data)\n data = r.json()\n if r.status_code == 403:\n # API request limit exceeded\n print(data[\"message\"])\n exit(1)\n issues.extend(data)\n\n # Look for next page\n links = requests.utils.parse_header_links(r.headers[\"Link\"])\n another_page = False\n for link in links:\n if link[\"rel\"] == \"next\":\n url = link[\"url\"]\n another_page = True\n if not another_page:\n return issues", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/extra/get_issues.py_main__get_kind.return._issue_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/extra/get_issues.py_main__get_kind.return._issue_", "embedding": null, "metadata": {"file_path": "extra/get_issues.py", "file_name": "get_issues.py", "file_type": "text/x-python", "category": "implementation", "start_line": 33, "end_line": 52, "span_ids": ["main", "_get_kind"], "tokens": 152}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def main(args):\n cachefile = Path(args.cache)\n if not cachefile.exists() or args.refresh:\n issues = get_issues()\n cachefile.write_text(json.dumps(issues), \"utf-8\")\n else:\n issues = json.loads(cachefile.read_text(\"utf-8\"))\n\n open_issues = [x for x in issues if x[\"state\"] == \"open\"]\n\n open_issues.sort(key=lambda x: x[\"number\"])\n report(open_issues)\n\n\ndef _get_kind(issue):\n labels = [label[\"name\"] for label in issue[\"labels\"]]\n for key in (\"bug\", \"enhancement\", \"proposal\"):\n if key in labels:\n return key\n return \"issue\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/extra/get_issues.py_report_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/extra/get_issues.py_report_", "embedding": null, "metadata": {"file_path": "extra/get_issues.py", "file_name": "get_issues.py", "file_type": "text/x-python", "category": "implementation", "start_line": 55, "end_line": 86, "span_ids": ["report", "impl:3"], "tokens": 231}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def report(issues):\n for issue in issues:\n title = issue[\"title\"]\n # body = issue[\"body\"]\n kind = _get_kind(issue)\n status = issue[\"state\"]\n number = issue[\"number\"]\n link = \"https://github.com/pytest-dev/pytest/issues/%s/\" % number\n print(\"----\")\n print(status, kind, link)\n print(title)\n # print()\n # lines = body.split(\"\\n\")\n # print(\"\\n\".join(lines[:3]))\n # if len(lines) > 3 or len(body) > 240:\n # print(\"...\")\n print(\"\\n\\nFound %s open issues\" % len(issues))\n\n\nif __name__ == \"__main__\":\n import argparse\n\n parser = argparse.ArgumentParser(\"process bitbucket issues\")\n parser.add_argument(\n \"--refresh\", action=\"store_true\", help=\"invalidate cache, refresh issues\"\n )\n parser.add_argument(\n \"--cache\", action=\"store\", default=\"issues.json\", help=\"cache file\"\n )\n args = parser.parse_args()\n main(args)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/extra/setup-py.test/setup.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/extra/setup-py.test/setup.py__", "embedding": null, "metadata": {"file_path": "extra/setup-py.test/setup.py", "file_name": "setup.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 12, "span_ids": ["impl", "imports"], "tokens": 78}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import sys\nfrom distutils.core import setup\n\nif __name__ == \"__main__\":\n if \"sdist\" not in sys.argv[1:]:\n raise ValueError(\"please use 'pytest' pypi package instead of 'py.test'\")\n setup(\n name=\"py.test\",\n version=\"0.0\",\n description=\"please use 'pytest' for installation\",\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/__init__.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/__init__.py__", "embedding": null, "metadata": {"file_path": "src/_pytest/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 9, "span_ids": ["impl"], "tokens": 56}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "__all__ = [\"__version__\"]\n\ntry:\n from ._version import version as __version__\nexcept ImportError:\n # broken installation, we don't even try\n # unknown only works because we do poor mans version compare\n __version__ = \"unknown\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_argcomplete.py_FastFilesCompleter_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_argcomplete.py_FastFilesCompleter_", "embedding": null, "metadata": {"file_path": "src/_pytest/_argcomplete.py", "file_name": "_argcomplete.py", "file_type": "text/x-python", "category": "implementation", "start_line": 73, "end_line": 118, "span_ids": ["FastFilesCompleter.__call__", "impl", "FastFilesCompleter"], "tokens": 317}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class FastFilesCompleter:\n \"\"\"Fast file completer class.\"\"\"\n\n def __init__(self, directories: bool = True) -> None:\n self.directories = directories\n\n def __call__(self, prefix: str, **kwargs: Any) -> List[str]:\n # Only called on non option completions.\n if os.path.sep in prefix[1:]:\n prefix_dir = len(os.path.dirname(prefix) + os.path.sep)\n else:\n prefix_dir = 0\n completion = []\n globbed = []\n if \"*\" not in prefix and \"?\" not in prefix:\n # We are on unix, otherwise no bash.\n if not prefix or prefix[-1] == os.path.sep:\n globbed.extend(glob(prefix + \".*\"))\n prefix += \"*\"\n globbed.extend(glob(prefix))\n for x in sorted(globbed):\n if os.path.isdir(x):\n x += \"/\"\n # Append stripping the prefix (like bash, not like compgen).\n completion.append(x[prefix_dir:])\n return completion\n\n\nif os.environ.get(\"_ARGCOMPLETE\"):\n try:\n import argcomplete.completers\n except ImportError:\n sys.exit(-1)\n filescompleter: Optional[FastFilesCompleter] = FastFilesCompleter()\n\n def try_argcomplete(parser: argparse.ArgumentParser) -> None:\n argcomplete.autocomplete(parser, always_complete_options=False)\n\n\nelse:\n\n def try_argcomplete(parser: argparse.ArgumentParser) -> None:\n pass\n\n filescompleter = None", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_Frame_Frame.getargs.return.retval": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_Frame_Frame.getargs.return.retval", "embedding": null, "metadata": {"file_path": "src/_pytest/_code/code.py", "file_name": "code.py", "file_type": "text/x-python", "category": "implementation", "start_line": 125, "end_line": 184, "span_ids": ["Frame.f_locals", "Frame.f_globals", "Frame.repr", "Frame", "Frame.statement", "Frame.lineno", "Frame.eval", "Frame.code", "Frame.getargs"], "tokens": 395}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Frame:\n \"\"\"Wrapper around a Python frame holding f_locals and f_globals\n in which expressions can be evaluated.\"\"\"\n\n __slots__ = (\"raw\",)\n\n def __init__(self, frame: FrameType) -> None:\n self.raw = frame\n\n @property\n def lineno(self) -> int:\n return self.raw.f_lineno - 1\n\n @property\n def f_globals(self) -> Dict[str, Any]:\n return self.raw.f_globals\n\n @property\n def f_locals(self) -> Dict[str, Any]:\n return self.raw.f_locals\n\n @property\n def code(self) -> Code:\n return Code(self.raw.f_code)\n\n @property\n def statement(self) -> \"Source\":\n \"\"\"Statement this frame is at.\"\"\"\n if self.code.fullsource is None:\n return Source(\"\")\n return self.code.fullsource.getstatement(self.lineno)\n\n def eval(self, code, **vars):\n \"\"\"Evaluate 'code' in the frame.\n\n 'vars' are optional additional local variables.\n\n Returns the result of the evaluation.\n \"\"\"\n f_locals = self.f_locals.copy()\n f_locals.update(vars)\n return eval(code, self.f_globals, f_locals)\n\n def repr(self, object: object) -> str:\n \"\"\"Return a 'safe' (non-recursive, one-line) string repr for 'object'.\"\"\"\n return saferepr(object)\n\n def getargs(self, var: bool = False):\n \"\"\"Return a list of tuples (name, value) for all arguments.\n\n If 'var' is set True, also include the variable and keyword arguments\n when present.\n \"\"\"\n retval = []\n for arg in self.code.getargs(var):\n try:\n retval.append((arg, self.f_locals[arg]))\n except KeyError:\n pass # this can occur when using Psyco\n return retval", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_Traceback.cut_Traceback.cut.return.self": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_Traceback.cut_Traceback.cut.return.self", "embedding": null, "metadata": {"file_path": "src/_pytest/_code/code.py", "file_name": "code.py", "file_type": "text/x-python", "category": "implementation", "start_line": 339, "end_line": 371, "span_ids": ["Traceback.cut"], "tokens": 272}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Traceback(List[TracebackEntry]):\n\n def cut(\n self,\n path: Optional[Union[Path, str]] = None,\n lineno: Optional[int] = None,\n firstlineno: Optional[int] = None,\n excludepath: Optional[Path] = None,\n ) -> \"Traceback\":\n \"\"\"Return a Traceback instance wrapping part of this Traceback.\n\n By providing any combination of path, lineno and firstlineno, the\n first frame to start the to-be-returned traceback is determined.\n\n This allows cutting the first part of a Traceback instance e.g.\n for formatting reasons (removing some uninteresting bits that deal\n with handling of the exception/traceback).\n \"\"\"\n for x in self:\n code = x.frame.code\n codepath = code.path\n if path is not None and codepath != path:\n continue\n if (\n excludepath is not None\n and isinstance(codepath, Path)\n and excludepath in codepath.parents\n ):\n continue\n if lineno is not None and x.lineno != lineno:\n continue\n if firstlineno is not None and x.frame.code.firstlineno != firstlineno:\n continue\n return Traceback(x._rawentry, self._excinfo)\n return self", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_ExceptionInfo.getrepr_ExceptionInfo.getrepr.return.fmt_repr_excinfo_self_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_ExceptionInfo.getrepr_ExceptionInfo.getrepr.return.fmt_repr_excinfo_self_", "embedding": null, "metadata": {"file_path": "src/_pytest/_code/code.py", "file_name": "code.py", "file_type": "text/x-python", "category": "implementation", "start_line": 594, "end_line": 652, "span_ids": ["ExceptionInfo.getrepr"], "tokens": 418}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\n@attr.s(repr=False)\nclass ExceptionInfo(Generic[_E]):\n\n def getrepr(\n self,\n showlocals: bool = False,\n style: \"_TracebackStyle\" = \"long\",\n abspath: bool = False,\n tbfilter: bool = True,\n funcargs: bool = False,\n truncate_locals: bool = True,\n chain: bool = True,\n ) -> Union[\"ReprExceptionInfo\", \"ExceptionChainRepr\"]:\n \"\"\"Return str()able representation of this exception info.\n\n :param bool showlocals:\n Show locals per traceback entry.\n Ignored if ``style==\"native\"``.\n\n :param str style:\n long|short|no|native|value traceback style.\n\n :param bool abspath:\n If paths should be changed to absolute or left unchanged.\n\n :param bool tbfilter:\n Hide entries that contain a local variable ``__tracebackhide__==True``.\n Ignored if ``style==\"native\"``.\n\n :param bool funcargs:\n Show fixtures (\"funcargs\" for legacy purposes) per traceback entry.\n\n :param bool truncate_locals:\n With ``showlocals==True``, make sure locals can be safely represented as strings.\n\n :param bool chain:\n If chained exceptions in Python 3 should be shown.\n\n .. versionchanged:: 3.9\n\n Added the ``chain`` parameter.\n \"\"\"\n if style == \"native\":\n return ReprExceptionInfo(\n ReprTracebackNative(\n traceback.format_exception(\n self.type, self.value, self.traceback[0]._rawentry\n )\n ),\n self._getreprcrash(),\n )\n\n fmt = FormattedExcinfo(\n showlocals=showlocals,\n style=style,\n abspath=abspath,\n tbfilter=tbfilter,\n funcargs=funcargs,\n truncate_locals=truncate_locals,\n chain=chain,\n )\n return fmt.repr_excinfo(self)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_FormattedExcinfo._makepath_FormattedExcinfo.repr_traceback.return.ReprTraceback_entries_ex": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_FormattedExcinfo._makepath_FormattedExcinfo.repr_traceback.return.ReprTraceback_entries_ex", "embedding": null, "metadata": {"file_path": "src/_pytest/_code/code.py", "file_name": "code.py", "file_type": "text/x-python", "category": "implementation", "start_line": 822, "end_line": 853, "span_ids": ["FormattedExcinfo.repr_traceback", "FormattedExcinfo._makepath"], "tokens": 281}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@attr.s\nclass FormattedExcinfo:\n\n def _makepath(self, path: Union[Path, str]) -> str:\n if not self.abspath and isinstance(path, Path):\n try:\n np = bestrelpath(Path.cwd(), path)\n except OSError:\n return str(path)\n if len(np) < len(str(path)):\n return np\n return str(path)\n\n def repr_traceback(self, excinfo: ExceptionInfo[BaseException]) -> \"ReprTraceback\":\n traceback = excinfo.traceback\n if self.tbfilter:\n traceback = traceback.filter()\n\n if isinstance(excinfo.value, RecursionError):\n traceback, extraline = self._truncate_recursive_traceback(traceback)\n else:\n extraline = None\n\n last = traceback[-1]\n entries = []\n if self.style == \"value\":\n reprentry = self.repr_traceback_entry(last, excinfo)\n entries.append(reprentry)\n return ReprTraceback(entries, None, style=self.style)\n\n for index, entry in enumerate(traceback):\n einfo = (last == entry) and excinfo or None\n reprentry = self.repr_traceback_entry(entry, einfo)\n entries.append(reprentry)\n return ReprTraceback(entries, extraline, style=self.style)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_FormattedExcinfo._truncate_recursive_traceback_FormattedExcinfo._truncate_recursive_traceback.return.traceback_extraline": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_FormattedExcinfo._truncate_recursive_traceback_FormattedExcinfo._truncate_recursive_traceback.return.traceback_extraline", "embedding": null, "metadata": {"file_path": "src/_pytest/_code/code.py", "file_name": "code.py", "file_type": "text/x-python", "category": "implementation", "start_line": 855, "end_line": 895, "span_ids": ["FormattedExcinfo._truncate_recursive_traceback"], "tokens": 392}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@attr.s\nclass FormattedExcinfo:\n\n def _truncate_recursive_traceback(\n self, traceback: Traceback\n ) -> Tuple[Traceback, Optional[str]]:\n \"\"\"Truncate the given recursive traceback trying to find the starting\n point of the recursion.\n\n The detection is done by going through each traceback entry and\n finding the point in which the locals of the frame are equal to the\n locals of a previous frame (see ``recursionindex()``).\n\n Handle the situation where the recursion process might raise an\n exception (for example comparing numpy arrays using equality raises a\n TypeError), in which case we do our best to warn the user of the\n error and show a limited traceback.\n \"\"\"\n try:\n recursionindex = traceback.recursionindex()\n except Exception as e:\n max_frames = 10\n extraline: Optional[str] = (\n \"!!! Recursion error detected, but an error occurred locating the origin of recursion.\\n\"\n \" The following exception happened when comparing locals in the stack frame:\\n\"\n \" {exc_type}: {exc_msg}\\n\"\n \" Displaying first and last {max_frames} stack frames out of {total}.\"\n ).format(\n exc_type=type(e).__name__,\n exc_msg=str(e),\n max_frames=max_frames,\n total=len(traceback),\n )\n # Type ignored because adding two instaces of a List subtype\n # currently incorrectly has type List instead of the subtype.\n traceback = traceback[:max_frames] + traceback[-max_frames:] # type: ignore\n else:\n if recursionindex is not None:\n extraline = \"!!! Recursion detected (same locals & position)\"\n traceback = traceback[: recursionindex + 1]\n else:\n extraline = None\n\n return traceback, extraline", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_ReprFuncArgs_ReprFuncArgs.toterminal.if_self_args_.tw_line_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_ReprFuncArgs_ReprFuncArgs.toterminal.if_self_args_.tw_line_", "embedding": null, "metadata": {"file_path": "src/_pytest/_code/code.py", "file_name": "code.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1166, "end_line": 1186, "span_ids": ["ReprFuncArgs.toterminal", "ReprFuncArgs"], "tokens": 161}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@attr.s(eq=False)\nclass ReprFuncArgs(TerminalRepr):\n args = attr.ib(type=Sequence[Tuple[str, object]])\n\n def toterminal(self, tw: TerminalWriter) -> None:\n if self.args:\n linesofar = \"\"\n for name, value in self.args:\n ns = f\"{name} = {value}\"\n if len(ns) + len(linesofar) + 2 > tw.fullwidth:\n if linesofar:\n tw.line(linesofar)\n linesofar = ns\n else:\n if linesofar:\n linesofar += \", \" + ns\n else:\n linesofar = ns\n if linesofar:\n tw.line(linesofar)\n tw.line(\"\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_filter_traceback_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_filter_traceback_", "embedding": null, "metadata": {"file_path": "src/_pytest/_code/code.py", "file_name": "code.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1236, "end_line": 1265, "span_ids": ["filter_traceback"], "tokens": 228}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def filter_traceback(entry: TracebackEntry) -> bool:\n \"\"\"Return True if a TracebackEntry instance should be included in tracebacks.\n\n We hide traceback entries of:\n\n * dynamically generated code (no code to show up for it);\n * internal traceback from pytest or its internal libraries, py and pluggy.\n \"\"\"\n # entry.path might sometimes return a str object when the entry\n # points to dynamically generated code.\n # See https://bitbucket.org/pytest-dev/py/issues/71.\n raw_filename = entry.frame.code.raw.co_filename\n is_generated = \"<\" in raw_filename and \">\" in raw_filename\n if is_generated:\n return False\n\n # entry.path might point to a non-existing file, in which case it will\n # also return a str object. See #1133.\n p = Path(entry.path)\n\n parents = p.parents\n if _PLUGGY_DIR in parents:\n return False\n if _PYTEST_DIR in parents:\n return False\n if _PY_DIR in parents:\n return False\n\n return True", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/source.py_get_statement_startend2_get_statement_startend2.return.start_end": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/source.py_get_statement_startend2_get_statement_startend2.return.start_end", "embedding": null, "metadata": {"file_path": "src/_pytest/_code/source.py", "file_name": "source.py", "file_type": "text/x-python", "category": "implementation", "start_line": 146, "end_line": 165, "span_ids": ["get_statement_startend2"], "tokens": 209}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def get_statement_startend2(lineno: int, node: ast.AST) -> Tuple[int, Optional[int]]:\n # Flatten all statements and except handlers into one lineno-list.\n # AST's line numbers start indexing at 1.\n values: List[int] = []\n for x in ast.walk(node):\n if isinstance(x, (ast.stmt, ast.ExceptHandler)):\n values.append(x.lineno - 1)\n for name in (\"finalbody\", \"orelse\"):\n val: Optional[List[ast.stmt]] = getattr(x, name, None)\n if val:\n # Treat the finally/orelse part as its own statement.\n values.append(val[0].lineno - 1 - 1)\n values.sort()\n insert_index = bisect_right(values, lineno)\n start = values[insert_index - 1]\n if insert_index >= len(values):\n end = None\n else:\n end = values[insert_index]\n return start, end", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/source.py_getstatementrange_ast_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/source.py_getstatementrange_ast_", "embedding": null, "metadata": {"file_path": "src/_pytest/_code/source.py", "file_name": "source.py", "file_type": "text/x-python", "category": "implementation", "start_line": 168, "end_line": 213, "span_ids": ["getstatementrange_ast"], "tokens": 396}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def getstatementrange_ast(\n lineno: int,\n source: Source,\n assertion: bool = False,\n astnode: Optional[ast.AST] = None,\n) -> Tuple[ast.AST, int, int]:\n if astnode is None:\n content = str(source)\n # See #4260:\n # Don't produce duplicate warnings when compiling source to find AST.\n with warnings.catch_warnings():\n warnings.simplefilter(\"ignore\")\n astnode = ast.parse(content, \"source\", \"exec\")\n\n start, end = get_statement_startend2(lineno, astnode)\n # We need to correct the end:\n # - ast-parsing strips comments\n # - there might be empty lines\n # - we might have lesser indented code blocks at the end\n if end is None:\n end = len(source.lines)\n\n if end > start + 1:\n # Make sure we don't span differently indented code blocks\n # by using the BlockFinder helper used which inspect.getsource() uses itself.\n block_finder = inspect.BlockFinder()\n # If we start with an indented line, put blockfinder to \"started\" mode.\n block_finder.started = source.lines[start][0].isspace()\n it = ((x + \"\\n\") for x in source.lines[start:end])\n try:\n for tok in tokenize.generate_tokens(lambda: next(it)):\n block_finder.tokeneater(*tok)\n except (inspect.EndOfBlock, IndentationError):\n end = block_finder.last + start\n except Exception:\n pass\n\n # The end might still point to a comment or empty line, correct it.\n while end:\n line = source.lines[end - 1].lstrip()\n if line.startswith(\"#\") or not line:\n end -= 1\n else:\n break\n return astnode, start, end", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/__init__.py_register_assert_rewrite_register_assert_rewrite.importhook_mark_rewrite_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/__init__.py_register_assert_rewrite_register_assert_rewrite.importhook_mark_rewrite_", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 47, "end_line": 70, "span_ids": ["register_assert_rewrite"], "tokens": 243}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def register_assert_rewrite(*names: str) -> None:\n \"\"\"Register one or more module names to be rewritten on import.\n\n This function will make sure that this module or all modules inside\n the package will get their assert statements rewritten.\n Thus you should make sure to call this before the module is\n actually imported, usually in your __init__.py if you are a plugin\n using a package.\n\n :raises TypeError: If the given module names are not strings.\n \"\"\"\n for name in names:\n if not isinstance(name, str):\n msg = \"expected module names as *args, got {0} instead\" # type: ignore[unreachable]\n raise TypeError(msg.format(repr(names)))\n for hook in sys.meta_path:\n if isinstance(hook, rewrite.AssertionRewritingHook):\n importhook = hook\n break\n else:\n # TODO(typing): Add a protocol for mark_rewrite() and use it\n # for importhook and for PytestPluginManager.rewrite_hook.\n importhook = DummyRewriteHook() # type: ignore\n importhook.mark_rewrite(*names)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/__init__.py_DummyRewriteHook_pytest_collection.if_assertstate_.if_assertstate_hook_is_no.assertstate_hook_set_sess": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/__init__.py_DummyRewriteHook_pytest_collection.if_assertstate_.if_assertstate_hook_is_no.assertstate_hook_set_sess", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 73, "end_line": 112, "span_ids": ["DummyRewriteHook", "AssertionState", "DummyRewriteHook.mark_rewrite", "pytest_collection", "install_importhook"], "tokens": 336}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class DummyRewriteHook:\n \"\"\"A no-op import hook for when rewriting is disabled.\"\"\"\n\n def mark_rewrite(self, *names: str) -> None:\n pass\n\n\nclass AssertionState:\n \"\"\"State for the assertion plugin.\"\"\"\n\n def __init__(self, config: Config, mode) -> None:\n self.mode = mode\n self.trace = config.trace.root.get(\"assertion\")\n self.hook: Optional[rewrite.AssertionRewritingHook] = None\n\n\ndef install_importhook(config: Config) -> rewrite.AssertionRewritingHook:\n \"\"\"Try to install the rewrite hook, raise SystemError if it fails.\"\"\"\n config._store[assertstate_key] = AssertionState(config, \"rewrite\")\n config._store[assertstate_key].hook = hook = rewrite.AssertionRewritingHook(config)\n sys.meta_path.insert(0, hook)\n config._store[assertstate_key].trace(\"installed rewrite import hook\")\n\n def undo() -> None:\n hook = config._store[assertstate_key].hook\n if hook is not None and hook in sys.meta_path:\n sys.meta_path.remove(hook)\n\n config.add_cleanup(undo)\n return hook\n\n\ndef pytest_collection(session: \"Session\") -> None:\n # This hook is only called when test modules are collected\n # so for example not in the master process of pytest-xdist\n # (which does not collect test modules).\n assertstate = session.config._store.get(assertstate_key, None)\n if assertstate:\n if assertstate.hook is not None:\n assertstate.hook.set_session(session)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewritingHook._early_rewrite_bailout_AssertionRewritingHook._early_rewrite_bailout.return.True": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewritingHook._early_rewrite_bailout_AssertionRewritingHook._early_rewrite_bailout.return.True", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/rewrite.py", "file_name": "rewrite.py", "file_type": "text/x-python", "category": "implementation", "start_line": 171, "end_line": 208, "span_ids": ["AssertionRewritingHook._early_rewrite_bailout"], "tokens": 400}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class AssertionRewritingHook(importlib.abc.MetaPathFinder, importlib.abc.Loader):\n\n def _early_rewrite_bailout(self, name: str, state: \"AssertionState\") -> bool:\n \"\"\"A fast way to get out of rewriting modules.\n\n Profiling has shown that the call to PathFinder.find_spec (inside of\n the find_spec from this class) is a major slowdown, so, this method\n tries to filter what we're sure won't be rewritten before getting to\n it.\n \"\"\"\n if self.session is not None and not self._session_paths_checked:\n self._session_paths_checked = True\n for initial_path in self.session._initialpaths:\n # Make something as c:/projects/my_project/path.py ->\n # ['c:', 'projects', 'my_project', 'path.py']\n parts = str(initial_path).split(os.path.sep)\n # add 'path' to basenames to be checked.\n self._basenames_to_check_rewrite.add(os.path.splitext(parts[-1])[0])\n\n # Note: conftest already by default in _basenames_to_check_rewrite.\n parts = name.split(\".\")\n if parts[-1] in self._basenames_to_check_rewrite:\n return False\n\n # For matching the name it must be as if it was a filename.\n path = PurePath(os.path.sep.join(parts) + \".py\")\n\n for pat in self.fnpats:\n # if the pattern contains subdirectories (\"tests/**.py\" for example) we can't bail out based\n # on the name alone because we need to match against the full path\n if os.path.dirname(pat):\n return False\n if fnmatch_ex(pat, path):\n return False\n\n if self._is_marked_for_rewrite(name, state):\n return False\n\n state.trace(f\"early skip of rewriting module: {name}\")\n return True", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewritingHook._should_rewrite_AssertionRewritingHook._should_rewrite.return.self__is_marked_for_rewri": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewritingHook._should_rewrite_AssertionRewritingHook._should_rewrite.return.self__is_marked_for_rewri", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/rewrite.py", "file_name": "rewrite.py", "file_type": "text/x-python", "category": "implementation", "start_line": 210, "end_line": 229, "span_ids": ["AssertionRewritingHook._should_rewrite"], "tokens": 214}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class AssertionRewritingHook(importlib.abc.MetaPathFinder, importlib.abc.Loader):\n\n def _should_rewrite(self, name: str, fn: str, state: \"AssertionState\") -> bool:\n # always rewrite conftest files\n if os.path.basename(fn) == \"conftest.py\":\n state.trace(f\"rewriting conftest file: {fn!r}\")\n return True\n\n if self.session is not None:\n if self.session.isinitpath(absolutepath(fn)):\n state.trace(f\"matched test file (was specified on cmdline): {fn!r}\")\n return True\n\n # modules not passed explicitly on the command line are only\n # rewritten if they match the naming convention for test files\n fn_path = PurePath(fn)\n for pat in self.fnpats:\n if fnmatch_ex(pat, fn_path):\n state.trace(f\"matched test file {fn!r}\")\n return True\n\n return self._is_marked_for_rewrite(name, state)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py__read_pyc__read_pyc.with_fp_.return.co": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py__read_pyc__read_pyc.with_fp_.return.co", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/rewrite.py", "file_name": "rewrite.py", "file_type": "text/x-python", "category": "implementation", "start_line": 359, "end_line": 407, "span_ids": ["_read_pyc"], "tokens": 527}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _read_pyc(\n source: Path, pyc: Path, trace: Callable[[str], None] = lambda x: None\n) -> Optional[types.CodeType]:\n \"\"\"Possibly read a pytest pyc containing rewritten code.\n\n Return rewritten code if successful or None if not.\n \"\"\"\n try:\n fp = open(os.fspath(pyc), \"rb\")\n except OSError:\n return None\n with fp:\n # https://www.python.org/dev/peps/pep-0552/\n has_flags = sys.version_info >= (3, 7)\n try:\n stat_result = os.stat(os.fspath(source))\n mtime = int(stat_result.st_mtime)\n size = stat_result.st_size\n data = fp.read(16 if has_flags else 12)\n except OSError as e:\n trace(f\"_read_pyc({source}): OSError {e}\")\n return None\n # Check for invalid or out of date pyc file.\n if len(data) != (16 if has_flags else 12):\n trace(\"_read_pyc(%s): invalid pyc (too short)\" % source)\n return None\n if data[:4] != importlib.util.MAGIC_NUMBER:\n trace(\"_read_pyc(%s): invalid pyc (bad magic number)\" % source)\n return None\n if has_flags and data[4:8] != b\"\\x00\\x00\\x00\\x00\":\n trace(\"_read_pyc(%s): invalid pyc (unsupported flags)\" % source)\n return None\n mtime_data = data[8 if has_flags else 4 : 12 if has_flags else 8]\n if int.from_bytes(mtime_data, \"little\") != mtime & 0xFFFFFFFF:\n trace(\"_read_pyc(%s): out of date\" % source)\n return None\n size_data = data[12 if has_flags else 8 : 16 if has_flags else 12]\n if int.from_bytes(size_data, \"little\") != size & 0xFFFFFFFF:\n trace(\"_read_pyc(%s): invalid pyc (incorrect size)\" % source)\n return None\n try:\n co = marshal.load(fp)\n except Exception as e:\n trace(f\"_read_pyc({source}): marshal.load error {e}\")\n return None\n if not isinstance(co, types.CodeType):\n trace(\"_read_pyc(%s): not a code object\" % source)\n return None\n return co", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py__format_assertmsg__format_assertmsg.return.obj": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py__format_assertmsg__format_assertmsg.return.obj", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/rewrite.py", "file_name": "rewrite.py", "file_type": "text/x-python", "category": "implementation", "start_line": 433, "end_line": 452, "span_ids": ["_format_assertmsg"], "tokens": 197}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _format_assertmsg(obj: object) -> str:\n r\"\"\"Format the custom assertion message given.\n\n For strings this simply replaces newlines with '\\n~' so that\n util.format_explanation() will preserve them instead of escaping\n newlines. For other objects saferepr() is used first.\n \"\"\"\n # reprlib appears to have a bug which means that if a string\n # contains a newline it gets escaped, however if an object has a\n # .__repr__() which contains newlines it does not get escaped.\n # However in either case we want to preserve the newline.\n replaces = [(\"\\n\", \"\\n~\"), (\"%\", \"%%\")]\n if not isinstance(obj, str):\n obj = saferepr(obj)\n replaces.append((\"\\\\n\", \"\\n~\"))\n\n for r1, r2 in replaces:\n obj = obj.replace(r1, r2)\n\n return obj", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewriter.run_AssertionRewriter.run.while_nodes_.for_name_field_in_ast_it.if_isinstance_field_list.elif_.nodes_append_field_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewriter.run_AssertionRewriter.run.while_nodes_.for_name_field_in_ast_it.if_isinstance_field_list.elif_.nodes_append_field_", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/rewrite.py", "file_name": "rewrite.py", "file_type": "text/x-python", "category": "implementation", "start_line": 670, "end_line": 738, "span_ids": ["AssertionRewriter.run"], "tokens": 528}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class AssertionRewriter(ast.NodeVisitor):\n\n def run(self, mod: ast.Module) -> None:\n \"\"\"Find all assert statements in *mod* and rewrite them.\"\"\"\n if not mod.body:\n # Nothing to do.\n return\n # Insert some special imports at the top of the module but after any\n # docstrings and __future__ imports.\n aliases = [\n ast.alias(\"builtins\", \"@py_builtins\"),\n ast.alias(\"_pytest.assertion.rewrite\", \"@pytest_ar\"),\n ]\n doc = getattr(mod, \"docstring\", None)\n expect_docstring = doc is None\n if doc is not None and self.is_rewrite_disabled(doc):\n return\n pos = 0\n lineno = 1\n for item in mod.body:\n if (\n expect_docstring\n and isinstance(item, ast.Expr)\n and isinstance(item.value, ast.Str)\n ):\n doc = item.value.s\n if self.is_rewrite_disabled(doc):\n return\n expect_docstring = False\n elif (\n isinstance(item, ast.ImportFrom)\n and item.level == 0\n and item.module == \"__future__\"\n ):\n pass\n else:\n break\n pos += 1\n # Special case: for a decorated function, set the lineno to that of the\n # first decorator, not the `def`. Issue #4984.\n if isinstance(item, ast.FunctionDef) and item.decorator_list:\n lineno = item.decorator_list[0].lineno\n else:\n lineno = item.lineno\n imports = [\n ast.Import([alias], lineno=lineno, col_offset=0) for alias in aliases\n ]\n mod.body[pos:pos] = imports\n # Collect asserts.\n nodes: List[ast.AST] = [mod]\n while nodes:\n node = nodes.pop()\n for name, field in ast.iter_fields(node):\n if isinstance(field, list):\n new: List[ast.AST] = []\n for i, child in enumerate(field):\n if isinstance(child, ast.Assert):\n # Transform assert.\n new.extend(self.visit(child))\n else:\n new.append(child)\n if isinstance(child, ast.AST):\n nodes.append(child)\n setattr(node, name, new)\n elif (\n isinstance(field, ast.AST)\n # Don't recurse into expressions as they can't contain\n # asserts.\n and not isinstance(field, ast.expr)\n ):\n nodes.append(field)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewriter.is_rewrite_disabled_AssertionRewriter.builtin.return.ast_Attribute_builtin_nam": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewriter.is_rewrite_disabled_AssertionRewriter.builtin.return.ast_Attribute_builtin_nam", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/rewrite.py", "file_name": "rewrite.py", "file_type": "text/x-python", "category": "implementation", "start_line": 734, "end_line": 764, "span_ids": ["AssertionRewriter.builtin", "AssertionRewriter.assign", "AssertionRewriter.is_rewrite_disabled", "AssertionRewriter.helper", "AssertionRewriter.variable", "AssertionRewriter.display"], "tokens": 299}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class AssertionRewriter(ast.NodeVisitor):\n\n @staticmethod\n def is_rewrite_disabled(docstring: str) -> bool:\n return \"PYTEST_DONT_REWRITE\" in docstring\n\n def variable(self) -> str:\n \"\"\"Get a new variable.\"\"\"\n # Use a character invalid in python identifiers to avoid clashing.\n name = \"@py_assert\" + str(next(self.variable_counter))\n self.variables.append(name)\n return name\n\n def assign(self, expr: ast.expr) -> ast.Name:\n \"\"\"Give *expr* a name.\"\"\"\n name = self.variable()\n self.statements.append(ast.Assign([ast.Name(name, ast.Store())], expr))\n return ast.Name(name, ast.Load())\n\n def display(self, expr: ast.expr) -> ast.expr:\n \"\"\"Call saferepr on the expression.\"\"\"\n return self.helper(\"_saferepr\", expr)\n\n def helper(self, name: str, *args: ast.expr) -> ast.expr:\n \"\"\"Call a helper in this module.\"\"\"\n py_name = ast.Name(\"@pytest_ar\", ast.Load())\n attr = ast.Attribute(py_name, name, ast.Load())\n return ast.Call(attr, list(args), [])\n\n def builtin(self, name: str) -> ast.Attribute:\n \"\"\"Return the builtin called *name*.\"\"\"\n builtin_name = ast.Name(\"@py_builtins\", ast.Load())\n return ast.Attribute(builtin_name, name, ast.Load())", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewriter.explanation_param_AssertionRewriter.explanation_param.return._specifier_s_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewriter.explanation_param_AssertionRewriter.explanation_param.return._specifier_s_", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/rewrite.py", "file_name": "rewrite.py", "file_type": "text/x-python", "category": "implementation", "start_line": 772, "end_line": 782, "span_ids": ["AssertionRewriter.explanation_param"], "tokens": 132}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class AssertionRewriter(ast.NodeVisitor):\n\n def explanation_param(self, expr: ast.expr) -> str:\n \"\"\"Return a new named %-formatting placeholder for expr.\n\n This creates a %-formatting placeholder for expr in the\n current formatting context, e.g. ``%(py0)s``. The placeholder\n and expr are placed in the current format context so that it\n can be used on the next call to .pop_format_context().\n \"\"\"\n specifier = \"py\" + str(next(self.variable_counter))\n self.explanation_specifiers[specifier] = expr\n return \"%(\" + specifier + \")s\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewriter.push_format_context_AssertionRewriter.push_format_context.self_stack_append_self_ex": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewriter.push_format_context_AssertionRewriter.push_format_context.self_stack_append_self_ex", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/rewrite.py", "file_name": "rewrite.py", "file_type": "text/x-python", "category": "implementation", "start_line": 784, "end_line": 795, "span_ids": ["AssertionRewriter.push_format_context"], "tokens": 126}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class AssertionRewriter(ast.NodeVisitor):\n\n def push_format_context(self) -> None:\n \"\"\"Create a new formatting context.\n\n The format context is used for when an explanation wants to\n have a variable value formatted in the assertion message. In\n this case the value required can be added using\n .explanation_param(). Finally .pop_format_context() is used\n to format a string of %-formatted values as added by\n .explanation_param().\n \"\"\"\n self.explanation_specifiers: Dict[str, ast.expr] = {}\n self.stack.append(self.explanation_specifiers)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewriter.pop_format_context_AssertionRewriter.generic_visit.return.res_self_explanation_par": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewriter.pop_format_context_AssertionRewriter.generic_visit.return.res_self_explanation_par", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/rewrite.py", "file_name": "rewrite.py", "file_type": "text/x-python", "category": "implementation", "start_line": 797, "end_line": 821, "span_ids": ["AssertionRewriter.pop_format_context", "AssertionRewriter.generic_visit"], "tokens": 280}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class AssertionRewriter(ast.NodeVisitor):\n\n def pop_format_context(self, expl_expr: ast.expr) -> ast.Name:\n \"\"\"Format the %-formatted string with current format context.\n\n The expl_expr should be an str ast.expr instance constructed from\n the %-placeholders created by .explanation_param(). This will\n add the required code to format said string to .expl_stmts and\n return the ast.Name instance of the formatted string.\n \"\"\"\n current = self.stack.pop()\n if self.stack:\n self.explanation_specifiers = self.stack[-1]\n keys = [ast.Str(key) for key in current.keys()]\n format_dict = ast.Dict(keys, list(current.values()))\n form = ast.BinOp(expl_expr, ast.Mod(), format_dict)\n name = \"@py_format\" + str(next(self.variable_counter))\n if self.enable_assertion_pass_hook:\n self.format_variables.append(name)\n self.expl_stmts.append(ast.Assign([ast.Name(name, ast.Store())], form))\n return ast.Name(name, ast.Load())\n\n def generic_visit(self, node: ast.AST) -> Tuple[ast.Name, str]:\n \"\"\"Handle expressions we don't have custom code for.\"\"\"\n assert isinstance(node, ast.expr)\n res = self.assign(node)\n return res, self.explanation_param(self.display(res))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewriter.visit_Name_AssertionRewriter.visit_Name.return.name_self_explanation_pa": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewriter.visit_Name_AssertionRewriter.visit_Name.return.name_self_explanation_pa", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/rewrite.py", "file_name": "rewrite.py", "file_type": "text/x-python", "category": "implementation", "start_line": 937, "end_line": 945, "span_ids": ["AssertionRewriter.visit_Name"], "tokens": 147}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class AssertionRewriter(ast.NodeVisitor):\n\n def visit_Name(self, name: ast.Name) -> Tuple[ast.Name, str]:\n # Display the repr of the name if it's a local variable or\n # _should_repr_global_name() thinks it's acceptable.\n locs = ast.Call(self.builtin(\"locals\"), [], [])\n inlocs = ast.Compare(ast.Str(name.id), [ast.In()], [locs])\n dorepr = self.helper(\"_should_repr_global_name\", name)\n test = ast.BoolOp(ast.Or(), [inlocs, dorepr])\n expr = ast.IfExp(test, self.display(name), ast.Str(name.id))\n return name, self.explanation_param(expr)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewriter.visit_BoolOp_AssertionRewriter.visit_BoolOp.return.ast_Name_res_var_ast_Loa": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewriter.visit_BoolOp_AssertionRewriter.visit_BoolOp.return.ast_Name_res_var_ast_Loa", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/rewrite.py", "file_name": "rewrite.py", "file_type": "text/x-python", "category": "implementation", "start_line": 949, "end_line": 982, "span_ids": ["AssertionRewriter.visit_BoolOp"], "tokens": 392}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class AssertionRewriter(ast.NodeVisitor):\n\n def visit_BoolOp(self, boolop: ast.BoolOp) -> Tuple[ast.Name, str]:\n res_var = self.variable()\n expl_list = self.assign(ast.List([], ast.Load()))\n app = ast.Attribute(expl_list, \"append\", ast.Load())\n is_or = int(isinstance(boolop.op, ast.Or))\n body = save = self.statements\n fail_save = self.expl_stmts\n levels = len(boolop.values) - 1\n self.push_format_context()\n # Process each operand, short-circuiting if needed.\n for i, v in enumerate(boolop.values):\n if i:\n fail_inner: List[ast.stmt] = []\n # cond is set in a prior loop iteration below\n self.expl_stmts.append(ast.If(cond, fail_inner, [])) # noqa\n self.expl_stmts = fail_inner\n self.push_format_context()\n res, expl = self.visit(v)\n body.append(ast.Assign([ast.Name(res_var, ast.Store())], res))\n expl_format = self.pop_format_context(ast.Str(expl))\n call = ast.Call(app, [expl_format], [])\n self.expl_stmts.append(ast.Expr(call))\n if i < levels:\n cond: ast.expr = res\n if is_or:\n cond = ast.UnaryOp(ast.Not(), cond)\n inner: List[ast.stmt] = []\n self.statements.append(ast.If(cond, inner, []))\n self.statements = body = inner\n self.statements = save\n self.expl_stmts = fail_save\n expl_template = self.helper(\"_format_boolop\", expl_list, ast.Num(is_or))\n expl = self.pop_format_context(expl_template)\n return ast.Name(res_var, ast.Load()), self.explanation_param(expl)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewriter.visit_UnaryOp_AssertionRewriter.visit_BinOp.return.res_explanation": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewriter.visit_UnaryOp_AssertionRewriter.visit_BinOp.return.res_explanation", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/rewrite.py", "file_name": "rewrite.py", "file_type": "text/x-python", "category": "implementation", "start_line": 984, "end_line": 996, "span_ids": ["AssertionRewriter.visit_UnaryOp", "AssertionRewriter.visit_BinOp"], "tokens": 196}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class AssertionRewriter(ast.NodeVisitor):\n\n def visit_UnaryOp(self, unary: ast.UnaryOp) -> Tuple[ast.Name, str]:\n pattern = UNARY_MAP[unary.op.__class__]\n operand_res, operand_expl = self.visit(unary.operand)\n res = self.assign(ast.UnaryOp(unary.op, operand_res))\n return res, pattern % (operand_expl,)\n\n def visit_BinOp(self, binop: ast.BinOp) -> Tuple[ast.Name, str]:\n symbol = BINOP_MAP[binop.op.__class__]\n left_expr, left_expl = self.visit(binop.left)\n right_expr, right_expl = self.visit(binop.right)\n explanation = f\"({left_expl} {symbol} {right_expl})\"\n res = self.assign(ast.BinOp(left_expr, binop.op, right_expr))\n return res, explanation", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/truncate.py__truncate_explanation__truncate_explanation.return.truncated_explanation": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/truncate.py__truncate_explanation__truncate_explanation.return.truncated_explanation", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/truncate.py", "file_name": "truncate.py", "file_type": "text/x-python", "category": "implementation", "start_line": 39, "end_line": 78, "span_ids": ["_truncate_explanation"], "tokens": 354}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _truncate_explanation(\n input_lines: List[str],\n max_lines: Optional[int] = None,\n max_chars: Optional[int] = None,\n) -> List[str]:\n \"\"\"Truncate given list of strings that makes up the assertion explanation.\n\n Truncates to either 8 lines, or 640 characters - whichever the input reaches\n first. The remaining lines will be replaced by a usage message.\n \"\"\"\n\n if max_lines is None:\n max_lines = DEFAULT_MAX_LINES\n if max_chars is None:\n max_chars = DEFAULT_MAX_CHARS\n\n # Check if truncation required\n input_char_count = len(\"\".join(input_lines))\n if len(input_lines) <= max_lines and input_char_count <= max_chars:\n return input_lines\n\n # Truncate first to max_lines, and then truncate to max_chars if max_chars\n # is exceeded.\n truncated_explanation = input_lines[:max_lines]\n truncated_explanation = _truncate_by_char_count(truncated_explanation, max_chars)\n\n # Add ellipsis to final line\n truncated_explanation[-1] = truncated_explanation[-1] + \"...\"\n\n # Append useful message to explanation\n truncated_line_count = len(input_lines) - len(truncated_explanation)\n truncated_line_count += 1 # Account for the part-truncated final line\n msg = \"...Full output truncated\"\n if truncated_line_count == 1:\n msg += f\" ({truncated_line_count} line hidden)\"\n else:\n msg += f\" ({truncated_line_count} lines hidden)\"\n msg += f\", {USAGE_MSG}\"\n truncated_explanation.extend([\"\", str(msg)])\n return truncated_explanation", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/truncate.py__truncate_by_char_count_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/truncate.py__truncate_by_char_count_", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/truncate.py", "file_name": "truncate.py", "file_type": "text/x-python", "category": "implementation", "start_line": 87, "end_line": 107, "span_ids": ["_truncate_by_char_count"], "tokens": 184}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _truncate_by_char_count(input_lines: List[str], max_chars: int) -> List[str]:\n # Check if truncation required\n if len(\"\".join(input_lines)) <= max_chars:\n return input_lines\n\n # Find point at which input length exceeds total allowed length\n iterated_char_count = 0\n for iterated_index, input_line in enumerate(input_lines):\n if iterated_char_count + len(input_line) > max_chars:\n break\n iterated_char_count += len(input_line)\n\n # Create truncated explanation with modified final line\n truncated_result = input_lines[:iterated_index]\n final_line = input_lines[iterated_index]\n if final_line:\n final_line_truncate_point = max_chars - iterated_char_count\n final_line = final_line[:final_line_truncate_point]\n truncated_result.append(final_line)\n return truncated_result", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/util.py__split_explanation__split_explanation.return.lines": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/util.py__split_explanation__split_explanation.return.lines", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/util.py", "file_name": "util.py", "file_type": "text/x-python", "category": "implementation", "start_line": 45, "end_line": 59, "span_ids": ["_split_explanation"], "tokens": 146}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _split_explanation(explanation: str) -> List[str]:\n r\"\"\"Return a list of individual lines in the explanation.\n\n This will return a list of lines split on '\\n{', '\\n}' and '\\n~'.\n Any other newlines will be escaped and appear in the line as the\n literal '\\n' characters.\n \"\"\"\n raw_lines = (explanation or \"\").split(\"\\n\")\n lines = [raw_lines[0]]\n for values in raw_lines[1:]:\n if values and values[0] in [\"{\", \"}\", \"~\", \">\"]:\n lines.append(values)\n else:\n lines[-1] += \"\\\\n\" + values\n return lines", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/util.py__format_lines__format_lines.return.result": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/util.py__format_lines__format_lines.return.result", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/util.py", "file_name": "util.py", "file_type": "text/x-python", "category": "implementation", "start_line": 62, "end_line": 94, "span_ids": ["_format_lines"], "tokens": 280}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _format_lines(lines: Sequence[str]) -> List[str]:\n \"\"\"Format the individual lines.\n\n This will replace the '{', '}' and '~' characters of our mini formatting\n language with the proper 'where ...', 'and ...' and ' + ...' text, taking\n care of indentation along the way.\n\n Return a list of formatted lines.\n \"\"\"\n result = list(lines[:1])\n stack = [0]\n stackcnt = [0]\n for line in lines[1:]:\n if line.startswith(\"{\"):\n if stackcnt[-1]:\n s = \"and \"\n else:\n s = \"where \"\n stack.append(len(result))\n stackcnt[-1] += 1\n stackcnt.append(0)\n result.append(\" +\" + \" \" * (len(stack) - 1) + s + line[1:])\n elif line.startswith(\"}\"):\n stack.pop()\n stackcnt.pop()\n result[stack[-1]] += line[1:]\n else:\n assert line[0] in [\"~\", \">\"]\n stack[-1] += 1\n indent = len(stack) if line.startswith(\"~\") else len(stack) - 1\n result.append(\" \" * indent + line[1:])\n assert len(stack) == 1\n return result", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/util.py_assertrepr_compare_assertrepr_compare.return._summary_explanation": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/util.py_assertrepr_compare_assertrepr_compare.return._summary_explanation", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/util.py", "file_name": "util.py", "file_type": "text/x-python", "category": "implementation", "start_line": 133, "end_line": 170, "span_ids": ["assertrepr_compare"], "tokens": 314}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def assertrepr_compare(config, op: str, left: Any, right: Any) -> Optional[List[str]]:\n \"\"\"Return specialised explanations for some operators/operands.\"\"\"\n verbose = config.getoption(\"verbose\")\n if verbose > 1:\n left_repr = safeformat(left)\n right_repr = safeformat(right)\n else:\n # XXX: \"15 chars indentation\" is wrong\n # (\"E AssertionError: assert \"); should use term width.\n maxsize = (\n 80 - 15 - len(op) - 2\n ) // 2 # 15 chars indentation, 1 space around op\n left_repr = saferepr(left, maxsize=maxsize)\n right_repr = saferepr(right, maxsize=maxsize)\n\n summary = f\"{left_repr} {op} {right_repr}\"\n\n explanation = None\n try:\n if op == \"==\":\n explanation = _compare_eq_any(left, right, verbose)\n elif op == \"not in\":\n if istext(left) and istext(right):\n explanation = _notin_text(left, right, verbose)\n except outcomes.Exit:\n raise\n except Exception:\n explanation = [\n \"(pytest_assertion plugin: representation of details failed: {}.\".format(\n _pytest._code.ExceptionInfo.from_current()._getreprcrash()\n ),\n \" Probably an object has a faulty __repr__.)\",\n ]\n\n if not explanation:\n return None\n\n return [summary] + explanation", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/util.py__compare_eq_cls__compare_eq_cls.return.explanation": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/util.py__compare_eq_cls__compare_eq_cls.return.explanation", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/util.py", "file_name": "util.py", "file_type": "text/x-python", "category": "implementation", "start_line": 414, "end_line": 458, "span_ids": ["_compare_eq_cls"], "tokens": 360}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _compare_eq_cls(left: Any, right: Any, verbose: int) -> List[str]:\n if isdatacls(left):\n all_fields = left.__dataclass_fields__\n fields_to_check = [field for field, info in all_fields.items() if info.compare]\n elif isattrs(left):\n all_fields = left.__attrs_attrs__\n fields_to_check = [field.name for field in all_fields if getattr(field, \"eq\")]\n elif isnamedtuple(left):\n fields_to_check = left._fields\n else:\n assert False\n\n indent = \" \"\n same = []\n diff = []\n for field in fields_to_check:\n if getattr(left, field) == getattr(right, field):\n same.append(field)\n else:\n diff.append(field)\n\n explanation = []\n if same or diff:\n explanation += [\"\"]\n if same and verbose < 2:\n explanation.append(\"Omitting %s identical items, use -vv to show\" % len(same))\n elif same:\n explanation += [\"Matching attributes:\"]\n explanation += pprint.pformat(same).splitlines()\n if diff:\n explanation += [\"Differing attributes:\"]\n explanation += pprint.pformat(diff).splitlines()\n for field in diff:\n field_left = getattr(left, field)\n field_right = getattr(right, field)\n explanation += [\n \"\",\n \"Drill down into differing attribute %s:\" % field,\n (\"%s%s: %r != %r\") % (indent, field, field_left, field_right),\n ]\n explanation += [\n indent + line\n for line in _compare_eq_any(field_left, field_right, verbose)\n ]\n return explanation", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/util.py__notin_text_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/util.py__notin_text_", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/util.py", "file_name": "util.py", "file_type": "text/x-python", "category": "implementation", "start_line": 447, "end_line": 464, "span_ids": ["_notin_text"], "tokens": 148}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _notin_text(term: str, text: str, verbose: int = 0) -> List[str]:\n index = text.find(term)\n head = text[:index]\n tail = text[index + len(term) :]\n correct_text = head + tail\n diff = _diff_text(text, correct_text, verbose)\n newdiff = [\"%s is contained here:\" % saferepr(term, maxsize=42)]\n for line in diff:\n if line.startswith(\"Skipping\"):\n continue\n if line.startswith(\"- \"):\n continue\n if line.startswith(\"+ \"):\n newdiff.append(\" \" + line[2:])\n else:\n newdiff.append(line)\n return newdiff", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py_Cache.makedir_Cache.makedir.return.py_path_local_res_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py_Cache.makedir_Cache.makedir.return.py_path_local_res_", "embedding": null, "metadata": {"file_path": "src/_pytest/cacheprovider.py", "file_name": "cacheprovider.py", "file_type": "text/x-python", "category": "implementation", "start_line": 123, "end_line": 140, "span_ids": ["Cache.makedir"], "tokens": 183}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\n@attr.s(init=False)\nclass Cache:\n\n def makedir(self, name: str) -> py.path.local:\n \"\"\"Return a directory path object with the given name.\n\n If the directory does not yet exist, it will be created. You can use\n it to manage files to e.g. store/retrieve database dumps across test\n sessions.\n\n :param name:\n Must be a string not containing a ``/`` separator.\n Make sure the name contains your plugin or application\n identifiers to prevent clashes with other cache users.\n \"\"\"\n path = Path(name)\n if len(path.parts) > 1:\n raise ValueError(\"name is not allowed to contain path separators\")\n res = self._cachedir.joinpath(self._CACHE_PREFIX_DIRS, path)\n res.mkdir(exist_ok=True, parents=True)\n return py.path.local(res)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py_Cache._ensure_supporting_files_Cache._ensure_supporting_files.cachedir_tag_path_write_b": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py_Cache._ensure_supporting_files_Cache._ensure_supporting_files.cachedir_tag_path_write_b", "embedding": null, "metadata": {"file_path": "src/_pytest/cacheprovider.py", "file_name": "cacheprovider.py", "file_type": "text/x-python", "category": "implementation", "start_line": 195, "end_line": 205, "span_ids": ["Cache._ensure_supporting_files"], "tokens": 137}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\n@attr.s(init=False)\nclass Cache:\n\n def _ensure_supporting_files(self) -> None:\n \"\"\"Create supporting files in the cache dir that are not really part of the cache.\"\"\"\n readme_path = self._cachedir / \"README.md\"\n readme_path.write_text(README_CONTENT)\n\n gitignore_path = self._cachedir.joinpath(\".gitignore\")\n msg = \"# Created by pytest automatically.\\n*\\n\"\n gitignore_path.write_text(msg, encoding=\"UTF-8\")\n\n cachedir_tag_path = self._cachedir.joinpath(\"CACHEDIR.TAG\")\n cachedir_tag_path.write_bytes(CACHEDIR_TAG_CONTENT)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py_LFPlugin_LFPlugin.pytest_collectreport.if_passed_.else_.self_lastfailed_report_no": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py_LFPlugin_LFPlugin.pytest_collectreport.if_passed_.else_.self_lastfailed_report_no", "embedding": null, "metadata": {"file_path": "src/_pytest/cacheprovider.py", "file_name": "cacheprovider.py", "file_type": "text/x-python", "category": "implementation", "start_line": 278, "end_line": 321, "span_ids": ["LFPlugin.get_last_failed_paths", "LFPlugin", "LFPlugin.pytest_collectreport", "LFPlugin.pytest_runtest_logreport", "LFPlugin.pytest_report_collectionfinish"], "tokens": 455}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class LFPlugin:\n \"\"\"Plugin which implements the --lf (run last-failing) option.\"\"\"\n\n def __init__(self, config: Config) -> None:\n self.config = config\n active_keys = \"lf\", \"failedfirst\"\n self.active = any(config.getoption(key) for key in active_keys)\n assert config.cache\n self.lastfailed: Dict[str, bool] = config.cache.get(\"cache/lastfailed\", {})\n self._previously_failed_count: Optional[int] = None\n self._report_status: Optional[str] = None\n self._skipped_files = 0 # count skipped files during collection due to --lf\n\n if config.getoption(\"lf\"):\n self._last_failed_paths = self.get_last_failed_paths()\n config.pluginmanager.register(\n LFPluginCollWrapper(self), \"lfplugin-collwrapper\"\n )\n\n def get_last_failed_paths(self) -> Set[Path]:\n \"\"\"Return a set with all Paths()s of the previously failed nodeids.\"\"\"\n rootpath = self.config.rootpath\n result = {rootpath / nodeid.split(\"::\")[0] for nodeid in self.lastfailed}\n return {x for x in result if x.exists()}\n\n def pytest_report_collectionfinish(self) -> Optional[str]:\n if self.active and self.config.getoption(\"verbose\") >= 0:\n return \"run-last-failure: %s\" % self._report_status\n return None\n\n def pytest_runtest_logreport(self, report: TestReport) -> None:\n if (report.when == \"call\" and report.passed) or report.skipped:\n self.lastfailed.pop(report.nodeid, None)\n elif report.failed:\n self.lastfailed[report.nodeid] = True\n\n def pytest_collectreport(self, report: CollectReport) -> None:\n passed = report.outcome in (\"passed\", \"skipped\")\n if passed:\n if report.nodeid in self.lastfailed:\n self.lastfailed.pop(report.nodeid)\n self.lastfailed.update((item.nodeid, True) for item in report.result)\n else:\n self.lastfailed[report.nodeid] = True", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py_LFPlugin.pytest_collection_modifyitems_LFPlugin.pytest_sessionfinish.if_saved_lastfailed_se.config_cache_set_cache_l": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py_LFPlugin.pytest_collection_modifyitems_LFPlugin.pytest_sessionfinish.if_saved_lastfailed_se.config_cache_set_cache_l", "embedding": null, "metadata": {"file_path": "src/_pytest/cacheprovider.py", "file_name": "cacheprovider.py", "file_type": "text/x-python", "category": "implementation", "start_line": 323, "end_line": 383, "span_ids": ["LFPlugin.pytest_collection_modifyitems", "LFPlugin.pytest_sessionfinish"], "tokens": 541}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class LFPlugin:\n\n @hookimpl(hookwrapper=True, tryfirst=True)\n def pytest_collection_modifyitems(\n self, config: Config, items: List[nodes.Item]\n ) -> Generator[None, None, None]:\n yield\n\n if not self.active:\n return\n\n if self.lastfailed:\n previously_failed = []\n previously_passed = []\n for item in items:\n if item.nodeid in self.lastfailed:\n previously_failed.append(item)\n else:\n previously_passed.append(item)\n self._previously_failed_count = len(previously_failed)\n\n if not previously_failed:\n # Running a subset of all tests with recorded failures\n # only outside of it.\n self._report_status = \"%d known failures not in selected tests\" % (\n len(self.lastfailed),\n )\n else:\n if self.config.getoption(\"lf\"):\n items[:] = previously_failed\n config.hook.pytest_deselected(items=previously_passed)\n else: # --failedfirst\n items[:] = previously_failed + previously_passed\n\n noun = \"failure\" if self._previously_failed_count == 1 else \"failures\"\n suffix = \" first\" if self.config.getoption(\"failedfirst\") else \"\"\n self._report_status = \"rerun previous {count} {noun}{suffix}\".format(\n count=self._previously_failed_count, suffix=suffix, noun=noun\n )\n\n if self._skipped_files > 0:\n files_noun = \"file\" if self._skipped_files == 1 else \"files\"\n self._report_status += \" (skipped {files} {files_noun})\".format(\n files=self._skipped_files, files_noun=files_noun\n )\n else:\n self._report_status = \"no previously failed tests, \"\n if self.config.getoption(\"last_failed_no_failures\") == \"none\":\n self._report_status += \"deselecting all items.\"\n config.hook.pytest_deselected(items=items[:])\n items[:] = []\n else:\n self._report_status += \"not deselecting items.\"\n\n def pytest_sessionfinish(self, session: Session) -> None:\n config = self.config\n if config.getoption(\"cacheshow\") or hasattr(config, \"workerinput\"):\n return\n\n assert config.cache is not None\n saved_lastfailed = config.cache.get(\"cache/lastfailed\", {})\n if saved_lastfailed != self.lastfailed:\n config.cache.set(\"cache/lastfailed\", self.lastfailed)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py_pytest_addoption_pytest_addoption.None_6": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py_pytest_addoption_pytest_addoption.None_6", "embedding": null, "metadata": {"file_path": "src/_pytest/cacheprovider.py", "file_name": "cacheprovider.py", "file_type": "text/x-python", "category": "implementation", "start_line": 399, "end_line": 454, "span_ids": ["pytest_addoption"], "tokens": 416}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_addoption(parser: Parser) -> None:\n group = parser.getgroup(\"general\")\n group.addoption(\n \"--lf\",\n \"--last-failed\",\n action=\"store_true\",\n dest=\"lf\",\n help=\"rerun only the tests that failed \"\n \"at the last run (or all if none failed)\",\n )\n group.addoption(\n \"--ff\",\n \"--failed-first\",\n action=\"store_true\",\n dest=\"failedfirst\",\n help=\"run all tests, but run the last failures first.\\n\"\n \"This may re-order tests and thus lead to \"\n \"repeated fixture setup/teardown.\",\n )\n group.addoption(\n \"--nf\",\n \"--new-first\",\n action=\"store_true\",\n dest=\"newfirst\",\n help=\"run tests from new files first, then the rest of the tests \"\n \"sorted by file mtime\",\n )\n group.addoption(\n \"--cache-show\",\n action=\"append\",\n nargs=\"?\",\n dest=\"cacheshow\",\n help=(\n \"show cache contents, don't perform collection or tests. \"\n \"Optional argument: glob (default: '*').\"\n ),\n )\n group.addoption(\n \"--cache-clear\",\n action=\"store_true\",\n dest=\"cacheclear\",\n help=\"remove all cache contents at start of test run.\",\n )\n cache_dir_default = \".pytest_cache\"\n if \"TOX_ENV_DIR\" in os.environ:\n cache_dir_default = os.path.join(os.environ[\"TOX_ENV_DIR\"], cache_dir_default)\n parser.addini(\"cache_dir\", default=cache_dir_default, help=\"cache directory path.\")\n group.addoption(\n \"--lfnf\",\n \"--last-failed-no-failures\",\n action=\"store\",\n dest=\"last_failed_no_failures\",\n choices=(\"all\", \"none\"),\n default=\"all\",\n help=\"which tests to run with no previously (known) failures.\",\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py_cacheshow_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py_cacheshow_", "embedding": null, "metadata": {"file_path": "src/_pytest/cacheprovider.py", "file_name": "cacheprovider.py", "file_type": "text/x-python", "category": "implementation", "start_line": 537, "end_line": 577, "span_ids": ["cacheshow"], "tokens": 356}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def cacheshow(config: Config, session: Session) -> int:\n from pprint import pformat\n\n assert config.cache is not None\n\n tw = TerminalWriter()\n tw.line(\"cachedir: \" + str(config.cache._cachedir))\n if not config.cache._cachedir.is_dir():\n tw.line(\"cache is empty\")\n return 0\n\n glob = config.option.cacheshow[0]\n if glob is None:\n glob = \"*\"\n\n dummy = object()\n basedir = config.cache._cachedir\n vdir = basedir / Cache._CACHE_PREFIX_VALUES\n tw.sep(\"-\", \"cache values for %r\" % glob)\n for valpath in sorted(x for x in vdir.rglob(glob) if x.is_file()):\n key = str(valpath.relative_to(vdir))\n val = config.cache.get(key, dummy)\n if val is dummy:\n tw.line(\"%s contains unreadable content, will be ignored\" % key)\n else:\n tw.line(\"%s contains:\" % key)\n for line in pformat(val).splitlines():\n tw.line(\" \" + line)\n\n ddir = basedir / Cache._CACHE_PREFIX_DIRS\n if ddir.is_dir():\n contents = sorted(ddir.rglob(glob))\n tw.sep(\"-\", \"cache directories for %r\" % glob)\n for p in contents:\n # if p.check(dir=1):\n # print(\"%s/\" % p.relto(basedir))\n if p.is_file():\n key = str(p.relative_to(basedir))\n tw.line(f\"{key} is a file of length {p.stat().st_size:d}\")\n return 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_pytest_load_initial_conftests_pytest_load_initial_conftests.if_outcome_excinfo_is_not.sys_stderr_write_err_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_pytest_load_initial_conftests_pytest_load_initial_conftests.if_outcome_excinfo_is_not.sys_stderr_write_err_", "embedding": null, "metadata": {"file_path": "src/_pytest/capture.py", "file_name": "capture.py", "file_type": "text/x-python", "category": "implementation", "start_line": 151, "end_line": 172, "span_ids": ["pytest_load_initial_conftests"], "tokens": 191}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@hookimpl(hookwrapper=True)\ndef pytest_load_initial_conftests(early_config: Config):\n ns = early_config.known_args_namespace\n if ns.capture == \"fd\":\n _py36_windowsconsoleio_workaround(sys.stdout)\n _colorama_workaround()\n _readline_workaround()\n pluginmanager = early_config.pluginmanager\n capman = CaptureManager(ns.capture)\n pluginmanager.register(capman, \"capturemanager\")\n\n # Make sure that capturemanager is properly reset at final shutdown.\n early_config.add_cleanup(capman.stop_global_capturing)\n\n # Finally trigger conftest loading but while capturing (issue #93).\n capman.start_global_capturing()\n outcome = yield\n capman.suspend_global_capture()\n if outcome.excinfo is not None:\n out, err = capman.read_global_capture()\n sys.stdout.write(out)\n sys.stderr.write(err)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_CaptureFixture_CaptureFixture.close.if_self__capture_is_not_N.self._capture.None": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_CaptureFixture_CaptureFixture.close.if_self__capture_is_not_N.self._capture.None", "embedding": null, "metadata": {"file_path": "src/_pytest/capture.py", "file_name": "capture.py", "file_type": "text/x-python", "category": "implementation", "start_line": 833, "end_line": 862, "span_ids": ["CaptureFixture.close", "CaptureFixture", "CaptureFixture._start"], "tokens": 258}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class CaptureFixture(Generic[AnyStr]):\n \"\"\"Object returned by the :fixture:`capsys`, :fixture:`capsysbinary`,\n :fixture:`capfd` and :fixture:`capfdbinary` fixtures.\"\"\"\n\n def __init__(\n self, captureclass, request: SubRequest, *, _ispytest: bool = False\n ) -> None:\n check_ispytest(_ispytest)\n self.captureclass = captureclass\n self.request = request\n self._capture: Optional[MultiCapture[AnyStr]] = None\n self._captured_out = self.captureclass.EMPTY_BUFFER\n self._captured_err = self.captureclass.EMPTY_BUFFER\n\n def _start(self) -> None:\n if self._capture is None:\n self._capture = MultiCapture(\n in_=None,\n out=self.captureclass(1),\n err=self.captureclass(2),\n )\n self._capture.start_capturing()\n\n def close(self) -> None:\n if self._capture is not None:\n out, err = self._capture.pop_outerr_to_orig()\n self._captured_out += out\n self._captured_err += err\n self._capture.stop_capturing()\n self._capture = None", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/compat.py_getfuncargnames_getfuncargnames.return.arg_names": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/compat.py_getfuncargnames_getfuncargnames.return.arg_names", "embedding": null, "metadata": {"file_path": "src/_pytest/compat.py", "file_name": "compat.py", "file_type": "text/x-python", "category": "implementation", "start_line": 112, "end_line": 177, "span_ids": ["getfuncargnames"], "tokens": 545}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def getfuncargnames(\n function: Callable[..., Any],\n *,\n name: str = \"\",\n is_method: bool = False,\n cls: Optional[type] = None,\n) -> Tuple[str, ...]:\n \"\"\"Return the names of a function's mandatory arguments.\n\n Should return the names of all function arguments that:\n * Aren't bound to an instance or type as in instance or class methods.\n * Don't have default values.\n * Aren't bound with functools.partial.\n * Aren't replaced with mocks.\n\n The is_method and cls arguments indicate that the function should\n be treated as a bound method even though it's not unless, only in\n the case of cls, the function is a static method.\n\n The name parameter should be the original name in which the function was collected.\n \"\"\"\n # TODO(RonnyPfannschmidt): This function should be refactored when we\n # revisit fixtures. The fixture mechanism should ask the node for\n # the fixture names, and not try to obtain directly from the\n # function object well after collection has occurred.\n\n # The parameters attribute of a Signature object contains an\n # ordered mapping of parameter names to Parameter instances. This\n # creates a tuple of the names of the parameters that don't have\n # defaults.\n try:\n parameters = signature(function).parameters\n except (ValueError, TypeError) as e:\n fail(\n f\"Could not determine arguments of {function!r}: {e}\",\n pytrace=False,\n )\n\n arg_names = tuple(\n p.name\n for p in parameters.values()\n if (\n p.kind is Parameter.POSITIONAL_OR_KEYWORD\n or p.kind is Parameter.KEYWORD_ONLY\n )\n and p.default is Parameter.empty\n )\n if not name:\n name = function.__name__\n\n # If this function should be treated as a bound method even though\n # it's passed as an unbound method or function, remove the first\n # parameter name.\n if is_method or (\n # Not using `getattr` because we don't want to resolve the staticmethod.\n # Not using `cls.__dict__` because we want to check the entire MRO.\n cls\n and not isinstance(\n inspect.getattr_static(cls, name, default=None), staticmethod\n )\n ):\n arg_names = arg_names[1:]\n # Remove any names that will be replaced with mocks.\n if hasattr(function, \"__wrapped__\"):\n arg_names = arg_names[num_mock_patch_args(function) :]\n return arg_names", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py__prepareconfig__prepareconfig.try_.except_BaseException_.raise": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py__prepareconfig__prepareconfig.try_.except_BaseException_.raise", "embedding": null, "metadata": {"file_path": "src/_pytest/config/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 301, "end_line": 330, "span_ids": ["_prepareconfig"], "tokens": 275}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _prepareconfig(\n args: Optional[Union[List[str], \"os.PathLike[str]\"]] = None,\n plugins: Optional[Sequence[Union[str, _PluggyPlugin]]] = None,\n) -> \"Config\":\n if args is None:\n args = sys.argv[1:]\n # TODO: Remove type-ignore after next mypy release.\n # https://github.com/python/typeshed/commit/076983eec45e739c68551cb6119fd7d85fd4afa9\n elif isinstance(args, os.PathLike): # type: ignore[misc]\n args = [os.fspath(args)]\n elif not isinstance(args, list):\n msg = \"`args` parameter expected to be a list of strings, got: {!r} (type: {})\"\n raise TypeError(msg.format(args, type(args)))\n\n config = get_config(args, plugins)\n pluginmanager = config.pluginmanager\n try:\n if plugins:\n for plugin in plugins:\n if isinstance(plugin, str):\n pluginmanager.consider_pluginarg(plugin)\n else:\n pluginmanager.register(plugin)\n config = pluginmanager.hook.pytest_cmdline_parse(\n pluginmanager=pluginmanager, args=args\n )\n return config\n except BaseException:\n config._ensure_unconfigure()\n raise", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_PytestPluginManager.parse_hookimpl_opts_PytestPluginManager.parse_hookimpl_opts.return.opts": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_PytestPluginManager.parse_hookimpl_opts_PytestPluginManager.parse_hookimpl_opts.return.opts", "embedding": null, "metadata": {"file_path": "src/_pytest/config/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 386, "end_line": 413, "span_ids": ["PytestPluginManager.parse_hookimpl_opts"], "tokens": 269}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass PytestPluginManager(PluginManager):\n\n def parse_hookimpl_opts(self, plugin: _PluggyPlugin, name: str):\n # pytest hooks are always prefixed with \"pytest_\",\n # so we avoid accessing possibly non-readable attributes\n # (see issue #1073).\n if not name.startswith(\"pytest_\"):\n return\n # Ignore names which can not be hooks.\n if name == \"pytest_plugins\":\n return\n\n method = getattr(plugin, name)\n opts = super().parse_hookimpl_opts(plugin, name)\n\n # Consider only actual functions for hooks (#3775).\n if not inspect.isroutine(method):\n return\n\n # Collect unmarked hooks as long as they have the `pytest_' prefix.\n if opts is None and name.startswith(\"pytest_\"):\n opts = {}\n if opts is not None:\n # TODO: DeprecationWarning, people should use hookimpl\n # https://github.com/pytest-dev/pytest/issues/4562\n known_marks = {m.name for m in getattr(method, \"pytestmark\", [])}\n\n for name in (\"tryfirst\", \"trylast\", \"optionalhook\", \"hookwrapper\"):\n opts.setdefault(name, hasattr(method, name) or name in known_marks)\n return opts", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_PytestPluginManager.parse_hookspec_opts_PytestPluginManager.parse_hookspec_opts.return.opts": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_PytestPluginManager.parse_hookspec_opts_PytestPluginManager.parse_hookspec_opts.return.opts", "embedding": null, "metadata": {"file_path": "src/_pytest/config/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 415, "end_line": 430, "span_ids": ["PytestPluginManager.parse_hookspec_opts"], "tokens": 163}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass PytestPluginManager(PluginManager):\n\n def parse_hookspec_opts(self, module_or_class, name: str):\n opts = super().parse_hookspec_opts(module_or_class, name)\n if opts is None:\n method = getattr(module_or_class, name)\n\n if name.startswith(\"pytest_\"):\n # todo: deprecate hookspec hacks\n # https://github.com/pytest-dev/pytest/issues/4562\n known_marks = {m.name for m in getattr(method, \"pytestmark\", [])}\n opts = {\n \"firstresult\": hasattr(method, \"firstresult\")\n or \"firstresult\" in known_marks,\n \"historic\": hasattr(method, \"historic\")\n or \"historic\" in known_marks,\n }\n return opts", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_PytestPluginManager._set_initial_conftests_PytestPluginManager._set_initial_conftests.if_not_foundanchor_.self__try_load_conftest_c": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_PytestPluginManager._set_initial_conftests_PytestPluginManager._set_initial_conftests.if_not_foundanchor_.self__try_load_conftest_c", "embedding": null, "metadata": {"file_path": "src/_pytest/config/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 483, "end_line": 512, "span_ids": ["PytestPluginManager._set_initial_conftests"], "tokens": 294}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass PytestPluginManager(PluginManager):\n def _set_initial_conftests(self, namespace: argparse.Namespace) -> None:\n \"\"\"Load initial conftest files given a preparsed \"namespace\".\n\n As conftest files may add their own command line options which have\n arguments ('--my-opt somepath') we might get some false positives.\n All builtin and 3rd party plugins will have been loaded, however, so\n common options will not confuse our logic here.\n \"\"\"\n current = Path.cwd()\n self._confcutdir = (\n absolutepath(current / namespace.confcutdir)\n if namespace.confcutdir\n else None\n )\n self._noconftest = namespace.noconftest\n self._using_pyargs = namespace.pyargs\n testpaths = namespace.file_or_dir\n foundanchor = False\n for testpath in testpaths:\n path = str(testpath)\n # remove node-id syntax\n i = path.find(\"::\")\n if i != -1:\n path = path[:i]\n anchor = absolutepath(current / path)\n if anchor.exists(): # we found some file object\n self._try_load_conftest(anchor, namespace.importmode)\n foundanchor = True\n if not foundanchor:\n self._try_load_conftest(current, namespace.importmode)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_PytestPluginManager.import_plugin_PytestPluginManager.import_plugin.try_.else_.self_register_mod_modnam": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_PytestPluginManager.import_plugin_PytestPluginManager.import_plugin.try_.else_.self_register_mod_modnam", "embedding": null, "metadata": {"file_path": "src/_pytest/config/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 696, "end_line": 731, "span_ids": ["PytestPluginManager.import_plugin"], "tokens": 326}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass PytestPluginManager(PluginManager):\n\n def import_plugin(self, modname: str, consider_entry_points: bool = False) -> None:\n \"\"\"Import a plugin with ``modname``.\n\n If ``consider_entry_points`` is True, entry point names are also\n considered to find a plugin.\n \"\"\"\n # Most often modname refers to builtin modules, e.g. \"pytester\",\n # \"terminal\" or \"capture\". Those plugins are registered under their\n # basename for historic purposes but must be imported with the\n # _pytest prefix.\n assert isinstance(modname, str), (\n \"module name as text required, got %r\" % modname\n )\n if self.is_blocked(modname) or self.get_plugin(modname) is not None:\n return\n\n importspec = \"_pytest.\" + modname if modname in builtin_plugins else modname\n self.rewrite_hook.mark_rewrite(importspec)\n\n if consider_entry_points:\n loaded = self.load_setuptools_entrypoints(\"pytest11\", name=modname)\n if loaded:\n return\n\n try:\n __import__(importspec)\n except ImportError as e:\n raise ImportError(\n 'Error importing plugin \"{}\": {}'.format(modname, str(e.args[0]))\n ).with_traceback(e.__traceback__) from e\n\n except Skipped as e:\n self.skipped_plugins.append((modname, e.msg or \"\"))\n else:\n mod = sys.modules[importspec]\n self.register(mod, modname)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config.pytest_cmdline_parse_Config.pytest_cmdline_parse.return.self": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config.pytest_cmdline_parse_Config.pytest_cmdline_parse.return.self", "embedding": null, "metadata": {"file_path": "src/_pytest/config/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1017, "end_line": 1041, "span_ids": ["Config.pytest_cmdline_parse"], "tokens": 191}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Config:\n\n def pytest_cmdline_parse(\n self, pluginmanager: PytestPluginManager, args: List[str]\n ) -> \"Config\":\n try:\n self.parse(args)\n except UsageError:\n\n # Handle --version and --help here in a minimal fashion.\n # This gets done via helpconfig normally, but its\n # pytest_cmdline_main is not called in case of errors.\n if getattr(self.option, \"version\", False) or \"--version\" in args:\n from _pytest.helpconfig import showversion\n\n showversion(self)\n elif (\n getattr(self.option, \"help\", False) or \"--help\" in args or \"-h\" in args\n ):\n self._parser._getparser().print_help()\n sys.stdout.write(\n \"\\nNOTE: displaying only minimal help due to UsageError.\\n\\n\"\n )\n\n raise\n\n return self", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config.notify_exception_Config.notify_exception.if_not_any_res_.for_line_in_str_excrepr_.sys_stderr_flush_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config.notify_exception_Config.notify_exception.if_not_any_res_.for_line_in_str_excrepr_.sys_stderr_flush_", "embedding": null, "metadata": {"file_path": "src/_pytest/config/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1043, "end_line": 1059, "span_ids": ["Config.notify_exception"], "tokens": 169}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Config:\n\n def notify_exception(\n self,\n excinfo: ExceptionInfo[BaseException],\n option: Optional[argparse.Namespace] = None,\n ) -> None:\n if option and getattr(option, \"fulltrace\", False):\n style: _TracebackStyle = \"long\"\n else:\n style = \"native\"\n excrepr = excinfo.getrepr(\n funcargs=True, showlocals=getattr(option, \"showlocals\", False), style=style\n )\n res = self.hook.pytest_internalerror(excrepr=excrepr, excinfo=excinfo)\n if not any(res):\n for line in str(excrepr).split(\"\\n\"):\n sys.stderr.write(\"INTERNALERROR> %s\\n\" % line)\n sys.stderr.flush()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config.cwd_relative_nodeid_Config.pytest_load_initial_conftests.self_pluginmanager__set_i": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config.cwd_relative_nodeid_Config.pytest_load_initial_conftests.self_pluginmanager__set_i", "embedding": null, "metadata": {"file_path": "src/_pytest/config/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1061, "end_line": 1088, "span_ids": ["Config.cwd_relative_nodeid", "Config._processopt", "Config.pytest_load_initial_conftests", "Config.fromdictargs"], "tokens": 275}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Config:\n\n def cwd_relative_nodeid(self, nodeid: str) -> str:\n # nodeid's are relative to the rootpath, compute relative to cwd.\n if self.invocation_params.dir != self.rootpath:\n fullpath = self.rootpath / nodeid\n nodeid = bestrelpath(self.invocation_params.dir, fullpath)\n return nodeid\n\n @classmethod\n def fromdictargs(cls, option_dict, args) -> \"Config\":\n \"\"\"Constructor usable for subprocesses.\"\"\"\n config = get_config(args)\n config.option.__dict__.update(option_dict)\n config.parse(args, addopts=False)\n for x in config.option.plugins:\n config.pluginmanager.consider_pluginarg(x)\n return config\n\n def _processopt(self, opt: \"Argument\") -> None:\n for name in opt._short_opts + opt._long_opts:\n self._opt2dest[name] = opt.dest\n\n if hasattr(opt, \"default\"):\n if not hasattr(self.option, opt.dest):\n setattr(self.option, opt.dest, opt.default)\n\n @hookimpl(trylast=True)\n def pytest_load_initial_conftests(self, early_config: \"Config\") -> None:\n self.pluginmanager._set_initial_conftests(early_config.known_args_namespace)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config._initini_Config._initini.self._override_ini.ns_override_ini_or_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config._initini_Config._initini.self._override_ini.ns_override_ini_or_", "embedding": null, "metadata": {"file_path": "src/_pytest/config/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1090, "end_line": 1113, "span_ids": ["Config._initini"], "tokens": 243}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Config:\n\n def _initini(self, args: Sequence[str]) -> None:\n ns, unknown_args = self._parser.parse_known_and_unknown_args(\n args, namespace=copy.copy(self.option)\n )\n rootpath, inipath, inicfg = determine_setup(\n ns.inifilename,\n ns.file_or_dir + unknown_args,\n rootdir_cmd_arg=ns.rootdir or None,\n config=self,\n )\n self._rootpath = rootpath\n self._inipath = inipath\n self.inicfg = inicfg\n self._parser.extra_info[\"rootdir\"] = str(self.rootpath)\n self._parser.extra_info[\"inifile\"] = str(self.inipath)\n self._parser.addini(\"addopts\", \"extra command line options\", \"args\")\n self._parser.addini(\"minversion\", \"minimally required pytest version\")\n self._parser.addini(\n \"required_plugins\",\n \"plugins that must be present for pytest to run\",\n type=\"args\",\n default=[],\n )\n self._override_ini = ns.override_ini or ()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config._mark_plugins_for_rewrite_Config._validate_args.return.args": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config._mark_plugins_for_rewrite_Config._validate_args.return.args", "embedding": null, "metadata": {"file_path": "src/_pytest/config/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1135, "end_line": 1165, "span_ids": ["Config._mark_plugins_for_rewrite", "Config._validate_args"], "tokens": 245}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Config:\n\n def _mark_plugins_for_rewrite(self, hook) -> None:\n \"\"\"Given an importhook, mark for rewrite any top-level\n modules or packages in the distribution package for\n all pytest plugins.\"\"\"\n self.pluginmanager.rewrite_hook = hook\n\n if os.environ.get(\"PYTEST_DISABLE_PLUGIN_AUTOLOAD\"):\n # We don't autoload from setuptools entry points, no need to continue.\n return\n\n package_files = (\n str(file)\n for dist in importlib_metadata.distributions()\n if any(ep.group == \"pytest11\" for ep in dist.entry_points)\n for file in dist.files or []\n )\n\n for name in _iter_rewritable_modules(package_files):\n hook.mark_rewrite(name)\n\n def _validate_args(self, args: List[str], via: str) -> List[str]:\n \"\"\"Validate known args.\"\"\"\n self._parser._config_source_hint = via # type: ignore\n try:\n self._parser.parse_known_and_unknown_args(\n args, namespace=copy.copy(self.option)\n )\n finally:\n del self._parser._config_source_hint # type: ignore\n\n return args", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config.addinivalue_line_Config.getini.try_.except_KeyError_.return.val": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config.addinivalue_line_Config.getini.try_.except_KeyError_.return.val", "embedding": null, "metadata": {"file_path": "src/_pytest/config/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1365, "end_line": 1384, "span_ids": ["Config.getini", "Config.addinivalue_line"], "tokens": 203}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Config:\n\n def addinivalue_line(self, name: str, line: str) -> None:\n \"\"\"Add a line to an ini-file option. The option must have been\n declared but might not yet be set in which case the line becomes\n the first line in its value.\"\"\"\n x = self.getini(name)\n assert isinstance(x, list)\n x.append(line) # modifies the cached list inline\n\n def getini(self, name: str):\n \"\"\"Return configuration value from an :ref:`ini file `.\n\n If the specified name hasn't been registered through a prior\n :py:func:`parser.addini <_pytest.config.argparsing.Parser.addini>`\n call (usually from a plugin), a ValueError is raised.\n \"\"\"\n try:\n return self._inicache[name]\n except KeyError:\n self._inicache[name] = val = self._getini(name)\n return val", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config._getini_Config._getini.if_type_pathlist_.else_.return.value": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config._getini_Config._getini.if_type_pathlist_.else_.return.value", "embedding": null, "metadata": {"file_path": "src/_pytest/config/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1386, "end_line": 1435, "span_ids": ["Config._getini"], "tokens": 462}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Config:\n\n def _getini(self, name: str):\n try:\n description, type, default = self._parser._inidict[name]\n except KeyError as e:\n raise ValueError(f\"unknown configuration value: {name!r}\") from e\n override_value = self._get_override_ini_value(name)\n if override_value is None:\n try:\n value = self.inicfg[name]\n except KeyError:\n if default is not None:\n return default\n if type is None:\n return \"\"\n return []\n else:\n value = override_value\n # Coerce the values based on types.\n #\n # Note: some coercions are only required if we are reading from .ini files, because\n # the file format doesn't contain type information, but when reading from toml we will\n # get either str or list of str values (see _parse_ini_config_from_pyproject_toml).\n # For example:\n #\n # ini:\n # a_line_list = \"tests acceptance\"\n # in this case, we need to split the string to obtain a list of strings.\n #\n # toml:\n # a_line_list = [\"tests\", \"acceptance\"]\n # in this case, we already have a list ready to use.\n #\n if type == \"pathlist\":\n # TODO: This assert is probably not valid in all cases.\n assert self.inipath is not None\n dp = self.inipath.parent\n input_values = shlex.split(value) if isinstance(value, str) else value\n return [py.path.local(str(dp / x)) for x in input_values]\n elif type == \"args\":\n return shlex.split(value) if isinstance(value, str) else value\n elif type == \"linelist\":\n if isinstance(value, str):\n return [t for t in map(lambda x: x.strip(), value.split(\"\\n\")) if t]\n else:\n return value\n elif type == \"bool\":\n return _strtobool(str(value).strip())\n else:\n assert type in [None, \"string\"]\n return value", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config._getconftest_pathlist_Config._getconftest_pathlist.return.values": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config._getconftest_pathlist_Config._getconftest_pathlist.return.values", "embedding": null, "metadata": {"file_path": "src/_pytest/config/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1437, "end_line": 1455, "span_ids": ["Config._getconftest_pathlist"], "tokens": 165}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Config:\n\n def _getconftest_pathlist(self, name: str, path: Path) -> Optional[List[Path]]:\n try:\n mod, relroots = self.pluginmanager._rget_with_confmod(\n name, path, self.getoption(\"importmode\")\n )\n except KeyError:\n return None\n modpath = Path(mod.__file__).parent\n values: List[Path] = []\n for relroot in relroots:\n if isinstance(relroot, Path):\n pass\n elif isinstance(relroot, py.path.local):\n relroot = Path(relroot)\n else:\n relroot = relroot.replace(\"/\", os.sep)\n relroot = absolutepath(modpath / relroot)\n values.append(relroot)\n return values", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config._get_override_ini_value_Config._get_override_ini_value.return.value": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config._get_override_ini_value_Config._get_override_ini_value.return.value", "embedding": null, "metadata": {"file_path": "src/_pytest/config/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1457, "end_line": 1474, "span_ids": ["Config._get_override_ini_value"], "tokens": 172}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Config:\n\n def _get_override_ini_value(self, name: str) -> Optional[str]:\n value = None\n # override_ini is a list of \"ini=value\" options.\n # Always use the last item if multiple values are set for same ini-name,\n # e.g. -o foo=bar1 -o foo=bar2 will set foo to bar2.\n for ini_config in self._override_ini:\n try:\n key, user_ini_value = ini_config.split(\"=\", 1)\n except ValueError as e:\n raise UsageError(\n \"-o/--override-ini expects option=value style (got: {!r}).\".format(\n ini_config\n )\n ) from e\n else:\n if key == name:\n value = user_ini_value\n return value", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_Parser.getgroup_Parser.getgroup.return.group": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_Parser.getgroup_Parser.getgroup.return.group", "embedding": null, "metadata": {"file_path": "src/_pytest/config/argparsing.py", "file_name": "argparsing.py", "file_type": "text/x-python", "category": "implementation", "start_line": 60, "end_line": 83, "span_ids": ["Parser.getgroup"], "tokens": 223}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Parser:\n\n def getgroup(\n self, name: str, description: str = \"\", after: Optional[str] = None\n ) -> \"OptionGroup\":\n \"\"\"Get (or create) a named option Group.\n\n :name: Name of the option group.\n :description: Long description for --help output.\n :after: Name of another group, used for ordering --help output.\n\n The returned group object has an ``addoption`` method with the same\n signature as :py:func:`parser.addoption\n <_pytest.config.argparsing.Parser.addoption>` but will be shown in the\n respective group in the output of ``pytest. --help``.\n \"\"\"\n for group in self._groups:\n if group.name == name:\n return group\n group = OptionGroup(name, description, parser=self)\n i = 0\n for i, grp in enumerate(self._groups):\n if grp.name == after:\n break\n self._groups.insert(i + 1, group)\n return group", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_Parser.addini_ArgumentError.__str__.if_self_option_id_.else_.return.self_msg": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_Parser.addini_ArgumentError.__str__.if_self_option_id_.else_.return.self_msg", "embedding": null, "metadata": {"file_path": "src/_pytest/config/argparsing.py", "file_name": "argparsing.py", "file_type": "text/x-python", "category": "implementation", "start_line": 161, "end_line": 198, "span_ids": ["ArgumentError.__str__", "ArgumentError", "Parser.addini"], "tokens": 325}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Parser:\n\n def addini(\n self,\n name: str,\n help: str,\n type: Optional[\n \"Literal['string', 'pathlist', 'args', 'linelist', 'bool']\"\n ] = None,\n default=None,\n ) -> None:\n \"\"\"Register an ini-file option.\n\n :name: Name of the ini-variable.\n :type: Type of the variable, can be ``string``, ``pathlist``, ``args``,\n ``linelist`` or ``bool``. Defaults to ``string`` if ``None`` or\n not passed.\n :default: Default value if no ini-file option exists but is queried.\n\n The value of ini-variables can be retrieved via a call to\n :py:func:`config.getini(name) <_pytest.config.Config.getini>`.\n \"\"\"\n assert type in (None, \"string\", \"pathlist\", \"args\", \"linelist\", \"bool\")\n self._inidict[name] = (help, type, default)\n self._ininames.append(name)\n\n\nclass ArgumentError(Exception):\n \"\"\"Raised if an Argument instance is created with invalid or\n inconsistent arguments.\"\"\"\n\n def __init__(self, msg: str, option: Union[\"Argument\", str]) -> None:\n self.msg = msg\n self.option_id = str(option)\n\n def __str__(self) -> str:\n if self.option_id:\n return f\"option {self.option_id}: {self.msg}\"\n else:\n return self.msg", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_Argument_Argument.names.return.self__short_opts_self__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_Argument_Argument.names.return.self__short_opts_self__", "embedding": null, "metadata": {"file_path": "src/_pytest/config/argparsing.py", "file_name": "argparsing.py", "file_type": "text/x-python", "category": "implementation", "start_line": 201, "end_line": 262, "span_ids": ["Argument.names", "Argument"], "tokens": 514}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Argument:\n \"\"\"Class that mimics the necessary behaviour of optparse.Option.\n\n It's currently a least effort implementation and ignoring choices\n and integer prefixes.\n\n https://docs.python.org/3/library/optparse.html#optparse-standard-option-types\n \"\"\"\n\n _typ_map = {\"int\": int, \"string\": str, \"float\": float, \"complex\": complex}\n\n def __init__(self, *names: str, **attrs: Any) -> None:\n \"\"\"Store parms in private vars for use in add_argument.\"\"\"\n self._attrs = attrs\n self._short_opts: List[str] = []\n self._long_opts: List[str] = []\n if \"%default\" in (attrs.get(\"help\") or \"\"):\n warnings.warn(ARGUMENT_PERCENT_DEFAULT, stacklevel=3)\n try:\n typ = attrs[\"type\"]\n except KeyError:\n pass\n else:\n # This might raise a keyerror as well, don't want to catch that.\n if isinstance(typ, str):\n if typ == \"choice\":\n warnings.warn(\n ARGUMENT_TYPE_STR_CHOICE.format(typ=typ, names=names),\n stacklevel=4,\n )\n # argparse expects a type here take it from\n # the type of the first element\n attrs[\"type\"] = type(attrs[\"choices\"][0])\n else:\n warnings.warn(\n ARGUMENT_TYPE_STR.format(typ=typ, names=names), stacklevel=4\n )\n attrs[\"type\"] = Argument._typ_map[typ]\n # Used in test_parseopt -> test_parse_defaultgetter.\n self.type = attrs[\"type\"]\n else:\n self.type = typ\n try:\n # Attribute existence is tested in Config._processopt.\n self.default = attrs[\"default\"]\n except KeyError:\n pass\n self._set_opt_strings(names)\n dest: Optional[str] = attrs.get(\"dest\")\n if dest:\n self.dest = dest\n elif self._long_opts:\n self.dest = self._long_opts[0][2:].replace(\"-\", \"_\")\n else:\n try:\n self.dest = self._short_opts[0][1:]\n except IndexError as e:\n self.dest = \"???\" # Needed for the error repr.\n raise ArgumentError(\"need a long or short option\", self) from e\n\n def names(self) -> List[str]:\n return self._short_opts + self._long_opts", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_Argument.attrs_Argument.attrs.return.self__attrs": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_Argument.attrs_Argument.attrs.return.self__attrs", "embedding": null, "metadata": {"file_path": "src/_pytest/config/argparsing.py", "file_name": "argparsing.py", "file_type": "text/x-python", "category": "implementation", "start_line": 264, "end_line": 278, "span_ids": ["Argument.attrs"], "tokens": 128}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Argument:\n\n def attrs(self) -> Mapping[str, Any]:\n # Update any attributes set by processopt.\n attrs = \"default dest help\".split()\n attrs.append(self.dest)\n for attr in attrs:\n try:\n self._attrs[attr] = getattr(self, attr)\n except AttributeError:\n pass\n if self._attrs.get(\"help\"):\n a = self._attrs[\"help\"]\n a = a.replace(\"%default\", \"%(default)s\")\n # a = a.replace('%prog', '%(prog)s')\n self._attrs[\"help\"] = a\n return self._attrs", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_Argument._set_opt_strings_Argument._set_opt_strings.for_opt_in_opts_.if_len_opt_2_.else_.self__long_opts_append_op": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_Argument._set_opt_strings_Argument._set_opt_strings.for_opt_in_opts_.if_len_opt_2_.else_.self__long_opts_append_op", "embedding": null, "metadata": {"file_path": "src/_pytest/config/argparsing.py", "file_name": "argparsing.py", "file_type": "text/x-python", "category": "implementation", "start_line": 280, "end_line": 307, "span_ids": ["Argument._set_opt_strings"], "tokens": 231}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Argument:\n\n def _set_opt_strings(self, opts: Sequence[str]) -> None:\n \"\"\"Directly from optparse.\n\n Might not be necessary as this is passed to argparse later on.\n \"\"\"\n for opt in opts:\n if len(opt) < 2:\n raise ArgumentError(\n \"invalid option string %r: \"\n \"must be at least two characters long\" % opt,\n self,\n )\n elif len(opt) == 2:\n if not (opt[0] == \"-\" and opt[1] != \"-\"):\n raise ArgumentError(\n \"invalid short option string %r: \"\n \"must be of the form -x, (x any non-dash char)\" % opt,\n self,\n )\n self._short_opts.append(opt)\n else:\n if not (opt[0:2] == \"--\" and opt[2] != \"-\"):\n raise ArgumentError(\n \"invalid long option string %r: \"\n \"must start with --, followed by non-dash\" % opt,\n self,\n )\n self._long_opts.append(opt)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_Argument.__repr___Argument.__repr__.return._Argument_format_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_Argument.__repr___Argument.__repr__.return._Argument_format_", "embedding": null, "metadata": {"file_path": "src/_pytest/config/argparsing.py", "file_name": "argparsing.py", "file_type": "text/x-python", "category": "implementation", "start_line": 309, "end_line": 320, "span_ids": ["Argument.__repr__"], "tokens": 132}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Argument:\n\n def __repr__(self) -> str:\n args: List[str] = []\n if self._short_opts:\n args += [\"_short_opts: \" + repr(self._short_opts)]\n if self._long_opts:\n args += [\"_long_opts: \" + repr(self._long_opts)]\n args += [\"dest: \" + repr(self.dest)]\n if hasattr(self, \"type\"):\n args += [\"type: \" + repr(self.type)]\n if hasattr(self, \"default\"):\n args += [\"default: \" + repr(self.default)]\n return \"Argument({})\".format(\", \".join(args))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/exceptions.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/exceptions.py__", "embedding": null, "metadata": {"file_path": "src/_pytest/config/exceptions.py", "file_name": "exceptions.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 12, "span_ids": ["PrintHelp", "UsageError", "imports"], "tokens": 52}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from _pytest.compat import final\n\n\n@final\nclass UsageError(Exception):\n \"\"\"Error in pytest usage or invocation.\"\"\"\n\n\nclass PrintHelp(Exception):\n \"\"\"Raised when pytest should print its help to skip the rest of the\n argument parsing and validation.\"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/findpaths.py_get_common_ancestor_get_common_ancestor.return.common_ancestor": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/findpaths.py_get_common_ancestor_get_common_ancestor.return.common_ancestor", "embedding": null, "metadata": {"file_path": "src/_pytest/config/findpaths.py", "file_name": "findpaths.py", "file_type": "text/x-python", "category": "implementation", "start_line": 110, "end_line": 130, "span_ids": ["get_common_ancestor"], "tokens": 160}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def get_common_ancestor(paths: Iterable[Path]) -> Path:\n common_ancestor: Optional[Path] = None\n for path in paths:\n if not path.exists():\n continue\n if common_ancestor is None:\n common_ancestor = path\n else:\n if common_ancestor in path.parents or path == common_ancestor:\n continue\n elif path in common_ancestor.parents:\n common_ancestor = path\n else:\n shared = commonpath(path, common_ancestor)\n if shared is not None:\n common_ancestor = shared\n if common_ancestor is None:\n common_ancestor = Path.cwd()\n elif common_ancestor.is_file():\n common_ancestor = common_ancestor.parent\n return common_ancestor", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/findpaths.py_determine_setup_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/findpaths.py_determine_setup_", "embedding": null, "metadata": {"file_path": "src/_pytest/config/findpaths.py", "file_name": "findpaths.py", "file_type": "text/x-python", "category": "implementation", "start_line": 166, "end_line": 210, "span_ids": ["determine_setup"], "tokens": 410}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def determine_setup(\n inifile: Optional[str],\n args: Sequence[str],\n rootdir_cmd_arg: Optional[str] = None,\n config: Optional[\"Config\"] = None,\n) -> Tuple[Path, Optional[Path], Dict[str, Union[str, List[str]]]]:\n rootdir = None\n dirs = get_dirs_from_args(args)\n if inifile:\n inipath_ = absolutepath(inifile)\n inipath: Optional[Path] = inipath_\n inicfg = load_config_dict_from_file(inipath_) or {}\n if rootdir_cmd_arg is None:\n rootdir = get_common_ancestor(dirs)\n else:\n ancestor = get_common_ancestor(dirs)\n rootdir, inipath, inicfg = locate_config([ancestor])\n if rootdir is None and rootdir_cmd_arg is None:\n for possible_rootdir in (ancestor, *ancestor.parents):\n if (possible_rootdir / \"setup.py\").is_file():\n rootdir = possible_rootdir\n break\n else:\n if dirs != [ancestor]:\n rootdir, inipath, inicfg = locate_config(dirs)\n if rootdir is None:\n if config is not None:\n cwd = config.invocation_params.dir\n else:\n cwd = Path.cwd()\n rootdir = get_common_ancestor([cwd, ancestor])\n is_fs_root = os.path.splitdrive(str(rootdir))[1] == \"/\"\n if is_fs_root:\n rootdir = ancestor\n if rootdir_cmd_arg:\n rootdir = absolutepath(os.path.expandvars(rootdir_cmd_arg))\n if not rootdir.is_dir():\n raise UsageError(\n \"Directory '{}' not found. Check your '--rootdir' option.\".format(\n rootdir\n )\n )\n assert rootdir is not None\n return rootdir, inipath, inicfg or {}", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/debugging.py_pytest_addoption_pytest_addoption.None_2": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/debugging.py_pytest_addoption_pytest_addoption.None_2", "embedding": null, "metadata": {"file_path": "src/_pytest/debugging.py", "file_name": "debugging.py", "file_type": "text/x-python", "category": "implementation", "start_line": 38, "end_line": 59, "span_ids": ["pytest_addoption"], "tokens": 160}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_addoption(parser: Parser) -> None:\n group = parser.getgroup(\"general\")\n group._addoption(\n \"--pdb\",\n dest=\"usepdb\",\n action=\"store_true\",\n help=\"start the interactive Python debugger on errors or KeyboardInterrupt.\",\n )\n group._addoption(\n \"--pdbcls\",\n dest=\"usepdb_cls\",\n metavar=\"modulename:classname\",\n type=_validate_usepdb_cls,\n help=\"start a custom interactive Python debugger on errors. \"\n \"For example: --pdbcls=IPython.terminal.debugger:TerminalPdb\",\n )\n group._addoption(\n \"--trace\",\n dest=\"trace\",\n action=\"store_true\",\n help=\"Immediately break when running each test.\",\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/debugging.py_pytest_configure_pytest_configure.config__cleanup_append_fi": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/debugging.py_pytest_configure_pytest_configure.config__cleanup_append_fi", "embedding": null, "metadata": {"file_path": "src/_pytest/debugging.py", "file_name": "debugging.py", "file_type": "text/x-python", "category": "implementation", "start_line": 62, "end_line": 86, "span_ids": ["pytest_configure"], "tokens": 194}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_configure(config: Config) -> None:\n import pdb\n\n if config.getvalue(\"trace\"):\n config.pluginmanager.register(PdbTrace(), \"pdbtrace\")\n if config.getvalue(\"usepdb\"):\n config.pluginmanager.register(PdbInvoke(), \"pdbinvoke\")\n\n pytestPDB._saved.append(\n (pdb.set_trace, pytestPDB._pluginmanager, pytestPDB._config)\n )\n pdb.set_trace = pytestPDB.set_trace\n pytestPDB._pluginmanager = config.pluginmanager\n pytestPDB._config = config\n\n # NOTE: not using pytest_unconfigure, since it might get called although\n # pytest_configure was not (if another plugin raises UsageError).\n def fin() -> None:\n (\n pdb.set_trace,\n pytestPDB._pluginmanager,\n pytestPDB._config,\n ) = pytestPDB._saved.pop()\n\n config._cleanup.append(fin)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/debugging.py_PdbInvoke_PdbTrace.pytest_pyfunc_call.yield": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/debugging.py_PdbInvoke_PdbTrace.pytest_pyfunc_call.yield", "embedding": null, "metadata": {"file_path": "src/_pytest/debugging.py", "file_name": "debugging.py", "file_type": "text/x-python", "category": "implementation", "start_line": 285, "end_line": 307, "span_ids": ["PdbInvoke.pytest_internalerror", "PdbInvoke.pytest_exception_interact", "PdbTrace.pytest_pyfunc_call", "PdbTrace", "PdbInvoke"], "tokens": 198}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class PdbInvoke:\n def pytest_exception_interact(\n self, node: Node, call: \"CallInfo[Any]\", report: BaseReport\n ) -> None:\n capman = node.config.pluginmanager.getplugin(\"capturemanager\")\n if capman:\n capman.suspend_global_capture(in_=True)\n out, err = capman.read_global_capture()\n sys.stdout.write(out)\n sys.stdout.write(err)\n assert call.excinfo is not None\n _enter_pdb(node, call.excinfo, report)\n\n def pytest_internalerror(self, excinfo: ExceptionInfo[BaseException]) -> None:\n tb = _postmortem_traceback(excinfo)\n post_mortem(tb)\n\n\nclass PdbTrace:\n @hookimpl(hookwrapper=True)\n def pytest_pyfunc_call(self, pyfuncitem) -> Generator[None, None, None]:\n wrap_pytest_function_for_tracing(pyfuncitem)\n yield", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/debugging.py__enter_pdb__enter_pdb.return.rep": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/debugging.py__enter_pdb__enter_pdb.return.rep", "embedding": null, "metadata": {"file_path": "src/_pytest/debugging.py", "file_name": "debugging.py", "file_type": "text/x-python", "category": "implementation", "start_line": 326, "end_line": 354, "span_ids": ["_enter_pdb"], "tokens": 244}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _enter_pdb(\n node: Node, excinfo: ExceptionInfo[BaseException], rep: BaseReport\n) -> BaseReport:\n # XXX we re-use the TerminalReporter's terminalwriter\n # because this seems to avoid some encoding related troubles\n # for not completely clear reasons.\n tw = node.config.pluginmanager.getplugin(\"terminalreporter\")._tw\n tw.line()\n\n showcapture = node.config.option.showcapture\n\n for sectionname, content in (\n (\"stdout\", rep.capstdout),\n (\"stderr\", rep.capstderr),\n (\"log\", rep.caplog),\n ):\n if showcapture in (sectionname, \"all\") and content:\n tw.sep(\">\", \"captured \" + sectionname)\n if content[-1:] == \"\\n\":\n content = content[:-1]\n tw.line(content)\n\n tw.sep(\">\", \"traceback\")\n rep.toterminal(tw)\n tw.sep(\">\", \"entering PDB\")\n tb = _postmortem_traceback(excinfo)\n rep._pdbshown = True # type: ignore[attr-defined]\n post_mortem(tb)\n return rep", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/debugging.py__postmortem_traceback_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/debugging.py__postmortem_traceback_", "embedding": null, "metadata": {"file_path": "src/_pytest/debugging.py", "file_name": "debugging.py", "file_type": "text/x-python", "category": "implementation", "start_line": 357, "end_line": 379, "span_ids": ["_postmortem_traceback", "post_mortem"], "tokens": 205}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _postmortem_traceback(excinfo: ExceptionInfo[BaseException]) -> types.TracebackType:\n from doctest import UnexpectedException\n\n if isinstance(excinfo.value, UnexpectedException):\n # A doctest.UnexpectedException is not useful for post_mortem.\n # Use the underlying exception instead:\n return excinfo.value.exc_info[2]\n elif isinstance(excinfo.value, ConftestImportFailure):\n # A config.ConftestImportFailure is not useful for post_mortem.\n # Use the underlying exception instead:\n return excinfo.value.excinfo[2]\n else:\n assert excinfo._excinfo is not None\n return excinfo._excinfo[2]\n\n\ndef post_mortem(t: types.TracebackType) -> None:\n p = pytestPDB._init_pdb(\"post_mortem\")\n p.reset()\n p.interaction(None, t)\n if p.quitting:\n outcomes.exit(\"Quitting debugger\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py_DoctestItem.repr_failure_DoctestItem.reportinfo.return.self_fspath_self_dtest_l": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py_DoctestItem.repr_failure_DoctestItem.reportinfo.return.self_fspath_self_dtest_l", "embedding": null, "metadata": {"file_path": "src/_pytest/doctest.py", "file_name": "doctest.py", "file_type": "text/x-python", "category": "implementation", "start_line": 311, "end_line": 381, "span_ids": ["DoctestItem.repr_failure", "DoctestItem.reportinfo"], "tokens": 611}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class DoctestItem(pytest.Item):\n def repr_failure( # type: ignore[override]\n self,\n excinfo: ExceptionInfo[BaseException],\n ) -> Union[str, TerminalRepr]:\n import doctest\n\n failures: Optional[\n Sequence[Union[doctest.DocTestFailure, doctest.UnexpectedException]]\n ] = None\n if isinstance(\n excinfo.value, (doctest.DocTestFailure, doctest.UnexpectedException)\n ):\n failures = [excinfo.value]\n elif isinstance(excinfo.value, MultipleDoctestFailures):\n failures = excinfo.value.failures\n\n if failures is not None:\n reprlocation_lines = []\n for failure in failures:\n example = failure.example\n test = failure.test\n filename = test.filename\n if test.lineno is None:\n lineno = None\n else:\n lineno = test.lineno + example.lineno + 1\n message = type(failure).__name__\n # TODO: ReprFileLocation doesn't expect a None lineno.\n reprlocation = ReprFileLocation(filename, lineno, message) # type: ignore[arg-type]\n checker = _get_checker()\n report_choice = _get_report_choice(\n self.config.getoption(\"doctestreport\")\n )\n if lineno is not None:\n assert failure.test.docstring is not None\n lines = failure.test.docstring.splitlines(False)\n # add line numbers to the left of the error message\n assert test.lineno is not None\n lines = [\n \"%03d %s\" % (i + test.lineno + 1, x)\n for (i, x) in enumerate(lines)\n ]\n # trim docstring error lines to 10\n lines = lines[max(example.lineno - 9, 0) : example.lineno + 1]\n else:\n lines = [\n \"EXAMPLE LOCATION UNKNOWN, not showing all tests of that example\"\n ]\n indent = \">>>\"\n for line in example.source.splitlines():\n lines.append(f\"??? {indent} {line}\")\n indent = \"...\"\n if isinstance(failure, doctest.DocTestFailure):\n lines += checker.output_difference(\n example, failure.got, report_choice\n ).split(\"\\n\")\n else:\n inner_excinfo = ExceptionInfo(failure.exc_info)\n lines += [\"UNEXPECTED EXCEPTION: %s\" % repr(inner_excinfo.value)]\n lines += [\n x.strip(\"\\n\")\n for x in traceback.format_exception(*failure.exc_info)\n ]\n reprlocation_lines.append((reprlocation, lines))\n return ReprFailDoctest(reprlocation_lines)\n else:\n return super().repr_failure(excinfo)\n\n def reportinfo(self):\n assert self.dtest is not None\n return self.fspath, self.dtest.lineno, \"[doctest] %s\" % self.name", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py__get_flag_lookup__get_flag_lookup.return.dict_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py__get_flag_lookup__get_flag_lookup.return.dict_", "embedding": null, "metadata": {"file_path": "src/_pytest/doctest.py", "file_name": "doctest.py", "file_type": "text/x-python", "category": "implementation", "start_line": 312, "end_line": 325, "span_ids": ["_get_flag_lookup"], "tokens": 136}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _get_flag_lookup() -> Dict[str, int]:\n import doctest\n\n return dict(\n DONT_ACCEPT_TRUE_FOR_1=doctest.DONT_ACCEPT_TRUE_FOR_1,\n DONT_ACCEPT_BLANKLINE=doctest.DONT_ACCEPT_BLANKLINE,\n NORMALIZE_WHITESPACE=doctest.NORMALIZE_WHITESPACE,\n ELLIPSIS=doctest.ELLIPSIS,\n IGNORE_EXCEPTION_DETAIL=doctest.IGNORE_EXCEPTION_DETAIL,\n COMPARISON_FLAGS=doctest.COMPARISON_FLAGS,\n ALLOW_UNICODE=_get_allow_unicode_flag(),\n ALLOW_BYTES=_get_allow_bytes_flag(),\n NUMBER=_get_number_flag(),\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py_get_optionflags__get_continue_on_failure.return.continue_on_failure": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py_get_optionflags__get_continue_on_failure.return.continue_on_failure", "embedding": null, "metadata": {"file_path": "src/_pytest/doctest.py", "file_name": "doctest.py", "file_type": "text/x-python", "category": "implementation", "start_line": 400, "end_line": 416, "span_ids": ["_get_continue_on_failure", "get_optionflags"], "tokens": 133}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def get_optionflags(parent):\n optionflags_str = parent.config.getini(\"doctest_optionflags\")\n flag_lookup_table = _get_flag_lookup()\n flag_acc = 0\n for flag in optionflags_str:\n flag_acc |= flag_lookup_table[flag]\n return flag_acc\n\n\ndef _get_continue_on_failure(config):\n continue_on_failure = config.getvalue(\"doctest_continue_on_failure\")\n if continue_on_failure:\n # We need to turn off this if we use pdb since we should stop at\n # the first failure.\n if config.getvalue(\"usepdb\"):\n continue_on_failure = False\n return continue_on_failure", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py__check_all_skipped__is_mocked.return._": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py__check_all_skipped__is_mocked.return._", "embedding": null, "metadata": {"file_path": "src/_pytest/doctest.py", "file_name": "doctest.py", "file_type": "text/x-python", "category": "implementation", "start_line": 450, "end_line": 466, "span_ids": ["_is_mocked", "_check_all_skipped"], "tokens": 148}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _check_all_skipped(test: \"doctest.DocTest\") -> None:\n \"\"\"Raise pytest.skip() if all examples in the given DocTest have the SKIP\n option set.\"\"\"\n import doctest\n\n all_skipped = all(x.options.get(doctest.SKIP, False) for x in test.examples)\n if all_skipped:\n pytest.skip(\"all tests skipped by +SKIP option\")\n\n\ndef _is_mocked(obj: object) -> bool:\n \"\"\"Return if an object is possibly a mock object by checking the\n existence of a highly improbable attribute.\"\"\"\n return (\n safe_getattr(obj, \"pytest_mock_example_attribute_that_shouldnt_exist\", None)\n is not None\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py_DoctestModule_DoctestModule.collect.MockAwareDocTestFinder._find.with__patch_unwrap_mock_a.doctest_DocTestFinder__fi": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py_DoctestModule_DoctestModule.collect.MockAwareDocTestFinder._find.with__patch_unwrap_mock_a.doctest_DocTestFinder__fi", "embedding": null, "metadata": {"file_path": "src/_pytest/doctest.py", "file_name": "doctest.py", "file_type": "text/x-python", "category": "implementation", "start_line": 499, "end_line": 535, "span_ids": ["DoctestModule.collect.MockAwareDocTestFinder", "DoctestModule.collect", "DoctestModule.collect.MockAwareDocTestFinder:2", "DoctestModule"], "tokens": 299}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class DoctestModule(pytest.Module):\n def collect(self) -> Iterable[DoctestItem]:\n import doctest\n\n class MockAwareDocTestFinder(doctest.DocTestFinder):\n \"\"\"A hackish doctest finder that overrides stdlib internals to fix a stdlib bug.\n\n https://github.com/pytest-dev/pytest/issues/3456\n https://bugs.python.org/issue25532\n \"\"\"\n\n def _find_lineno(self, obj, source_lines):\n \"\"\"Doctest code does not take into account `@property`, this\n is a hackish way to fix it.\n\n https://bugs.python.org/issue17446\n \"\"\"\n if isinstance(obj, property):\n obj = getattr(obj, \"fget\", obj)\n # Type ignored because this is a private function.\n return doctest.DocTestFinder._find_lineno( # type: ignore\n self,\n obj,\n source_lines,\n )\n\n def _find(\n self, tests, obj, name, module, source_lines, globs, seen\n ) -> None:\n if _is_mocked(obj):\n return\n with _patch_unwrap_mock_aware():\n\n # Type ignored because this is a private function.\n doctest.DocTestFinder._find( # type: ignore\n self, tests, obj, name, module, source_lines, globs, seen\n )\n # ... other code", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py__setup_fixtures__setup_fixtures.return.fixture_request": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py__setup_fixtures__setup_fixtures.return.fixture_request", "embedding": null, "metadata": {"file_path": "src/_pytest/doctest.py", "file_name": "doctest.py", "file_type": "text/x-python", "category": "implementation", "start_line": 566, "end_line": 579, "span_ids": ["_setup_fixtures"], "tokens": 140}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _setup_fixtures(doctest_item: DoctestItem) -> FixtureRequest:\n \"\"\"Used by DoctestTextfile and DoctestItem to setup fixture information.\"\"\"\n\n def func() -> None:\n pass\n\n doctest_item.funcargs = {} # type: ignore[attr-defined]\n fm = doctest_item.session._fixturemanager\n doctest_item._fixtureinfo = fm.getfixtureinfo( # type: ignore[attr-defined]\n node=doctest_item, func=func, cls=None, funcargs=False\n )\n fixture_request = FixtureRequest(doctest_item, _ispytest=True)\n fixture_request._fillfixtures()\n return fixture_request", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_reorder_items_atscope_reorder_items_atscope.return.items_done": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_reorder_items_atscope_reorder_items_atscope.return.items_done", "embedding": null, "metadata": {"file_path": "src/_pytest/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 302, "end_line": 345, "span_ids": ["reorder_items_atscope"], "tokens": 440}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def reorder_items_atscope(\n items: Dict[nodes.Item, None],\n argkeys_cache: Dict[int, Dict[nodes.Item, Dict[_Key, None]]],\n items_by_argkey: Dict[int, Dict[_Key, \"Deque[nodes.Item]\"]],\n scopenum: int,\n) -> Dict[nodes.Item, None]:\n if scopenum >= scopenum_function or len(items) < 3:\n return items\n ignore: Set[Optional[_Key]] = set()\n items_deque = deque(items)\n items_done: Dict[nodes.Item, None] = {}\n scoped_items_by_argkey = items_by_argkey[scopenum]\n scoped_argkeys_cache = argkeys_cache[scopenum]\n while items_deque:\n no_argkey_group: Dict[nodes.Item, None] = {}\n slicing_argkey = None\n while items_deque:\n item = items_deque.popleft()\n if item in items_done or item in no_argkey_group:\n continue\n argkeys = dict.fromkeys(\n (k for k in scoped_argkeys_cache.get(item, []) if k not in ignore), None\n )\n if not argkeys:\n no_argkey_group[item] = None\n else:\n slicing_argkey, _ = argkeys.popitem()\n # We don't have to remove relevant items from later in the\n # deque because they'll just be ignored.\n matching_items = [\n i for i in scoped_items_by_argkey[slicing_argkey] if i in items\n ]\n for i in reversed(matching_items):\n fix_cache_order(i, argkeys_cache, items_by_argkey)\n items_deque.appendleft(i)\n break\n if no_argkey_group:\n no_argkey_group = reorder_items_atscope(\n no_argkey_group, argkeys_cache, items_by_argkey, scopenum + 1\n )\n for item in no_argkey_group:\n items_done[item] = None\n ignore.add(slicing_argkey)\n return items_done", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FuncFixtureInfo_FuncFixtureInfo.prune_dependency_tree.self_names_closure_s": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FuncFixtureInfo_FuncFixtureInfo.prune_dependency_tree.self_names_closure_s", "embedding": null, "metadata": {"file_path": "src/_pytest/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 390, "end_line": 426, "span_ids": ["FuncFixtureInfo.prune_dependency_tree", "FuncFixtureInfo"], "tokens": 390}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@attr.s(slots=True)\nclass FuncFixtureInfo:\n # Original function argument names.\n argnames = attr.ib(type=Tuple[str, ...])\n # Argnames that function immediately requires. These include argnames +\n # fixture names specified via usefixtures and via autouse=True in fixture\n # definitions.\n initialnames = attr.ib(type=Tuple[str, ...])\n names_closure = attr.ib(type=List[str])\n name2fixturedefs = attr.ib(type=Dict[str, Sequence[\"FixtureDef[Any]\"]])\n\n def prune_dependency_tree(self) -> None:\n \"\"\"Recompute names_closure from initialnames and name2fixturedefs.\n\n Can only reduce names_closure, which means that the new closure will\n always be a subset of the old one. The order is preserved.\n\n This method is needed because direct parametrization may shadow some\n of the fixtures that were included in the originally built dependency\n tree. In this way the dependency tree can get pruned, and the closure\n of argnames may get reduced.\n \"\"\"\n closure: Set[str] = set()\n working_set = set(self.initialnames)\n while working_set:\n argname = working_set.pop()\n # Argname may be smth not included in the original names_closure,\n # in which case we ignore it. This currently happens with pseudo\n # FixtureDefs which wrap 'get_direct_param_fixture_func(request)'.\n # So they introduce the new dependency 'request' which might have\n # been missing in the original tree (closure).\n if argname not in closure and argname in self.names_closure:\n closure.add(argname)\n if argname in self.name2fixturedefs:\n working_set.update(self.name2fixturedefs[argname][-1].argnames)\n\n self.names_closure[:] = sorted(closure, key=self.names_closure.index)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureRequest_FixtureRequest.node.return.self__getscopeitem_self_s": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureRequest_FixtureRequest.node.return.self__getscopeitem_self_s", "embedding": null, "metadata": {"file_path": "src/_pytest/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 429, "end_line": 460, "span_ids": ["FixtureRequest.fixturenames", "FixtureRequest", "FixtureRequest.node"], "tokens": 312}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class FixtureRequest:\n \"\"\"A request for a fixture from a test or fixture function.\n\n A request object gives access to the requesting test context and has\n an optional ``param`` attribute in case the fixture is parametrized\n indirectly.\n \"\"\"\n\n def __init__(self, pyfuncitem, *, _ispytest: bool = False) -> None:\n check_ispytest(_ispytest)\n self._pyfuncitem = pyfuncitem\n #: Fixture for which this request is being performed.\n self.fixturename: Optional[str] = None\n #: Scope string, one of \"function\", \"class\", \"module\", \"session\".\n self.scope: _Scope = \"function\"\n self._fixture_defs: Dict[str, FixtureDef[Any]] = {}\n fixtureinfo: FuncFixtureInfo = pyfuncitem._fixtureinfo\n self._arg2fixturedefs = fixtureinfo.name2fixturedefs.copy()\n self._arg2index: Dict[str, int] = {}\n self._fixturemanager: FixtureManager = pyfuncitem.session._fixturemanager\n\n @property\n def fixturenames(self) -> List[str]:\n \"\"\"Names of all active fixtures in this request.\"\"\"\n result = list(self._pyfuncitem._fixtureinfo.names_closure)\n result.extend(set(self._fixture_defs).difference(result))\n return result\n\n @property\n def node(self):\n \"\"\"Underlying collection node (depends on current request scope).\"\"\"\n return self._getscopeitem(self.scope)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureRequest._getnextfixturedef_FixtureRequest._getnextfixturedef.return.fixturedefs_index_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureRequest._getnextfixturedef_FixtureRequest._getnextfixturedef.return.fixturedefs_index_", "embedding": null, "metadata": {"file_path": "src/_pytest/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 462, "end_line": 479, "span_ids": ["FixtureRequest._getnextfixturedef"], "tokens": 246}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class FixtureRequest:\n\n def _getnextfixturedef(self, argname: str) -> \"FixtureDef[Any]\":\n fixturedefs = self._arg2fixturedefs.get(argname, None)\n if fixturedefs is None:\n # We arrive here because of a dynamic call to\n # getfixturevalue(argname) usage which was naturally\n # not known at parsing/collection time.\n assert self._pyfuncitem.parent is not None\n parentid = self._pyfuncitem.parent.nodeid\n fixturedefs = self._fixturemanager.getfixturedefs(argname, parentid)\n # TODO: Fix this type ignore. Either add assert or adjust types.\n # Can this be None here?\n self._arg2fixturedefs[argname] = fixturedefs # type: ignore[assignment]\n # fixturedefs list is immutable so we maintain a decreasing index.\n index = self._arg2index.get(argname, 0) - 1\n if fixturedefs is None or (-index > len(fixturedefs)):\n raise FixtureLookupError(argname, self)\n self._arg2index[argname] = index\n return fixturedefs[index]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureRequest._compute_fixture_value_FixtureRequest._compute_fixture_value.None_1.finally_.self__schedule_finalizers": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureRequest._compute_fixture_value_FixtureRequest._compute_fixture_value.None_1.finally_.self__schedule_finalizers", "embedding": null, "metadata": {"file_path": "src/_pytest/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 617, "end_line": 690, "span_ids": ["FixtureRequest._compute_fixture_value"], "tokens": 629}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class FixtureRequest:\n\n def _compute_fixture_value(self, fixturedef: \"FixtureDef[object]\") -> None:\n \"\"\"Create a SubRequest based on \"self\" and call the execute method\n of the given FixtureDef object.\n\n This will force the FixtureDef object to throw away any previous\n results and compute a new fixture value, which will be stored into\n the FixtureDef object itself.\n \"\"\"\n # prepare a subrequest object before calling fixture function\n # (latter managed by fixturedef)\n argname = fixturedef.argname\n funcitem = self._pyfuncitem\n scope = fixturedef.scope\n try:\n param = funcitem.callspec.getparam(argname)\n except (AttributeError, ValueError):\n param = NOTSET\n param_index = 0\n has_params = fixturedef.params is not None\n fixtures_not_supported = getattr(funcitem, \"nofuncargs\", False)\n if has_params and fixtures_not_supported:\n msg = (\n \"{name} does not support fixtures, maybe unittest.TestCase subclass?\\n\"\n \"Node id: {nodeid}\\n\"\n \"Function type: {typename}\"\n ).format(\n name=funcitem.name,\n nodeid=funcitem.nodeid,\n typename=type(funcitem).__name__,\n )\n fail(msg, pytrace=False)\n if has_params:\n frame = inspect.stack()[3]\n frameinfo = inspect.getframeinfo(frame[0])\n source_path = absolutepath(frameinfo.filename)\n source_lineno = frameinfo.lineno\n try:\n source_path_str = str(\n source_path.relative_to(funcitem.config.rootpath)\n )\n except ValueError:\n source_path_str = str(source_path)\n msg = (\n \"The requested fixture has no parameter defined for test:\\n\"\n \" {}\\n\\n\"\n \"Requested fixture '{}' defined in:\\n{}\"\n \"\\n\\nRequested here:\\n{}:{}\".format(\n funcitem.nodeid,\n fixturedef.argname,\n getlocation(fixturedef.func, funcitem.config.rootdir),\n source_path_str,\n source_lineno,\n )\n )\n fail(msg, pytrace=False)\n else:\n param_index = funcitem.callspec.indices[argname]\n # If a parametrize invocation set a scope it will override\n # the static scope defined with the fixture function.\n paramscopenum = funcitem.callspec._arg2scopenum.get(argname)\n if paramscopenum is not None:\n scope = scopes[paramscopenum]\n\n subrequest = SubRequest(\n self, scope, param, param_index, fixturedef, _ispytest=True\n )\n\n # Check if a higher-level scoped fixture accesses a lower level one.\n subrequest._check_scope(argname, self.scope, scope)\n try:\n # Call the fixture function.\n fixturedef.execute(request=subrequest)\n finally:\n self._schedule_finalizers(fixturedef, subrequest)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureLookupError_FixtureLookupError.formatrepr.return.FixtureLookupErrorRepr_fs": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureLookupError_FixtureLookupError.formatrepr.return.FixtureLookupErrorRepr_fs", "embedding": null, "metadata": {"file_path": "src/_pytest/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 825, "end_line": 879, "span_ids": ["FixtureLookupError", "FixtureLookupError.formatrepr"], "tokens": 502}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass FixtureLookupError(LookupError):\n \"\"\"Could not return a requested fixture (missing or invalid).\"\"\"\n\n def __init__(\n self, argname: Optional[str], request: FixtureRequest, msg: Optional[str] = None\n ) -> None:\n self.argname = argname\n self.request = request\n self.fixturestack = request._get_fixturestack()\n self.msg = msg\n\n def formatrepr(self) -> \"FixtureLookupErrorRepr\":\n tblines: List[str] = []\n addline = tblines.append\n stack = [self.request._pyfuncitem.obj]\n stack.extend(map(lambda x: x.func, self.fixturestack))\n msg = self.msg\n if msg is not None:\n # The last fixture raise an error, let's present\n # it at the requesting side.\n stack = stack[:-1]\n for function in stack:\n fspath, lineno = getfslineno(function)\n try:\n lines, _ = inspect.getsourcelines(get_real_func(function))\n except (OSError, IndexError, TypeError):\n error_msg = \"file %s, line %s: source code not available\"\n addline(error_msg % (fspath, lineno + 1))\n else:\n addline(\"file {}, line {}\".format(fspath, lineno + 1))\n for i, line in enumerate(lines):\n line = line.rstrip()\n addline(\" \" + line)\n if line.lstrip().startswith(\"def\"):\n break\n\n if msg is None:\n fm = self.request._fixturemanager\n available = set()\n parentid = self.request._pyfuncitem.parent.nodeid\n for name, fixturedefs in fm._arg2fixturedefs.items():\n faclist = list(fm._matchfactories(fixturedefs, parentid))\n if faclist:\n available.add(name)\n if self.argname in available:\n msg = \" recursive dependency involving fixture '{}' detected\".format(\n self.argname\n )\n else:\n msg = f\"fixture '{self.argname}' not found\"\n msg += \"\\n available fixtures: {}\".format(\", \".join(sorted(available)))\n msg += \"\\n use 'pytest --fixtures [testpath]' for help on them.\"\n\n return FixtureLookupErrorRepr(fspath, lineno, tblines, msg, self.argname)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureDef_FixtureDef.addfinalizer.self__finalizers_append_f": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureDef_FixtureDef.addfinalizer.self__finalizers_append_f", "embedding": null, "metadata": {"file_path": "src/_pytest/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 980, "end_line": 1026, "span_ids": ["FixtureDef", "FixtureDef.addfinalizer"], "tokens": 398}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass FixtureDef(Generic[_FixtureValue]):\n \"\"\"A container for a factory definition.\"\"\"\n\n def __init__(\n self,\n fixturemanager: \"FixtureManager\",\n baseid: Optional[str],\n argname: str,\n func: \"_FixtureFunc[_FixtureValue]\",\n scope: \"Union[_Scope, Callable[[str, Config], _Scope]]\",\n params: Optional[Sequence[object]],\n unittest: bool = False,\n ids: Optional[\n Union[\n Tuple[Union[None, str, float, int, bool], ...],\n Callable[[Any], Optional[object]],\n ]\n ] = None,\n ) -> None:\n self._fixturemanager = fixturemanager\n self.baseid = baseid or \"\"\n self.has_location = baseid is not None\n self.func = func\n self.argname = argname\n if callable(scope):\n scope_ = _eval_scope_callable(scope, argname, fixturemanager.config)\n else:\n scope_ = scope\n self.scopenum = scope2index(\n # TODO: Check if the `or` here is really necessary.\n scope_ or \"function\", # type: ignore[unreachable]\n descr=f\"Fixture '{func.__name__}'\",\n where=baseid,\n )\n self.scope = scope_\n self.params: Optional[Sequence[object]] = params\n self.argnames: Tuple[str, ...] = getfuncargnames(\n func, name=argname, is_method=unittest\n )\n self.unittest = unittest\n self.ids = ids\n self.cached_result: Optional[_FixtureCachedResult[_FixtureValue]] = None\n self._finalizers: List[Callable[[], object]] = []\n\n def addfinalizer(self, finalizer: Callable[[], object]) -> None:\n self._finalizers.append(finalizer)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureDef.finish_FixtureDef.finish.try_.finally_.self._finalizers._": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureDef.finish_FixtureDef.finish.try_.finally_.self._finalizers._", "embedding": null, "metadata": {"file_path": "src/_pytest/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1028, "end_line": 1049, "span_ids": ["FixtureDef.finish"], "tokens": 189}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass FixtureDef(Generic[_FixtureValue]):\n\n def finish(self, request: SubRequest) -> None:\n exc = None\n try:\n while self._finalizers:\n try:\n func = self._finalizers.pop()\n func()\n except BaseException as e:\n # XXX Only first exception will be seen by user,\n # ideally all should be reported.\n if exc is None:\n exc = e\n if exc:\n raise exc\n finally:\n hook = self._fixturemanager.session.gethookproxy(request.node.fspath)\n hook.pytest_fixture_post_finalizer(fixturedef=self, request=request)\n # Even if finalization fails, we invalidate the cached fixture\n # value and remove all finalizers because they may be bound methods\n # which will keep instances alive.\n self.cached_result = None\n self._finalizers = []", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_resolve_fixture_function_resolve_fixture_function.return.fixturefunc": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_resolve_fixture_function_resolve_fixture_function.return.fixturefunc", "embedding": null, "metadata": {"file_path": "src/_pytest/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1091, "end_line": 1115, "span_ids": ["resolve_fixture_function"], "tokens": 284}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def resolve_fixture_function(\n fixturedef: FixtureDef[_FixtureValue], request: FixtureRequest\n) -> \"_FixtureFunc[_FixtureValue]\":\n \"\"\"Get the actual callable that can be called to obtain the fixture\n value, dealing with unittest-specific instances and bound methods.\"\"\"\n fixturefunc = fixturedef.func\n if fixturedef.unittest:\n if request.instance is not None:\n # Bind the unbound method to the TestCase instance.\n fixturefunc = fixturedef.func.__get__(request.instance) # type: ignore[union-attr]\n else:\n # The fixture function needs to be bound to the actual\n # request.instance so that code working with \"fixturedef\" behaves\n # as expected.\n if request.instance is not None:\n # Handle the case where fixture is defined not in a test class, but some other class\n # (for example a plugin class with a fixture), see #2270.\n if hasattr(fixturefunc, \"__self__\") and not isinstance(\n request.instance, fixturefunc.__self__.__class__ # type: ignore[union-attr]\n ):\n return fixturefunc\n fixturefunc = getimfunc(fixturedef.func)\n if fixturefunc != fixturedef.func:\n fixturefunc = fixturefunc.__get__(request.instance) # type: ignore[union-attr]\n return fixturefunc", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureManager_FixtureManager.__init__.session_config_pluginmana": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureManager_FixtureManager.__init__.session_config_pluginmana", "embedding": null, "metadata": {"file_path": "src/_pytest/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1399, "end_line": 1442, "span_ids": ["FixtureManager"], "tokens": 396}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class FixtureManager:\n \"\"\"pytest fixture definitions and information is stored and managed\n from this class.\n\n During collection fm.parsefactories() is called multiple times to parse\n fixture function definitions into FixtureDef objects and internal\n data structures.\n\n During collection of test functions, metafunc-mechanics instantiate\n a FuncFixtureInfo object which is cached per node/func-name.\n This FuncFixtureInfo object is later retrieved by Function nodes\n which themselves offer a fixturenames attribute.\n\n The FuncFixtureInfo object holds information about fixtures and FixtureDefs\n relevant for a particular function. An initial list of fixtures is\n assembled like this:\n\n - ini-defined usefixtures\n - autouse-marked fixtures along the collection chain up from the function\n - usefixtures markers at module/class/function level\n - test function funcargs\n\n Subsequently the funcfixtureinfo.fixturenames attribute is computed\n as the closure of the fixtures needed to setup the initial fixtures,\n i.e. fixtures needed by fixture functions themselves are appended\n to the fixturenames list.\n\n Upon the test-setup phases all fixturenames are instantiated, retrieved\n by a lookup of their FuncFixtureInfo.\n \"\"\"\n\n FixtureLookupError = FixtureLookupError\n FixtureLookupErrorRepr = FixtureLookupErrorRepr\n\n def __init__(self, session: \"Session\") -> None:\n self.session = session\n self.config: Config = session.config\n self._arg2fixturedefs: Dict[str, List[FixtureDef[Any]]] = {}\n self._holderobjseen: Set[object] = set()\n # A mapping from a nodeid to a list of autouse fixtures it defines.\n self._nodeid_autousenames: Dict[str, List[str]] = {\n \"\": self.config.getini(\"usefixtures\"),\n }\n session.config.pluginmanager.register(self, \"funcmanage\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureManager.getfixtureinfo_FixtureManager.getfixtureinfo.return.FuncFixtureInfo_argnames_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureManager.getfixtureinfo_FixtureManager.getfixtureinfo.return.FuncFixtureInfo_argnames_", "embedding": null, "metadata": {"file_path": "src/_pytest/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1463, "end_line": 1479, "span_ids": ["FixtureManager.getfixtureinfo"], "tokens": 179}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class FixtureManager:\n\n def getfixtureinfo(\n self, node: nodes.Node, func, cls, funcargs: bool = True\n ) -> FuncFixtureInfo:\n if funcargs and not getattr(node, \"nofuncargs\", False):\n argnames = getfuncargnames(func, name=node.name, cls=cls)\n else:\n argnames = ()\n\n usefixtures = tuple(\n arg for mark in node.iter_markers(name=\"usefixtures\") for arg in mark.args\n )\n initialnames = usefixtures + argnames\n fm = node.session._fixturemanager\n initialnames, names_closure, arg2fixturedefs = fm.getfixtureclosure(\n initialnames, node, ignore_args=self._get_direct_parametrize_args(node)\n )\n return FuncFixtureInfo(argnames, initialnames, names_closure, arg2fixturedefs)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureManager.getfixtureclosure_FixtureManager.getfixtureclosure.return.initialnames_fixturename": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureManager.getfixtureclosure_FixtureManager.getfixtureclosure.return.initialnames_fixturename", "embedding": null, "metadata": {"file_path": "src/_pytest/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1510, "end_line": 1561, "span_ids": ["FixtureManager.getfixtureclosure"], "tokens": 464}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class FixtureManager:\n\n def getfixtureclosure(\n self,\n fixturenames: Tuple[str, ...],\n parentnode: nodes.Node,\n ignore_args: Sequence[str] = (),\n ) -> Tuple[Tuple[str, ...], List[str], Dict[str, Sequence[FixtureDef[Any]]]]:\n # Collect the closure of all fixtures, starting with the given\n # fixturenames as the initial set. As we have to visit all\n # factory definitions anyway, we also return an arg2fixturedefs\n # mapping so that the caller can reuse it and does not have\n # to re-discover fixturedefs again for each fixturename\n # (discovering matching fixtures for a given name/node is expensive).\n\n parentid = parentnode.nodeid\n fixturenames_closure = list(self._getautousenames(parentid))\n\n def merge(otherlist: Iterable[str]) -> None:\n for arg in otherlist:\n if arg not in fixturenames_closure:\n fixturenames_closure.append(arg)\n\n merge(fixturenames)\n\n # At this point, fixturenames_closure contains what we call \"initialnames\",\n # which is a set of fixturenames the function immediately requests. We\n # need to return it as well, so save this.\n initialnames = tuple(fixturenames_closure)\n\n arg2fixturedefs: Dict[str, Sequence[FixtureDef[Any]]] = {}\n lastlen = -1\n while lastlen != len(fixturenames_closure):\n lastlen = len(fixturenames_closure)\n for argname in fixturenames_closure:\n if argname in ignore_args:\n continue\n if argname in arg2fixturedefs:\n continue\n fixturedefs = self.getfixturedefs(argname, parentid)\n if fixturedefs:\n arg2fixturedefs[argname] = fixturedefs\n merge(fixturedefs[-1].argnames)\n\n def sort_by_scope(arg_name: str) -> int:\n try:\n fixturedefs = arg2fixturedefs[arg_name]\n except KeyError:\n return scopes.index(\"function\")\n else:\n return fixturedefs[-1].scopenum\n\n fixturenames_closure.sort(key=sort_by_scope)\n return initialnames, fixturenames_closure, arg2fixturedefs", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureManager.pytest_generate_tests_FixtureManager.pytest_collection_modifyitems.items_reorder_items_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureManager.pytest_generate_tests_FixtureManager.pytest_collection_modifyitems.items_reorder_items_", "embedding": null, "metadata": {"file_path": "src/_pytest/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1563, "end_line": 1610, "span_ids": ["FixtureManager.pytest_generate_tests", "FixtureManager.pytest_collection_modifyitems"], "tokens": 405}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class FixtureManager:\n\n def pytest_generate_tests(self, metafunc: \"Metafunc\") -> None:\n \"\"\"Generate new tests based on parametrized fixtures used by the given metafunc\"\"\"\n\n def get_parametrize_mark_argnames(mark: Mark) -> Sequence[str]:\n args, _ = ParameterSet._parse_parametrize_args(*mark.args, **mark.kwargs)\n return args\n\n for argname in metafunc.fixturenames:\n # Get the FixtureDefs for the argname.\n fixture_defs = metafunc._arg2fixturedefs.get(argname)\n if not fixture_defs:\n # Will raise FixtureLookupError at setup time if not parametrized somewhere\n # else (e.g @pytest.mark.parametrize)\n continue\n\n # If the test itself parametrizes using this argname, give it\n # precedence.\n if any(\n argname in get_parametrize_mark_argnames(mark)\n for mark in metafunc.definition.iter_markers(\"parametrize\")\n ):\n continue\n\n # In the common case we only look at the fixture def with the\n # closest scope (last in the list). But if the fixture overrides\n # another fixture, while requesting the super fixture, keep going\n # in case the super fixture is parametrized (#1953).\n for fixturedef in reversed(fixture_defs):\n # Fixture is parametrized, apply it and stop.\n if fixturedef.params is not None:\n metafunc.parametrize(\n argname,\n fixturedef.params,\n indirect=True,\n scope=fixturedef.scope,\n ids=fixturedef.ids,\n )\n break\n\n # Not requesting the overridden super fixture, stop.\n if argname not in fixturedef.argnames:\n break\n\n # Try next super fixture, if any.\n\n def pytest_collection_modifyitems(self, items: List[nodes.Item]) -> None:\n # Separate parametrized setups.\n items[:] = reorder_items(items)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureManager.getfixturedefs_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureManager.getfixturedefs_", "embedding": null, "metadata": {"file_path": "src/_pytest/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1671, "end_line": 1693, "span_ids": ["FixtureManager.getfixturedefs", "FixtureManager._matchfactories"], "tokens": 200}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class FixtureManager:\n\n def getfixturedefs(\n self, argname: str, nodeid: str\n ) -> Optional[Sequence[FixtureDef[Any]]]:\n \"\"\"Get a list of fixtures which are applicable to the given node id.\n\n :param str argname: Name of the fixture to search for.\n :param str nodeid: Full node id of the requesting test.\n :rtype: Sequence[FixtureDef]\n \"\"\"\n try:\n fixturedefs = self._arg2fixturedefs[argname]\n except KeyError:\n return None\n return tuple(self._matchfactories(fixturedefs, nodeid))\n\n def _matchfactories(\n self, fixturedefs: Iterable[FixtureDef[Any]], nodeid: str\n ) -> Iterator[FixtureDef[Any]]:\n parentnodeids = set(nodes.iterparentnodeids(nodeid))\n for fixturedef in fixturedefs:\n if fixturedef.baseid in parentnodeids:\n yield fixturedef", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/helpconfig.py_pytest_addoption_pytest_addoption.None_5": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/helpconfig.py_pytest_addoption_pytest_addoption.None_5", "embedding": null, "metadata": {"file_path": "src/_pytest/helpconfig.py", "file_name": "helpconfig.py", "file_type": "text/x-python", "category": "implementation", "start_line": 46, "end_line": 94, "span_ids": ["pytest_addoption"], "tokens": 329}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_addoption(parser: Parser) -> None:\n group = parser.getgroup(\"debugconfig\")\n group.addoption(\n \"--version\",\n \"-V\",\n action=\"count\",\n default=0,\n dest=\"version\",\n help=\"display pytest version and information about plugins. \"\n \"When given twice, also display information about plugins.\",\n )\n group._addoption(\n \"-h\",\n \"--help\",\n action=HelpAction,\n dest=\"help\",\n help=\"show help message and configuration info\",\n )\n group._addoption(\n \"-p\",\n action=\"append\",\n dest=\"plugins\",\n default=[],\n metavar=\"name\",\n help=\"early-load given plugin module name or entry point (multi-allowed).\\n\"\n \"To avoid loading of plugins, use the `no:` prefix, e.g. \"\n \"`no:doctest`.\",\n )\n group.addoption(\n \"--traceconfig\",\n \"--trace-config\",\n action=\"store_true\",\n default=False,\n help=\"trace considerations of conftest.py files.\",\n )\n group.addoption(\n \"--debug\",\n action=\"store_true\",\n dest=\"debug\",\n default=False,\n help=\"store internal tracing debug information in 'pytestdebug.log'.\",\n )\n group._addoption(\n \"-o\",\n \"--override-ini\",\n dest=\"override_ini\",\n action=\"append\",\n help='override ini option with \"option=value\" style, e.g. `-o xfail_strict=True -o cache_dir=cache`.',\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/helpconfig.py_pytest_cmdline_parse_pytest_cmdline_parse.if_config_option_debug_.config_add_cleanup_unset_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/helpconfig.py_pytest_cmdline_parse_pytest_cmdline_parse.if_config_option_debug_.config_add_cleanup_unset_", "embedding": null, "metadata": {"file_path": "src/_pytest/helpconfig.py", "file_name": "helpconfig.py", "file_type": "text/x-python", "category": "implementation", "start_line": 97, "end_line": 125, "span_ids": ["pytest_cmdline_parse"], "tokens": 224}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.hookimpl(hookwrapper=True)\ndef pytest_cmdline_parse():\n outcome = yield\n config: Config = outcome.get_result()\n if config.option.debug:\n path = os.path.abspath(\"pytestdebug.log\")\n debugfile = open(path, \"w\")\n debugfile.write(\n \"versions pytest-%s, py-%s, \"\n \"python-%s\\ncwd=%s\\nargs=%s\\n\\n\"\n % (\n pytest.__version__,\n py.__version__,\n \".\".join(map(str, sys.version_info)),\n os.getcwd(),\n config.invocation_params.args,\n )\n )\n config.trace.root.setwriter(debugfile.write)\n undo_tracing = config.pluginmanager.enable_tracing()\n sys.stderr.write(\"writing pytestdebug information to %s\\n\" % path)\n\n def unset_tracing() -> None:\n debugfile.close()\n sys.stderr.write(\"wrote pytestdebug information to %s\\n\" % debugfile.name)\n config.trace.root.setwriter(None)\n undo_tracing()\n\n config.add_cleanup(unset_tracing)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/helpconfig.py_showhelp_showhelp.return": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/helpconfig.py_showhelp_showhelp.return", "embedding": null, "metadata": {"file_path": "src/_pytest/helpconfig.py", "file_name": "helpconfig.py", "file_type": "text/x-python", "category": "implementation", "start_line": 155, "end_line": 225, "span_ids": ["showhelp"], "tokens": 576}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def showhelp(config: Config) -> None:\n import textwrap\n\n reporter = config.pluginmanager.get_plugin(\"terminalreporter\")\n tw = reporter._tw\n tw.write(config._parser.optparser.format_help())\n tw.line()\n tw.line(\n \"[pytest] ini-options in the first pytest.ini|tox.ini|setup.cfg file found:\"\n )\n tw.line()\n\n columns = tw.fullwidth # costly call\n indent_len = 24 # based on argparse's max_help_position=24\n indent = \" \" * indent_len\n for name in config._parser._ininames:\n help, type, default = config._parser._inidict[name]\n if type is None:\n type = \"string\"\n if help is None:\n raise TypeError(f\"help argument cannot be None for {name}\")\n spec = f\"{name} ({type}):\"\n tw.write(\" %s\" % spec)\n spec_len = len(spec)\n if spec_len > (indent_len - 3):\n # Display help starting at a new line.\n tw.line()\n helplines = textwrap.wrap(\n help,\n columns,\n initial_indent=indent,\n subsequent_indent=indent,\n break_on_hyphens=False,\n )\n\n for line in helplines:\n tw.line(line)\n else:\n # Display help starting after the spec, following lines indented.\n tw.write(\" \" * (indent_len - spec_len - 2))\n wrapped = textwrap.wrap(help, columns - indent_len, break_on_hyphens=False)\n\n if wrapped:\n tw.line(wrapped[0])\n for line in wrapped[1:]:\n tw.line(indent + line)\n\n tw.line()\n tw.line(\"environment variables:\")\n vars = [\n (\"PYTEST_ADDOPTS\", \"extra command line options\"),\n (\"PYTEST_PLUGINS\", \"comma-separated plugins to load during startup\"),\n (\"PYTEST_DISABLE_PLUGIN_AUTOLOAD\", \"set to disable plugin auto-loading\"),\n (\"PYTEST_DEBUG\", \"set to enable debug tracing of pytest's internals\"),\n ]\n for name, help in vars:\n tw.line(f\" {name:<24} {help}\")\n tw.line()\n tw.line()\n\n tw.line(\"to see available markers type: pytest --markers\")\n tw.line(\"to see available fixtures type: pytest --fixtures\")\n tw.line(\n \"(shown according to specified file_or_dir or current dir \"\n \"if not specified; fixtures with leading '_' are only shown \"\n \"with the '-v' option\"\n )\n\n for warningreport in reporter.stats.get(\"warnings\", []):\n tw.line(\"warning : \" + warningreport.message, red=True)\n return", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_cmdline_preparse_pytest_cmdline_preparse._Deprecated_modif": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_cmdline_preparse_pytest_cmdline_preparse._Deprecated_modif", "embedding": null, "metadata": {"file_path": "src/_pytest/hookspec.py", "file_name": "hookspec.py", "file_type": "text/x-python", "category": "implementation", "start_line": 160, "end_line": 171, "span_ids": ["pytest_cmdline_preparse"], "tokens": 120}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_cmdline_preparse(config: \"Config\", args: List[str]) -> None:\n \"\"\"(**Deprecated**) modify command line arguments before option parsing.\n\n This hook is considered deprecated and will be removed in a future pytest version. Consider\n using :func:`pytest_load_initial_conftests` instead.\n\n .. note::\n This hook will not be called for ``conftest.py`` files, only for setuptools plugins.\n\n :param _pytest.config.Config config: The pytest config object.\n :param List[str] args: Arguments passed on the command line.\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py__warn_incompatibility_with_xunit2__warn_incompatibility_with_xunit2.if_xml_is_not_None_and_xm.request_node_warn_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py__warn_incompatibility_with_xunit2__warn_incompatibility_with_xunit2.if_xml_is_not_None_and_xm.request_node_warn_", "embedding": null, "metadata": {"file_path": "src/_pytest/junitxml.py", "file_name": "junitxml.py", "file_type": "text/x-python", "category": "implementation", "start_line": 264, "end_line": 278, "span_ids": ["_warn_incompatibility_with_xunit2"], "tokens": 144}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _warn_incompatibility_with_xunit2(\n request: FixtureRequest, fixture_name: str\n) -> None:\n \"\"\"Emit a PytestWarning about the given fixture being incompatible with newer xunit revisions.\"\"\"\n from _pytest.warning_types import PytestWarning\n\n xml = request.config._store.get(xml_key, None)\n if xml is not None and xml.family not in (\"xunit1\", \"legacy\"):\n request.node.warn(\n PytestWarning(\n \"{fixture_name} is incompatible with junit_family '{family}' (use 'legacy' or 'xunit1')\".format(\n fixture_name=fixture_name, family=xml.family\n )\n )\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py_record_property_record_property.return.append_property": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py_record_property_record_property.return.append_property", "embedding": null, "metadata": {"file_path": "src/_pytest/junitxml.py", "file_name": "junitxml.py", "file_type": "text/x-python", "category": "implementation", "start_line": 304, "end_line": 324, "span_ids": ["record_property"], "tokens": 143}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.fixture\ndef record_property(request: FixtureRequest) -> Callable[[str, object], None]:\n \"\"\"Add extra properties to the calling test.\n\n User properties become part of the test report and are available to the\n configured reporters, like JUnit XML.\n\n The fixture is callable with ``name, value``. The value is automatically\n XML-encoded.\n\n Example::\n\n def test_function(record_property):\n record_property(\"example_key\", 1)\n \"\"\"\n _warn_incompatibility_with_xunit2(request, \"record_property\")\n\n def append_property(name: str, value: object) -> None:\n request.node.user_properties.append((name, value))\n\n return append_property", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py_pytest_configure_pytest_configure.if_xmlpath_and_not_hasatt.config_pluginmanager_regi": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py_pytest_configure_pytest_configure.if_xmlpath_and_not_hasatt.config_pluginmanager_regi", "embedding": null, "metadata": {"file_path": "src/_pytest/junitxml.py", "file_name": "junitxml.py", "file_type": "text/x-python", "category": "implementation", "start_line": 426, "end_line": 440, "span_ids": ["pytest_configure"], "tokens": 138}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_configure(config: Config) -> None:\n xmlpath = config.option.xmlpath\n # Prevent opening xmllog on worker nodes (xdist).\n if xmlpath and not hasattr(config, \"workerinput\"):\n junit_family = config.getini(\"junit_family\")\n config._store[xml_key] = LogXML(\n xmlpath,\n config.option.junitprefix,\n config.getini(\"junit_suite_name\"),\n config.getini(\"junit_logging\"),\n config.getini(\"junit_duration_report\"),\n junit_family,\n config.getini(\"junit_log_passing_tests\"),\n )\n config.pluginmanager.register(config._store[xml_key])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py_pytest_unconfigure_mangle_test_address.return.names": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py_pytest_unconfigure_mangle_test_address.return.names", "embedding": null, "metadata": {"file_path": "src/_pytest/junitxml.py", "file_name": "junitxml.py", "file_type": "text/x-python", "category": "implementation", "start_line": 443, "end_line": 462, "span_ids": ["pytest_unconfigure", "mangle_test_address"], "tokens": 160}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_unconfigure(config: Config) -> None:\n xml = config._store.get(xml_key, None)\n if xml:\n del config._store[xml_key]\n config.pluginmanager.unregister(xml)\n\n\ndef mangle_test_address(address: str) -> List[str]:\n path, possible_open_bracket, params = address.partition(\"[\")\n names = path.split(\"::\")\n try:\n names.remove(\"()\")\n except ValueError:\n pass\n # Convert file path to dotted path.\n names[0] = names[0].replace(nodes.SEP, \".\")\n names[0] = re.sub(r\"\\.py$\", \"\", names[0])\n # Put any params back.\n names[-1] += possible_open_bracket + params\n return names", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py_LogXML_LogXML.finalize.if_reporter_is_not_None_.reporter_finalize_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py_LogXML_LogXML.finalize.if_reporter_is_not_None_.reporter_finalize_", "embedding": null, "metadata": {"file_path": "src/_pytest/junitxml.py", "file_name": "junitxml.py", "file_type": "text/x-python", "category": "implementation", "start_line": 465, "end_line": 507, "span_ids": ["LogXML.finalize", "LogXML"], "tokens": 374}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class LogXML:\n def __init__(\n self,\n logfile,\n prefix: Optional[str],\n suite_name: str = \"pytest\",\n logging: str = \"no\",\n report_duration: str = \"total\",\n family=\"xunit1\",\n log_passing_tests: bool = True,\n ) -> None:\n logfile = os.path.expanduser(os.path.expandvars(logfile))\n self.logfile = os.path.normpath(os.path.abspath(logfile))\n self.prefix = prefix\n self.suite_name = suite_name\n self.logging = logging\n self.log_passing_tests = log_passing_tests\n self.report_duration = report_duration\n self.family = family\n self.stats: Dict[str, int] = dict.fromkeys(\n [\"error\", \"passed\", \"failure\", \"skipped\"], 0\n )\n self.node_reporters: Dict[\n Tuple[Union[str, TestReport], object], _NodeReporter\n ] = {}\n self.node_reporters_ordered: List[_NodeReporter] = []\n self.global_properties: List[Tuple[str, str]] = []\n\n # List of reports that failed on call but teardown is pending.\n self.open_reports: List[TestReport] = []\n self.cnt_double_fail_tests = 0\n\n # Replaces convenience family with real family.\n if self.family == \"legacy\":\n self.family = \"xunit1\"\n\n def finalize(self, report: TestReport) -> None:\n nodeid = getattr(report, \"nodeid\", report)\n # Local hack to handle xdist report order.\n workernode = getattr(report, \"node\", None)\n reporter = self.node_reporters.pop((nodeid, workernode))\n if reporter is not None:\n reporter.finalize()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py_LogXML.node_reporter_LogXML._opentestcase.return.reporter": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py_LogXML.node_reporter_LogXML._opentestcase.return.reporter", "embedding": null, "metadata": {"file_path": "src/_pytest/junitxml.py", "file_name": "junitxml.py", "file_type": "text/x-python", "category": "implementation", "start_line": 509, "end_line": 534, "span_ids": ["LogXML.node_reporter", "LogXML._opentestcase", "LogXML.add_stats"], "tokens": 208}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class LogXML:\n\n def node_reporter(self, report: Union[TestReport, str]) -> _NodeReporter:\n nodeid: Union[str, TestReport] = getattr(report, \"nodeid\", report)\n # Local hack to handle xdist report order.\n workernode = getattr(report, \"node\", None)\n\n key = nodeid, workernode\n\n if key in self.node_reporters:\n # TODO: breaks for --dist=each\n return self.node_reporters[key]\n\n reporter = _NodeReporter(nodeid, self)\n\n self.node_reporters[key] = reporter\n self.node_reporters_ordered.append(reporter)\n\n return reporter\n\n def add_stats(self, key: str) -> None:\n if key in self.stats:\n self.stats[key] += 1\n\n def _opentestcase(self, report: TestReport) -> _NodeReporter:\n reporter = self.node_reporter(report)\n reporter.record_testreport(report)\n return reporter", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py_LogXML.pytest_runtest_logreport_LogXML.pytest_runtest_logreport.if_report_when_teardo.if_close_report_.self_open_reports_remove_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py_LogXML.pytest_runtest_logreport_LogXML.pytest_runtest_logreport.if_report_when_teardo.if_close_report_.self_open_reports_remove_", "embedding": null, "metadata": {"file_path": "src/_pytest/junitxml.py", "file_name": "junitxml.py", "file_type": "text/x-python", "category": "implementation", "start_line": 536, "end_line": 622, "span_ids": ["LogXML.pytest_runtest_logreport"], "tokens": 650}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class LogXML:\n\n def pytest_runtest_logreport(self, report: TestReport) -> None:\n \"\"\"Handle a setup/call/teardown report, generating the appropriate\n XML tags as necessary.\n\n Note: due to plugins like xdist, this hook may be called in interlaced\n order with reports from other nodes. For example:\n\n Usual call order:\n -> setup node1\n -> call node1\n -> teardown node1\n -> setup node2\n -> call node2\n -> teardown node2\n\n Possible call order in xdist:\n -> setup node1\n -> call node1\n -> setup node2\n -> call node2\n -> teardown node2\n -> teardown node1\n \"\"\"\n close_report = None\n if report.passed:\n if report.when == \"call\": # ignore setup/teardown\n reporter = self._opentestcase(report)\n reporter.append_pass(report)\n elif report.failed:\n if report.when == \"teardown\":\n # The following vars are needed when xdist plugin is used.\n report_wid = getattr(report, \"worker_id\", None)\n report_ii = getattr(report, \"item_index\", None)\n close_report = next(\n (\n rep\n for rep in self.open_reports\n if (\n rep.nodeid == report.nodeid\n and getattr(rep, \"item_index\", None) == report_ii\n and getattr(rep, \"worker_id\", None) == report_wid\n )\n ),\n None,\n )\n if close_report:\n # We need to open new testcase in case we have failure in\n # call and error in teardown in order to follow junit\n # schema.\n self.finalize(close_report)\n self.cnt_double_fail_tests += 1\n reporter = self._opentestcase(report)\n if report.when == \"call\":\n reporter.append_failure(report)\n self.open_reports.append(report)\n if not self.log_passing_tests:\n reporter.write_captured_output(report)\n else:\n reporter.append_error(report)\n elif report.skipped:\n reporter = self._opentestcase(report)\n reporter.append_skipped(report)\n self.update_testcase_duration(report)\n if report.when == \"teardown\":\n reporter = self._opentestcase(report)\n reporter.write_captured_output(report)\n\n for propname, propvalue in report.user_properties:\n reporter.add_property(propname, str(propvalue))\n\n self.finalize(report)\n report_wid = getattr(report, \"worker_id\", None)\n report_ii = getattr(report, \"item_index\", None)\n close_report = next(\n (\n rep\n for rep in self.open_reports\n if (\n rep.nodeid == report.nodeid\n and getattr(rep, \"item_index\", None) == report_ii\n and getattr(rep, \"worker_id\", None) == report_wid\n )\n ),\n None,\n )\n if close_report:\n self.open_reports.remove(close_report)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py_LogXML.pytest_sessionfinish_LogXML.pytest_sessionfinish.logfile_close_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py_LogXML.pytest_sessionfinish_LogXML.pytest_sessionfinish.logfile_close_", "embedding": null, "metadata": {"file_path": "src/_pytest/junitxml.py", "file_name": "junitxml.py", "file_type": "text/x-python", "category": "implementation", "start_line": 647, "end_line": 683, "span_ids": ["LogXML.pytest_sessionfinish"], "tokens": 314}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class LogXML:\n\n def pytest_sessionfinish(self) -> None:\n dirname = os.path.dirname(os.path.abspath(self.logfile))\n if not os.path.isdir(dirname):\n os.makedirs(dirname)\n logfile = open(self.logfile, \"w\", encoding=\"utf-8\")\n suite_stop_time = timing.time()\n suite_time_delta = suite_stop_time - self.suite_start_time\n\n numtests = (\n self.stats[\"passed\"]\n + self.stats[\"failure\"]\n + self.stats[\"skipped\"]\n + self.stats[\"error\"]\n - self.cnt_double_fail_tests\n )\n logfile.write('')\n\n suite_node = ET.Element(\n \"testsuite\",\n name=self.suite_name,\n errors=str(self.stats[\"error\"]),\n failures=str(self.stats[\"failure\"]),\n skipped=str(self.stats[\"skipped\"]),\n tests=str(numtests),\n time=\"%.3f\" % suite_time_delta,\n timestamp=datetime.fromtimestamp(self.suite_start_time).isoformat(),\n hostname=platform.node(),\n )\n global_properties = self._get_global_properties_node()\n if global_properties is not None:\n suite_node.append(global_properties)\n for node_reporter in self.node_reporters_ordered:\n suite_node.append(node_reporter.to_xml())\n testsuites = ET.Element(\"testsuites\")\n testsuites.append(suite_node)\n logfile.write(ET.tostring(testsuites, encoding=\"unicode\"))\n logfile.close()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py_LogXML.pytest_terminal_summary_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py_LogXML.pytest_terminal_summary_", "embedding": null, "metadata": {"file_path": "src/_pytest/junitxml.py", "file_name": "junitxml.py", "file_type": "text/x-python", "category": "implementation", "start_line": 685, "end_line": 701, "span_ids": ["LogXML.add_global_property", "LogXML._get_global_properties_node", "LogXML.pytest_terminal_summary"], "tokens": 165}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class LogXML:\n\n def pytest_terminal_summary(self, terminalreporter: TerminalReporter) -> None:\n terminalreporter.write_sep(\"-\", f\"generated xml file: {self.logfile}\")\n\n def add_global_property(self, name: str, value: object) -> None:\n __tracebackhide__ = True\n _check_record_param_type(\"name\", name)\n self.global_properties.append((name, bin_xml_escape(value)))\n\n def _get_global_properties_node(self) -> Optional[ET.Element]:\n \"\"\"Return a Junit node containing custom properties, if any.\"\"\"\n if self.global_properties:\n properties = ET.Element(\"properties\")\n for name, value in self.global_properties:\n properties.append(ET.Element(\"property\", name=name, value=value))\n return properties\n return None", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_LogCaptureFixture.text_LogCaptureFixture.record_tuples.return._r_name_r_levelno_r_ge": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_LogCaptureFixture.text_LogCaptureFixture.record_tuples.return._r_name_r_levelno_r_ge", "embedding": null, "metadata": {"file_path": "src/_pytest/logging.py", "file_name": "logging.py", "file_type": "text/x-python", "category": "implementation", "start_line": 390, "end_line": 409, "span_ids": ["LogCaptureFixture.text", "LogCaptureFixture.record_tuples", "LogCaptureFixture.records"], "tokens": 150}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass LogCaptureFixture:\n\n @property\n def text(self) -> str:\n \"\"\"The formatted log text.\"\"\"\n return _remove_ansi_escape_sequences(self.handler.stream.getvalue())\n\n @property\n def records(self) -> List[logging.LogRecord]:\n \"\"\"The list of log records.\"\"\"\n return self.handler.records\n\n @property\n def record_tuples(self) -> List[Tuple[str, int, str]]:\n \"\"\"A list of a stripped down version of log records intended\n for use in assertion comparison.\n\n The format of the tuple is:\n\n (logger_name, log_level, message)\n \"\"\"\n return [(r.name, r.levelno, r.getMessage()) for r in self.records]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_caplog_caplog.result__finalize_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_caplog_caplog.result__finalize_", "embedding": null, "metadata": {"file_path": "src/_pytest/logging.py", "file_name": "logging.py", "file_type": "text/x-python", "category": "implementation", "start_line": 475, "end_line": 489, "span_ids": ["caplog"], "tokens": 148}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@fixture\ndef caplog(request: FixtureRequest) -> Generator[LogCaptureFixture, None, None]:\n \"\"\"Access and control log capturing.\n\n Captured logs are available through the following properties/methods::\n\n * caplog.messages -> list of format-interpolated log messages\n * caplog.text -> string containing formatted log output\n * caplog.records -> list of logging.LogRecord instances\n * caplog.record_tuples -> list of (logger_name, level, message) tuples\n * caplog.clear() -> clear captured records and formatted log output string\n \"\"\"\n result = LogCaptureFixture(request.node, _ispytest=True)\n yield result\n result._finalize()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py__LiveLoggingStreamHandler.emit_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py__LiveLoggingStreamHandler.emit_", "embedding": null, "metadata": {"file_path": "src/_pytest/logging.py", "file_name": "logging.py", "file_type": "text/x-python", "category": "implementation", "start_line": 788, "end_line": 824, "span_ids": ["_LiveLoggingNullHandler.reset", "_LiveLoggingStreamHandler.handleError", "_LiveLoggingStreamHandler.emit", "_LiveLoggingNullHandler.handleError", "_LiveLoggingNullHandler.set_when", "_LiveLoggingNullHandler"], "tokens": 272}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class _LiveLoggingStreamHandler(logging.StreamHandler):\n\n def emit(self, record: logging.LogRecord) -> None:\n ctx_manager = (\n self.capture_manager.global_and_fixture_disabled()\n if self.capture_manager\n else nullcontext()\n )\n with ctx_manager:\n if not self._first_record_emitted:\n self.stream.write(\"\\n\")\n self._first_record_emitted = True\n elif self._when in (\"teardown\", \"finish\"):\n if not self._test_outcome_written:\n self._test_outcome_written = True\n self.stream.write(\"\\n\")\n if not self._section_name_shown and self._when:\n self.stream.section(\"live log \" + self._when, sep=\"-\", bold=True)\n self._section_name_shown = True\n super().emit(record)\n\n def handleError(self, record: logging.LogRecord) -> None:\n # Handled by LogCaptureHandler.\n pass\n\n\nclass _LiveLoggingNullHandler(logging.NullHandler):\n \"\"\"A logging handler used when live logging is disabled.\"\"\"\n\n def reset(self) -> None:\n pass\n\n def set_when(self, when: str) -> None:\n pass\n\n def handleError(self, record: logging.LogRecord) -> None:\n # Handled by LogCaptureHandler.\n pass", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_wrap_session_wrap_session.return.session_exitstatus": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_wrap_session_wrap_session.return.session_exitstatus", "embedding": null, "metadata": {"file_path": "src/_pytest/main.py", "file_name": "main.py", "file_type": "text/x-python", "category": "implementation", "start_line": 259, "end_line": 315, "span_ids": ["wrap_session"], "tokens": 495}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def wrap_session(\n config: Config, doit: Callable[[Config, \"Session\"], Optional[Union[int, ExitCode]]]\n) -> Union[int, ExitCode]:\n \"\"\"Skeleton command line program.\"\"\"\n session = Session.from_config(config)\n session.exitstatus = ExitCode.OK\n initstate = 0\n try:\n try:\n config._do_configure()\n initstate = 1\n config.hook.pytest_sessionstart(session=session)\n initstate = 2\n session.exitstatus = doit(config, session) or 0\n except UsageError:\n session.exitstatus = ExitCode.USAGE_ERROR\n raise\n except Failed:\n session.exitstatus = ExitCode.TESTS_FAILED\n except (KeyboardInterrupt, exit.Exception):\n excinfo = _pytest._code.ExceptionInfo.from_current()\n exitstatus: Union[int, ExitCode] = ExitCode.INTERRUPTED\n if isinstance(excinfo.value, exit.Exception):\n if excinfo.value.returncode is not None:\n exitstatus = excinfo.value.returncode\n if initstate < 2:\n sys.stderr.write(f\"{excinfo.typename}: {excinfo.value.msg}\\n\")\n config.hook.pytest_keyboard_interrupt(excinfo=excinfo)\n session.exitstatus = exitstatus\n except BaseException:\n session.exitstatus = ExitCode.INTERNAL_ERROR\n excinfo = _pytest._code.ExceptionInfo.from_current()\n try:\n config.notify_exception(excinfo, config.option)\n except exit.Exception as exc:\n if exc.returncode is not None:\n session.exitstatus = exc.returncode\n sys.stderr.write(\"{}: {}\\n\".format(type(exc).__name__, exc))\n else:\n if isinstance(excinfo.value, SystemExit):\n sys.stderr.write(\"mainloop: caught unexpected SystemExit!\\n\")\n\n finally:\n # Explicitly break reference cycle.\n excinfo = None # type: ignore\n session.startdir.chdir()\n if initstate >= 2:\n try:\n config.hook.pytest_sessionfinish(\n session=session, exitstatus=session.exitstatus\n )\n except exit.Exception as exc:\n if exc.returncode is not None:\n session.exitstatus = exc.returncode\n sys.stderr.write(\"{}: {}\\n\".format(type(exc).__name__, exc))\n config._ensure_unconfigure()\n return session.exitstatus", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/__init__.py_pytest_addoption_pytest_addoption.parser_addini_EMPTY_PARAM": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/__init__.py_pytest_addoption_pytest_addoption.parser_addini_EMPTY_PARAM", "embedding": null, "metadata": {"file_path": "src/_pytest/mark/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 64, "end_line": 103, "span_ids": ["pytest_addoption"], "tokens": 357}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_addoption(parser: Parser) -> None:\n group = parser.getgroup(\"general\")\n group._addoption(\n \"-k\",\n action=\"store\",\n dest=\"keyword\",\n default=\"\",\n metavar=\"EXPRESSION\",\n help=\"only run tests which match the given substring expression. \"\n \"An expression is a python evaluatable expression \"\n \"where all names are substring-matched against test names \"\n \"and their parent classes. Example: -k 'test_method or test_\"\n \"other' matches all test functions and classes whose name \"\n \"contains 'test_method' or 'test_other', while -k 'not test_method' \"\n \"matches those that don't contain 'test_method' in their names. \"\n \"-k 'not test_method and not test_other' will eliminate the matches. \"\n \"Additionally keywords are matched to classes and functions \"\n \"containing extra names in their 'extra_keyword_matches' set, \"\n \"as well as functions which have names assigned directly to them. \"\n \"The matching is case-insensitive.\",\n )\n\n group._addoption(\n \"-m\",\n action=\"store\",\n dest=\"markexpr\",\n default=\"\",\n metavar=\"MARKEXPR\",\n help=\"only run tests matching given mark expression.\\n\"\n \"For example: -m 'mark1 and not mark2'.\",\n )\n\n group.addoption(\n \"--markers\",\n action=\"store_true\",\n help=\"show markers (builtin, plugin and per-project ones).\",\n )\n\n parser.addini(\"markers\", \"markers for test functions\", \"linelist\")\n parser.addini(EMPTY_PARAMETERSET_OPTION, \"default marker for empty parametersets\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/structures.py_ParameterSet._for_parametrize_ParameterSet._for_parametrize.return.argnames_parameters": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/structures.py_ParameterSet._for_parametrize_ParameterSet._for_parametrize.return.argnames_parameters", "embedding": null, "metadata": {"file_path": "src/_pytest/mark/structures.py", "file_name": "structures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 160, "end_line": 200, "span_ids": ["ParameterSet._for_parametrize"], "tokens": 402}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class ParameterSet(\n NamedTuple(\n \"ParameterSet\",\n [\n (\"values\", Sequence[Union[object, NotSetType]]),\n (\"marks\", Collection[Union[\"MarkDecorator\", \"Mark\"]]),\n (\"id\", Optional[str]),\n ],\n )\n):\n\n @classmethod\n def _for_parametrize(\n cls,\n argnames: Union[str, List[str], Tuple[str, ...]],\n argvalues: Iterable[Union[\"ParameterSet\", Sequence[object], object]],\n func,\n config: Config,\n nodeid: str,\n ) -> Tuple[Union[List[str], Tuple[str, ...]], List[\"ParameterSet\"]]:\n argnames, force_tuple = cls._parse_parametrize_args(argnames, argvalues)\n parameters = cls._parse_parametrize_parameters(argvalues, force_tuple)\n del argvalues\n\n if parameters:\n # Check all parameter sets have the correct number of values.\n for param in parameters:\n if len(param.values) != len(argnames):\n msg = (\n '{nodeid}: in \"parametrize\" the number of names ({names_len}):\\n'\n \" {names}\\n\"\n \"must be equal to the number of values ({values_len}):\\n\"\n \" {values}\"\n )\n fail(\n msg.format(\n nodeid=nodeid,\n values=param.values,\n names=argnames,\n names_len=len(argnames),\n values_len=len(param.values),\n ),\n pytrace=False,\n )\n else:\n # Empty parameter set (likely computed at runtime): create a single\n # parameter set with NOTSET values, with the \"empty parameter set\" mark applied to it.\n mark = get_empty_parameterset_mark(config, argnames, func)\n parameters.append(\n ParameterSet(values=(NOTSET,) * len(argnames), marks=[mark], id=None)\n )\n return argnames, parameters", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/monkeypatch.py_resolve_resolve.return.found": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/monkeypatch.py_resolve_resolve.return.found", "embedding": null, "metadata": {"file_path": "src/_pytest/monkeypatch.py", "file_name": "monkeypatch.py", "file_type": "text/x-python", "category": "implementation", "start_line": 53, "end_line": 78, "span_ids": ["resolve"], "tokens": 172}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def resolve(name: str) -> object:\n # Simplified from zope.dottedname.\n parts = name.split(\".\")\n\n used = parts.pop(0)\n found = __import__(used)\n for part in parts:\n used += \".\" + part\n try:\n found = getattr(found, part)\n except AttributeError:\n pass\n else:\n continue\n # We use explicit un-nesting of the handling block in order\n # to avoid nested exceptions.\n try:\n __import__(used)\n except ImportError as ex:\n expected = str(ex).split()[-1]\n if expected == used:\n raise\n else:\n raise ImportError(f\"import error in {used}: {ex}\") from ex\n found = annotated_getattr(found, part, used)\n return found", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/monkeypatch.py_annotated_getattr_notset.Notset_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/monkeypatch.py_annotated_getattr_notset.Notset_", "embedding": null, "metadata": {"file_path": "src/_pytest/monkeypatch.py", "file_name": "monkeypatch.py", "file_type": "text/x-python", "category": "implementation", "start_line": 81, "end_line": 108, "span_ids": ["Notset.__repr__", "derive_importpath", "Notset", "impl:7", "annotated_getattr"], "tokens": 199}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def annotated_getattr(obj: object, name: str, ann: str) -> object:\n try:\n obj = getattr(obj, name)\n except AttributeError as e:\n raise AttributeError(\n \"{!r} object at {} has no attribute {!r}\".format(\n type(obj).__name__, ann, name\n )\n ) from e\n return obj\n\n\ndef derive_importpath(import_path: str, raising: bool) -> Tuple[str, object]:\n if not isinstance(import_path, str) or \".\" not in import_path:\n raise TypeError(f\"must be absolute import path string, not {import_path!r}\")\n module, attr = import_path.rsplit(\".\", 1)\n target = resolve(module)\n if raising:\n annotated_getattr(target, attr, ann=module)\n return attr, target\n\n\nclass Notset:\n def __repr__(self) -> str:\n return \"\"\n\n\nnotset = Notset()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/monkeypatch.py_MonkeyPatch.delattr_MonkeyPatch.delattr.if_not_hasattr_target_na.else_.delattr_target_name_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/monkeypatch.py_MonkeyPatch.delattr_MonkeyPatch.delattr.if_not_hasattr_target_na.else_.delattr_target_name_", "embedding": null, "metadata": {"file_path": "src/_pytest/monkeypatch.py", "file_name": "monkeypatch.py", "file_type": "text/x-python", "category": "implementation", "start_line": 226, "end_line": 262, "span_ids": ["MonkeyPatch.delattr"], "tokens": 280}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass MonkeyPatch:\n\n def delattr(\n self,\n target: Union[object, str],\n name: Union[str, Notset] = notset,\n raising: bool = True,\n ) -> None:\n \"\"\"Delete attribute ``name`` from ``target``.\n\n If no ``name`` is specified and ``target`` is a string\n it will be interpreted as a dotted import path with the\n last part being the attribute name.\n\n Raises AttributeError it the attribute does not exist, unless\n ``raising`` is set to False.\n \"\"\"\n __tracebackhide__ = True\n import inspect\n\n if isinstance(name, Notset):\n if not isinstance(target, str):\n raise TypeError(\n \"use delattr(target, name) or \"\n \"delattr(target) with target being a dotted \"\n \"import string\"\n )\n name, target = derive_importpath(target, raising)\n\n if not hasattr(target, name):\n if raising:\n raise AttributeError(name)\n else:\n oldval = getattr(target, name, notset)\n # Avoid class descriptors like staticmethod/classmethod.\n if inspect.isclass(target):\n oldval = target.__dict__.get(name, notset)\n self._setattr.append((target, name, oldval))\n delattr(target, name)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/monkeypatch.py_MonkeyPatch.undo_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/monkeypatch.py_MonkeyPatch.undo_", "embedding": null, "metadata": {"file_path": "src/_pytest/monkeypatch.py", "file_name": "monkeypatch.py", "file_type": "text/x-python", "category": "implementation", "start_line": 344, "end_line": 381, "span_ids": ["MonkeyPatch.undo"], "tokens": 305}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass MonkeyPatch:\n\n def undo(self) -> None:\n \"\"\"Undo previous changes.\n\n This call consumes the undo stack. Calling it a second time has no\n effect unless you do more monkeypatching after the undo call.\n\n There is generally no need to call `undo()`, since it is\n called automatically during tear-down.\n\n Note that the same `monkeypatch` fixture is used across a\n single test function invocation. If `monkeypatch` is used both by\n the test function itself and one of the test fixtures,\n calling `undo()` will undo all of the changes made in\n both functions.\n \"\"\"\n for obj, name, value in reversed(self._setattr):\n if value is not notset:\n setattr(obj, name, value)\n else:\n delattr(obj, name)\n self._setattr[:] = []\n for dictionary, key, value in reversed(self._setitem):\n if value is notset:\n try:\n del dictionary[key]\n except KeyError:\n pass # Was already deleted, so we have the desired state.\n else:\n dictionary[key] = value\n self._setitem[:] = []\n if self._savesyspath is not None:\n sys.path[:] = self._savesyspath\n self._savesyspath = None\n\n if self._cwd is not None:\n os.chdir(self._cwd)\n self._cwd = None", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nodes.py_Node.warn_Node.warn.warnings_warn_explicit_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nodes.py_Node.warn_Node.warn.warnings_warn_explicit_", "embedding": null, "metadata": {"file_path": "src/_pytest/nodes.py", "file_name": "nodes.py", "file_type": "text/x-python", "category": "implementation", "start_line": 203, "end_line": 238, "span_ids": ["Node.warn"], "tokens": 251}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Node(metaclass=NodeMeta):\n\n def warn(self, warning: Warning) -> None:\n \"\"\"Issue a warning for this Node.\n\n Warnings will be displayed after the test session, unless explicitly suppressed.\n\n :param Warning warning:\n The warning instance to issue.\n\n :raises ValueError: If ``warning`` instance is not a subclass of Warning.\n\n Example usage:\n\n .. code-block:: python\n\n node.warn(PytestWarning(\"some message\"))\n node.warn(UserWarning(\"some message\"))\n\n .. versionchanged:: 6.2\n Any subclass of :class:`Warning` is now accepted, rather than only\n :class:`PytestWarning ` subclasses.\n \"\"\"\n # enforce type checks here to avoid getting a generic type error later otherwise.\n if not isinstance(warning, Warning):\n raise ValueError(\n \"warning must be an instance of Warning or subclass, got {!r}\".format(\n warning\n )\n )\n path, lineno = get_fslocation_from_item(self)\n assert lineno is not None\n warnings.warn_explicit(\n warning,\n category=None,\n filename=str(path),\n lineno=lineno + 1,\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nodes.py_Node.add_marker_Node.add_marker.if_append_.else_.self_own_markers_insert_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nodes.py_Node.add_marker_Node.add_marker.if_append_.else_.self_own_markers_insert_0", "embedding": null, "metadata": {"file_path": "src/_pytest/nodes.py", "file_name": "nodes.py", "file_type": "text/x-python", "category": "implementation", "start_line": 267, "end_line": 287, "span_ids": ["Node.add_marker"], "tokens": 166}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Node(metaclass=NodeMeta):\n\n def add_marker(\n self, marker: Union[str, MarkDecorator], append: bool = True\n ) -> None:\n \"\"\"Dynamically add a marker object to the node.\n\n :param append:\n Whether to append the marker, or prepend it.\n \"\"\"\n from _pytest.mark import MARK_GEN\n\n if isinstance(marker, MarkDecorator):\n marker_ = marker\n elif isinstance(marker, str):\n marker_ = getattr(MARK_GEN, marker)\n else:\n raise ValueError(\"is not a string or pytest.mark.* Marker\")\n self.keywords[marker_.name] = marker_\n if append:\n self.own_markers.append(marker_.mark)\n else:\n self.own_markers.insert(0, marker_.mark)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nodes.py_Collector_Collector._prunetraceback.if_hasattr_self_fspath_.excinfo.traceback.ntraceback_filter_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nodes.py_Collector_Collector._prunetraceback.if_hasattr_self_fspath_.excinfo.traceback.ntraceback_filter_", "embedding": null, "metadata": {"file_path": "src/_pytest/nodes.py", "file_name": "nodes.py", "file_type": "text/x-python", "category": "implementation", "start_line": 444, "end_line": 484, "span_ids": ["Collector.collect", "Collector.repr_failure", "Collector", "Collector._prunetraceback", "Collector.CollectError"], "tokens": 370}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Collector(Node):\n \"\"\"Collector instances create children through collect() and thus\n iteratively build a tree.\"\"\"\n\n class CollectError(Exception):\n \"\"\"An error during collection, contains a custom message.\"\"\"\n\n def collect(self) -> Iterable[Union[\"Item\", \"Collector\"]]:\n \"\"\"Return a list of children (items and collectors) for this\n collection node.\"\"\"\n raise NotImplementedError(\"abstract\")\n\n # TODO: This omits the style= parameter which breaks Liskov Substitution.\n def repr_failure( # type: ignore[override]\n self, excinfo: ExceptionInfo[BaseException]\n ) -> Union[str, TerminalRepr]:\n \"\"\"Return a representation of a collection failure.\n\n :param excinfo: Exception information for the failure.\n \"\"\"\n if isinstance(excinfo.value, self.CollectError) and not self.config.getoption(\n \"fulltrace\", False\n ):\n exc = excinfo.value\n return str(exc.args[0])\n\n # Respect explicit tbstyle option, but default to \"short\"\n # (_repr_failure_py uses \"long\" with \"fulltrace\" option always).\n tbstyle = self.config.getoption(\"tbstyle\", \"auto\")\n if tbstyle == \"auto\":\n tbstyle = \"short\"\n\n return self._repr_failure_py(excinfo, style=tbstyle)\n\n def _prunetraceback(self, excinfo: ExceptionInfo[BaseException]) -> None:\n if hasattr(self, \"fspath\"):\n traceback = excinfo.traceback\n ntraceback = traceback.cut(path=Path(self.fspath))\n if ntraceback == traceback:\n ntraceback = ntraceback.cut(excludepath=tracebackcutdir)\n excinfo.traceback = ntraceback.filter()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pastebin.py_pytest_terminal_summary_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pastebin.py_pytest_terminal_summary_", "embedding": null, "metadata": {"file_path": "src/_pytest/pastebin.py", "file_name": "pastebin.py", "file_type": "text/x-python", "category": "implementation", "start_line": 94, "end_line": 111, "span_ids": ["pytest_terminal_summary"], "tokens": 174}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_terminal_summary(terminalreporter: TerminalReporter) -> None:\n if terminalreporter.config.option.pastebin != \"failed\":\n return\n if \"failed\" in terminalreporter.stats:\n terminalreporter.write_sep(\"=\", \"Sending information to Paste Service\")\n for rep in terminalreporter.stats[\"failed\"]:\n try:\n msg = rep.longrepr.reprtraceback.reprentries[-1].reprfileloc\n except AttributeError:\n msg = terminalreporter._getfailureheadline(rep)\n file = StringIO()\n tw = create_terminal_writer(terminalreporter.config, file)\n rep.toterminal(tw)\n s = file.getvalue()\n assert len(s)\n pastebinurl = create_new_paste(s)\n terminalreporter.write_line(f\"{msg} --> {pastebinurl}\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py__force_symlink__force_symlink.None_1.except_Exception_.pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py__force_symlink__force_symlink.None_1.except_Exception_.pass", "embedding": null, "metadata": {"file_path": "src/_pytest/pathlib.py", "file_name": "pathlib.py", "file_type": "text/x-python", "category": "implementation", "start_line": 193, "end_line": 212, "span_ids": ["_force_symlink"], "tokens": 137}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _force_symlink(\n root: Path, target: Union[str, PurePath], link_to: Union[str, Path]\n) -> None:\n \"\"\"Helper to create the current symlink.\n\n It's full of race conditions that are reasonably OK to ignore\n for the context of best effort linking to the latest test run.\n\n The presumption being that in case of much parallelism\n the inaccuracy is going to be acceptable.\n \"\"\"\n current_symlink = root.joinpath(target)\n try:\n current_symlink.unlink()\n except OSError:\n pass\n try:\n current_symlink.symlink_to(link_to)\n except Exception:\n pass", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_HookRecorder_HookRecorder.getcalls.return._call_for_call_in_self_ca": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_HookRecorder_HookRecorder.getcalls.return._call_for_call_in_self_ca", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 231, "end_line": 257, "span_ids": ["HookRecorder.getcalls", "HookRecorder.__init__.before", "HookRecorder.finish_recording", "HookRecorder"], "tokens": 232}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class HookRecorder:\n \"\"\"Record all hooks called in a plugin manager.\n\n This wraps all the hook calls in the plugin manager, recording each call\n before propagating the normal calls.\n \"\"\"\n\n def __init__(self, pluginmanager: PytestPluginManager) -> None:\n self._pluginmanager = pluginmanager\n self.calls: List[ParsedCall] = []\n self.ret: Optional[Union[int, ExitCode]] = None\n\n def before(hook_name: str, hook_impls, kwargs) -> None:\n self.calls.append(ParsedCall(hook_name, kwargs))\n\n def after(outcome, hook_name: str, hook_impls, kwargs) -> None:\n pass\n\n self._undo_wrapping = pluginmanager.add_hookcall_monitoring(before, after)\n\n def finish_recording(self) -> None:\n self._undo_wrapping()\n\n def getcalls(self, names: Union[str, Iterable[str]]) -> List[ParsedCall]:\n if isinstance(names, str):\n names = names.split()\n return [call for call in self.calls if call._name in names]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_HookRecorder.matchreport_HookRecorder.matchreport.return.values_0_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_HookRecorder.matchreport_HookRecorder.matchreport.return.values_0_", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 330, "end_line": 360, "span_ids": ["HookRecorder.matchreport"], "tokens": 244}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class HookRecorder:\n\n def matchreport(\n self,\n inamepart: str = \"\",\n names: Union[str, Iterable[str]] = (\n \"pytest_runtest_logreport\",\n \"pytest_collectreport\",\n ),\n when: Optional[str] = None,\n ) -> Union[CollectReport, TestReport]:\n \"\"\"Return a testreport whose dotted import path matches.\"\"\"\n values = []\n for rep in self.getreports(names=names):\n if not when and rep.when != \"call\" and rep.passed:\n # setup/teardown passing reports - let's ignore those\n continue\n if when and rep.when != when:\n continue\n if not inamepart or inamepart in rep.nodeid.split(\"::\"):\n values.append(rep)\n if not values:\n raise ValueError(\n \"could not find test report matching %r: \"\n \"no test reports at all!\" % (inamepart,)\n )\n if len(values) > 1:\n raise ValueError(\n \"found 2 or more testreports matching {!r}: {}\".format(\n inamepart, values\n )\n )\n return values[0]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_CwdSnapshot_SysPathsSnapshot.restore.sys_path_sys_meta_pat": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_CwdSnapshot_SysPathsSnapshot.restore.sys_path_sys_meta_pat", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 521, "end_line": 548, "span_ids": ["SysModulesSnapshot.restore", "SysModulesSnapshot", "CwdSnapshot", "CwdSnapshot.restore", "SysPathsSnapshot", "SysPathsSnapshot.restore"], "tokens": 188}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class CwdSnapshot:\n def __init__(self) -> None:\n self.__saved = os.getcwd()\n\n def restore(self) -> None:\n os.chdir(self.__saved)\n\n\nclass SysModulesSnapshot:\n def __init__(self, preserve: Optional[Callable[[str], bool]] = None) -> None:\n self.__preserve = preserve\n self.__saved = dict(sys.modules)\n\n def restore(self) -> None:\n if self.__preserve:\n self.__saved.update(\n (k, m) for k, m in sys.modules.items() if self.__preserve(k)\n )\n sys.modules.clear()\n sys.modules.update(self.__saved)\n\n\nclass SysPathsSnapshot:\n def __init__(self) -> None:\n self.__saved = list(sys.path), list(sys.meta_path)\n\n def restore(self) -> None:\n sys.path[:], sys.meta_path[:] = self.__saved", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_LineMatcher_LineMatcher.re_match_lines_random.self__match_lines_random_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_LineMatcher_LineMatcher.re_match_lines_random.self__match_lines_random_", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1737, "end_line": 1774, "span_ids": ["LineMatcher.fnmatch_lines_random", "LineMatcher._getlines", "LineMatcher.re_match_lines_random", "LineMatcher", "LineMatcher.__str__"], "tokens": 332}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class LineMatcher:\n \"\"\"Flexible matching of text.\n\n This is a convenience class to test large texts like the output of\n commands.\n\n The constructor takes a list of lines without their trailing newlines, i.e.\n ``text.splitlines()``.\n \"\"\"\n\n def __init__(self, lines: List[str]) -> None:\n self.lines = lines\n self._log_output: List[str] = []\n\n def __str__(self) -> str:\n \"\"\"Return the entire original text.\n\n .. versionadded:: 6.2\n You can use :meth:`str` in older versions.\n \"\"\"\n return \"\\n\".join(self.lines)\n\n def _getlines(self, lines2: Union[str, Sequence[str], Source]) -> Sequence[str]:\n if isinstance(lines2, str):\n lines2 = Source(lines2)\n if isinstance(lines2, Source):\n lines2 = lines2.strip().lines\n return lines2\n\n def fnmatch_lines_random(self, lines2: Sequence[str]) -> None:\n \"\"\"Check lines exist in the output in any order (using :func:`python:fnmatch.fnmatch`).\"\"\"\n __tracebackhide__ = True\n self._match_lines_random(lines2, fnmatch)\n\n def re_match_lines_random(self, lines2: Sequence[str]) -> None:\n \"\"\"Check lines exist in the output in any order (using :func:`python:re.match`).\"\"\"\n __tracebackhide__ = True\n self._match_lines_random(lines2, lambda name, pat: bool(re.match(pat, name)))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_PyobjMixin.reportinfo_PyobjMixin.reportinfo.return.fspath_lineno_modpath": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_PyobjMixin.reportinfo_PyobjMixin.reportinfo.return.fspath_lineno_modpath", "embedding": null, "metadata": {"file_path": "src/_pytest/python.py", "file_name": "python.py", "file_type": "text/x-python", "category": "implementation", "start_line": 324, "end_line": 343, "span_ids": ["PyobjMixin.reportinfo"], "tokens": 195}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class PyobjMixin(nodes.Node):\n\n def reportinfo(self) -> Tuple[Union[py.path.local, str], int, str]:\n # XXX caching?\n obj = self.obj\n compat_co_firstlineno = getattr(obj, \"compat_co_firstlineno\", None)\n if isinstance(compat_co_firstlineno, int):\n # nose compatibility\n file_path = sys.modules[obj.__module__].__file__\n if file_path.endswith(\".pyc\"):\n file_path = file_path[:-1]\n fspath: Union[py.path.local, str] = file_path\n lineno = compat_co_firstlineno\n else:\n path, lineno = getfslineno(obj)\n if isinstance(path, Path):\n fspath = py.path.local(path)\n else:\n fspath = path\n modpath = self.getmodpath()\n assert isinstance(lineno, int)\n return fspath, lineno, modpath", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_PyCollector._matches_prefix_or_glob_option_PyCollector._matches_prefix_or_glob_option.return.False": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_PyCollector._matches_prefix_or_glob_option_PyCollector._matches_prefix_or_glob_option.return.False", "embedding": null, "metadata": {"file_path": "src/_pytest/python.py", "file_name": "python.py", "file_type": "text/x-python", "category": "implementation", "start_line": 397, "end_line": 410, "span_ids": ["PyCollector._matches_prefix_or_glob_option"], "tokens": 150}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class PyCollector(PyobjMixin, nodes.Collector):\n\n def _matches_prefix_or_glob_option(self, option_name: str, name: str) -> bool:\n \"\"\"Check if the given name matches the prefix or glob-pattern defined\n in ini configuration.\"\"\"\n for option in self.config.getini(option_name):\n if name.startswith(option):\n return True\n # Check that name looks like a glob-string before calling fnmatch\n # because this is called for every name in each collected module,\n # and fnmatch is somewhat expensive to call.\n elif (\"*\" in option or \"?\" in option or \"[\" in option) and fnmatch.fnmatch(\n name, option\n ):\n return True\n return False", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Module_Module._inject_setup_module_fixture.self.obj.__pytest_setup_module.xunit_setup_module_fixtur": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Module_Module._inject_setup_module_fixture.self.obj.__pytest_setup_module.xunit_setup_module_fixtur", "embedding": null, "metadata": {"file_path": "src/_pytest/python.py", "file_name": "python.py", "file_type": "text/x-python", "category": "implementation", "start_line": 501, "end_line": 543, "span_ids": ["Module.collect", "Module", "Module._getobj", "Module._inject_setup_module_fixture"], "tokens": 337}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Module(nodes.File, PyCollector):\n \"\"\"Collector for test classes and functions.\"\"\"\n\n def _getobj(self):\n return self._importtestmodule()\n\n def collect(self) -> Iterable[Union[nodes.Item, nodes.Collector]]:\n self._inject_setup_module_fixture()\n self._inject_setup_function_fixture()\n self.session._fixturemanager.parsefactories(self)\n return super().collect()\n\n def _inject_setup_module_fixture(self) -> None:\n \"\"\"Inject a hidden autouse, module scoped fixture into the collected module object\n that invokes setUpModule/tearDownModule if either or both are available.\n\n Using a fixture to invoke this methods ensures we play nicely and unsurprisingly with\n other fixtures (#517).\n \"\"\"\n setup_module = _get_first_non_fixture_func(\n self.obj, (\"setUpModule\", \"setup_module\")\n )\n teardown_module = _get_first_non_fixture_func(\n self.obj, (\"tearDownModule\", \"teardown_module\")\n )\n\n if setup_module is None and teardown_module is None:\n return\n\n @fixtures.fixture(\n autouse=True,\n scope=\"module\",\n # Use a unique name to speed up lookup.\n name=f\"xunit_setup_module_fixture_{self.obj.__name__}\",\n )\n def xunit_setup_module_fixture(request) -> Generator[None, None, None]:\n if setup_module is not None:\n _call_with_optional_argument(setup_module, request.module)\n yield\n if teardown_module is not None:\n _call_with_optional_argument(teardown_module, request.module)\n\n self.obj.__pytest_setup_module = xunit_setup_module_fixture", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Module._importtestmodule_Module._importtestmodule.return.mod": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Module._importtestmodule_Module._importtestmodule.return.mod", "embedding": null, "metadata": {"file_path": "src/_pytest/python.py", "file_name": "python.py", "file_type": "text/x-python", "category": "implementation", "start_line": 579, "end_line": 624, "span_ids": ["Module._importtestmodule"], "tokens": 453}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Module(nodes.File, PyCollector):\n\n def _importtestmodule(self):\n # We assume we are only called once per module.\n importmode = self.config.getoption(\"--import-mode\")\n try:\n mod = import_path(self.fspath, mode=importmode)\n except SyntaxError as e:\n raise self.CollectError(\n ExceptionInfo.from_current().getrepr(style=\"short\")\n ) from e\n except ImportPathMismatchError as e:\n raise self.CollectError(\n \"import file mismatch:\\n\"\n \"imported module %r has this __file__ attribute:\\n\"\n \" %s\\n\"\n \"which is not the same as the test file we want to collect:\\n\"\n \" %s\\n\"\n \"HINT: remove __pycache__ / .pyc files and/or use a \"\n \"unique basename for your test file modules\" % e.args\n ) from e\n except ImportError as e:\n exc_info = ExceptionInfo.from_current()\n if self.config.getoption(\"verbose\") < 2:\n exc_info.traceback = exc_info.traceback.filter(filter_traceback)\n exc_repr = (\n exc_info.getrepr(style=\"short\")\n if exc_info.traceback\n else exc_info.exconly()\n )\n formatted_tb = str(exc_repr)\n raise self.CollectError(\n \"ImportError while importing test module '{fspath}'.\\n\"\n \"Hint: make sure your test modules/packages have valid Python names.\\n\"\n \"Traceback:\\n\"\n \"{traceback}\".format(fspath=self.fspath, traceback=formatted_tb)\n ) from e\n except skip.Exception as e:\n if e.allow_module_level:\n raise\n raise self.CollectError(\n \"Using pytest.skip outside of a test is not allowed. \"\n \"To decorate a test function, use the @pytest.mark.skip \"\n \"or @pytest.mark.skipif decorators instead, and to skip a \"\n \"module use `pytestmark = pytest.mark.{skip,skipif}.\"\n ) from e\n self.config.pluginmanager.consider_module(mod)\n return mod", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Metafunc._resolve_arg_value_types_Metafunc._resolve_arg_value_types.return.valtypes": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Metafunc._resolve_arg_value_types_Metafunc._resolve_arg_value_types.return.valtypes", "embedding": null, "metadata": {"file_path": "src/_pytest/python.py", "file_name": "python.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1192, "end_line": 1230, "span_ids": ["Metafunc._resolve_arg_value_types"], "tokens": 378}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Metafunc:\n\n def _resolve_arg_value_types(\n self,\n argnames: Sequence[str],\n indirect: Union[bool, Sequence[str]],\n ) -> Dict[str, \"Literal['params', 'funcargs']\"]:\n \"\"\"Resolve if each parametrized argument must be considered a\n parameter to a fixture or a \"funcarg\" to the function, based on the\n ``indirect`` parameter of the parametrized() call.\n\n :param List[str] argnames: List of argument names passed to ``parametrize()``.\n :param indirect: Same as the ``indirect`` parameter of ``parametrize()``.\n :rtype: Dict[str, str]\n A dict mapping each arg name to either:\n * \"params\" if the argname should be the parameter of a fixture of the same name.\n * \"funcargs\" if the argname should be a parameter to the parametrized test function.\n \"\"\"\n if isinstance(indirect, bool):\n valtypes: Dict[str, Literal[\"params\", \"funcargs\"]] = dict.fromkeys(\n argnames, \"params\" if indirect else \"funcargs\"\n )\n elif isinstance(indirect, Sequence):\n valtypes = dict.fromkeys(argnames, \"funcargs\")\n for arg in indirect:\n if arg not in argnames:\n fail(\n \"In {}: indirect fixture '{}' doesn't exist\".format(\n self.function.__name__, arg\n ),\n pytrace=False,\n )\n valtypes[arg] = \"params\"\n else:\n fail(\n \"In {func}: expected Sequence or boolean for indirect, got {type}\".format(\n type=type(indirect).__name__, func=self.function.__name__\n ),\n pytrace=False,\n )\n return valtypes", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Metafunc._validate_if_using_arg_names_Metafunc._validate_if_using_arg_names.for_arg_in_argnames_.if_arg_not_in_self_fixtur.if_arg_in_default_arg_nam.else_.fail_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Metafunc._validate_if_using_arg_names_Metafunc._validate_if_using_arg_names.for_arg_in_argnames_.if_arg_not_in_self_fixtur.if_arg_in_default_arg_nam.else_.fail_", "embedding": null, "metadata": {"file_path": "src/_pytest/python.py", "file_name": "python.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1232, "end_line": 1262, "span_ids": ["Metafunc._validate_if_using_arg_names"], "tokens": 263}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Metafunc:\n\n def _validate_if_using_arg_names(\n self,\n argnames: Sequence[str],\n indirect: Union[bool, Sequence[str]],\n ) -> None:\n \"\"\"Check if all argnames are being used, by default values, or directly/indirectly.\n\n :param List[str] argnames: List of argument names passed to ``parametrize()``.\n :param indirect: Same as the ``indirect`` parameter of ``parametrize()``.\n :raises ValueError: If validation fails.\n \"\"\"\n default_arg_names = set(get_default_arg_names(self.function))\n func_name = self.function.__name__\n for arg in argnames:\n if arg not in self.fixturenames:\n if arg in default_arg_names:\n fail(\n \"In {}: function already takes an argument '{}' with a default value\".format(\n func_name, arg\n ),\n pytrace=False,\n )\n else:\n if isinstance(indirect, Sequence):\n name = \"fixture\" if arg in indirect else \"argument\"\n else:\n name = \"fixture\" if indirect else \"argument\"\n fail(\n f\"In {func_name}: function uses no {name} '{arg}'\",\n pytrace=False,\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py__idval__idval.return.str_argname_str_idx_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py__idval__idval.return.str_argname_str_idx_", "embedding": null, "metadata": {"file_path": "src/_pytest/python.py", "file_name": "python.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1313, "end_line": 1353, "span_ids": ["_idval"], "tokens": 343}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _idval(\n val: object,\n argname: str,\n idx: int,\n idfn: Optional[Callable[[Any], Optional[object]]],\n nodeid: Optional[str],\n config: Optional[Config],\n) -> str:\n if idfn:\n try:\n generated_id = idfn(val)\n if generated_id is not None:\n val = generated_id\n except Exception as e:\n prefix = f\"{nodeid}: \" if nodeid is not None else \"\"\n msg = \"error raised while trying to determine id of parameter '{}' at position {}\"\n msg = prefix + msg.format(argname, idx)\n raise ValueError(msg) from e\n elif config:\n hook_id: Optional[str] = config.hook.pytest_make_parametrize_id(\n config=config, val=val, argname=argname\n )\n if hook_id:\n return hook_id\n\n if isinstance(val, STRING_TYPES):\n return _ascii_escaped_by_config(val, config)\n elif val is None or isinstance(val, (float, int, bool)):\n return str(val)\n elif isinstance(val, REGEX_TYPE):\n return ascii_escaped(val.pattern)\n elif val is NOTSET:\n # Fallback to default. Note that NOTSET is an enum.Enum.\n pass\n elif isinstance(val, enum.Enum):\n return str(val)\n elif isinstance(getattr(val, \"__name__\", None), str):\n # Name of a class, function, module, etc.\n name: str = getattr(val, \"__name__\")\n return name\n return str(argname) + str(idx)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py__show_fixtures_per_test.write_item_showfixtures.return.wrap_session_config__sho": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py__show_fixtures_per_test.write_item_showfixtures.return.wrap_session_config__sho", "embedding": null, "metadata": {"file_path": "src/_pytest/python.py", "file_name": "python.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1446, "end_line": 1471, "span_ids": ["_show_fixtures_per_test", "showfixtures"], "tokens": 259}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _show_fixtures_per_test(config: Config, session: Session) -> None:\n # ... other code\n\n def write_item(item: nodes.Item) -> None:\n # Not all items have _fixtureinfo attribute.\n info: Optional[FuncFixtureInfo] = getattr(item, \"_fixtureinfo\", None)\n if info is None or not info.name2fixturedefs:\n # This test item does not use any fixtures.\n return\n tw.line()\n tw.sep(\"-\", f\"fixtures used by {item.name}\")\n # TODO: Fix this type ignore.\n tw.sep(\"-\", \"({})\".format(get_best_relpath(item.function))) # type: ignore[attr-defined]\n # dict key not used in loop but needed for sorting.\n for _, fixturedefs in sorted(info.name2fixturedefs.items()):\n assert fixturedefs is not None\n if not fixturedefs:\n continue\n # Last item is expected to be the one used by the test item.\n write_fixture(fixturedefs[-1])\n\n for session_item in session.items:\n write_item(session_item)\n\n\ndef showfixtures(config: Config) -> Union[int, ExitCode]:\n from _pytest.main import wrap_session\n\n return wrap_session(config, _showfixtures_main)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python_api.py_ApproxNumpy._yield_comparisons_ApproxNumpy._yield_comparisons.if_np_isscalar_actual_.else_.for_i_in_np_ndindex_self_.yield_actual_i_item_s": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python_api.py_ApproxNumpy._yield_comparisons_ApproxNumpy._yield_comparisons.if_np_isscalar_actual_.else_.for_i_in_np_ndindex_self_.yield_actual_i_item_s", "embedding": null, "metadata": {"file_path": "src/_pytest/python_api.py", "file_name": "python_api.py", "file_type": "text/x-python", "category": "implementation", "start_line": 143, "end_line": 155, "span_ids": ["ApproxNumpy._yield_comparisons"], "tokens": 132}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class ApproxNumpy(ApproxBase):\n\n def _yield_comparisons(self, actual):\n import numpy as np\n\n # `actual` can either be a numpy array or a scalar, it is treated in\n # `__eq__` before being passed to `ApproxBase.__eq__`, which is the\n # only method that calls this one.\n\n if np.isscalar(actual):\n for i in np.ndindex(self.expected.shape):\n yield actual, self.expected[i].item()\n else:\n for i in np.ndindex(self.expected.shape):\n yield actual[i].item(), self.expected[i].item()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/recwarn.py_WarningsChecker_WarningsChecker.__init__.self.match_expr.match_expr": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/recwarn.py_WarningsChecker_WarningsChecker.__init__.self.match_expr.match_expr", "embedding": null, "metadata": {"file_path": "src/_pytest/recwarn.py", "file_name": "recwarn.py", "file_type": "text/x-python", "category": "implementation", "start_line": 231, "end_line": 259, "span_ids": ["WarningsChecker"], "tokens": 214}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass WarningsChecker(WarningsRecorder):\n def __init__(\n self,\n expected_warning: Optional[\n Union[Type[Warning], Tuple[Type[Warning], ...]]\n ] = None,\n match_expr: Optional[Union[str, Pattern[str]]] = None,\n *,\n _ispytest: bool = False,\n ) -> None:\n check_ispytest(_ispytest)\n super().__init__(_ispytest=True)\n\n msg = \"exceptions must be derived from Warning, not %s\"\n if expected_warning is None:\n expected_warning_tup = None\n elif isinstance(expected_warning, tuple):\n for exc in expected_warning:\n if not issubclass(exc, Warning):\n raise TypeError(msg % type(exc))\n expected_warning_tup = expected_warning\n elif issubclass(expected_warning, Warning):\n expected_warning_tup = (expected_warning,)\n else:\n raise TypeError(msg % type(expected_warning))\n\n self.expected_warning = expected_warning_tup\n self.match_expr = match_expr", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/recwarn.py_WarningsChecker.__exit___": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/recwarn.py_WarningsChecker.__exit___", "embedding": null, "metadata": {"file_path": "src/_pytest/recwarn.py", "file_name": "recwarn.py", "file_type": "text/x-python", "category": "implementation", "start_line": 261, "end_line": 297, "span_ids": ["WarningsChecker.__exit__"], "tokens": 301}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass WarningsChecker(WarningsRecorder):\n\n def __exit__(\n self,\n exc_type: Optional[Type[BaseException]],\n exc_val: Optional[BaseException],\n exc_tb: Optional[TracebackType],\n ) -> None:\n super().__exit__(exc_type, exc_val, exc_tb)\n\n __tracebackhide__ = True\n\n # only check if we're not currently handling an exception\n if exc_type is None and exc_val is None and exc_tb is None:\n if self.expected_warning is not None:\n if not any(issubclass(r.category, self.expected_warning) for r in self):\n __tracebackhide__ = True\n fail(\n \"DID NOT WARN. No warnings of type {} was emitted. \"\n \"The list of emitted warnings is: {}.\".format(\n self.expected_warning, [each.message for each in self]\n )\n )\n elif self.match_expr is not None:\n for r in self:\n if issubclass(r.category, self.expected_warning):\n if re.compile(self.match_expr).search(str(r.message)):\n break\n else:\n fail(\n \"DID NOT WARN. No warnings of type {} matching\"\n \" ('{}') was emitted. The list of emitted warnings\"\n \" is: {}.\".format(\n self.expected_warning,\n self.match_expr,\n [each.message for each in self],\n )\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/reports.py__report_unserialization_failure__report_unserialization_failure.raise_RuntimeError_stream": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/reports.py__report_unserialization_failure__report_unserialization_failure.raise_RuntimeError_stream", "embedding": null, "metadata": {"file_path": "src/_pytest/reports.py", "file_name": "reports.py", "file_type": "text/x-python", "category": "implementation", "start_line": 224, "end_line": 235, "span_ids": ["_report_unserialization_failure"], "tokens": 146}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _report_unserialization_failure(\n type_name: str, report_class: Type[BaseReport], reportdict\n) -> \"NoReturn\":\n url = \"https://github.com/pytest-dev/pytest/issues\"\n stream = StringIO()\n pprint(\"-\" * 100, stream=stream)\n pprint(\"INTERNALERROR: Unknown entry type returned: %s\" % type_name, stream=stream)\n pprint(\"report_name: %s\" % report_class, stream=stream)\n pprint(reportdict, stream=stream)\n pprint(\"Please report this bug at %s\" % url, stream=stream)\n pprint(\"-\" * 100, stream=stream)\n raise RuntimeError(stream.getvalue())", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/reports.py_TestReport.from_item_and_call_TestReport.from_item_and_call.return.cls_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/reports.py_TestReport.from_item_and_call_TestReport.from_item_and_call.return.cls_", "embedding": null, "metadata": {"file_path": "src/_pytest/reports.py", "file_name": "reports.py", "file_type": "text/x-python", "category": "implementation", "start_line": 301, "end_line": 353, "span_ids": ["TestReport.from_item_and_call"], "tokens": 432}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass TestReport(BaseReport):\n\n @classmethod\n def from_item_and_call(cls, item: Item, call: \"CallInfo[None]\") -> \"TestReport\":\n \"\"\"Create and fill a TestReport with standard item and call info.\"\"\"\n when = call.when\n # Remove \"collect\" from the Literal type -- only for collection calls.\n assert when != \"collect\"\n duration = call.duration\n keywords = {x: 1 for x in item.keywords}\n excinfo = call.excinfo\n sections = []\n if not call.excinfo:\n outcome: Literal[\"passed\", \"failed\", \"skipped\"] = \"passed\"\n longrepr: Union[\n None,\n ExceptionInfo[BaseException],\n Tuple[str, int, str],\n str,\n TerminalRepr,\n ] = None\n else:\n if not isinstance(excinfo, ExceptionInfo):\n outcome = \"failed\"\n longrepr = excinfo\n elif isinstance(excinfo.value, skip.Exception):\n outcome = \"skipped\"\n r = excinfo._getreprcrash()\n if excinfo.value._use_item_location:\n filename, line = item.reportinfo()[:2]\n assert line is not None\n longrepr = str(filename), line + 1, r.message\n else:\n longrepr = (str(r.path), r.lineno, r.message)\n else:\n outcome = \"failed\"\n if call.when == \"call\":\n longrepr = item.repr_failure(excinfo)\n else: # exception in setup or teardown\n longrepr = item._repr_failure_py(\n excinfo, style=item.config.getoption(\"tbstyle\", \"auto\")\n )\n for rwhen, key, content in item._report_sections:\n sections.append((f\"Captured {key} {rwhen}\", content))\n return cls(\n item.nodeid,\n item.location,\n keywords,\n outcome,\n longrepr,\n when,\n sections,\n duration,\n user_properties=item.user_properties,\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/runner.py__update_current_test_var__update_current_test_var.if_when_.else_.os_environ_pop_var_name_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/runner.py__update_current_test_var__update_current_test_var.if_when_.else_.os_environ_pop_var_name_", "embedding": null, "metadata": {"file_path": "src/_pytest/runner.py", "file_name": "runner.py", "file_type": "text/x-python", "category": "implementation", "start_line": 185, "end_line": 199, "span_ids": ["_update_current_test_var"], "tokens": 150}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _update_current_test_var(\n item: Item, when: Optional[\"Literal['setup', 'call', 'teardown']\"]\n) -> None:\n \"\"\"Update :envvar:`PYTEST_CURRENT_TEST` to reflect the current item and stage.\n\n If ``when`` is None, delete ``PYTEST_CURRENT_TEST`` from the environment.\n \"\"\"\n var_name = \"PYTEST_CURRENT_TEST\"\n if when:\n value = f\"{item.nodeid} ({when})\"\n # don't allow null bytes on environment variables (see #2644, #2957)\n value = value.replace(\"\\x00\", \"(null)\")\n os.environ[var_name] = value\n else:\n os.environ.pop(var_name)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/setuponly.py__show_fixture_action_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/setuponly.py__show_fixture_action_", "embedding": null, "metadata": {"file_path": "src/_pytest/setuponly.py", "file_name": "setuponly.py", "file_type": "text/x-python", "category": "implementation", "start_line": 59, "end_line": 95, "span_ids": ["_show_fixture_action", "pytest_cmdline_main"], "tokens": 288}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _show_fixture_action(fixturedef: FixtureDef[object], msg: str) -> None:\n config = fixturedef._fixturemanager.config\n capman = config.pluginmanager.getplugin(\"capturemanager\")\n if capman:\n capman.suspend_global_capture()\n\n tw = config.get_terminal_writer()\n tw.line()\n tw.write(\" \" * 2 * fixturedef.scopenum)\n tw.write(\n \"{step} {scope} {fixture}\".format(\n step=msg.ljust(8), # align the output to TEARDOWN\n scope=fixturedef.scope[0].upper(),\n fixture=fixturedef.argname,\n )\n )\n\n if msg == \"SETUP\":\n deps = sorted(arg for arg in fixturedef.argnames if arg != \"request\")\n if deps:\n tw.write(\" (fixtures used: {})\".format(\", \".join(deps)))\n\n if hasattr(fixturedef, \"cached_param\"):\n tw.write(\"[{}]\".format(saferepr(fixturedef.cached_param, maxsize=42))) # type: ignore[attr-defined]\n\n tw.flush()\n\n if capman:\n capman.resume_global_capture()\n\n\n@pytest.hookimpl(tryfirst=True)\ndef pytest_cmdline_main(config: Config) -> Optional[Union[int, ExitCode]]:\n if config.option.setuponly:\n config.option.setupshow = True\n return None", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/skipping.py_pytest_configure_pytest_configure.None_2": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/skipping.py_pytest_configure_pytest_configure.None_2", "embedding": null, "metadata": {"file_path": "src/_pytest/skipping.py", "file_name": "skipping.py", "file_type": "text/x-python", "category": "implementation", "start_line": 48, "end_line": 84, "span_ids": ["pytest_configure"], "tokens": 381}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_configure(config: Config) -> None:\n if config.option.runxfail:\n # yay a hack\n import pytest\n\n old = pytest.xfail\n config._cleanup.append(lambda: setattr(pytest, \"xfail\", old))\n\n def nop(*args, **kwargs):\n pass\n\n nop.Exception = xfail.Exception # type: ignore[attr-defined]\n setattr(pytest, \"xfail\", nop)\n\n config.addinivalue_line(\n \"markers\",\n \"skip(reason=None): skip the given test function with an optional reason. \"\n 'Example: skip(reason=\"no way of currently testing this\") skips the '\n \"test.\",\n )\n config.addinivalue_line(\n \"markers\",\n \"skipif(condition, ..., *, reason=...): \"\n \"skip the given test function if any of the conditions evaluate to True. \"\n \"Example: skipif(sys.platform == 'win32') skips the test if we are on the win32 platform. \"\n \"See https://docs.pytest.org/en/stable/reference.html#pytest-mark-skipif\",\n )\n config.addinivalue_line(\n \"markers\",\n \"xfail(condition, ..., *, reason=..., run=True, raises=None, strict=xfail_strict): \"\n \"mark the test function as an expected failure if any of the conditions \"\n \"evaluate to True. Optionally specify a reason for better reporting \"\n \"and run=False if you don't even want to execute the test function. \"\n \"If only specific exception(s) are expected, you can list them in \"\n \"raises, and if the test fails in other ways, it will be reported as \"\n \"a true failure. See https://docs.pytest.org/en/stable/reference.html#pytest-mark-xfail\",\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/skipping.py_pytest_runtest_makereport_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/skipping.py_pytest_runtest_makereport_", "embedding": null, "metadata": {"file_path": "src/_pytest/skipping.py", "file_name": "skipping.py", "file_type": "text/x-python", "category": "implementation", "start_line": 265, "end_line": 300, "span_ids": ["pytest_report_teststatus", "pytest_runtest_makereport"], "tokens": 325}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@hookimpl(hookwrapper=True)\ndef pytest_runtest_makereport(item: Item, call: CallInfo[None]):\n outcome = yield\n rep = outcome.get_result()\n xfailed = item._store.get(xfailed_key, None)\n if item.config.option.runxfail:\n pass # don't interfere\n elif call.excinfo and isinstance(call.excinfo.value, xfail.Exception):\n assert call.excinfo.value.msg is not None\n rep.wasxfail = \"reason: \" + call.excinfo.value.msg\n rep.outcome = \"skipped\"\n elif not rep.skipped and xfailed:\n if call.excinfo:\n raises = xfailed.raises\n if raises is not None and not isinstance(call.excinfo.value, raises):\n rep.outcome = \"failed\"\n else:\n rep.outcome = \"skipped\"\n rep.wasxfail = xfailed.reason\n elif call.when == \"call\":\n if xfailed.strict:\n rep.outcome = \"failed\"\n rep.longrepr = \"[XPASS(strict)] \" + xfailed.reason\n else:\n rep.outcome = \"passed\"\n rep.wasxfail = xfailed.reason\n\n\ndef pytest_report_teststatus(report: BaseReport) -> Optional[Tuple[str, str, str]]:\n if hasattr(report, \"wasxfail\"):\n if report.skipped:\n return \"xfailed\", \"x\", \"XFAIL\"\n elif report.passed:\n return \"xpassed\", \"X\", \"XPASS\"\n return None", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_pytest_addoption_pytest_addoption.parser_addini_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_pytest_addoption_pytest_addoption.parser_addini_", "embedding": null, "metadata": {"file_path": "src/_pytest/terminal.py", "file_name": "terminal.py", "file_type": "text/x-python", "category": "implementation", "start_line": 110, "end_line": 222, "span_ids": ["pytest_addoption"], "tokens": 781}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_addoption(parser: Parser) -> None:\n group = parser.getgroup(\"terminal reporting\", \"reporting\", after=\"general\")\n group._addoption(\n \"-v\",\n \"--verbose\",\n action=\"count\",\n default=0,\n dest=\"verbose\",\n help=\"increase verbosity.\",\n )\n group._addoption(\n \"--no-header\",\n action=\"store_true\",\n default=False,\n dest=\"no_header\",\n help=\"disable header\",\n )\n group._addoption(\n \"--no-summary\",\n action=\"store_true\",\n default=False,\n dest=\"no_summary\",\n help=\"disable summary\",\n )\n group._addoption(\n \"-q\",\n \"--quiet\",\n action=MoreQuietAction,\n default=0,\n dest=\"verbose\",\n help=\"decrease verbosity.\",\n )\n group._addoption(\n \"--verbosity\",\n dest=\"verbose\",\n type=int,\n default=0,\n help=\"set verbosity. Default is 0.\",\n )\n group._addoption(\n \"-r\",\n action=\"store\",\n dest=\"reportchars\",\n default=_REPORTCHARS_DEFAULT,\n metavar=\"chars\",\n help=\"show extra test summary info as specified by chars: (f)ailed, \"\n \"(E)rror, (s)kipped, (x)failed, (X)passed, \"\n \"(p)assed, (P)assed with output, (a)ll except passed (p/P), or (A)ll. \"\n \"(w)arnings are enabled by default (see --disable-warnings), \"\n \"'N' can be used to reset the list. (default: 'fE').\",\n )\n group._addoption(\n \"--disable-warnings\",\n \"--disable-pytest-warnings\",\n default=False,\n dest=\"disable_warnings\",\n action=\"store_true\",\n help=\"disable warnings summary\",\n )\n group._addoption(\n \"-l\",\n \"--showlocals\",\n action=\"store_true\",\n dest=\"showlocals\",\n default=False,\n help=\"show locals in tracebacks (disabled by default).\",\n )\n group._addoption(\n \"--tb\",\n metavar=\"style\",\n action=\"store\",\n dest=\"tbstyle\",\n default=\"auto\",\n choices=[\"auto\", \"long\", \"short\", \"no\", \"line\", \"native\"],\n help=\"traceback print mode (auto/long/short/line/native/no).\",\n )\n group._addoption(\n \"--show-capture\",\n action=\"store\",\n dest=\"showcapture\",\n choices=[\"no\", \"stdout\", \"stderr\", \"log\", \"all\"],\n default=\"all\",\n help=\"Controls how captured stdout/stderr/log is shown on failed tests. \"\n \"Default is 'all'.\",\n )\n group._addoption(\n \"--fulltrace\",\n \"--full-trace\",\n action=\"store_true\",\n default=False,\n help=\"don't cut any tracebacks (default is to cut).\",\n )\n group._addoption(\n \"--color\",\n metavar=\"color\",\n action=\"store\",\n dest=\"color\",\n default=\"auto\",\n choices=[\"yes\", \"no\", \"auto\"],\n help=\"color terminal output (yes/no/auto).\",\n )\n group._addoption(\n \"--code-highlight\",\n default=\"yes\",\n choices=[\"yes\", \"no\"],\n help=\"Whether code should be highlighted (only if --color is also enabled)\",\n )\n\n parser.addini(\n \"console_output_style\",\n help='console output: \"classic\", or with additional progress information (\"progress\" (percentage) | \"count\").',\n default=\"progress\",\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_pytest_configure_getreportopt.return.reportopts": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_pytest_configure_getreportopt.return.reportopts", "embedding": null, "metadata": {"file_path": "src/_pytest/terminal.py", "file_name": "terminal.py", "file_type": "text/x-python", "category": "implementation", "start_line": 227, "end_line": 261, "span_ids": ["pytest_configure", "getreportopt"], "tokens": 262}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_configure(config: Config) -> None:\n reporter = TerminalReporter(config, sys.stdout)\n config.pluginmanager.register(reporter, \"terminalreporter\")\n if config.option.debug or config.option.traceconfig:\n\n def mywriter(tags, args):\n msg = \" \".join(map(str, args))\n reporter.write_line(\"[traceconfig] \" + msg)\n\n config.trace.root.setprocessor(\"pytest:config\", mywriter)\n\n\ndef getreportopt(config: Config) -> str:\n reportchars: str = config.option.reportchars\n\n old_aliases = {\"F\", \"S\"}\n reportopts = \"\"\n for char in reportchars:\n if char in old_aliases:\n char = char.lower()\n if char == \"a\":\n reportopts = \"sxXEf\"\n elif char == \"A\":\n reportopts = \"PpsxXEf\"\n elif char == \"N\":\n reportopts = \"\"\n elif char not in reportopts:\n reportopts += char\n\n if not config.option.disable_warnings and \"w\" not in reportopts:\n reportopts = \"w\" + reportopts\n elif config.option.disable_warnings and \"w\" in reportopts:\n reportopts = reportopts.replace(\"w\", \"\")\n\n return reportopts", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.verbosity_TerminalReporter.hasopt.return.char_in_self_reportchars": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.verbosity_TerminalReporter.hasopt.return.char_in_self_reportchars", "embedding": null, "metadata": {"file_path": "src/_pytest/terminal.py", "file_name": "terminal.py", "file_type": "text/x-python", "category": "implementation", "start_line": 353, "end_line": 386, "span_ids": ["TerminalReporter.hasopt", "TerminalReporter.no_summary", "TerminalReporter.showlongtestinfo", "TerminalReporter.showfspath", "TerminalReporter.verbosity", "TerminalReporter.showfspath_6", "TerminalReporter.no_header", "TerminalReporter.showheader"], "tokens": 239}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass TerminalReporter:\n\n @property\n def verbosity(self) -> int:\n verbosity: int = self.config.option.verbose\n return verbosity\n\n @property\n def showheader(self) -> bool:\n return self.verbosity >= 0\n\n @property\n def no_header(self) -> bool:\n return bool(self.config.option.no_header)\n\n @property\n def no_summary(self) -> bool:\n return bool(self.config.option.no_summary)\n\n @property\n def showfspath(self) -> bool:\n if self._showfspath is None:\n return self.verbosity >= 0\n return self._showfspath\n\n @showfspath.setter\n def showfspath(self, value: Optional[bool]) -> None:\n self._showfspath = value\n\n @property\n def showlongtestinfo(self) -> bool:\n return self.verbosity > 0\n\n def hasopt(self, char: str) -> bool:\n char = {\"xfailed\": \"x\", \"skipped\": \"s\"}.get(char, char)\n return char in self.reportchars", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.write_ensure_prefix_TerminalReporter.write_line.self__tw_line_line_mar": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.write_ensure_prefix_TerminalReporter.write_line.self__tw_line_line_mar", "embedding": null, "metadata": {"file_path": "src/_pytest/terminal.py", "file_name": "terminal.py", "file_type": "text/x-python", "category": "implementation", "start_line": 399, "end_line": 423, "span_ids": ["TerminalReporter.write_line", "TerminalReporter.write", "TerminalReporter.ensure_newline", "TerminalReporter.flush", "TerminalReporter.write_ensure_prefix"], "tokens": 221}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass TerminalReporter:\n\n def write_ensure_prefix(self, prefix: str, extra: str = \"\", **kwargs) -> None:\n if self.currentfspath != prefix:\n self._tw.line()\n self.currentfspath = prefix\n self._tw.write(prefix)\n if extra:\n self._tw.write(extra, **kwargs)\n self.currentfspath = -2\n\n def ensure_newline(self) -> None:\n if self.currentfspath:\n self._tw.line()\n self.currentfspath = None\n\n def write(self, content: str, *, flush: bool = False, **markup: bool) -> None:\n self._tw.write(content, flush=flush, **markup)\n\n def flush(self) -> None:\n self._tw.flush()\n\n def write_line(self, line: Union[str, bytes], **markup: bool) -> None:\n if not isinstance(line, str):\n line = str(line, errors=\"replace\")\n self.ensure_newline()\n self._tw.line(line, **markup)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.rewrite_TerminalReporter.rewrite.self__tw_write_r_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.rewrite_TerminalReporter.rewrite.self__tw_write_r_lin", "embedding": null, "metadata": {"file_path": "src/_pytest/terminal.py", "file_name": "terminal.py", "file_type": "text/x-python", "category": "implementation", "start_line": 425, "end_line": 441, "span_ids": ["TerminalReporter.rewrite"], "tokens": 150}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass TerminalReporter:\n\n def rewrite(self, line: str, **markup: bool) -> None:\n \"\"\"Rewinds the terminal cursor to the beginning and writes the given line.\n\n :param erase:\n If True, will also add spaces until the full terminal width to ensure\n previous lines are properly erased.\n\n The rest of the keyword arguments are markup instructions.\n \"\"\"\n erase = markup.pop(\"erase\", False)\n if erase:\n fill_count = self._tw.fullwidth - len(line) - 1\n fill = \" \" * fill_count\n else:\n fill = \"\"\n line = str(line)\n self._tw.write(\"\\r\" + line + fill, **markup)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter._get_progress_information_message_TerminalReporter._get_progress_information_message.if_self__show_progress_in.else_.return._100_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter._get_progress_information_message_TerminalReporter._get_progress_information_message.if_self__show_progress_in.else_.return._100_", "embedding": null, "metadata": {"file_path": "src/_pytest/terminal.py", "file_name": "terminal.py", "file_type": "text/x-python", "category": "implementation", "start_line": 596, "end_line": 611, "span_ids": ["TerminalReporter._get_progress_information_message"], "tokens": 159}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass TerminalReporter:\n\n def _get_progress_information_message(self) -> str:\n assert self._session\n collected = self._session.testscollected\n if self._show_progress_info == \"count\":\n if collected:\n progress = self._progress_nodeids_reported\n counter_format = \"{{:{}d}}\".format(len(str(collected)))\n format_string = f\" [{counter_format}/{{}}]\"\n return format_string.format(len(progress), collected)\n return f\" [ {collected} / {collected} ]\"\n else:\n if collected:\n return \" [{:3d}%]\".format(\n len(self._progress_nodeids_reported) * 100 // collected\n )\n return \" [100%]\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter._write_progress_information_filling_space_TerminalReporter.pytest_collectreport.if_self_isatty_.self_report_collect_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter._write_progress_information_filling_space_TerminalReporter.pytest_collectreport.if_self_isatty_.self_report_collect_", "embedding": null, "metadata": {"file_path": "src/_pytest/terminal.py", "file_name": "terminal.py", "file_type": "text/x-python", "category": "implementation", "start_line": 613, "end_line": 641, "span_ids": ["TerminalReporter._write_progress_information_filling_space", "TerminalReporter._width_of_current_line", "TerminalReporter.pytest_collection", "TerminalReporter.pytest_collectreport"], "tokens": 279}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass TerminalReporter:\n\n def _write_progress_information_filling_space(self) -> None:\n color, _ = self._get_main_color()\n msg = self._get_progress_information_message()\n w = self._width_of_current_line\n fill = self._tw.fullwidth - w - 1\n self.write(msg.rjust(fill), flush=True, **{color: True})\n\n @property\n def _width_of_current_line(self) -> int:\n \"\"\"Return the width of the current line.\"\"\"\n return self._tw.width_of_current_line\n\n def pytest_collection(self) -> None:\n if self.isatty:\n if self.config.option.verbose >= 0:\n self.write(\"collecting ... \", flush=True, bold=True)\n self._collect_report_last_write = timing.time()\n elif self.config.option.verbose >= 1:\n self.write(\"collecting ... \", flush=True, bold=True)\n\n def pytest_collectreport(self, report: CollectReport) -> None:\n if report.failed:\n self._add_stats(\"error\", [report])\n elif report.skipped:\n self._add_stats(\"skipped\", [report])\n items = [x for x in report.result if isinstance(x, Item)]\n self._numcollected += len(items)\n if self.isatty:\n self.report_collect()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.report_collect_TerminalReporter.report_collect.if_self_isatty_.else_.self_write_line_line_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.report_collect_TerminalReporter.report_collect.if_self_isatty_.else_.self_write_line_line_", "embedding": null, "metadata": {"file_path": "src/_pytest/terminal.py", "file_name": "terminal.py", "file_type": "text/x-python", "category": "implementation", "start_line": 643, "end_line": 681, "span_ids": ["TerminalReporter.report_collect"], "tokens": 344}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass TerminalReporter:\n\n def report_collect(self, final: bool = False) -> None:\n if self.config.option.verbose < 0:\n return\n\n if not final:\n # Only write \"collecting\" report every 0.5s.\n t = timing.time()\n if (\n self._collect_report_last_write is not None\n and self._collect_report_last_write > t - REPORT_COLLECTING_RESOLUTION\n ):\n return\n self._collect_report_last_write = t\n\n errors = len(self.stats.get(\"error\", []))\n skipped = len(self.stats.get(\"skipped\", []))\n deselected = len(self.stats.get(\"deselected\", []))\n selected = self._numcollected - errors - skipped - deselected\n if final:\n line = \"collected \"\n else:\n line = \"collecting \"\n line += (\n str(self._numcollected) + \" item\" + (\"\" if self._numcollected == 1 else \"s\")\n )\n if errors:\n line += \" / %d error%s\" % (errors, \"s\" if errors != 1 else \"\")\n if deselected:\n line += \" / %d deselected\" % deselected\n if skipped:\n line += \" / %d skipped\" % skipped\n if self._numcollected > selected > 0:\n line += \" / %d selected\" % selected\n if self.isatty:\n self.rewrite(line, bold=True, erase=True)\n if final:\n self.write(\"\\n\")\n else:\n self.write_line(line)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter._write_report_lines_from_hooks_TerminalReporter.pytest_report_header.return.result": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter._write_report_lines_from_hooks_TerminalReporter.pytest_report_header.return.result", "embedding": null, "metadata": {"file_path": "src/_pytest/terminal.py", "file_name": "terminal.py", "file_type": "text/x-python", "category": "implementation", "start_line": 712, "end_line": 737, "span_ids": ["TerminalReporter.pytest_report_header", "TerminalReporter._write_report_lines_from_hooks"], "tokens": 230}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass TerminalReporter:\n\n def _write_report_lines_from_hooks(\n self, lines: Sequence[Union[str, Sequence[str]]]\n ) -> None:\n for line_or_lines in reversed(lines):\n if isinstance(line_or_lines, str):\n self.write_line(line_or_lines)\n else:\n for line in line_or_lines:\n self.write_line(line)\n\n def pytest_report_header(self, config: Config) -> List[str]:\n line = \"rootdir: %s\" % config.rootpath\n\n if config.inipath:\n line += \", configfile: \" + bestrelpath(config.rootpath, config.inipath)\n\n testpaths: List[str] = config.getini(\"testpaths\")\n if config.invocation_params.dir == config.rootpath and config.args == testpaths:\n line += \", testpaths: {}\".format(\", \".join(testpaths))\n\n result = [line]\n\n plugininfo = config.pluginmanager.list_plugin_distinfo()\n if plugininfo:\n result.append(\"plugins: %s\" % \", \".join(_plugin_nameversions(plugininfo)))\n return result", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.pytest_sessionfinish_TerminalReporter.pytest_sessionfinish.self_summary_stats_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.pytest_sessionfinish_TerminalReporter.pytest_sessionfinish.self_summary_stats_", "embedding": null, "metadata": {"file_path": "src/_pytest/terminal.py", "file_name": "terminal.py", "file_type": "text/x-python", "category": "implementation", "start_line": 796, "end_line": 821, "span_ids": ["TerminalReporter.pytest_sessionfinish"], "tokens": 215}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass TerminalReporter:\n\n @hookimpl(hookwrapper=True)\n def pytest_sessionfinish(\n self, session: \"Session\", exitstatus: Union[int, ExitCode]\n ):\n outcome = yield\n outcome.get_result()\n self._tw.line(\"\")\n summary_exit_codes = (\n ExitCode.OK,\n ExitCode.TESTS_FAILED,\n ExitCode.INTERRUPTED,\n ExitCode.USAGE_ERROR,\n ExitCode.NO_TESTS_COLLECTED,\n )\n if exitstatus in summary_exit_codes and not self.no_summary:\n self.config.hook.pytest_terminal_summary(\n terminalreporter=self, exitstatus=exitstatus, config=self.config\n )\n if session.shouldfail:\n self.write_sep(\"!\", str(session.shouldfail), red=True)\n if exitstatus == ExitCode.INTERRUPTED:\n self._report_keyboardinterrupt()\n self._keyboardinterrupt_memo = None\n elif session.shouldstop:\n self.write_sep(\"!\", str(session.shouldstop), red=True)\n self.summary_stats()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter._report_keyboardinterrupt_TerminalReporter._report_keyboardinterrupt.if_KeyboardInterrupt_in.if_self_config_option_ful.else_.self__tw_line_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter._report_keyboardinterrupt_TerminalReporter._report_keyboardinterrupt.if_KeyboardInterrupt_in.if_self_config_option_ful.else_.self__tw_line_", "embedding": null, "metadata": {"file_path": "src/_pytest/terminal.py", "file_name": "terminal.py", "file_type": "text/x-python", "category": "implementation", "start_line": 841, "end_line": 855, "span_ids": ["TerminalReporter._report_keyboardinterrupt"], "tokens": 147}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass TerminalReporter:\n\n def _report_keyboardinterrupt(self) -> None:\n excrepr = self._keyboardinterrupt_memo\n assert excrepr is not None\n assert excrepr.reprcrash is not None\n msg = excrepr.reprcrash.message\n self.write_sep(\"!\", msg)\n if \"KeyboardInterrupt\" in msg:\n if self.config.option.fulltrace:\n excrepr.toterminal(self._tw)\n else:\n excrepr.reprcrash.toterminal(self._tw)\n self._tw.line(\n \"(to show a full traceback on KeyboardInterrupt use --full-trace)\",\n yellow=True,\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter._locationline_TerminalReporter._locationline.return.res_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter._locationline_TerminalReporter._locationline.return.res_", "embedding": null, "metadata": {"file_path": "src/_pytest/terminal.py", "file_name": "terminal.py", "file_type": "text/x-python", "category": "implementation", "start_line": 857, "end_line": 877, "span_ids": ["TerminalReporter._locationline"], "tokens": 195}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass TerminalReporter:\n\n def _locationline(self, nodeid, fspath, lineno, domain):\n def mkrel(nodeid):\n line = self.config.cwd_relative_nodeid(nodeid)\n if domain and line.endswith(domain):\n line = line[: -len(domain)]\n values = domain.split(\"[\")\n values[0] = values[0].replace(\".\", \"::\") # don't replace '.' in params\n line += \"[\".join(values)\n return line\n\n # collect_fspath comes from testid which has a \"/\"-normalized path.\n\n if fspath:\n res = mkrel(nodeid)\n if self.verbosity >= 2 and nodeid.split(\"::\")[0] != fspath.replace(\n \"\\\\\", nodes.SEP\n ):\n res += \" <- \" + bestrelpath(self.startpath, fspath)\n else:\n res = \"[location]\"\n return res + \" \"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter._getfailureheadline_TerminalReporter.getreports.return.values": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter._getfailureheadline_TerminalReporter.getreports.return.values", "embedding": null, "metadata": {"file_path": "src/_pytest/terminal.py", "file_name": "terminal.py", "file_type": "text/x-python", "category": "implementation", "start_line": 879, "end_line": 902, "span_ids": ["TerminalReporter.getreports", "TerminalReporter._getcrashline", "TerminalReporter._getfailureheadline"], "tokens": 148}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass TerminalReporter:\n\n def _getfailureheadline(self, rep):\n head_line = rep.head_line\n if head_line:\n return head_line\n return \"test session\" # XXX?\n\n def _getcrashline(self, rep):\n try:\n return str(rep.longrepr.reprcrash)\n except AttributeError:\n try:\n return str(rep.longrepr)[:50]\n except AttributeError:\n return \"\"\n\n #\n # Summaries for sessionfinish.\n #\n def getreports(self, name: str):\n values = []\n for x in self.stats.get(name, []):\n if not hasattr(x, \"_pdbshown\"):\n values.append(x)\n return values", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.summary_warnings_TerminalReporter.summary_warnings.if_self_hasopt_w_.self__tw_line_Docs_h": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.summary_warnings_TerminalReporter.summary_warnings.if_self_hasopt_w_.self__tw_line_Docs_h", "embedding": null, "metadata": {"file_path": "src/_pytest/terminal.py", "file_name": "terminal.py", "file_type": "text/x-python", "category": "implementation", "start_line": 904, "end_line": 954, "span_ids": ["TerminalReporter.summary_warnings"], "tokens": 427}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass TerminalReporter:\n\n def summary_warnings(self) -> None:\n if self.hasopt(\"w\"):\n all_warnings: Optional[List[WarningReport]] = self.stats.get(\"warnings\")\n if not all_warnings:\n return\n\n final = self._already_displayed_warnings is not None\n if final:\n warning_reports = all_warnings[self._already_displayed_warnings :]\n else:\n warning_reports = all_warnings\n self._already_displayed_warnings = len(warning_reports)\n if not warning_reports:\n return\n\n reports_grouped_by_message: Dict[str, List[WarningReport]] = {}\n for wr in warning_reports:\n reports_grouped_by_message.setdefault(wr.message, []).append(wr)\n\n def collapsed_location_report(reports: List[WarningReport]) -> str:\n locations = []\n for w in reports:\n location = w.get_location(self.config)\n if location:\n locations.append(location)\n\n if len(locations) < 10:\n return \"\\n\".join(map(str, locations))\n\n counts_by_filename = Counter(\n str(loc).split(\"::\", 1)[0] for loc in locations\n )\n return \"\\n\".join(\n \"{}: {} warning{}\".format(k, v, \"s\" if v > 1 else \"\")\n for k, v in counts_by_filename.items()\n )\n\n title = \"warnings summary (final)\" if final else \"warnings summary\"\n self.write_sep(\"=\", title, yellow=True, bold=False)\n for message, message_reports in reports_grouped_by_message.items():\n maybe_location = collapsed_location_report(message_reports)\n if maybe_location:\n self._tw.line(maybe_location)\n lines = message.splitlines()\n indented = \"\\n\".join(\" \" + x for x in lines)\n message = indented.rstrip()\n else:\n message = message.rstrip()\n self._tw.line(message)\n self._tw.line()\n self._tw.line(\"-- Docs: https://docs.pytest.org/en/stable/warnings.html\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.short_test_summary.show_skipped_TerminalReporter.short_test_summary.show_skipped.for_num_fspath_lineno_.if_lineno_is_not_None_.else_.lines_append_s_d_s_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.short_test_summary.show_skipped_TerminalReporter.short_test_summary.show_skipped.for_num_fspath_lineno_.if_lineno_is_not_None_.else_.lines_append_s_d_s_", "embedding": null, "metadata": {"file_path": "src/_pytest/terminal.py", "file_name": "terminal.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1109, "end_line": 1124, "span_ids": ["TerminalReporter.short_test_summary"], "tokens": 205}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass TerminalReporter:\n\n def short_test_summary(self) -> None:\n # ... other code\n\n def show_skipped(lines: List[str]) -> None:\n skipped: List[CollectReport] = self.stats.get(\"skipped\", [])\n fskips = _folded_skips(self.startpath, skipped) if skipped else []\n if not fskips:\n return\n verbose_word = skipped[0]._get_verbose_word(self.config)\n for num, fspath, lineno, reason in fskips:\n if reason.startswith(\"Skipped: \"):\n reason = reason[9:]\n if lineno is not None:\n lines.append(\n \"%s [%d] %s:%d: %s\"\n % (verbose_word, num, fspath, lineno, reason)\n )\n else:\n lines.append(\"%s [%d] %s: %s\" % (verbose_word, num, fspath, reason))\n # ... other code", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py__folded_skips__folded_skips.return.values": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py__folded_skips__folded_skips.return.values", "embedding": null, "metadata": {"file_path": "src/_pytest/terminal.py", "file_name": "terminal.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1310, "end_line": 1338, "span_ids": ["_folded_skips"], "tokens": 309}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _folded_skips(\n startpath: Path,\n skipped: Sequence[CollectReport],\n) -> List[Tuple[int, str, Optional[int], str]]:\n d: Dict[Tuple[str, Optional[int], str], List[CollectReport]] = {}\n for event in skipped:\n assert event.longrepr is not None\n assert isinstance(event.longrepr, tuple), (event, event.longrepr)\n assert len(event.longrepr) == 3, (event, event.longrepr)\n fspath, lineno, reason = event.longrepr\n # For consistency, report all fspaths in relative form.\n fspath = bestrelpath(startpath, Path(fspath))\n keywords = getattr(event, \"keywords\", {})\n # Folding reports with global pytestmark variable.\n # This is a workaround, because for now we cannot identify the scope of a skip marker\n # TODO: Revisit after marks scope would be fixed.\n if (\n event.when == \"setup\"\n and \"skip\" in keywords\n and \"pytestmark\" not in keywords\n ):\n key: Tuple[str, Optional[int], str] = (fspath, None, reason)\n else:\n key = (fspath, lineno, reason)\n d.setdefault(key, []).append(event)\n values: List[Tuple[int, str, Optional[int], str]] = []\n for key, events in d.items():\n values.append((len(events), *key))\n return values", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/unittest.py__make_xunit_fixture__make_xunit_fixture.return.fixture": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/unittest.py__make_xunit_fixture__make_xunit_fixture.return.fixture", "embedding": null, "metadata": {"file_path": "src/_pytest/unittest.py", "file_name": "unittest.py", "file_type": "text/x-python", "category": "implementation", "start_line": 123, "end_line": 181, "span_ids": ["_make_xunit_fixture"], "tokens": 332}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _make_xunit_fixture(\n obj: type,\n setup_name: str,\n teardown_name: str,\n cleanup_name: Optional[str],\n scope: \"_Scope\",\n pass_self: bool,\n):\n setup = getattr(obj, setup_name, None)\n teardown = getattr(obj, teardown_name, None)\n if setup is None and teardown is None:\n return None\n\n if cleanup_name:\n cleanup = getattr(obj, cleanup_name, lambda *args: None)\n else:\n\n def cleanup(*args):\n pass\n\n @pytest.fixture(\n scope=scope,\n autouse=True,\n # Use a unique name to speed up lookup.\n name=f\"unittest_{setup_name}_fixture_{obj.__qualname__}\",\n )\n def fixture(self, request: FixtureRequest) -> Generator[None, None, None]:\n if _is_skipped(self):\n reason = self.__unittest_skip_why__\n raise pytest.skip.Exception(reason, _use_item_location=True)\n if setup is not None:\n try:\n if pass_self:\n setup(self, request.function)\n else:\n setup()\n # unittest does not call the cleanup function for every BaseException, so we\n # follow this here.\n except Exception:\n if pass_self:\n cleanup(self)\n else:\n cleanup()\n\n raise\n yield\n try:\n if teardown is not None:\n if pass_self:\n teardown(self, request.function)\n else:\n teardown()\n finally:\n if pass_self:\n cleanup(self)\n else:\n cleanup()\n\n return fixture", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/warning_types.py_PytestExperimentalApiWarning_PytestExperimentalApiWarning.simple.return.cls_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/warning_types.py_PytestExperimentalApiWarning_PytestExperimentalApiWarning.simple.return.cls_", "embedding": null, "metadata": {"file_path": "src/_pytest/warning_types.py", "file_name": "warning_types.py", "file_type": "text/x-python", "category": "implementation", "start_line": 52, "end_line": 68, "span_ids": ["PytestExperimentalApiWarning.simple", "PytestExperimentalApiWarning"], "tokens": 112}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass PytestExperimentalApiWarning(PytestWarning, FutureWarning):\n \"\"\"Warning category used to denote experiments in pytest.\n\n Use sparingly as the API might change or even be removed completely in a\n future version.\n \"\"\"\n\n __module__ = \"pytest\"\n\n @classmethod\n def simple(cls, apiname: str) -> \"PytestExperimentalApiWarning\":\n return cls(\n \"{apiname} is an experimental api that may change over time\".format(\n apiname=apiname\n )\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_early_hook_error_issue38_1_TestGeneralUsage.test_early_hook_error_issue38_1.None_2": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_early_hook_error_issue38_1_TestGeneralUsage.test_early_hook_error_issue38_1.None_2", "embedding": null, "metadata": {"file_path": "testing/acceptance_test.py", "file_name": "acceptance_test.py", "file_type": "text/x-python", "category": "test", "start_line": 36, "end_line": 54, "span_ids": ["TestGeneralUsage.test_early_hook_error_issue38_1"], "tokens": 184}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestGeneralUsage:\n\n def test_early_hook_error_issue38_1(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n def pytest_sessionstart():\n 0 / 0\n \"\"\"\n )\n result = pytester.runpytest(pytester.path)\n assert result.ret != 0\n # tracestyle is native by default for hook failures\n result.stdout.fnmatch_lines(\n [\"*INTERNALERROR*File*conftest.py*line 2*\", \"*0 / 0*\"]\n )\n result = pytester.runpytest(pytester.path, \"--fulltrace\")\n assert result.ret != 0\n # tracestyle is native by default for hook failures\n result.stdout.fnmatch_lines(\n [\"*INTERNALERROR*def pytest_sessionstart():*\", \"*INTERNALERROR*0 / 0*\"]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_config_preparse_plugin_option_TestGeneralUsage.test_config_preparse_plugin_option.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_config_preparse_plugin_option_TestGeneralUsage.test_config_preparse_plugin_option.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/acceptance_test.py", "file_name": "acceptance_test.py", "file_type": "text/x-python", "category": "test", "start_line": 89, "end_line": 104, "span_ids": ["TestGeneralUsage.test_config_preparse_plugin_option"], "tokens": 140}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestGeneralUsage:\n\n def test_config_preparse_plugin_option(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n pytest_xyz=\"\"\"\n def pytest_addoption(parser):\n parser.addoption(\"--xyz\", dest=\"xyz\", action=\"store\")\n \"\"\"\n )\n pytester.makepyfile(\n test_one=\"\"\"\n def test_option(pytestconfig):\n assert pytestconfig.option.xyz == \"123\"\n \"\"\"\n )\n result = pytester.runpytest(\"-p\", \"pytest_xyz\", \"--xyz=123\", syspathinsert=True)\n assert result.ret == 0\n result.stdout.fnmatch_lines([\"*1 passed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_early_load_setuptools_name_TestGeneralUsage.test_early_load_setuptools_name.if_load_cov_early_.else_.assert_loaded_myplug": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_early_load_setuptools_name_TestGeneralUsage.test_early_load_setuptools_name.if_load_cov_early_.else_.assert_loaded_myplug", "embedding": null, "metadata": {"file_path": "testing/acceptance_test.py", "file_name": "acceptance_test.py", "file_type": "text/x-python", "category": "test", "start_line": 106, "end_line": 150, "span_ids": ["TestGeneralUsage.test_early_load_setuptools_name", "TestGeneralUsage.test_early_load_setuptools_name.DummyDist", "TestGeneralUsage.test_early_load_setuptools_name.DummyDist:2", "TestGeneralUsage.test_early_load_setuptools_name.DummyEntryPoint", "TestGeneralUsage.test_early_load_setuptools_name.DummyEntryPoint:2"], "tokens": 341}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestGeneralUsage:\n\n @pytest.mark.parametrize(\"load_cov_early\", [True, False])\n def test_early_load_setuptools_name(\n self, pytester: Pytester, monkeypatch, load_cov_early\n ) -> None:\n monkeypatch.delenv(\"PYTEST_DISABLE_PLUGIN_AUTOLOAD\")\n\n pytester.makepyfile(mytestplugin1_module=\"\")\n pytester.makepyfile(mytestplugin2_module=\"\")\n pytester.makepyfile(mycov_module=\"\")\n pytester.syspathinsert()\n\n loaded = []\n\n @attr.s\n class DummyEntryPoint:\n name = attr.ib()\n module = attr.ib()\n group = \"pytest11\"\n\n def load(self):\n __import__(self.module)\n loaded.append(self.name)\n return sys.modules[self.module]\n\n entry_points = [\n DummyEntryPoint(\"myplugin1\", \"mytestplugin1_module\"),\n DummyEntryPoint(\"myplugin2\", \"mytestplugin2_module\"),\n DummyEntryPoint(\"mycov\", \"mycov_module\"),\n ]\n\n @attr.s\n class DummyDist:\n entry_points = attr.ib()\n files = ()\n\n def my_dists():\n return (DummyDist(entry_points),)\n\n monkeypatch.setattr(importlib_metadata, \"distributions\", my_dists)\n params = (\"-p\", \"mycov\") if load_cov_early else ()\n pytester.runpytest_inprocess(*params)\n if load_cov_early:\n assert loaded == [\"mycov\", \"myplugin1\", \"myplugin2\"]\n else:\n assert loaded == [\"myplugin1\", \"myplugin2\", \"mycov\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_early_skip_TestGeneralUsage.test_conftest_printing_shows_if_error.assert_should_be_seen_i": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_early_skip_TestGeneralUsage.test_conftest_printing_shows_if_error.assert_should_be_seen_i", "embedding": null, "metadata": {"file_path": "testing/acceptance_test.py", "file_name": "acceptance_test.py", "file_type": "text/x-python", "category": "test", "start_line": 227, "end_line": 268, "span_ids": ["TestGeneralUsage.test_early_skip", "TestGeneralUsage.test_conftest_printing_shows_if_error", "TestGeneralUsage.test_issue93_initialnode_importing_capturing", "TestGeneralUsage.test_issue88_initial_file_multinodes"], "tokens": 362}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestGeneralUsage:\n\n def test_early_skip(self, pytester: Pytester) -> None:\n pytester.mkdir(\"xyz\")\n pytester.makeconftest(\n \"\"\"\n import pytest\n def pytest_collect_file():\n pytest.skip(\"early\")\n \"\"\"\n )\n result = pytester.runpytest()\n assert result.ret == ExitCode.NO_TESTS_COLLECTED\n result.stdout.fnmatch_lines([\"*1 skip*\"])\n\n def test_issue88_initial_file_multinodes(self, pytester: Pytester) -> None:\n pytester.copy_example(\"issue88_initial_file_multinodes\")\n p = pytester.makepyfile(\"def test_hello(): pass\")\n result = pytester.runpytest(p, \"--collect-only\")\n result.stdout.fnmatch_lines([\"*MyFile*test_issue88*\", \"*Module*test_issue88*\"])\n\n def test_issue93_initialnode_importing_capturing(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n import sys\n print(\"should not be seen\")\n sys.stderr.write(\"stder42\\\\n\")\n \"\"\"\n )\n result = pytester.runpytest()\n assert result.ret == ExitCode.NO_TESTS_COLLECTED\n result.stdout.no_fnmatch_line(\"*should not be seen*\")\n assert \"stderr42\" not in result.stderr.str()\n\n def test_conftest_printing_shows_if_error(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n print(\"should be seen\")\n assert 0\n \"\"\"\n )\n result = pytester.runpytest()\n assert result.ret != 0\n assert \"should be seen\" in result.stdout.str()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_issue109_sibling_conftests_not_loaded_TestGeneralUsage.test_directory_skipped.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_issue109_sibling_conftests_not_loaded_TestGeneralUsage.test_directory_skipped.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/acceptance_test.py", "file_name": "acceptance_test.py", "file_type": "text/x-python", "category": "test", "start_line": 270, "end_line": 295, "span_ids": ["TestGeneralUsage.test_directory_skipped", "TestGeneralUsage.test_issue109_sibling_conftests_not_loaded"], "tokens": 256}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestGeneralUsage:\n\n def test_issue109_sibling_conftests_not_loaded(self, pytester: Pytester) -> None:\n sub1 = pytester.mkdir(\"sub1\")\n sub2 = pytester.mkdir(\"sub2\")\n sub1.joinpath(\"conftest.py\").write_text(\"assert 0\")\n result = pytester.runpytest(sub2)\n assert result.ret == ExitCode.NO_TESTS_COLLECTED\n sub2.joinpath(\"__init__.py\").touch()\n p = sub2.joinpath(\"test_hello.py\")\n p.touch()\n result = pytester.runpytest(p)\n assert result.ret == ExitCode.NO_TESTS_COLLECTED\n result = pytester.runpytest(sub1)\n assert result.ret == ExitCode.USAGE_ERROR\n\n def test_directory_skipped(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n import pytest\n def pytest_ignore_collect():\n pytest.skip(\"intentional\")\n \"\"\"\n )\n pytester.makepyfile(\"def test_hello(): pass\")\n result = pytester.runpytest()\n assert result.ret == ExitCode.NO_TESTS_COLLECTED\n result.stdout.fnmatch_lines([\"*1 skipped*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_multiple_items_per_collector_byid_TestGeneralUsage.test_multiple_items_per_collector_byid.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_multiple_items_per_collector_byid_TestGeneralUsage.test_multiple_items_per_collector_byid.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/acceptance_test.py", "file_name": "acceptance_test.py", "file_type": "text/x-python", "category": "test", "start_line": 297, "end_line": 314, "span_ids": ["TestGeneralUsage.test_multiple_items_per_collector_byid"], "tokens": 159}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestGeneralUsage:\n\n def test_multiple_items_per_collector_byid(self, pytester: Pytester) -> None:\n c = pytester.makeconftest(\n \"\"\"\n import pytest\n class MyItem(pytest.Item):\n def runtest(self):\n pass\n class MyCollector(pytest.File):\n def collect(self):\n return [MyItem.from_parent(name=\"xyz\", parent=self)]\n def pytest_collect_file(path, parent):\n if path.basename.startswith(\"conftest\"):\n return MyCollector.from_parent(fspath=path, parent=parent)\n \"\"\"\n )\n result = pytester.runpytest(c.name + \"::\" + \"xyz\")\n assert result.ret == 0\n result.stdout.fnmatch_lines([\"*1 pass*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_skip_on_generated_funcarg_id_TestGeneralUsage.test_skip_on_generated_funcarg_id.res_stdout_fnmatch_lines_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_skip_on_generated_funcarg_id_TestGeneralUsage.test_skip_on_generated_funcarg_id.res_stdout_fnmatch_lines_", "embedding": null, "metadata": {"file_path": "testing/acceptance_test.py", "file_name": "acceptance_test.py", "file_type": "text/x-python", "category": "test", "start_line": 316, "end_line": 332, "span_ids": ["TestGeneralUsage.test_skip_on_generated_funcarg_id"], "tokens": 147}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestGeneralUsage:\n\n def test_skip_on_generated_funcarg_id(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n import pytest\n def pytest_generate_tests(metafunc):\n metafunc.parametrize('x', [3], ids=['hello-123'])\n def pytest_runtest_setup(item):\n print(item.keywords)\n if 'hello-123' in item.keywords:\n pytest.skip(\"hello\")\n assert 0\n \"\"\"\n )\n p = pytester.makepyfile(\"\"\"def test_func(x): pass\"\"\")\n res = pytester.runpytest(p)\n assert res.ret == 0\n res.stdout.fnmatch_lines([\"*1 skipped*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_issue134_report_error_when_collecting_member_TestGeneralUsage.test_issue134_report_error_when_collecting_member.if_in_lookfor_._usage_error_only_if_ite": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_issue134_report_error_when_collecting_member_TestGeneralUsage.test_issue134_report_error_when_collecting_member.if_in_lookfor_._usage_error_only_if_ite", "embedding": null, "metadata": {"file_path": "testing/acceptance_test.py", "file_name": "acceptance_test.py", "file_type": "text/x-python", "category": "test", "start_line": 377, "end_line": 391, "span_ids": ["TestGeneralUsage.test_issue134_report_error_when_collecting_member"], "tokens": 134}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestGeneralUsage:\n\n @pytest.mark.parametrize(\"lookfor\", [\"test_fun.py::test_a\"])\n def test_issue134_report_error_when_collecting_member(\n self, pytester: Pytester, lookfor\n ) -> None:\n pytester.makepyfile(\n test_fun=\"\"\"\n def test_a():\n pass\n def\"\"\"\n )\n result = pytester.runpytest(lookfor)\n result.stdout.fnmatch_lines([\"*SyntaxError*\"])\n if \"::\" in lookfor:\n result.stderr.fnmatch_lines([\"*ERROR*\"])\n assert result.ret == 4 # usage error only if item not found", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_namespace_import_doesnt_confuse_import_hook_TestGeneralUsage.test_namespace_import_doesnt_confuse_import_hook.assert_res_ret_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_namespace_import_doesnt_confuse_import_hook_TestGeneralUsage.test_namespace_import_doesnt_confuse_import_hook.assert_res_ret_0", "embedding": null, "metadata": {"file_path": "testing/acceptance_test.py", "file_name": "acceptance_test.py", "file_type": "text/x-python", "category": "test", "start_line": 409, "end_line": 432, "span_ids": ["TestGeneralUsage.test_namespace_import_doesnt_confuse_import_hook"], "tokens": 159}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestGeneralUsage:\n\n def test_namespace_import_doesnt_confuse_import_hook(\n self, pytester: Pytester\n ) -> None:\n \"\"\"Ref #383.\n\n Python 3.3's namespace package messed with our import hooks.\n Importing a module that didn't exist, even if the ImportError was\n gracefully handled, would make our test crash.\n \"\"\"\n pytester.mkdir(\"not_a_package\")\n p = pytester.makepyfile(\n \"\"\"\n try:\n from not_a_package import doesnt_exist\n except ImportError:\n # We handle the import error gracefully here\n pass\n\n def test_whatever():\n pass\n \"\"\"\n )\n res = pytester.runpytest(p.name)\n assert res.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_unknown_option_TestGeneralUsage.test_getsourcelines_error_issue553.res_stdout_fnmatch_lines_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_unknown_option_TestGeneralUsage.test_getsourcelines_error_issue553.res_stdout_fnmatch_lines_", "embedding": null, "metadata": {"file_path": "testing/acceptance_test.py", "file_name": "acceptance_test.py", "file_type": "text/x-python", "category": "test", "start_line": 434, "end_line": 461, "span_ids": ["TestGeneralUsage.test_getsourcelines_error_issue553", "TestGeneralUsage.test_unknown_option"], "tokens": 190}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestGeneralUsage:\n\n def test_unknown_option(self, pytester: Pytester) -> None:\n result = pytester.runpytest(\"--qwlkej\")\n result.stderr.fnmatch_lines(\n \"\"\"\n *unrecognized*\n \"\"\"\n )\n\n def test_getsourcelines_error_issue553(\n self, pytester: Pytester, monkeypatch\n ) -> None:\n monkeypatch.setattr(\"inspect.getsourcelines\", None)\n p = pytester.makepyfile(\n \"\"\"\n def raise_error(obj):\n raise OSError('source code not available')\n\n import inspect\n inspect.getsourcelines = raise_error\n\n def test_foo(invalid_fixture):\n pass\n \"\"\"\n )\n res = pytester.runpytest(p)\n res.stdout.fnmatch_lines(\n [\"*source code not available*\", \"E*fixture 'invalid_fixture' not found\"]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_plugins_given_as_strings_TestGeneralUsage.test_plugins_given_as_strings.assert_pytest_main_args_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_plugins_given_as_strings_TestGeneralUsage.test_plugins_given_as_strings.assert_pytest_main_args_", "embedding": null, "metadata": {"file_path": "testing/acceptance_test.py", "file_name": "acceptance_test.py", "file_type": "text/x-python", "category": "test", "start_line": 463, "end_line": 476, "span_ids": ["TestGeneralUsage.test_plugins_given_as_strings"], "tokens": 174}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestGeneralUsage:\n\n def test_plugins_given_as_strings(\n self, pytester: Pytester, monkeypatch, _sys_snapshot\n ) -> None:\n \"\"\"Test that str values passed to main() as `plugins` arg are\n interpreted as module names to be imported and registered (#855).\"\"\"\n with pytest.raises(ImportError) as excinfo:\n pytest.main([str(pytester.path)], plugins=[\"invalid.module\"])\n assert \"invalid\" in str(excinfo.value)\n\n p = pytester.path.joinpath(\"test_test_plugins_given_as_strings.py\")\n p.write_text(\"def test_foo(): pass\")\n mod = types.ModuleType(\"myplugin\")\n monkeypatch.setitem(sys.modules, \"myplugin\", mod)\n assert pytest.main(args=[str(pytester.path)], plugins=[\"myplugin\"]) == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_parametrized_with_bytes_regex_TestGeneralUsage.test_parametrized_with_null_bytes.res_assert_outcomes_passe": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_parametrized_with_bytes_regex_TestGeneralUsage.test_parametrized_with_null_bytes.res_assert_outcomes_passe", "embedding": null, "metadata": {"file_path": "testing/acceptance_test.py", "file_name": "acceptance_test.py", "file_type": "text/x-python", "category": "test", "start_line": 478, "end_line": 503, "span_ids": ["TestGeneralUsage.test_parametrized_with_null_bytes", "TestGeneralUsage.test_parametrized_with_bytes_regex"], "tokens": 209}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestGeneralUsage:\n\n def test_parametrized_with_bytes_regex(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n import re\n import pytest\n @pytest.mark.parametrize('r', [re.compile(b'foo')])\n def test_stuff(r):\n pass\n \"\"\"\n )\n res = pytester.runpytest(p)\n res.stdout.fnmatch_lines([\"*1 passed*\"])\n\n def test_parametrized_with_null_bytes(self, pytester: Pytester) -> None:\n \"\"\"Test parametrization with values that contain null bytes and unicode characters (#2644, #2957)\"\"\"\n p = pytester.makepyfile(\n \"\"\"\\\n import pytest\n\n @pytest.mark.parametrize(\"data\", [b\"\\\\x00\", \"\\\\x00\", 'a\u00e7\u00e3o'])\n def test_foo(data):\n assert data\n \"\"\"\n )\n res = pytester.runpytest(p)\n res.assert_outcomes(passed=3)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestInvocationVariants.test_cmdline_python_package_not_exists_TestInvocationVariants.test_noclass_discovery_if_not_testcase.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestInvocationVariants.test_cmdline_python_package_not_exists_TestInvocationVariants.test_noclass_discovery_if_not_testcase.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/acceptance_test.py", "file_name": "acceptance_test.py", "file_type": "text/x-python", "category": "test", "start_line": 812, "end_line": 831, "span_ids": ["TestInvocationVariants.test_cmdline_python_package_not_exists", "TestInvocationVariants.test_noclass_discovery_if_not_testcase"], "tokens": 180}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestInvocationVariants:\n\n def test_cmdline_python_package_not_exists(self, pytester: Pytester) -> None:\n result = pytester.runpytest(\"--pyargs\", \"tpkgwhatv\")\n assert result.ret\n result.stderr.fnmatch_lines([\"ERROR*module*or*package*not*found*\"])\n\n @pytest.mark.xfail(reason=\"decide: feature or bug\")\n def test_noclass_discovery_if_not_testcase(self, pytester: Pytester) -> None:\n testpath = pytester.makepyfile(\n \"\"\"\n import unittest\n class TestHello(object):\n def test_hello(self):\n assert self.attr\n\n class RealTest(unittest.TestCase, TestHello):\n attr = 42\n \"\"\"\n )\n reprec = pytester.inline_run(testpath)\n reprec.assertoutcome(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestInvocationVariants.test_doctest_id_TestInvocationVariants.test_has_plugin.assert_request_config_plu": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestInvocationVariants.test_doctest_id_TestInvocationVariants.test_has_plugin.assert_request_config_plu", "embedding": null, "metadata": {"file_path": "testing/acceptance_test.py", "file_name": "acceptance_test.py", "file_type": "text/x-python", "category": "test", "start_line": 833, "end_line": 869, "span_ids": ["TestInvocationVariants.test_has_plugin", "TestInvocationVariants.test_core_backward_compatibility", "TestInvocationVariants.test_doctest_id"], "tokens": 320}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestInvocationVariants:\n\n def test_doctest_id(self, pytester: Pytester) -> None:\n pytester.makefile(\n \".txt\",\n \"\"\"\n >>> x=3\n >>> x\n 4\n \"\"\",\n )\n testid = \"test_doctest_id.txt::test_doctest_id.txt\"\n expected_lines = [\n \"*= FAILURES =*\",\n \"*_ ?doctest? test_doctest_id.txt _*\",\n \"FAILED test_doctest_id.txt::test_doctest_id.txt\",\n \"*= 1 failed in*\",\n ]\n result = pytester.runpytest(testid, \"-rf\", \"--tb=short\")\n result.stdout.fnmatch_lines(expected_lines)\n\n # Ensure that re-running it will still handle it as\n # doctest.DocTestFailure, which was not the case before when\n # re-importing doctest, but not creating a new RUNNER_CLASS.\n result = pytester.runpytest(testid, \"-rf\", \"--tb=short\")\n result.stdout.fnmatch_lines(expected_lines)\n\n def test_core_backward_compatibility(self) -> None:\n \"\"\"Test backward compatibility for get_plugin_manager function. See #787.\"\"\"\n import _pytest.config\n\n assert (\n type(_pytest.config.get_plugin_manager())\n is _pytest.config.PytestPluginManager\n )\n\n def test_has_plugin(self, request) -> None:\n \"\"\"Test hasplugin function of the plugin manager (#932).\"\"\"\n assert request.config.pluginmanager.hasplugin(\"python\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_test_import_plugin_unicode_name_test_pytest_plugins_as_module.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_test_import_plugin_unicode_name_test_pytest_plugins_as_module.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/acceptance_test.py", "file_name": "acceptance_test.py", "file_type": "text/x-python", "category": "test", "start_line": 1008, "end_line": 1027, "span_ids": ["test_import_plugin_unicode_name", "test_pytest_plugins_as_module"], "tokens": 179}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_import_plugin_unicode_name(pytester: Pytester) -> None:\n pytester.makepyfile(myplugin=\"\")\n pytester.makepyfile(\"def test(): pass\")\n pytester.makeconftest(\"pytest_plugins = ['myplugin']\")\n r = pytester.runpytest()\n assert r.ret == 0\n\n\ndef test_pytest_plugins_as_module(pytester: Pytester) -> None:\n \"\"\"Do not raise an error if pytest_plugins attribute is a module (#3899)\"\"\"\n pytester.makepyfile(\n **{\n \"__init__.py\": \"\",\n \"pytest_plugins.py\": \"\",\n \"conftest.py\": \"from . import pytest_plugins\",\n \"test_foo.py\": \"def test(): pass\",\n }\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"* 1 passed in *\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_test_deferred_hook_checking_test_deferred_hook_checking.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_test_deferred_hook_checking_test_deferred_hook_checking.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/acceptance_test.py", "file_name": "acceptance_test.py", "file_type": "text/x-python", "category": "test", "start_line": 1030, "end_line": 1055, "span_ids": ["test_deferred_hook_checking"], "tokens": 178}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_deferred_hook_checking(pytester: Pytester) -> None:\n \"\"\"Check hooks as late as possible (#1821).\"\"\"\n pytester.syspathinsert()\n pytester.makepyfile(\n **{\n \"plugin.py\": \"\"\"\n class Hooks(object):\n def pytest_my_hook(self, config):\n pass\n\n def pytest_configure(config):\n config.pluginmanager.add_hookspecs(Hooks)\n \"\"\",\n \"conftest.py\": \"\"\"\n pytest_plugins = ['plugin']\n def pytest_my_hook(config):\n return 40\n \"\"\",\n \"test_foo.py\": \"\"\"\n def test(request):\n assert request.config.hook.pytest_my_hook(config=request.config) == [40]\n \"\"\",\n }\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"* 1 passed *\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_test_fixture_values_leak_test_fixture_values_leak.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_test_fixture_values_leak_test_fixture_values_leak.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/acceptance_test.py", "file_name": "acceptance_test.py", "file_type": "text/x-python", "category": "test", "start_line": 1058, "end_line": 1103, "span_ids": ["test_fixture_values_leak"], "tokens": 306}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_fixture_values_leak(pytester: Pytester) -> None:\n \"\"\"Ensure that fixture objects are properly destroyed by the garbage collector at the end of their expected\n life-times (#2981).\n \"\"\"\n pytester.makepyfile(\n \"\"\"\n import attr\n import gc\n import pytest\n import weakref\n\n @attr.s\n class SomeObj(object):\n name = attr.ib()\n\n fix_of_test1_ref = None\n session_ref = None\n\n @pytest.fixture(scope='session')\n def session_fix():\n global session_ref\n obj = SomeObj(name='session-fixture')\n session_ref = weakref.ref(obj)\n return obj\n\n @pytest.fixture\n def fix(session_fix):\n global fix_of_test1_ref\n obj = SomeObj(name='local-fixture')\n fix_of_test1_ref = weakref.ref(obj)\n return obj\n\n def test1(fix):\n assert fix_of_test1_ref() is fix\n\n def test2():\n gc.collect()\n # fixture \"fix\" created during test1 must have been destroyed by now\n assert fix_of_test1_ref() is None\n \"\"\"\n )\n # Running on subprocess does not activate the HookRecorder\n # which holds itself a reference to objects in case of the\n # pytest_assert_reprcompare hook\n result = pytester.runpytest_subprocess()\n result.stdout.fnmatch_lines([\"* 2 passed *\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_test_fixture_order_respects_scope_test_fixture_order_respects_scope.assert_result_ret_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_test_fixture_order_respects_scope_test_fixture_order_respects_scope.assert_result_ret_0", "embedding": null, "metadata": {"file_path": "testing/acceptance_test.py", "file_name": "acceptance_test.py", "file_type": "text/x-python", "category": "test", "start_line": 1106, "end_line": 1128, "span_ids": ["test_fixture_order_respects_scope"], "tokens": 127}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_fixture_order_respects_scope(pytester: Pytester) -> None:\n \"\"\"Ensure that fixtures are created according to scope order (#2405).\"\"\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n data = {}\n\n @pytest.fixture(scope='module')\n def clean_data():\n data.clear()\n\n @pytest.fixture(autouse=True)\n def add_data():\n data.update(value=True)\n\n @pytest.mark.usefixtures('clean_data')\n def test_value():\n assert data.get('value')\n \"\"\"\n )\n result = pytester.runpytest()\n assert result.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_code.py_test_code_getargs_test_code_getargs.assert_c4_getargs_var_Tru": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_code.py_test_code_getargs_test_code_getargs.assert_c4_getargs_var_Tru", "embedding": null, "metadata": {"file_path": "testing/code/test_code.py", "file_name": "test_code.py", "file_type": "text/x-python", "category": "test", "start_line": 92, "end_line": 115, "span_ids": ["test_code_getargs"], "tokens": 166}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_code_getargs() -> None:\n def f1(x):\n raise NotImplementedError()\n\n c1 = Code.from_function(f1)\n assert c1.getargs(var=True) == (\"x\",)\n\n def f2(x, *y):\n raise NotImplementedError()\n\n c2 = Code.from_function(f2)\n assert c2.getargs(var=True) == (\"x\", \"y\")\n\n def f3(x, **z):\n raise NotImplementedError()\n\n c3 = Code.from_function(f3)\n assert c3.getargs(var=True) == (\"x\", \"z\")\n\n def f4(x, *y, **z):\n raise NotImplementedError()\n\n c4 = Code.from_function(f4)\n assert c4.getargs(var=True) == (\"x\", \"y\", \"z\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_code.py_test_frame_getargs_test_frame_getargs.assert_fr4_getargs_var_Tr": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_code.py_test_frame_getargs_test_frame_getargs.assert_fr4_getargs_var_Tr", "embedding": null, "metadata": {"file_path": "testing/code/test_code.py", "file_name": "test_code.py", "file_type": "text/x-python", "category": "test", "start_line": 115, "end_line": 138, "span_ids": ["test_frame_getargs"], "tokens": 255}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_frame_getargs() -> None:\n def f1(x) -> FrameType:\n return sys._getframe(0)\n\n fr1 = Frame(f1(\"a\"))\n assert fr1.getargs(var=True) == [(\"x\", \"a\")]\n\n def f2(x, *y) -> FrameType:\n return sys._getframe(0)\n\n fr2 = Frame(f2(\"a\", \"b\", \"c\"))\n assert fr2.getargs(var=True) == [(\"x\", \"a\"), (\"y\", (\"b\", \"c\"))]\n\n def f3(x, **z) -> FrameType:\n return sys._getframe(0)\n\n fr3 = Frame(f3(\"a\", b=\"c\"))\n assert fr3.getargs(var=True) == [(\"x\", \"a\"), (\"z\", {\"b\": \"c\"})]\n\n def f4(x, *y, **z) -> FrameType:\n return sys._getframe(0)\n\n fr4 = Frame(f4(\"a\", \"b\", c=\"d\"))\n assert fr4.getargs(var=True) == [(\"x\", \"a\"), (\"y\", (\"b\",)), (\"z\", {\"c\": \"d\"})]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_test_excinfo_getstatement_test_excinfo_getstatement._xxx": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_test_excinfo_getstatement_test_excinfo_getstatement._xxx", "embedding": null, "metadata": {"file_path": "testing/code/test_excinfo.py", "file_name": "test_excinfo.py", "file_type": "text/x-python", "category": "test", "start_line": 50, "end_line": 71, "span_ids": ["test_excinfo_getstatement"], "tokens": 170}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_excinfo_getstatement():\n def g():\n raise ValueError\n\n def f():\n g()\n\n try:\n f()\n except ValueError:\n excinfo = _pytest._code.ExceptionInfo.from_current()\n linenumbers = [\n f.__code__.co_firstlineno - 1 + 4,\n f.__code__.co_firstlineno - 1 + 1,\n g.__code__.co_firstlineno - 1 + 1,\n ]\n values = list(excinfo.traceback)\n foundlinenumbers = [x.lineno for x in values]\n assert foundlinenumbers == linenumbers\n # for x in info:\n # print \"%s:%d %s\" %(x.path.relto(root), x.lineno, x.statement)\n # xxx", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py__testchain_for_getentrie_TestTraceback_f_g_h.test_traceback_entry_getsource.assert_s_endswith_raise_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py__testchain_for_getentrie_TestTraceback_f_g_h.test_traceback_entry_getsource.assert_s_endswith_raise_", "embedding": null, "metadata": {"file_path": "testing/code/test_excinfo.py", "file_name": "test_excinfo.py", "file_type": "text/x-python", "category": "test", "start_line": 74, "end_line": 120, "span_ids": ["TestTraceback_f_g_h", "TestTraceback_f_g_h.setup_method", "TestTraceback_f_g_h.test_traceback_entry_getsource", "TestTraceback_f_g_h.test_traceback_entries", "g", "test_excinfo_getstatement", "h", "f"], "tokens": 231}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "# testchain for getentries test below\n\n\ndef f():\n #\n raise ValueError\n #\n\n\ndef g():\n #\n __tracebackhide__ = True\n f()\n #\n\n\ndef h():\n #\n g()\n #\n\n\nclass TestTraceback_f_g_h:\n def setup_method(self, method):\n try:\n h()\n except ValueError:\n self.excinfo = _pytest._code.ExceptionInfo.from_current()\n\n def test_traceback_entries(self):\n tb = self.excinfo.traceback\n entries = list(tb)\n assert len(tb) == 4 # maybe fragile test\n assert len(entries) == 4 # maybe fragile test\n names = [\"f\", \"g\", \"h\"]\n for entry in entries:\n try:\n names.remove(entry.frame.code.name)\n except ValueError:\n pass\n assert not names\n\n def test_traceback_entry_getsource(self):\n tb = self.excinfo.traceback\n s = str(tb[-1].getsource())\n assert s.startswith(\"def f():\")\n assert s.endswith(\"raise ValueError\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestTraceback_f_g_h.test_traceback_getcrashentry_TestTraceback_f_g_h.test_traceback_getcrashentry_empty.assert_entry_frame_code_n": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestTraceback_f_g_h.test_traceback_getcrashentry_TestTraceback_f_g_h.test_traceback_getcrashentry_empty.assert_entry_frame_code_n", "embedding": null, "metadata": {"file_path": "testing/code/test_excinfo.py", "file_name": "test_excinfo.py", "file_type": "text/x-python", "category": "test", "start_line": 279, "end_line": 317, "span_ids": ["TestTraceback_f_g_h.test_traceback_getcrashentry_empty", "TestTraceback_f_g_h.test_traceback_getcrashentry"], "tokens": 255}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTraceback_f_g_h:\n\n def test_traceback_getcrashentry(self):\n def i():\n __tracebackhide__ = True\n raise ValueError\n\n def h():\n i()\n\n def g():\n __tracebackhide__ = True\n h()\n\n def f():\n g()\n\n excinfo = pytest.raises(ValueError, f)\n tb = excinfo.traceback\n entry = tb.getcrashentry()\n co = _pytest._code.Code.from_function(h)\n assert entry.frame.code.path == co.path\n assert entry.lineno == co.firstlineno + 1\n assert entry.frame.code.name == \"h\"\n\n def test_traceback_getcrashentry_empty(self):\n def g():\n __tracebackhide__ = True\n raise ValueError\n\n def f():\n __tracebackhide__ = True\n g()\n\n excinfo = pytest.raises(ValueError, f)\n tb = excinfo.traceback\n entry = tb.getcrashentry()\n co = _pytest._code.Code.from_function(g)\n assert entry.frame.code.path == co.path\n assert entry.lineno == co.firstlineno + 2\n assert entry.frame.code.name == \"g\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo_TestFormattedExcinfo.test_repr_source.assert_lines_1_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo_TestFormattedExcinfo.test_repr_source.assert_lines_1_", "embedding": null, "metadata": {"file_path": "testing/code/test_excinfo.py", "file_name": "test_excinfo.py", "file_type": "text/x-python", "category": "test", "start_line": 437, "end_line": 462, "span_ids": ["TestFormattedExcinfo.test_repr_source", "TestFormattedExcinfo.importasmod", "TestFormattedExcinfo"], "tokens": 188}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFormattedExcinfo:\n @pytest.fixture\n def importasmod(self, tmp_path: Path, _sys_snapshot):\n def importasmod(source):\n source = textwrap.dedent(source)\n modpath = tmp_path.joinpath(\"mod.py\")\n tmp_path.joinpath(\"__init__.py\").touch()\n modpath.write_text(source)\n importlib.invalidate_caches()\n return import_path(modpath)\n\n return importasmod\n\n def test_repr_source(self):\n pr = FormattedExcinfo()\n source = _pytest._code.Source(\n \"\"\"\\\n def f(x):\n pass\n \"\"\"\n ).strip()\n pr.flow_marker = \"|\"\n lines = pr.get_source(source, 0)\n assert len(lines) == 2\n assert lines[0] == \"| def f(x):\"\n assert lines[1] == \" pass\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_repr_local_TestFormattedExcinfo.test_repr_local.assert_reprlocals_lines_3": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_repr_local_TestFormattedExcinfo.test_repr_local.assert_reprlocals_lines_3", "embedding": null, "metadata": {"file_path": "testing/code/test_excinfo.py", "file_name": "test_excinfo.py", "file_type": "text/x-python", "category": "test", "start_line": 532, "end_line": 541, "span_ids": ["TestFormattedExcinfo.test_repr_local"], "tokens": 151}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFormattedExcinfo:\n\n def test_repr_local(self) -> None:\n p = FormattedExcinfo(showlocals=True)\n loc = {\"y\": 5, \"z\": 7, \"x\": 3, \"@x\": 2, \"__builtins__\": {}}\n reprlocals = p.repr_locals(loc)\n assert reprlocals is not None\n assert reprlocals.lines\n assert reprlocals.lines[0] == \"__builtins__ = \"\n assert reprlocals.lines[1] == \"x = 3\"\n assert reprlocals.lines[2] == \"y = 5\"\n assert reprlocals.lines[3] == \"z = 7\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_repr_local_with_error_TestFormattedExcinfo.test_repr_local_with_error.assert_NotImplementedEr": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_repr_local_with_error_TestFormattedExcinfo.test_repr_local_with_error.assert_NotImplementedEr", "embedding": null, "metadata": {"file_path": "testing/code/test_excinfo.py", "file_name": "test_excinfo.py", "file_type": "text/x-python", "category": "test", "start_line": 543, "end_line": 554, "span_ids": ["TestFormattedExcinfo.test_repr_local_with_error.ObjWithErrorInRepr", "TestFormattedExcinfo.test_repr_local_with_error", "TestFormattedExcinfo.test_repr_local_with_error.ObjWithErrorInRepr.__repr__"], "tokens": 132}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFormattedExcinfo:\n\n def test_repr_local_with_error(self) -> None:\n class ObjWithErrorInRepr:\n def __repr__(self):\n raise NotImplementedError\n\n p = FormattedExcinfo(showlocals=True, truncate_locals=False)\n loc = {\"x\": ObjWithErrorInRepr(), \"__builtins__\": {}}\n reprlocals = p.repr_locals(loc)\n assert reprlocals is not None\n assert reprlocals.lines\n assert reprlocals.lines[0] == \"__builtins__ = \"\n assert \"[NotImplementedError() raised in repr()]\" in reprlocals.lines[1]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_repr_local_with_exception_in_class_property_TestFormattedExcinfo.test_repr_local_with_exception_in_class_property.assert_ExceptionWithBro": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_repr_local_with_exception_in_class_property_TestFormattedExcinfo.test_repr_local_with_exception_in_class_property.assert_ExceptionWithBro", "embedding": null, "metadata": {"file_path": "testing/code/test_excinfo.py", "file_name": "test_excinfo.py", "file_type": "text/x-python", "category": "test", "start_line": 556, "end_line": 573, "span_ids": ["TestFormattedExcinfo.test_repr_local_with_exception_in_class_property", "TestFormattedExcinfo.test_repr_local_with_exception_in_class_property.ExceptionWithBrokenClass", "TestFormattedExcinfo.test_repr_local_with_exception_in_class_property.ObjWithErrorInRepr.__repr__", "TestFormattedExcinfo.test_repr_local_with_exception_in_class_property.ObjWithErrorInRepr", "TestFormattedExcinfo.test_repr_local_with_exception_in_class_property.ExceptionWithBrokenClass:2"], "tokens": 180}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFormattedExcinfo:\n\n def test_repr_local_with_exception_in_class_property(self) -> None:\n class ExceptionWithBrokenClass(Exception):\n # Type ignored because it's bypassed intentionally.\n @property # type: ignore\n def __class__(self):\n raise TypeError(\"boom!\")\n\n class ObjWithErrorInRepr:\n def __repr__(self):\n raise ExceptionWithBrokenClass()\n\n p = FormattedExcinfo(showlocals=True, truncate_locals=False)\n loc = {\"x\": ObjWithErrorInRepr(), \"__builtins__\": {}}\n reprlocals = p.repr_locals(loc)\n assert reprlocals is not None\n assert reprlocals.lines\n assert reprlocals.lines[0] == \"__builtins__ = \"\n assert \"[ExceptionWithBrokenClass() raised in repr()]\" in reprlocals.lines[1]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_repr_tracebackentry_lines_TestFormattedExcinfo.test_repr_tracebackentry_lines._assert_loc_message_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_repr_tracebackentry_lines_TestFormattedExcinfo.test_repr_tracebackentry_lines._assert_loc_message_", "embedding": null, "metadata": {"file_path": "testing/code/test_excinfo.py", "file_name": "test_excinfo.py", "file_type": "text/x-python", "category": "test", "start_line": 589, "end_line": 620, "span_ids": ["TestFormattedExcinfo.test_repr_tracebackentry_lines"], "tokens": 302}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFormattedExcinfo:\n\n def test_repr_tracebackentry_lines(self, importasmod) -> None:\n mod = importasmod(\n \"\"\"\n def func1():\n raise ValueError(\"hello\\\\nworld\")\n \"\"\"\n )\n excinfo = pytest.raises(ValueError, mod.func1)\n excinfo.traceback = excinfo.traceback.filter()\n p = FormattedExcinfo()\n reprtb = p.repr_traceback_entry(excinfo.traceback[-1])\n\n # test as intermittent entry\n lines = reprtb.lines\n assert lines[0] == \" def func1():\"\n assert lines[1] == '> raise ValueError(\"hello\\\\nworld\")'\n\n # test as last entry\n p = FormattedExcinfo(showlocals=True)\n repr_entry = p.repr_traceback_entry(excinfo.traceback[-1], excinfo)\n lines = repr_entry.lines\n assert lines[0] == \" def func1():\"\n assert lines[1] == '> raise ValueError(\"hello\\\\nworld\")'\n assert lines[2] == \"E ValueError: hello\"\n assert lines[3] == \"E world\"\n assert not lines[4:]\n\n loc = repr_entry.reprfileloc\n assert loc is not None\n assert loc.path == mod.__file__\n assert loc.lineno == 3\n # assert loc.message == \"ValueError: hello\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_repr_tracebackentry_no_TestFormattedExcinfo.test_repr_tracebackentry_no.assert_not_lines_1_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_repr_tracebackentry_no_TestFormattedExcinfo.test_repr_tracebackentry_no.assert_not_lines_1_", "embedding": null, "metadata": {"file_path": "testing/code/test_excinfo.py", "file_name": "test_excinfo.py", "file_type": "text/x-python", "category": "test", "start_line": 736, "end_line": 753, "span_ids": ["TestFormattedExcinfo.test_repr_tracebackentry_no"], "tokens": 149}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFormattedExcinfo:\n\n def test_repr_tracebackentry_no(self, importasmod):\n mod = importasmod(\n \"\"\"\n def func1():\n raise ValueError(\"hello\")\n def entry():\n func1()\n \"\"\"\n )\n excinfo = pytest.raises(ValueError, mod.entry)\n p = FormattedExcinfo(style=\"no\")\n p.repr_traceback_entry(excinfo.traceback[-2])\n\n p = FormattedExcinfo(style=\"no\")\n reprentry = p.repr_traceback_entry(excinfo.traceback[-1], excinfo)\n lines = reprentry.lines\n assert lines[0] == \"E ValueError: hello\"\n assert not lines[1:]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_repr_traceback_tbfilter_TestFormattedExcinfo.test_repr_traceback_tbfilter.None_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_repr_traceback_tbfilter_TestFormattedExcinfo.test_repr_traceback_tbfilter.None_1", "embedding": null, "metadata": {"file_path": "testing/code/test_excinfo.py", "file_name": "test_excinfo.py", "file_type": "text/x-python", "category": "test", "start_line": 755, "end_line": 770, "span_ids": ["TestFormattedExcinfo.test_repr_traceback_tbfilter"], "tokens": 136}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFormattedExcinfo:\n\n def test_repr_traceback_tbfilter(self, importasmod):\n mod = importasmod(\n \"\"\"\n def f(x):\n raise ValueError(x)\n def entry():\n f(0)\n \"\"\"\n )\n excinfo = pytest.raises(ValueError, mod.entry)\n p = FormattedExcinfo(tbfilter=True)\n reprtb = p.repr_traceback(excinfo)\n assert len(reprtb.reprentries) == 2\n p = FormattedExcinfo(tbfilter=False)\n reprtb = p.repr_traceback(excinfo)\n assert len(reprtb.reprentries) == 3", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_traceback_short_no_source_TestFormattedExcinfo.test_traceback_short_no_source.assert_last_lines_1_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_traceback_short_no_source_TestFormattedExcinfo.test_traceback_short_no_source.assert_last_lines_1_", "embedding": null, "metadata": {"file_path": "testing/code/test_excinfo.py", "file_name": "test_excinfo.py", "file_type": "text/x-python", "category": "test", "start_line": 740, "end_line": 763, "span_ids": ["TestFormattedExcinfo.test_traceback_short_no_source"], "tokens": 221}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFormattedExcinfo:\n\n def test_traceback_short_no_source(self, importasmod, monkeypatch) -> None:\n mod = importasmod(\n \"\"\"\n def func1():\n raise ValueError(\"hello\")\n def entry():\n func1()\n \"\"\"\n )\n excinfo = pytest.raises(ValueError, mod.entry)\n from _pytest._code.code import Code\n\n monkeypatch.setattr(Code, \"path\", \"bogus\")\n p = FormattedExcinfo(style=\"short\")\n reprtb = p.repr_traceback_entry(excinfo.traceback[-2])\n lines = reprtb.lines\n last_p = FormattedExcinfo(style=\"short\")\n last_reprtb = last_p.repr_traceback_entry(excinfo.traceback[-1], excinfo)\n last_lines = last_reprtb.lines\n monkeypatch.undo()\n assert lines[0] == \" func1()\"\n\n assert last_lines[0] == ' raise ValueError(\"hello\")'\n assert last_lines[1] == \"E ValueError: hello\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_repr_excinfo_addouterr_TestFormattedExcinfo.test_repr_excinfo_reprcrash.assert_str_repr_reprcrash": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_repr_excinfo_addouterr_TestFormattedExcinfo.test_repr_excinfo_reprcrash.assert_str_repr_reprcrash", "embedding": null, "metadata": {"file_path": "testing/code/test_excinfo.py", "file_name": "test_excinfo.py", "file_type": "text/x-python", "category": "test", "start_line": 843, "end_line": 870, "span_ids": ["TestFormattedExcinfo.test_repr_excinfo_addouterr", "TestFormattedExcinfo.test_repr_excinfo_reprcrash"], "tokens": 238}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFormattedExcinfo:\n\n def test_repr_excinfo_addouterr(self, importasmod, tw_mock):\n mod = importasmod(\n \"\"\"\n def entry():\n raise ValueError()\n \"\"\"\n )\n excinfo = pytest.raises(ValueError, mod.entry)\n repr = excinfo.getrepr()\n repr.addsection(\"title\", \"content\")\n repr.toterminal(tw_mock)\n assert tw_mock.lines[-1] == \"content\"\n assert tw_mock.lines[-2] == (\"-\", \"title\")\n\n def test_repr_excinfo_reprcrash(self, importasmod) -> None:\n mod = importasmod(\n \"\"\"\n def entry():\n raise ValueError()\n \"\"\"\n )\n excinfo = pytest.raises(ValueError, mod.entry)\n repr = excinfo.getrepr()\n assert repr.reprcrash is not None\n assert repr.reprcrash.path.endswith(\"mod.py\")\n assert repr.reprcrash.lineno == 3\n assert repr.reprcrash.message == \"ValueError\"\n assert str(repr.reprcrash).endswith(\"mod.py:3: ValueError\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_repr_traceback_recursion_TestFormattedExcinfo.test_repr_traceback_recursion.for_style_in_short_l.assert_str_reprtb_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_repr_traceback_recursion_TestFormattedExcinfo.test_repr_traceback_recursion.for_style_in_short_l.assert_str_reprtb_", "embedding": null, "metadata": {"file_path": "testing/code/test_excinfo.py", "file_name": "test_excinfo.py", "file_type": "text/x-python", "category": "test", "start_line": 873, "end_line": 890, "span_ids": ["TestFormattedExcinfo.test_repr_traceback_recursion"], "tokens": 146}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFormattedExcinfo:\n\n def test_repr_traceback_recursion(self, importasmod):\n mod = importasmod(\n \"\"\"\n def rec2(x):\n return rec1(x+1)\n def rec1(x):\n return rec2(x-1)\n def entry():\n rec1(42)\n \"\"\"\n )\n excinfo = pytest.raises(RuntimeError, mod.entry)\n\n for style in (\"short\", \"long\", \"no\"):\n p = FormattedExcinfo(style=\"short\")\n reprtb = p.repr_traceback(excinfo)\n assert reprtb.extraline == \"!!! Recursion detected (same locals & position)\"\n assert str(reprtb)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_exc_chain_repr_without_traceback_TestFormattedExcinfo.test_exc_chain_repr_without_traceback.matcher_fnmatch_lines_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_exc_chain_repr_without_traceback_TestFormattedExcinfo.test_exc_chain_repr_without_traceback.matcher_fnmatch_lines_", "embedding": null, "metadata": {"file_path": "testing/code/test_excinfo.py", "file_name": "test_excinfo.py", "file_type": "text/x-python", "category": "test", "start_line": 1216, "end_line": 1274, "span_ids": ["TestFormattedExcinfo.test_exc_chain_repr_without_traceback"], "tokens": 378}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFormattedExcinfo:\n\n @pytest.mark.parametrize(\n \"reason, description\",\n [\n pytest.param(\n \"cause\",\n \"The above exception was the direct cause of the following exception:\",\n id=\"cause\",\n ),\n pytest.param(\n \"context\",\n \"During handling of the above exception, another exception occurred:\",\n id=\"context\",\n ),\n ],\n )\n def test_exc_chain_repr_without_traceback(self, importasmod, reason, description):\n \"\"\"\n Handle representation of exception chains where one of the exceptions doesn't have a\n real traceback, such as those raised in a subprocess submitted by the multiprocessing\n module (#1984).\n \"\"\"\n exc_handling_code = \" from e\" if reason == \"cause\" else \"\"\n mod = importasmod(\n \"\"\"\n def f():\n try:\n g()\n except Exception as e:\n raise RuntimeError('runtime problem'){exc_handling_code}\n def g():\n raise ValueError('invalid value')\n \"\"\".format(\n exc_handling_code=exc_handling_code\n )\n )\n\n with pytest.raises(RuntimeError) as excinfo:\n mod.f()\n\n # emulate the issue described in #1984\n attr = \"__%s__\" % reason\n getattr(excinfo.value, attr).__traceback__ = None\n\n r = excinfo.getrepr()\n file = io.StringIO()\n tw = TerminalWriter(file=file)\n tw.hasmarkup = False\n r.toterminal(tw)\n\n matcher = LineMatcher(file.getvalue().splitlines())\n matcher.fnmatch_lines(\n [\n \"ValueError: invalid value\",\n description,\n \"* except Exception as e:\",\n \"> * raise RuntimeError('runtime problem')\" + exc_handling_code,\n \"E *RuntimeError: runtime problem\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_test_exception_repr_extraction_error_on_recursion_test_exception_repr_extraction_error_on_recursion.matcher_fnmatch_lines_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_test_exception_repr_extraction_error_on_recursion_test_exception_repr_extraction_error_on_recursion.matcher_fnmatch_lines_", "embedding": null, "metadata": {"file_path": "testing/code/test_excinfo.py", "file_name": "test_excinfo.py", "file_type": "text/x-python", "category": "test", "start_line": 1347, "end_line": 1378, "span_ids": ["test_exception_repr_extraction_error_on_recursion.numpy_like", "test_exception_repr_extraction_error_on_recursion.numpy_like.__eq__", "test_exception_repr_extraction_error_on_recursion"], "tokens": 198}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.usefixtures(\"limited_recursion_depth\")\ndef test_exception_repr_extraction_error_on_recursion():\n \"\"\"\n Ensure we can properly detect a recursion error even\n if some locals raise error on comparison (#2459).\n \"\"\"\n\n class numpy_like:\n def __eq__(self, other):\n if type(other) is numpy_like:\n raise ValueError(\n \"The truth value of an array \"\n \"with more than one element is ambiguous.\"\n )\n\n def a(x):\n return b(numpy_like())\n\n def b(x):\n return a(numpy_like())\n\n with pytest.raises(RuntimeError) as excinfo:\n a(numpy_like())\n\n matcher = LineMatcher(str(excinfo.getrepr()).splitlines())\n matcher.fnmatch_lines(\n [\n \"!!! Recursion error detected, but an error occurred locating the origin of recursion.\",\n \"*The following exception happened*\",\n \"*ValueError: The truth value of an array*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_test_no_recursion_index_on_recursion_error_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_test_no_recursion_index_on_recursion_error_", "embedding": null, "metadata": {"file_path": "testing/code/test_excinfo.py", "file_name": "test_excinfo.py", "file_type": "text/x-python", "category": "test", "start_line": 1398, "end_line": 1412, "span_ids": ["test_no_recursion_index_on_recursion_error.RecursionDepthError", "test_no_recursion_index_on_recursion_error.RecursionDepthError.__getattr__", "test_no_recursion_index_on_recursion_error"], "tokens": 108}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.usefixtures(\"limited_recursion_depth\")\ndef test_no_recursion_index_on_recursion_error():\n \"\"\"\n Ensure that we don't break in case we can't find the recursion index\n during a recursion error (#2486).\n \"\"\"\n\n class RecursionDepthError:\n def __getattr__(self, attr):\n return getattr(self, \"_\" + attr)\n\n with pytest.raises(RuntimeError) as excinfo:\n RecursionDepthError().trigger\n assert \"maximum recursion\" in str(excinfo.getrepr())", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_source.py_test_code_of_object_instance_with_call_test_oneline_and_comment.assert_str_source_ra": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_source.py_test_code_of_object_instance_with_call_test_oneline_and_comment.assert_str_source_ra", "embedding": null, "metadata": {"file_path": "testing/code/test_source.py", "file_name": "test_source.py", "file_type": "text/x-python", "category": "test", "start_line": 381, "end_line": 430, "span_ids": ["test_code_of_object_instance_with_call", "test_code_of_object_instance_with_call.Hello.__call__", "test_code_of_object_instance_with_call.A", "test_code_of_object_instance_with_call.A:2", "test_oneline", "test_code_of_object_instance_with_call.WithCall.__call__", "test_code_of_object_instance_with_call.WithCall", "test_comment_and_no_newline_at_end", "getstatement", "test_oneline_and_comment", "test_code_of_object_instance_with_call.Hello"], "tokens": 311}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_code_of_object_instance_with_call() -> None:\n class A:\n pass\n\n pytest.raises(TypeError, lambda: Source(A()))\n\n class WithCall:\n def __call__(self) -> None:\n pass\n\n code = Code.from_function(WithCall())\n assert \"pass\" in str(code.source())\n\n class Hello:\n def __call__(self) -> None:\n pass\n\n pytest.raises(TypeError, lambda: Code.from_function(Hello))\n\n\ndef getstatement(lineno: int, source) -> Source:\n from _pytest._code.source import getstatementrange_ast\n\n src = Source(source)\n ast, start, end = getstatementrange_ast(lineno, src)\n return src[start:end]\n\n\ndef test_oneline() -> None:\n source = getstatement(0, \"raise ValueError\")\n assert str(source) == \"raise ValueError\"\n\n\ndef test_comment_and_no_newline_at_end() -> None:\n from _pytest._code.source import getstatementrange_ast\n\n source = Source(\n [\n \"def test_basic_complex():\",\n \" assert 1 == 2\",\n \"# vim: filetype=pyopencl:fdm=marker\",\n ]\n )\n ast, start, end = getstatementrange_ast(1, source)\n assert end == 2\n\n\ndef test_oneline_and_comment() -> None:\n source = getstatement(0, \"raise ValueError\\n#hello\")\n assert str(source) == \"raise ValueError\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_source.py_TestTry_TestTryFinally.test_finally.assert_str_source_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_source.py_TestTry_TestTryFinally.test_finally.assert_str_source_", "embedding": null, "metadata": {"file_path": "testing/code/test_source.py", "file_name": "test_source.py", "file_type": "text/x-python", "category": "test", "start_line": 658, "end_line": 701, "span_ids": ["TestTry.test_except_line", "TestTry.test_except_body", "TestTry.test_body", "TestTryFinally.setup_class", "TestTry.setup_class", "TestTry.test_else", "TestTryFinally.test_body", "TestTry", "TestTryFinally", "TestTryFinally.test_finally"], "tokens": 270}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTry:\n def setup_class(self) -> None:\n self.source = \"\"\"\\\ntry:\n raise ValueError\nexcept Something:\n raise IndexError(1)\nelse:\n raise KeyError()\n\"\"\"\n\n def test_body(self) -> None:\n source = getstatement(1, self.source)\n assert str(source) == \" raise ValueError\"\n\n def test_except_line(self) -> None:\n source = getstatement(2, self.source)\n assert str(source) == \"except Something:\"\n\n def test_except_body(self) -> None:\n source = getstatement(3, self.source)\n assert str(source) == \" raise IndexError(1)\"\n\n def test_else(self) -> None:\n source = getstatement(5, self.source)\n assert str(source) == \" raise KeyError()\"\n\n\nclass TestTryFinally:\n def setup_class(self) -> None:\n self.source = \"\"\"\\\ntry:\n raise ValueError\nfinally:\n raise IndexError(1)\n\"\"\"\n\n def test_body(self) -> None:\n source = getstatement(1, self.source)\n assert str(source) == \" raise ValueError\"\n\n def test_finally(self) -> None:\n source = getstatement(3, self.source)\n assert str(source) == \" raise IndexError(1)\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_source.py_TestIf_TestIf.test_else.assert_str_source_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_source.py_TestIf_TestIf.test_else.assert_str_source_", "embedding": null, "metadata": {"file_path": "testing/code/test_source.py", "file_name": "test_source.py", "file_type": "text/x-python", "category": "test", "start_line": 704, "end_line": 729, "span_ids": ["TestIf.test_elif", "TestIf.test_elif_clause", "TestIf.test_else", "TestIf.test_body", "TestIf", "TestIf.setup_class"], "tokens": 180}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestIf:\n def setup_class(self) -> None:\n self.source = \"\"\"\\\nif 1:\n y = 3\nelif False:\n y = 5\nelse:\n y = 7\n\"\"\"\n\n def test_body(self) -> None:\n source = getstatement(1, self.source)\n assert str(source) == \" y = 3\"\n\n def test_elif_clause(self) -> None:\n source = getstatement(2, self.source)\n assert str(source) == \"elif False:\"\n\n def test_elif(self) -> None:\n source = getstatement(3, self.source)\n assert str(source) == \" y = 5\"\n\n def test_else(self) -> None:\n source = getstatement(5, self.source)\n assert str(source) == \" y = 7\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_source.py_test_semicolon_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_source.py_test_semicolon_", "embedding": null, "metadata": {"file_path": "testing/code/test_source.py", "file_name": "test_source.py", "file_type": "text/x-python", "category": "test", "start_line": 604, "end_line": 646, "span_ids": ["test_semicolon", "test_def_online", "test_getstartingblock_multiline.A.__init__", "test_getstartingblock_multiline", "XXX_test_expression_multiline", "test_getstartingblock_multiline.A"], "tokens": 232}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_semicolon() -> None:\n s = \"\"\"\\\nhello ; pytest.skip()\n\"\"\"\n source = getstatement(0, s)\n assert str(source) == s.strip()\n\n\ndef test_def_online() -> None:\n s = \"\"\"\\\ndef func(): raise ValueError(42)\n\ndef something():\n pass\n\"\"\"\n source = getstatement(0, s)\n assert str(source) == \"def func(): raise ValueError(42)\"\n\n\ndef XXX_test_expression_multiline() -> None:\n source = \"\"\"\\\nsomething\n'''\n'''\"\"\"\n result = getstatement(1, source)\n assert str(result) == \"'''\\n'''\"\n\n\ndef test_getstartingblock_multiline() -> None:\n class A:\n def __init__(self, *args):\n frame = sys._getframe(1)\n self.source = Frame(frame).statement\n\n # fmt: off\n x = A('x',\n 'y'\n ,\n 'z')\n # fmt: on\n values = [i for i in x.source.lines if i.strip()]\n assert len(values) == 4", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/acceptance/fixture_mock_integration.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/acceptance/fixture_mock_integration.py__", "embedding": null, "metadata": {"file_path": "testing/example_scripts/acceptance/fixture_mock_integration.py", "file_name": "fixture_mock_integration.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 17, "span_ids": ["impl", "docstring", "test_foobar", "imports", "my_fixture"], "tokens": 78}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"Reproduces issue #3774\"\"\"\nfrom unittest import mock\n\nimport pytest\n\nconfig = {\"mykey\": \"ORIGINAL\"}\n\n\n@pytest.fixture(scope=\"function\")\n@mock.patch.dict(config, {\"mykey\": \"MOCKED\"})\ndef my_fixture():\n return config[\"mykey\"]\n\n\ndef test_foobar(my_fixture):\n assert my_fixture == \"MOCKED\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/collect/collect_init_tests/tests/__init__.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/collect/collect_init_tests/tests/__init__.py__", "embedding": null, "metadata": {"file_path": "testing/example_scripts/collect/collect_init_tests/tests/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 3, "span_ids": ["test_init"], "tokens": 6}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_init():\n pass", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/collect/collect_init_tests/tests/test_foo.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/collect/collect_init_tests/tests/test_foo.py__", "embedding": null, "metadata": {"file_path": "testing/example_scripts/collect/collect_init_tests/tests/test_foo.py", "file_name": "test_foo.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 3, "span_ids": ["test_foo"], "tokens": 7}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_foo():\n pass", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/collect/package_infinite_recursion/conftest.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/collect/package_infinite_recursion/conftest.py__", "embedding": null, "metadata": {"file_path": "testing/example_scripts/collect/package_infinite_recursion/conftest.py", "file_name": "conftest.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 3, "span_ids": ["pytest_ignore_collect"], "tokens": 9}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_ignore_collect(path):\n return False", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/collect/package_infinite_recursion/tests/test_basic.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/collect/package_infinite_recursion/tests/test_basic.py__", "embedding": null, "metadata": {"file_path": "testing/example_scripts/collect/package_infinite_recursion/tests/test_basic.py", "file_name": "test_basic.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 3, "span_ids": ["test"], "tokens": 5}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test():\n pass", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/collect/package_init_given_as_arg/pkg/test_foo.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/collect/package_init_given_as_arg/pkg/test_foo.py__", "embedding": null, "metadata": {"file_path": "testing/example_scripts/collect/package_init_given_as_arg/pkg/test_foo.py", "file_name": "test_foo.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 3, "span_ids": ["test"], "tokens": 5}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test():\n pass", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/config/collect_pytest_prefix/conftest.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/config/collect_pytest_prefix/conftest.py__", "embedding": null, "metadata": {"file_path": "testing/example_scripts/config/collect_pytest_prefix/conftest.py", "file_name": "conftest.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 3, "span_ids": ["pytest_something"], "tokens": 7}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class pytest_something:\n pass", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/config/collect_pytest_prefix/test_foo.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/config/collect_pytest_prefix/test_foo.py__", "embedding": null, "metadata": {"file_path": "testing/example_scripts/config/collect_pytest_prefix/test_foo.py", "file_name": "test_foo.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 3, "span_ids": ["test_foo"], "tokens": 7}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_foo():\n pass", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/conftest_usageerror/conftest.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/conftest_usageerror/conftest.py__", "embedding": null, "metadata": {"file_path": "testing/example_scripts/conftest_usageerror/conftest.py", "file_name": "conftest.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 9, "span_ids": ["pytest_unconfigure", "pytest_configure"], "tokens": 31}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_configure(config):\n import pytest\n\n raise pytest.UsageError(\"hello\")\n\n\ndef pytest_unconfigure(config):\n print(\"pytest_unconfigure_called\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/dataclasses/test_compare_dataclasses.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/dataclasses/test_compare_dataclasses.py__", "embedding": null, "metadata": {"file_path": "testing/example_scripts/dataclasses/test_compare_dataclasses.py", "file_name": "test_compare_dataclasses.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 15, "span_ids": ["test_dataclasses.SimpleDataObject", "test_dataclasses.SimpleDataObject:2", "test_dataclasses", "imports"], "tokens": 77}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from dataclasses import dataclass\nfrom dataclasses import field\n\n\ndef test_dataclasses() -> None:\n @dataclass\n class SimpleDataObject:\n field_a: int = field()\n field_b: str = field()\n\n left = SimpleDataObject(1, \"b\")\n right = SimpleDataObject(1, \"c\")\n\n assert left == right", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/dataclasses/test_compare_dataclasses_field_comparison_off.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/dataclasses/test_compare_dataclasses_field_comparison_off.py__", "embedding": null, "metadata": {"file_path": "testing/example_scripts/dataclasses/test_compare_dataclasses_field_comparison_off.py", "file_name": "test_compare_dataclasses_field_comparison_off.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 15, "span_ids": ["test_dataclasses_with_attribute_comparison_off.SimpleDataObject", "test_dataclasses_with_attribute_comparison_off", "imports", "test_dataclasses_with_attribute_comparison_off.SimpleDataObject:2"], "tokens": 83}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from dataclasses import dataclass\nfrom dataclasses import field\n\n\ndef test_dataclasses_with_attribute_comparison_off() -> None:\n @dataclass\n class SimpleDataObject:\n field_a: int = field()\n field_b: str = field(compare=False)\n\n left = SimpleDataObject(1, \"b\")\n right = SimpleDataObject(1, \"c\")\n\n assert left == right", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/dataclasses/test_compare_dataclasses_verbose.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/dataclasses/test_compare_dataclasses_verbose.py__", "embedding": null, "metadata": {"file_path": "testing/example_scripts/dataclasses/test_compare_dataclasses_verbose.py", "file_name": "test_compare_dataclasses_verbose.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 15, "span_ids": ["test_dataclasses_verbose.SimpleDataObject", "test_dataclasses_verbose.SimpleDataObject:2", "imports", "test_dataclasses_verbose"], "tokens": 78}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from dataclasses import dataclass\nfrom dataclasses import field\n\n\ndef test_dataclasses_verbose() -> None:\n @dataclass\n class SimpleDataObject:\n field_a: int = field()\n field_b: str = field()\n\n left = SimpleDataObject(1, \"b\")\n right = SimpleDataObject(1, \"c\")\n\n assert left == right", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/custom_item/conftest.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/custom_item/conftest.py__", "embedding": null, "metadata": {"file_path": "testing/example_scripts/fixtures/custom_item/conftest.py", "file_name": "conftest.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 16, "span_ids": ["CustomItem.runtest", "pytest_collect_file", "CustomItem", "CustomFile.collect", "imports", "CustomFile"], "tokens": 66}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import pytest\n\n\nclass CustomItem(pytest.Item):\n def runtest(self):\n pass\n\n\nclass CustomFile(pytest.File):\n def collect(self):\n yield CustomItem.from_parent(name=\"foo\", parent=self)\n\n\ndef pytest_collect_file(path, parent):\n return CustomFile.from_parent(fspath=path, parent=parent)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/custom_item/foo/test_foo.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/custom_item/foo/test_foo.py__", "embedding": null, "metadata": {"file_path": "testing/example_scripts/fixtures/custom_item/foo/test_foo.py", "file_name": "test_foo.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 3, "span_ids": ["test"], "tokens": 5}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test():\n pass", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/fill_fixtures/test_conftest_funcargs_only_available_in_subdir/sub1/conftest.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/fill_fixtures/test_conftest_funcargs_only_available_in_subdir/sub1/conftest.py__", "embedding": null, "metadata": {"file_path": "testing/example_scripts/fixtures/fill_fixtures/test_conftest_funcargs_only_available_in_subdir/sub1/conftest.py", "file_name": "conftest.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 8, "span_ids": ["arg1", "imports"], "tokens": 31}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import pytest\n\n\n@pytest.fixture\ndef arg1(request):\n with pytest.raises(pytest.FixtureLookupError):\n request.getfixturevalue(\"arg2\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/fill_fixtures/test_conftest_funcargs_only_available_in_subdir/sub1/test_in_sub1.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/fill_fixtures/test_conftest_funcargs_only_available_in_subdir/sub1/test_in_sub1.py__", "embedding": null, "metadata": {"file_path": "testing/example_scripts/fixtures/fill_fixtures/test_conftest_funcargs_only_available_in_subdir/sub1/test_in_sub1.py", "file_name": "test_in_sub1.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 3, "span_ids": ["test_1"], "tokens": 9}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_1(arg1):\n pass", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/fill_fixtures/test_conftest_funcargs_only_available_in_subdir/sub2/conftest.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/fill_fixtures/test_conftest_funcargs_only_available_in_subdir/sub2/conftest.py__", "embedding": null, "metadata": {"file_path": "testing/example_scripts/fixtures/fill_fixtures/test_conftest_funcargs_only_available_in_subdir/sub2/conftest.py", "file_name": "conftest.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 7, "span_ids": ["arg2", "imports"], "tokens": 25}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import pytest\n\n\n@pytest.fixture\ndef arg2(request):\n pytest.raises(Exception, request.getfixturevalue, \"arg1\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/fill_fixtures/test_conftest_funcargs_only_available_in_subdir/sub2/test_in_sub2.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/fill_fixtures/test_conftest_funcargs_only_available_in_subdir/sub2/test_in_sub2.py__", "embedding": null, "metadata": {"file_path": "testing/example_scripts/fixtures/fill_fixtures/test_conftest_funcargs_only_available_in_subdir/sub2/test_in_sub2.py", "file_name": "test_in_sub2.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 3, "span_ids": ["test_2"], "tokens": 9}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_2(arg2):\n pass", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/fill_fixtures/test_detect_recursive_dependency_error.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/fill_fixtures/test_detect_recursive_dependency_error.py__", "embedding": null, "metadata": {"file_path": "testing/example_scripts/fixtures/fill_fixtures/test_detect_recursive_dependency_error.py", "file_name": "test_detect_recursive_dependency_error.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 16, "span_ids": ["fix2", "test", "imports", "fix1"], "tokens": 41}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import pytest\n\n\n@pytest.fixture\ndef fix1(fix2):\n return 1\n\n\n@pytest.fixture\ndef fix2(fix1):\n return 1\n\n\ndef test(fix1):\n pass", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_conftest_conftest/conftest.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_conftest_conftest/conftest.py__", "embedding": null, "metadata": {"file_path": "testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_conftest_conftest/conftest.py", "file_name": "conftest.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 7, "span_ids": ["imports", "spam"], "tokens": 14}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import pytest\n\n\n@pytest.fixture\ndef spam():\n return \"spam\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_conftest_conftest/pkg/conftest.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_conftest_conftest/pkg/conftest.py__", "embedding": null, "metadata": {"file_path": "testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_conftest_conftest/pkg/conftest.py", "file_name": "conftest.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 7, "span_ids": ["imports", "spam"], "tokens": 17}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import pytest\n\n\n@pytest.fixture\ndef spam(spam):\n return spam * 2", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_conftest_conftest/pkg/test_spam.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_conftest_conftest/pkg/test_spam.py__", "embedding": null, "metadata": {"file_path": "testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_conftest_conftest/pkg/test_spam.py", "file_name": "test_spam.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 3, "span_ids": ["test_spam"], "tokens": 15}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_spam(spam):\n assert spam == \"spamspam\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_conftest_module/conftest.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_conftest_module/conftest.py__", "embedding": null, "metadata": {"file_path": "testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_conftest_module/conftest.py", "file_name": "conftest.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 7, "span_ids": ["imports", "spam"], "tokens": 14}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import pytest\n\n\n@pytest.fixture\ndef spam():\n return \"spam\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_conftest_module/test_extend_fixture_conftest_module.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_conftest_module/test_extend_fixture_conftest_module.py__", "embedding": null, "metadata": {"file_path": "testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_conftest_module/test_extend_fixture_conftest_module.py", "file_name": "test_extend_fixture_conftest_module.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 11, "span_ids": ["test_spam", "imports", "spam"], "tokens": 33}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import pytest\n\n\n@pytest.fixture\ndef spam(spam):\n return spam * 2\n\n\ndef test_spam(spam):\n assert spam == \"spamspam\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_module_class.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_module_class.py__", "embedding": null, "metadata": {"file_path": "testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_module_class.py", "file_name": "test_extend_fixture_module_class.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 16, "span_ids": ["TestSpam", "TestSpam.test_spam", "TestSpam.spam", "imports", "spam"], "tokens": 55}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import pytest\n\n\n@pytest.fixture\ndef spam():\n return \"spam\"\n\n\nclass TestSpam:\n @pytest.fixture\n def spam(self, spam):\n return spam * 2\n\n def test_spam(self, spam):\n assert spam == \"spamspam\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/fill_fixtures/test_funcarg_basic.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/fill_fixtures/test_funcarg_basic.py__", "embedding": null, "metadata": {"file_path": "testing/example_scripts/fixtures/fill_fixtures/test_funcarg_basic.py", "file_name": "test_funcarg_basic.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 16, "span_ids": ["test_func", "some", "imports", "other"], "tokens": 40}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import pytest\n\n\n@pytest.fixture\ndef some(request):\n return request.function.__name__\n\n\n@pytest.fixture\ndef other(request):\n return 42\n\n\ndef test_func(some, other):\n pass", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/fill_fixtures/test_funcarg_lookup_classlevel.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/fill_fixtures/test_funcarg_lookup_classlevel.py__", "embedding": null, "metadata": {"file_path": "testing/example_scripts/fixtures/fill_fixtures/test_funcarg_lookup_classlevel.py", "file_name": "test_funcarg_lookup_classlevel.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 11, "span_ids": ["TestClass.something", "TestClass", "imports", "TestClass.test_method"], "tokens": 37}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import pytest\n\n\nclass TestClass:\n @pytest.fixture\n def something(self, request):\n return request.instance\n\n def test_method(self, something):\n assert something is self", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/fill_fixtures/test_funcarg_lookup_modulelevel.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/fill_fixtures/test_funcarg_lookup_modulelevel.py__", "embedding": null, "metadata": {"file_path": "testing/example_scripts/fixtures/fill_fixtures/test_funcarg_lookup_modulelevel.py", "file_name": "test_funcarg_lookup_modulelevel.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 16, "span_ids": ["TestClass", "TestClass.test_method", "something", "test_func", "imports"], "tokens": 52}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import pytest\n\n\n@pytest.fixture\ndef something(request):\n return request.function.__name__\n\n\nclass TestClass:\n def test_method(self, something):\n assert something == \"test_method\"\n\n\ndef test_func(something):\n assert something == \"test_func\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/fill_fixtures/test_funcarg_lookupfails.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/fill_fixtures/test_funcarg_lookupfails.py__", "embedding": null, "metadata": {"file_path": "testing/example_scripts/fixtures/fill_fixtures/test_funcarg_lookupfails.py", "file_name": "test_funcarg_lookupfails.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 11, "span_ids": ["test_func", "imports", "xyzsomething"], "tokens": 24}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import pytest\n\n\n@pytest.fixture\ndef xyzsomething(request):\n return 42\n\n\ndef test_func(some):\n pass", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/test_getfixturevalue_dynamic.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/test_getfixturevalue_dynamic.py__", "embedding": null, "metadata": {"file_path": "testing/example_scripts/fixtures/test_getfixturevalue_dynamic.py", "file_name": "test_getfixturevalue_dynamic.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 21, "span_ids": ["test", "a", "dynamic", "b", "imports"], "tokens": 61}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import pytest\n\n\n@pytest.fixture\ndef dynamic():\n pass\n\n\n@pytest.fixture\ndef a(request):\n request.getfixturevalue(\"dynamic\")\n\n\n@pytest.fixture\ndef b(a):\n pass\n\n\ndef test(b, request):\n assert request.fixturenames == [\"b\", \"request\", \"a\", \"dynamic\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/issue88_initial_file_multinodes/conftest.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/issue88_initial_file_multinodes/conftest.py__", "embedding": null, "metadata": {"file_path": "testing/example_scripts/issue88_initial_file_multinodes/conftest.py", "file_name": "conftest.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 15, "span_ids": ["MyItem", "MyFile", "MyFile.collect", "pytest_collect_file", "imports"], "tokens": 61}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import pytest\n\n\nclass MyFile(pytest.File):\n def collect(self):\n return [MyItem.from_parent(name=\"hello\", parent=self)]\n\n\ndef pytest_collect_file(path, parent):\n return MyFile.from_parent(fspath=path, parent=parent)\n\n\nclass MyItem(pytest.Item):\n pass", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/issue88_initial_file_multinodes/test_hello.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/issue88_initial_file_multinodes/test_hello.py__", "embedding": null, "metadata": {"file_path": "testing/example_scripts/issue88_initial_file_multinodes/test_hello.py", "file_name": "test_hello.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 3, "span_ids": ["test_hello"], "tokens": 6}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_hello():\n pass", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/issue_519.py_pprint_checked_order.assert_order_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/issue_519.py_pprint_checked_order.assert_order_", "embedding": null, "metadata": {"file_path": "testing/example_scripts/issue_519.py", "file_name": "issue_519.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 33, "span_ids": ["checked_order", "imports", "pytest_generate_tests"], "tokens": 370}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import pprint\nfrom typing import List\nfrom typing import Tuple\n\nimport pytest\n\n\ndef pytest_generate_tests(metafunc):\n if \"arg1\" in metafunc.fixturenames:\n metafunc.parametrize(\"arg1\", [\"arg1v1\", \"arg1v2\"], scope=\"module\")\n\n if \"arg2\" in metafunc.fixturenames:\n metafunc.parametrize(\"arg2\", [\"arg2v1\", \"arg2v2\"], scope=\"function\")\n\n\n@pytest.fixture(scope=\"session\")\ndef checked_order():\n order: List[Tuple[str, str, str]] = []\n\n yield order\n pprint.pprint(order)\n assert order == [\n (\"testing/example_scripts/issue_519.py\", \"fix1\", \"arg1v1\"),\n (\"test_one[arg1v1-arg2v1]\", \"fix2\", \"arg2v1\"),\n (\"test_two[arg1v1-arg2v1]\", \"fix2\", \"arg2v1\"),\n (\"test_one[arg1v1-arg2v2]\", \"fix2\", \"arg2v2\"),\n (\"test_two[arg1v1-arg2v2]\", \"fix2\", \"arg2v2\"),\n (\"testing/example_scripts/issue_519.py\", \"fix1\", \"arg1v2\"),\n (\"test_one[arg1v2-arg2v1]\", \"fix2\", \"arg2v1\"),\n (\"test_two[arg1v2-arg2v1]\", \"fix2\", \"arg2v1\"),\n (\"test_one[arg1v2-arg2v2]\", \"fix2\", \"arg2v2\"),\n (\"test_two[arg1v2-arg2v2]\", \"fix2\", \"arg2v2\"),\n ]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/issue_519.py_fix1_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/issue_519.py_fix1_", "embedding": null, "metadata": {"file_path": "testing/example_scripts/issue_519.py", "file_name": "issue_519.py", "file_type": "text/x-python", "category": "implementation", "start_line": 36, "end_line": 54, "span_ids": ["fix2", "test_two", "test_one", "fix1"], "tokens": 111}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.fixture(scope=\"module\")\ndef fix1(request, arg1, checked_order):\n checked_order.append((request.node.name, \"fix1\", arg1))\n yield \"fix1-\" + arg1\n\n\n@pytest.fixture(scope=\"function\")\ndef fix2(request, fix1, arg2, checked_order):\n checked_order.append((request.node.name, \"fix2\", arg2))\n yield \"fix2-\" + arg2 + fix1\n\n\ndef test_one(fix2):\n pass\n\n\ndef test_two(fix2):\n pass", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/marks/marks_considered_keywords/test_marks_as_keywords.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/marks/marks_considered_keywords/test_marks_as_keywords.py__", "embedding": null, "metadata": {"file_path": "testing/example_scripts/marks/marks_considered_keywords/test_marks_as_keywords.py", "file_name": "test_marks_as_keywords.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 7, "span_ids": ["test_mark", "imports"], "tokens": 13}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import pytest\n\n\n@pytest.mark.foo\ndef test_mark():\n pass", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/perf_examples/collect_stats/generate_folders.py_argparse_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/perf_examples/collect_stats/generate_folders.py_argparse_", "embedding": null, "metadata": {"file_path": "testing/example_scripts/perf_examples/collect_stats/generate_folders.py", "file_name": "generate_folders.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 28, "span_ids": ["impl", "impl:8", "generate_folders", "imports"], "tokens": 197}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import argparse\nimport pathlib\n\nHERE = pathlib.Path(__file__).parent\nTEST_CONTENT = (HERE / \"template_test.py\").read_bytes()\n\nparser = argparse.ArgumentParser()\nparser.add_argument(\"numbers\", nargs=\"*\", type=int)\n\n\ndef generate_folders(root, elements, *more_numbers):\n fill_len = len(str(elements))\n if more_numbers:\n for i in range(elements):\n new_folder = root.joinpath(f\"foo_{i:0>{fill_len}}\")\n new_folder.mkdir()\n new_folder.joinpath(\"__init__.py\").write_bytes(TEST_CONTENT)\n generate_folders(new_folder, *more_numbers)\n else:\n for i in range(elements):\n new_test = root.joinpath(f\"test_{i:0<{fill_len}}.py\")\n new_test.write_bytes(TEST_CONTENT)\n\n\nif __name__ == \"__main__\":\n args = parser.parse_args()\n generate_folders(HERE, *(args.numbers or (10, 100)))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/perf_examples/collect_stats/template_test.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/perf_examples/collect_stats/template_test.py__", "embedding": null, "metadata": {"file_path": "testing/example_scripts/perf_examples/collect_stats/template_test.py", "file_name": "template_test.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 3, "span_ids": ["test_x"], "tokens": 6}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_x():\n pass", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/tmpdir/tmpdir_fixture.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/tmpdir/tmpdir_fixture.py__", "embedding": null, "metadata": {"file_path": "testing/example_scripts/tmpdir/tmpdir_fixture.py", "file_name": "tmpdir_fixture.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 8, "span_ids": ["test_fixture", "imports"], "tokens": 41}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import pytest\n\n\n@pytest.mark.parametrize(\"a\", [r\"qwe/\\abc\"])\ndef test_fixture(tmpdir, a):\n tmpdir.check(dir=1)\n assert tmpdir.listdir() == []", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/unittest/test_parametrized_fixture_error_message.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/unittest/test_parametrized_fixture_error_message.py__", "embedding": null, "metadata": {"file_path": "testing/example_scripts/unittest/test_parametrized_fixture_error_message.py", "file_name": "test_parametrized_fixture_error_message.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 15, "span_ids": ["TestSomethingElse.test_two", "two", "imports", "TestSomethingElse"], "tokens": 46}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport pytest\n\n\n@pytest.fixture(params=[1, 2])\ndef two(request):\n return request.param\n\n\n@pytest.mark.usefixtures(\"two\")\nclass TestSomethingElse(unittest.TestCase):\n def test_two(self):\n pass", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/unittest/test_setup_skip.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/unittest/test_setup_skip.py__", "embedding": null, "metadata": {"file_path": "testing/example_scripts/unittest/test_setup_skip.py", "file_name": "test_setup_skip.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 14, "span_ids": ["Base.setUp", "Base", "Test.test_foo", "docstring", "Test", "imports"], "tokens": 61}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"Skipping an entire subclass with unittest.skip() should *not* call setUp from a base class.\"\"\"\nimport unittest\n\n\nclass Base(unittest.TestCase):\n def setUp(self):\n assert 0\n\n\n@unittest.skip(\"skip all tests\")\nclass Test(Base):\n def test_foo(self):\n assert 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/unittest/test_setup_skip_class.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/unittest/test_setup_skip_class.py__", "embedding": null, "metadata": {"file_path": "testing/example_scripts/unittest/test_setup_skip_class.py", "file_name": "test_setup_skip_class.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 15, "span_ids": ["Base", "Test.test_foo", "docstring", "Test", "imports", "Base.setUpClass"], "tokens": 67}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"Skipping an entire subclass with unittest.skip() should *not* call setUpClass from a base class.\"\"\"\nimport unittest\n\n\nclass Base(unittest.TestCase):\n @classmethod\n def setUpClass(cls):\n assert 0\n\n\n@unittest.skip(\"skip all tests\")\nclass Test(Base):\n def test_foo(self):\n assert 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/unittest/test_setup_skip_module.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/unittest/test_setup_skip_module.py__", "embedding": null, "metadata": {"file_path": "testing/example_scripts/unittest/test_setup_skip_module.py", "file_name": "test_setup_skip_module.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 13, "span_ids": ["Base", "Base.test", "docstring", "setUpModule", "imports"], "tokens": 51}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"setUpModule is always called, even if all tests in the module are skipped\"\"\"\nimport unittest\n\n\ndef setUpModule():\n assert 0\n\n\n@unittest.skip(\"skip all tests\")\nclass Base(unittest.TestCase):\n def test(self):\n assert 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/warnings/test_group_warnings_by_message.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/warnings/test_group_warnings_by_message.py__", "embedding": null, "metadata": {"file_path": "testing/example_scripts/warnings/test_group_warnings_by_message.py", "file_name": "test_group_warnings_by_message.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 22, "span_ids": ["func", "test_bar", "imports", "test_foo_1", "test_foo"], "tokens": 70}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import warnings\n\nimport pytest\n\n\ndef func(msg):\n warnings.warn(UserWarning(msg))\n\n\n@pytest.mark.parametrize(\"i\", range(5))\ndef test_foo(i):\n func(\"foo\")\n\n\ndef test_foo_1():\n func(\"foo\")\n\n\n@pytest.mark.parametrize(\"i\", range(5))\ndef test_bar(i):\n func(\"bar\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/examples/test_issue519.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/examples/test_issue519.py__", "embedding": null, "metadata": {"file_path": "testing/examples/test_issue519.py", "file_name": "test_issue519.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 7, "span_ids": ["test_510", "imports"], "tokens": 44}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from _pytest.pytester import Pytester\n\n\ndef test_510(pytester: Pytester) -> None:\n pytester.copy_example(\"issue_519.py\")\n pytester.runpytest(\"issue_519.py\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/freeze/create_executable.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/freeze/create_executable.py__", "embedding": null, "metadata": {"file_path": "testing/freeze/create_executable.py", "file_name": "create_executable.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 12, "span_ids": ["impl:2", "impl", "docstring", "imports"], "tokens": 95}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"Generate an executable with pytest runner embedded using PyInstaller.\"\"\"\nif __name__ == \"__main__\":\n import pytest\n import subprocess\n\n hidden = []\n for x in pytest.freeze_includes():\n hidden.extend([\"--hidden-import\", x])\n hidden.extend([\"--hidden-import\", \"distutils\"])\n args = [\"pyinstaller\", \"--noconfirm\"] + hidden + [\"runtests_script.py\"]\n subprocess.check_call(\" \".join(args), shell=True)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/freeze/runtests_script.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/freeze/runtests_script.py__", "embedding": null, "metadata": {"file_path": "testing/freeze/runtests_script.py", "file_name": "runtests_script.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 11, "span_ids": ["impl:2", "impl", "docstring", "imports"], "tokens": 43}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"\nThis is the script that is actually frozen into an executable: simply executes\npytest main().\n\"\"\"\n\nif __name__ == \"__main__\":\n import sys\n import pytest\n\n sys.exit(pytest.main())", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/freeze/tests/test_trivial.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/freeze/tests/test_trivial.py__", "embedding": null, "metadata": {"file_path": "testing/freeze/tests/test_trivial.py", "file_name": "test_trivial.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 7, "span_ids": ["test_upper", "test_lower"], "tokens": 32}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_upper():\n assert \"foo\".upper() == \"FOO\"\n\n\ndef test_lower():\n assert \"FOO\".lower() == \"foo\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/freeze/tox_run.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/freeze/tox_run.py__", "embedding": null, "metadata": {"file_path": "testing/freeze/tox_run.py", "file_name": "tox_run.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 13, "span_ids": ["impl:2", "impl", "docstring", "imports"], "tokens": 85}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"\nCalled by tox.ini: uses the generated executable to run the tests in ./tests/\ndirectory.\n\"\"\"\nif __name__ == \"__main__\":\n import os\n import sys\n\n executable = os.path.join(os.getcwd(), \"dist\", \"runtests_script\", \"runtests_script\")\n if sys.platform.startswith(\"win\"):\n executable += \".exe\"\n sys.exit(os.system(\"%s tests\" % executable))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_fixture.py_test_with_statement_test_log_access.assert_boo_arg_in_caplo": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_fixture.py_test_with_statement_test_log_access.assert_boo_arg_in_caplo", "embedding": null, "metadata": {"file_path": "testing/logging/test_fixture.py", "file_name": "test_fixture.py", "file_type": "text/x-python", "category": "test", "start_line": 52, "end_line": 72, "span_ids": ["test_log_access", "test_with_statement"], "tokens": 174}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_with_statement(caplog):\n with caplog.at_level(logging.INFO):\n logger.debug(\"handler DEBUG level\")\n logger.info(\"handler INFO level\")\n\n with caplog.at_level(logging.CRITICAL, logger=sublogger.name):\n sublogger.warning(\"logger WARNING level\")\n sublogger.critical(\"logger CRITICAL level\")\n\n assert \"DEBUG\" not in caplog.text\n assert \"INFO\" in caplog.text\n assert \"WARNING\" not in caplog.text\n assert \"CRITICAL\" in caplog.text\n\n\ndef test_log_access(caplog):\n caplog.set_level(logging.INFO)\n logger.info(\"boo %s\", \"arg\")\n assert caplog.records[0].levelname == \"INFO\"\n assert caplog.records[0].msg == \"boo %s\"\n assert \"boo arg\" in caplog.text", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_fixture.py_test_messages_test_messages.assert_Exception_not_in": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_fixture.py_test_messages_test_messages.assert_Exception_not_in", "embedding": null, "metadata": {"file_path": "testing/logging/test_fixture.py", "file_name": "test_fixture.py", "file_type": "text/x-python", "category": "test", "start_line": 76, "end_line": 94, "span_ids": ["test_messages"], "tokens": 188}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_messages(caplog):\n caplog.set_level(logging.INFO)\n logger.info(\"boo %s\", \"arg\")\n logger.info(\"bar %s\\nbaz %s\", \"arg1\", \"arg2\")\n assert \"boo arg\" == caplog.messages[0]\n assert \"bar arg1\\nbaz arg2\" == caplog.messages[1]\n assert caplog.text.count(\"\\n\") > len(caplog.messages)\n assert len(caplog.text.splitlines()) > len(caplog.messages)\n\n try:\n raise Exception(\"test\")\n except Exception:\n logger.exception(\"oops\")\n\n assert \"oops\" in caplog.text\n assert \"oops\" in caplog.messages[-1]\n # Tracebacks are stored in the record and not added until the formatter or handler.\n assert \"Exception\" in caplog.text\n assert \"Exception\" not in caplog.messages[-1]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_fixture.py_test_record_tuples_logging_during_setup_and_teardown.assert_x_message_for_x_i": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_fixture.py_test_record_tuples_logging_during_setup_and_teardown.assert_x_message_for_x_i", "embedding": null, "metadata": {"file_path": "testing/logging/test_fixture.py", "file_name": "test_fixture.py", "file_type": "text/x-python", "category": "test", "start_line": 96, "end_line": 127, "span_ids": ["test_record_tuples", "logging_during_setup_and_teardown", "test_unicode", "test_clear"], "tokens": 223}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_record_tuples(caplog):\n caplog.set_level(logging.INFO)\n logger.info(\"boo %s\", \"arg\")\n\n assert caplog.record_tuples == [(__name__, logging.INFO, \"boo arg\")]\n\n\ndef test_unicode(caplog):\n caplog.set_level(logging.INFO)\n logger.info(\"b\u016b\")\n assert caplog.records[0].levelname == \"INFO\"\n assert caplog.records[0].msg == \"b\u016b\"\n assert \"b\u016b\" in caplog.text\n\n\ndef test_clear(caplog):\n caplog.set_level(logging.INFO)\n logger.info(\"b\u016b\")\n assert len(caplog.records)\n assert caplog.text\n caplog.clear()\n assert not len(caplog.records)\n assert not caplog.text\n\n\n@pytest.fixture\ndef logging_during_setup_and_teardown(caplog):\n caplog.set_level(\"INFO\")\n logger.info(\"a_setup_log\")\n yield\n logger.info(\"a_teardown_log\")\n assert [x.message for x in caplog.get_records(\"teardown\")] == [\"a_teardown_log\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_messages_logged_test_messages_logged.None_3": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_messages_logged_test_messages_logged.None_3", "embedding": null, "metadata": {"file_path": "testing/logging/test_reporting.py", "file_name": "test_reporting.py", "file_type": "text/x-python", "category": "test", "start_line": 33, "end_line": 52, "span_ids": ["test_messages_logged"], "tokens": 164}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_messages_logged(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import sys\n import logging\n\n logger = logging.getLogger(__name__)\n\n def test_foo():\n sys.stdout.write('text going to stdout')\n sys.stderr.write('text going to stderr')\n logger.info('text going to logger')\n assert False\n \"\"\"\n )\n result = pytester.runpytest(\"--log-level=INFO\")\n assert result.ret == 1\n result.stdout.fnmatch_lines([\"*- Captured *log call -*\", \"*text going to logger*\"])\n result.stdout.fnmatch_lines([\"*- Captured stdout call -*\", \"text going to stdout\"])\n result.stdout.fnmatch_lines([\"*- Captured stderr call -*\", \"text going to stderr\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_root_logger_affected_test_root_logger_affected.with_open_log_file_as_rf.assert_error_text_going_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_root_logger_affected_test_root_logger_affected.with_open_log_file_as_rf.assert_error_text_going_", "embedding": null, "metadata": {"file_path": "testing/logging/test_reporting.py", "file_name": "test_reporting.py", "file_type": "text/x-python", "category": "test", "start_line": 55, "end_line": 88, "span_ids": ["test_root_logger_affected"], "tokens": 304}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_root_logger_affected(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import logging\n logger = logging.getLogger()\n\n def test_foo():\n logger.info('info text ' + 'going to logger')\n logger.warning('warning text ' + 'going to logger')\n logger.error('error text ' + 'going to logger')\n\n assert 0\n \"\"\"\n )\n log_file = str(pytester.path.joinpath(\"pytest.log\"))\n result = pytester.runpytest(\"--log-level=ERROR\", \"--log-file=pytest.log\")\n assert result.ret == 1\n\n # The capture log calls in the stdout section only contain the\n # logger.error msg, because of --log-level=ERROR.\n result.stdout.fnmatch_lines([\"*error text going to logger*\"])\n stdout = result.stdout.str()\n assert \"warning text going to logger\" not in stdout\n assert \"info text going to logger\" not in stdout\n\n # The log file should contain the warning and the error log messages and\n # not the info one, because the default level of the root logger is\n # WARNING.\n assert os.path.isfile(log_file)\n with open(log_file) as rfh:\n contents = rfh.read()\n assert \"info text going to logger\" not in contents\n assert \"warning text going to logger\" in contents\n assert \"error text going to logger\" in contents", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_setup_logging_test_setup_logging.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_setup_logging_test_setup_logging.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/logging/test_reporting.py", "file_name": "test_reporting.py", "file_type": "text/x-python", "category": "test", "start_line": 121, "end_line": 145, "span_ids": ["test_setup_logging"], "tokens": 150}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_setup_logging(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import logging\n\n logger = logging.getLogger(__name__)\n\n def setup_function(function):\n logger.info('text going to logger from setup')\n\n def test_foo():\n logger.info('text going to logger from call')\n assert False\n \"\"\"\n )\n result = pytester.runpytest(\"--log-level=INFO\")\n assert result.ret == 1\n result.stdout.fnmatch_lines(\n [\n \"*- Captured *log setup -*\",\n \"*text going to logger from setup*\",\n \"*- Captured *log call -*\",\n \"*text going to logger from call*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_teardown_logging_test_teardown_logging.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_teardown_logging_test_teardown_logging.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/logging/test_reporting.py", "file_name": "test_reporting.py", "file_type": "text/x-python", "category": "test", "start_line": 148, "end_line": 172, "span_ids": ["test_teardown_logging"], "tokens": 151}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_teardown_logging(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import logging\n\n logger = logging.getLogger(__name__)\n\n def test_foo():\n logger.info('text going to logger from call')\n\n def teardown_function(function):\n logger.info('text going to logger from teardown')\n assert False\n \"\"\"\n )\n result = pytester.runpytest(\"--log-level=INFO\")\n assert result.ret == 1\n result.stdout.fnmatch_lines(\n [\n \"*- Captured *log call -*\",\n \"*text going to logger from call*\",\n \"*- Captured *log teardown -*\",\n \"*text going to logger from teardown*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_log_cli_enabled_disabled_test_log_cli_enabled_disabled.None_1.else_.assert_msg_not_in_result_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_log_cli_enabled_disabled_test_log_cli_enabled_disabled.None_1.else_.assert_msg_not_in_result_", "embedding": null, "metadata": {"file_path": "testing/logging/test_reporting.py", "file_name": "test_reporting.py", "file_type": "text/x-python", "category": "test", "start_line": 175, "end_line": 205, "span_ids": ["test_log_cli_enabled_disabled"], "tokens": 185}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\"enabled\", [True, False])\ndef test_log_cli_enabled_disabled(pytester: Pytester, enabled: bool) -> None:\n msg = \"critical message logged by test\"\n pytester.makepyfile(\n \"\"\"\n import logging\n def test_log_cli():\n logging.critical(\"{}\")\n \"\"\".format(\n msg\n )\n )\n if enabled:\n pytester.makeini(\n \"\"\"\n [pytest]\n log_cli=true\n \"\"\"\n )\n result = pytester.runpytest()\n if enabled:\n result.stdout.fnmatch_lines(\n [\n \"test_log_cli_enabled_disabled.py::test_log_cli \",\n \"*-- live log call --*\",\n \"CRITICAL *test_log_cli_enabled_disabled.py* critical message logged by test\",\n \"PASSED*\",\n ]\n )\n else:\n assert msg not in result.stdout.str()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_log_cli_default_level_test_log_cli_default_level.assert_result_ret_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_log_cli_default_level_test_log_cli_default_level.assert_result_ret_0", "embedding": null, "metadata": {"file_path": "testing/logging/test_reporting.py", "file_name": "test_reporting.py", "file_type": "text/x-python", "category": "test", "start_line": 208, "end_line": 239, "span_ids": ["test_log_cli_default_level"], "tokens": 230}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_log_cli_default_level(pytester: Pytester) -> None:\n # Default log file level\n pytester.makepyfile(\n \"\"\"\n import pytest\n import logging\n def test_log_cli(request):\n plugin = request.config.pluginmanager.getplugin('logging-plugin')\n assert plugin.log_cli_handler.level == logging.NOTSET\n logging.getLogger('catchlog').info(\"INFO message won't be shown\")\n logging.getLogger('catchlog').warning(\"WARNING message will be shown\")\n \"\"\"\n )\n pytester.makeini(\n \"\"\"\n [pytest]\n log_cli=true\n \"\"\"\n )\n\n result = pytester.runpytest()\n\n # fnmatch_lines does an assertion internally\n result.stdout.fnmatch_lines(\n [\n \"test_log_cli_default_level.py::test_log_cli \",\n \"WARNING*test_log_cli_default_level.py* message will be shown*\",\n ]\n )\n result.stdout.no_fnmatch_line(\"*INFO message won't be shown*\")\n # make sure that that we get a '0' exit code for the testsuite\n assert result.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_log_cli_default_level_multiple_tests_test_log_cli_default_level_multiple_tests.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_log_cli_default_level_multiple_tests_test_log_cli_default_level_multiple_tests.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/logging/test_reporting.py", "file_name": "test_reporting.py", "file_type": "text/x-python", "category": "test", "start_line": 242, "end_line": 276, "span_ids": ["test_log_cli_default_level_multiple_tests"], "tokens": 224}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_log_cli_default_level_multiple_tests(\n pytester: Pytester, request: FixtureRequest\n) -> None:\n \"\"\"Ensure we reset the first newline added by the live logger between tests\"\"\"\n filename = request.node.name + \".py\"\n pytester.makepyfile(\n \"\"\"\n import logging\n\n def test_log_1():\n logging.warning(\"log message from test_log_1\")\n\n def test_log_2():\n logging.warning(\"log message from test_log_2\")\n \"\"\"\n )\n pytester.makeini(\n \"\"\"\n [pytest]\n log_cli=true\n \"\"\"\n )\n\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n [\n f\"{filename}::test_log_1 \",\n \"*WARNING*log message from test_log_1*\",\n \"PASSED *50%*\",\n f\"{filename}::test_log_2 \",\n \"*WARNING*log message from test_log_2*\",\n \"PASSED *100%*\",\n \"=* 2 passed in *=\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_log_cli_default_level_sections_test_log_cli_default_level_sections.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_log_cli_default_level_sections_test_log_cli_default_level_sections.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/logging/test_reporting.py", "file_name": "test_reporting.py", "file_type": "text/x-python", "category": "test", "start_line": 279, "end_line": 352, "span_ids": ["test_log_cli_default_level_sections"], "tokens": 536}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_log_cli_default_level_sections(\n pytester: Pytester, request: FixtureRequest\n) -> None:\n \"\"\"Check that with live logging enable we are printing the correct headers during\n start/setup/call/teardown/finish.\"\"\"\n filename = request.node.name + \".py\"\n pytester.makeconftest(\n \"\"\"\n import pytest\n import logging\n\n def pytest_runtest_logstart():\n logging.warning('>>>>> START >>>>>')\n\n def pytest_runtest_logfinish():\n logging.warning('<<<<< END <<<<<<<')\n \"\"\"\n )\n\n pytester.makepyfile(\n \"\"\"\n import pytest\n import logging\n\n @pytest.fixture\n def fix(request):\n logging.warning(\"log message from setup of {}\".format(request.node.name))\n yield\n logging.warning(\"log message from teardown of {}\".format(request.node.name))\n\n def test_log_1(fix):\n logging.warning(\"log message from test_log_1\")\n\n def test_log_2(fix):\n logging.warning(\"log message from test_log_2\")\n \"\"\"\n )\n pytester.makeini(\n \"\"\"\n [pytest]\n log_cli=true\n \"\"\"\n )\n\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n [\n f\"{filename}::test_log_1 \",\n \"*-- live log start --*\",\n \"*WARNING* >>>>> START >>>>>*\",\n \"*-- live log setup --*\",\n \"*WARNING*log message from setup of test_log_1*\",\n \"*-- live log call --*\",\n \"*WARNING*log message from test_log_1*\",\n \"PASSED *50%*\",\n \"*-- live log teardown --*\",\n \"*WARNING*log message from teardown of test_log_1*\",\n \"*-- live log finish --*\",\n \"*WARNING* <<<<< END <<<<<<<*\",\n f\"{filename}::test_log_2 \",\n \"*-- live log start --*\",\n \"*WARNING* >>>>> START >>>>>*\",\n \"*-- live log setup --*\",\n \"*WARNING*log message from setup of test_log_2*\",\n \"*-- live log call --*\",\n \"*WARNING*log message from test_log_2*\",\n \"PASSED *100%*\",\n \"*-- live log teardown --*\",\n \"*WARNING*log message from teardown of test_log_2*\",\n \"*-- live log finish --*\",\n \"*WARNING* <<<<< END <<<<<<<*\",\n \"=* 2 passed in *=\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_live_logs_unknown_sections_test_live_logs_unknown_sections.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_live_logs_unknown_sections_test_live_logs_unknown_sections.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/logging/test_reporting.py", "file_name": "test_reporting.py", "file_type": "text/x-python", "category": "test", "start_line": 355, "end_line": 416, "span_ids": ["test_live_logs_unknown_sections"], "tokens": 393}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_live_logs_unknown_sections(\n pytester: Pytester, request: FixtureRequest\n) -> None:\n \"\"\"Check that with live logging enable we are printing the correct headers during\n start/setup/call/teardown/finish.\"\"\"\n filename = request.node.name + \".py\"\n pytester.makeconftest(\n \"\"\"\n import pytest\n import logging\n\n def pytest_runtest_protocol(item, nextitem):\n logging.warning('Unknown Section!')\n\n def pytest_runtest_logstart():\n logging.warning('>>>>> START >>>>>')\n\n def pytest_runtest_logfinish():\n logging.warning('<<<<< END <<<<<<<')\n \"\"\"\n )\n\n pytester.makepyfile(\n \"\"\"\n import pytest\n import logging\n\n @pytest.fixture\n def fix(request):\n logging.warning(\"log message from setup of {}\".format(request.node.name))\n yield\n logging.warning(\"log message from teardown of {}\".format(request.node.name))\n\n def test_log_1(fix):\n logging.warning(\"log message from test_log_1\")\n\n \"\"\"\n )\n pytester.makeini(\n \"\"\"\n [pytest]\n log_cli=true\n \"\"\"\n )\n\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n [\n \"*WARNING*Unknown Section*\",\n f\"{filename}::test_log_1 \",\n \"*WARNING* >>>>> START >>>>>*\",\n \"*-- live log setup --*\",\n \"*WARNING*log message from setup of test_log_1*\",\n \"*-- live log call --*\",\n \"*WARNING*log message from test_log_1*\",\n \"PASSED *100%*\",\n \"*-- live log teardown --*\",\n \"*WARNING*log message from teardown of test_log_1*\",\n \"*WARNING* <<<<< END <<<<<<<*\",\n \"=* 1 passed in *=\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_sections_single_new_line_after_test_outcome_test_sections_single_new_line_after_test_outcome.None_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_sections_single_new_line_after_test_outcome_test_sections_single_new_line_after_test_outcome.None_1", "embedding": null, "metadata": {"file_path": "testing/logging/test_reporting.py", "file_name": "test_reporting.py", "file_type": "text/x-python", "category": "test", "start_line": 419, "end_line": 496, "span_ids": ["test_sections_single_new_line_after_test_outcome"], "tokens": 511}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_sections_single_new_line_after_test_outcome(\n pytester: Pytester, request: FixtureRequest\n) -> None:\n \"\"\"Check that only a single new line is written between log messages during\n teardown/finish.\"\"\"\n filename = request.node.name + \".py\"\n pytester.makeconftest(\n \"\"\"\n import pytest\n import logging\n\n def pytest_runtest_logstart():\n logging.warning('>>>>> START >>>>>')\n\n def pytest_runtest_logfinish():\n logging.warning('<<<<< END <<<<<<<')\n logging.warning('<<<<< END <<<<<<<')\n \"\"\"\n )\n\n pytester.makepyfile(\n \"\"\"\n import pytest\n import logging\n\n @pytest.fixture\n def fix(request):\n logging.warning(\"log message from setup of {}\".format(request.node.name))\n yield\n logging.warning(\"log message from teardown of {}\".format(request.node.name))\n logging.warning(\"log message from teardown of {}\".format(request.node.name))\n\n def test_log_1(fix):\n logging.warning(\"log message from test_log_1\")\n \"\"\"\n )\n pytester.makeini(\n \"\"\"\n [pytest]\n log_cli=true\n \"\"\"\n )\n\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n [\n f\"{filename}::test_log_1 \",\n \"*-- live log start --*\",\n \"*WARNING* >>>>> START >>>>>*\",\n \"*-- live log setup --*\",\n \"*WARNING*log message from setup of test_log_1*\",\n \"*-- live log call --*\",\n \"*WARNING*log message from test_log_1*\",\n \"PASSED *100%*\",\n \"*-- live log teardown --*\",\n \"*WARNING*log message from teardown of test_log_1*\",\n \"*-- live log finish --*\",\n \"*WARNING* <<<<< END <<<<<<<*\",\n \"*WARNING* <<<<< END <<<<<<<*\",\n \"=* 1 passed in *=\",\n ]\n )\n assert (\n re.search(\n r\"(.+)live log teardown(.+)\\nWARNING(.+)\\nWARNING(.+)\",\n result.stdout.str(),\n re.MULTILINE,\n )\n is not None\n )\n assert (\n re.search(\n r\"(.+)live log finish(.+)\\nWARNING(.+)\\nWARNING(.+)\",\n result.stdout.str(),\n re.MULTILINE,\n )\n is not None\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_log_cli_ini_level_test_log_cli_ini_level.assert_result_ret_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_log_cli_ini_level_test_log_cli_ini_level.assert_result_ret_0", "embedding": null, "metadata": {"file_path": "testing/logging/test_reporting.py", "file_name": "test_reporting.py", "file_type": "text/x-python", "category": "test", "start_line": 549, "end_line": 582, "span_ids": ["test_log_cli_ini_level"], "tokens": 253}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_log_cli_ini_level(pytester: Pytester) -> None:\n pytester.makeini(\n \"\"\"\n [pytest]\n log_cli=true\n log_cli_level = INFO\n \"\"\"\n )\n pytester.makepyfile(\n \"\"\"\n import pytest\n import logging\n def test_log_cli(request):\n plugin = request.config.pluginmanager.getplugin('logging-plugin')\n assert plugin.log_cli_handler.level == logging.INFO\n logging.getLogger('catchlog').debug(\"This log message won't be shown\")\n logging.getLogger('catchlog').info(\"This log message will be shown\")\n print('PASSED')\n \"\"\"\n )\n\n result = pytester.runpytest(\"-s\")\n\n # fnmatch_lines does an assertion internally\n result.stdout.fnmatch_lines(\n [\n \"*test_log_cli_ini_level.py* This log message will be shown\",\n \"PASSED\", # 'PASSED' on its own line because the log message prints a new line\n ]\n )\n result.stdout.no_fnmatch_line(\"*This log message won't be shown*\")\n\n # make sure that that we get a '0' exit code for the testsuite\n assert result.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_log_cli_auto_enable_test_log_cli_auto_enable.if_cli_args_log_cli.else_.assert_WARNING_not_in_s": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_log_cli_auto_enable_test_log_cli_auto_enable.if_cli_args_log_cli.else_.assert_WARNING_not_in_s", "embedding": null, "metadata": {"file_path": "testing/logging/test_reporting.py", "file_name": "test_reporting.py", "file_type": "text/x-python", "category": "test", "start_line": 585, "end_line": 629, "span_ids": ["test_log_cli_auto_enable"], "tokens": 328}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\n \"cli_args\",\n [\"\", \"--log-level=WARNING\", \"--log-file-level=WARNING\", \"--log-cli-level=WARNING\"],\n)\ndef test_log_cli_auto_enable(pytester: Pytester, cli_args: str) -> None:\n \"\"\"Check that live logs are enabled if --log-level or --log-cli-level is passed on the CLI.\n It should not be auto enabled if the same configs are set on the INI file.\n \"\"\"\n pytester.makepyfile(\n \"\"\"\n import logging\n\n def test_log_1():\n logging.info(\"log message from test_log_1 not to be shown\")\n logging.warning(\"log message from test_log_1\")\n\n \"\"\"\n )\n pytester.makeini(\n \"\"\"\n [pytest]\n log_level=INFO\n log_cli_level=INFO\n \"\"\"\n )\n\n result = pytester.runpytest(cli_args)\n stdout = result.stdout.str()\n if cli_args == \"--log-cli-level=WARNING\":\n result.stdout.fnmatch_lines(\n [\n \"*::test_log_1 \",\n \"*-- live log call --*\",\n \"*WARNING*log message from test_log_1*\",\n \"PASSED *100%*\",\n \"=* 1 passed in *=\",\n ]\n )\n assert \"INFO\" not in stdout\n else:\n result.stdout.fnmatch_lines(\n [\"*test_log_cli_auto_enable*100%*\", \"=* 1 passed in *=\"]\n )\n assert \"INFO\" not in stdout\n assert \"WARNING\" not in stdout", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_log_file_cli_test_log_file_cli.with_open_log_file_as_rf.None_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_log_file_cli_test_log_file_cli.with_open_log_file_as_rf.None_1", "embedding": null, "metadata": {"file_path": "testing/logging/test_reporting.py", "file_name": "test_reporting.py", "file_type": "text/x-python", "category": "test", "start_line": 632, "end_line": 662, "span_ids": ["test_log_file_cli"], "tokens": 260}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_log_file_cli(pytester: Pytester) -> None:\n # Default log file level\n pytester.makepyfile(\n \"\"\"\n import pytest\n import logging\n def test_log_file(request):\n plugin = request.config.pluginmanager.getplugin('logging-plugin')\n assert plugin.log_file_handler.level == logging.WARNING\n logging.getLogger('catchlog').info(\"This log message won't be shown\")\n logging.getLogger('catchlog').warning(\"This log message will be shown\")\n print('PASSED')\n \"\"\"\n )\n\n log_file = str(pytester.path.joinpath(\"pytest.log\"))\n\n result = pytester.runpytest(\n \"-s\", f\"--log-file={log_file}\", \"--log-file-level=WARNING\"\n )\n\n # fnmatch_lines does an assertion internally\n result.stdout.fnmatch_lines([\"test_log_file_cli.py PASSED\"])\n\n # make sure that that we get a '0' exit code for the testsuite\n assert result.ret == 0\n assert os.path.isfile(log_file)\n with open(log_file) as rfh:\n contents = rfh.read()\n assert \"This log message will be shown\" in contents\n assert \"This log message won't be shown\" not in contents", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_log_file_cli_level_test_log_level_not_changed_by_default.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_log_file_cli_level_test_log_level_not_changed_by_default.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/logging/test_reporting.py", "file_name": "test_reporting.py", "file_type": "text/x-python", "category": "test", "start_line": 665, "end_line": 705, "span_ids": ["test_log_level_not_changed_by_default", "test_log_file_cli_level"], "tokens": 333}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_log_file_cli_level(pytester: Pytester) -> None:\n # Default log file level\n pytester.makepyfile(\n \"\"\"\n import pytest\n import logging\n def test_log_file(request):\n plugin = request.config.pluginmanager.getplugin('logging-plugin')\n assert plugin.log_file_handler.level == logging.INFO\n logging.getLogger('catchlog').debug(\"This log message won't be shown\")\n logging.getLogger('catchlog').info(\"This log message will be shown\")\n print('PASSED')\n \"\"\"\n )\n\n log_file = str(pytester.path.joinpath(\"pytest.log\"))\n\n result = pytester.runpytest(\"-s\", f\"--log-file={log_file}\", \"--log-file-level=INFO\")\n\n # fnmatch_lines does an assertion internally\n result.stdout.fnmatch_lines([\"test_log_file_cli_level.py PASSED\"])\n\n # make sure that that we get a '0' exit code for the testsuite\n assert result.ret == 0\n assert os.path.isfile(log_file)\n with open(log_file) as rfh:\n contents = rfh.read()\n assert \"This log message will be shown\" in contents\n assert \"This log message won't be shown\" not in contents\n\n\ndef test_log_level_not_changed_by_default(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import logging\n def test_log_file():\n assert logging.getLogger().level == logging.WARNING\n \"\"\"\n )\n result = pytester.runpytest(\"-s\")\n result.stdout.fnmatch_lines([\"* 1 passed in *\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_log_file_ini_test_log_file_ini.with_open_log_file_as_rf.None_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_log_file_ini_test_log_file_ini.with_open_log_file_as_rf.None_1", "embedding": null, "metadata": {"file_path": "testing/logging/test_reporting.py", "file_name": "test_reporting.py", "file_type": "text/x-python", "category": "test", "start_line": 708, "end_line": 744, "span_ids": ["test_log_file_ini"], "tokens": 269}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_log_file_ini(pytester: Pytester) -> None:\n log_file = str(pytester.path.joinpath(\"pytest.log\"))\n\n pytester.makeini(\n \"\"\"\n [pytest]\n log_file={}\n log_file_level=WARNING\n \"\"\".format(\n log_file\n )\n )\n pytester.makepyfile(\n \"\"\"\n import pytest\n import logging\n def test_log_file(request):\n plugin = request.config.pluginmanager.getplugin('logging-plugin')\n assert plugin.log_file_handler.level == logging.WARNING\n logging.getLogger('catchlog').info(\"This log message won't be shown\")\n logging.getLogger('catchlog').warning(\"This log message will be shown\")\n print('PASSED')\n \"\"\"\n )\n\n result = pytester.runpytest(\"-s\")\n\n # fnmatch_lines does an assertion internally\n result.stdout.fnmatch_lines([\"test_log_file_ini.py PASSED\"])\n\n # make sure that that we get a '0' exit code for the testsuite\n assert result.ret == 0\n assert os.path.isfile(log_file)\n with open(log_file) as rfh:\n contents = rfh.read()\n assert \"This log message will be shown\" in contents\n assert \"This log message won't be shown\" not in contents", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_log_file_ini_level_test_log_file_ini_level.with_open_log_file_as_rf.None_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_log_file_ini_level_test_log_file_ini_level.with_open_log_file_as_rf.None_1", "embedding": null, "metadata": {"file_path": "testing/logging/test_reporting.py", "file_name": "test_reporting.py", "file_type": "text/x-python", "category": "test", "start_line": 747, "end_line": 783, "span_ids": ["test_log_file_ini_level"], "tokens": 271}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_log_file_ini_level(pytester: Pytester) -> None:\n log_file = str(pytester.path.joinpath(\"pytest.log\"))\n\n pytester.makeini(\n \"\"\"\n [pytest]\n log_file={}\n log_file_level = INFO\n \"\"\".format(\n log_file\n )\n )\n pytester.makepyfile(\n \"\"\"\n import pytest\n import logging\n def test_log_file(request):\n plugin = request.config.pluginmanager.getplugin('logging-plugin')\n assert plugin.log_file_handler.level == logging.INFO\n logging.getLogger('catchlog').debug(\"This log message won't be shown\")\n logging.getLogger('catchlog').info(\"This log message will be shown\")\n print('PASSED')\n \"\"\"\n )\n\n result = pytester.runpytest(\"-s\")\n\n # fnmatch_lines does an assertion internally\n result.stdout.fnmatch_lines([\"test_log_file_ini_level.py PASSED\"])\n\n # make sure that that we get a '0' exit code for the testsuite\n assert result.ret == 0\n assert os.path.isfile(log_file)\n with open(log_file) as rfh:\n contents = rfh.read()\n assert \"This log message will be shown\" in contents\n assert \"This log message won't be shown\" not in contents", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_log_file_unicode_test_log_file_unicode.with_open_log_file_encod.assert_Another_normal_me": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_log_file_unicode_test_log_file_unicode.with_open_log_file_encod.assert_Another_normal_me", "embedding": null, "metadata": {"file_path": "testing/logging/test_reporting.py", "file_name": "test_reporting.py", "file_type": "text/x-python", "category": "test", "start_line": 786, "end_line": 818, "span_ids": ["test_log_file_unicode"], "tokens": 217}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_log_file_unicode(pytester: Pytester) -> None:\n log_file = str(pytester.path.joinpath(\"pytest.log\"))\n\n pytester.makeini(\n \"\"\"\n [pytest]\n log_file={}\n log_file_level = INFO\n \"\"\".format(\n log_file\n )\n )\n pytester.makepyfile(\n \"\"\"\\\n import logging\n\n def test_log_file():\n logging.getLogger('catchlog').info(\"Normal message\")\n logging.getLogger('catchlog').info(\"\u251c\")\n logging.getLogger('catchlog').info(\"Another normal message\")\n \"\"\"\n )\n\n result = pytester.runpytest()\n\n # make sure that that we get a '0' exit code for the testsuite\n assert result.ret == 0\n assert os.path.isfile(log_file)\n with open(log_file, encoding=\"utf-8\") as rfh:\n contents = rfh.read()\n assert \"Normal message\" in contents\n assert \"\u251c\" in contents\n assert \"Another normal message\" in contents", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_log_in_hooks_test_log_in_hooks.with_open_log_file_as_rf.assert_sessionfinish_in": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_log_in_hooks_test_log_in_hooks.with_open_log_file_as_rf.assert_sessionfinish_in", "embedding": null, "metadata": {"file_path": "testing/logging/test_reporting.py", "file_name": "test_reporting.py", "file_type": "text/x-python", "category": "test", "start_line": 959, "end_line": 992, "span_ids": ["test_log_in_hooks"], "tokens": 206}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_log_in_hooks(pytester: Pytester) -> None:\n log_file = str(pytester.path.joinpath(\"pytest.log\"))\n\n pytester.makeini(\n \"\"\"\n [pytest]\n log_file={}\n log_file_level = INFO\n log_cli=true\n \"\"\".format(\n log_file\n )\n )\n pytester.makeconftest(\n \"\"\"\n import logging\n\n def pytest_runtestloop(session):\n logging.info('runtestloop')\n\n def pytest_sessionstart(session):\n logging.info('sessionstart')\n\n def pytest_sessionfinish(session, exitstatus):\n logging.info('sessionfinish')\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*sessionstart*\", \"*runtestloop*\", \"*sessionfinish*\"])\n with open(log_file) as rfh:\n contents = rfh.read()\n assert \"sessionstart\" in contents\n assert \"runtestloop\" in contents\n assert \"sessionfinish\" in contents", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_log_in_runtest_logreport_test_log_in_runtest_logreport.with_open_log_file_as_rf.assert_contents_count_lo": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_log_in_runtest_logreport_test_log_in_runtest_logreport.with_open_log_file_as_rf.assert_contents_count_lo", "embedding": null, "metadata": {"file_path": "testing/logging/test_reporting.py", "file_name": "test_reporting.py", "file_type": "text/x-python", "category": "test", "start_line": 995, "end_line": 1026, "span_ids": ["test_log_in_runtest_logreport"], "tokens": 169}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_log_in_runtest_logreport(pytester: Pytester) -> None:\n log_file = str(pytester.path.joinpath(\"pytest.log\"))\n\n pytester.makeini(\n \"\"\"\n [pytest]\n log_file={}\n log_file_level = INFO\n log_cli=true\n \"\"\".format(\n log_file\n )\n )\n pytester.makeconftest(\n \"\"\"\n import logging\n logger = logging.getLogger(__name__)\n\n def pytest_runtest_logreport(report):\n logger.info(\"logreport\")\n \"\"\"\n )\n pytester.makepyfile(\n \"\"\"\n def test_first():\n assert True\n \"\"\"\n )\n pytester.runpytest()\n with open(log_file) as rfh:\n contents = rfh.read()\n assert contents.count(\"logreport\") == 3", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox_TestApprox.test_repr_string.assert_repr_approx_a_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox_TestApprox.test_repr_string.assert_repr_approx_a_", "embedding": null, "metadata": {"file_path": "testing/python/approx.py", "file_name": "approx.py", "file_type": "text/x-python", "category": "implementation", "start_line": 43, "end_line": 56, "span_ids": ["TestApprox.test_repr_string", "TestApprox"], "tokens": 293}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestApprox:\n def test_repr_string(self):\n assert repr(approx(1.0)) == \"1.0 \u00b1 1.0e-06\"\n assert repr(approx([1.0, 2.0])) == \"approx([1.0 \u00b1 1.0e-06, 2.0 \u00b1 2.0e-06])\"\n assert repr(approx((1.0, 2.0))) == \"approx((1.0 \u00b1 1.0e-06, 2.0 \u00b1 2.0e-06))\"\n assert repr(approx(inf)) == \"inf\"\n assert repr(approx(1.0, rel=nan)) == \"1.0 \u00b1 ???\"\n assert repr(approx(1.0, rel=inf)) == \"1.0 \u00b1 inf\"\n\n # Dictionaries aren't ordered, so we need to check both orders.\n assert repr(approx({\"a\": 1.0, \"b\": 2.0})) in (\n \"approx({'a': 1.0 \u00b1 1.0e-06, 'b': 2.0 \u00b1 2.0e-06})\",\n \"approx({'b': 2.0 \u00b1 2.0e-06, 'a': 1.0 \u00b1 1.0e-06})\",\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_repr_nd_array_TestApprox.test_repr_nd_array.assert_repr_approx_np_arr": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_repr_nd_array_TestApprox.test_repr_nd_array.assert_repr_approx_np_arr", "embedding": null, "metadata": {"file_path": "testing/python/approx.py", "file_name": "approx.py", "file_type": "text/x-python", "category": "implementation", "start_line": 73, "end_line": 87, "span_ids": ["TestApprox.test_repr_nd_array"], "tokens": 229}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestApprox:\n\n @pytest.mark.parametrize(\n \"value, expected_repr_string\",\n [\n (5.0, \"approx(5.0 \u00b1 5.0e-06)\"),\n ([5.0], \"approx([5.0 \u00b1 5.0e-06])\"),\n ([[5.0]], \"approx([[5.0 \u00b1 5.0e-06]])\"),\n ([[5.0, 6.0]], \"approx([[5.0 \u00b1 5.0e-06, 6.0 \u00b1 6.0e-06]])\"),\n ([[5.0], [6.0]], \"approx([[5.0 \u00b1 5.0e-06], [6.0 \u00b1 6.0e-06]])\"),\n ],\n )\n def test_repr_nd_array(self, value, expected_repr_string):\n \"\"\"Make sure that arrays of all different dimensions are repr'd correctly.\"\"\"\n np = pytest.importorskip(\"numpy\")\n np_array = np.array(value)\n assert repr(approx(np_array)) == expected_repr_string", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_operator_overloading_TestApprox.test_exactly_equal.for_a_x_in_examples_.assert_a_approx_x_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_operator_overloading_TestApprox.test_exactly_equal.for_a_x_in_examples_.assert_a_approx_x_", "embedding": null, "metadata": {"file_path": "testing/python/approx.py", "file_name": "approx.py", "file_type": "text/x-python", "category": "implementation", "start_line": 89, "end_line": 107, "span_ids": ["TestApprox.test_operator_overloading", "TestApprox.test_exactly_equal"], "tokens": 241}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestApprox:\n\n def test_operator_overloading(self):\n assert 1 == approx(1, rel=1e-6, abs=1e-12)\n assert not (1 != approx(1, rel=1e-6, abs=1e-12))\n assert 10 != approx(1, rel=1e-6, abs=1e-12)\n assert not (10 == approx(1, rel=1e-6, abs=1e-12))\n\n def test_exactly_equal(self):\n examples = [\n (2.0, 2.0),\n (0.1e200, 0.1e200),\n (1.123e-300, 1.123e-300),\n (12345, 12345.0),\n (0.0, -0.0),\n (345678, 345678),\n (Decimal(\"1.0001\"), Decimal(\"1.0001\")),\n (Fraction(1, 3), Fraction(-1, -3)),\n ]\n for a, x in examples:\n assert a == approx(x)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_opposite_sign_TestApprox.test_zero_tolerance.for_a_x_in_within_1e10_.None_5": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_opposite_sign_TestApprox.test_zero_tolerance.for_a_x_in_within_1e10_.None_5", "embedding": null, "metadata": {"file_path": "testing/python/approx.py", "file_name": "approx.py", "file_type": "text/x-python", "category": "implementation", "start_line": 97, "end_line": 110, "span_ids": ["TestApprox.test_zero_tolerance", "TestApprox.test_opposite_sign"], "tokens": 233}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestApprox:\n\n def test_opposite_sign(self):\n examples = [(eq, 1e-100, -1e-100), (ne, 1e100, -1e100)]\n for op, a, x in examples:\n assert op(a, approx(x))\n\n def test_zero_tolerance(self):\n within_1e10 = [(1.1e-100, 1e-100), (-1.1e-100, -1e-100)]\n for a, x in within_1e10:\n assert x == approx(x, rel=0.0, abs=0.0)\n assert a != approx(x, rel=0.0, abs=0.0)\n assert a == approx(x, rel=0.0, abs=5e-101)\n assert a != approx(x, rel=0.0, abs=5e-102)\n assert a == approx(x, rel=5e-1, abs=0.0)\n assert a != approx(x, rel=5e-2, abs=0.0)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_inf_tolerance_TestApprox.test_inf_tolerance.for_a_x_in_large_diffs_.None_3": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_inf_tolerance_TestApprox.test_inf_tolerance.for_a_x_in_large_diffs_.None_3", "embedding": null, "metadata": {"file_path": "testing/python/approx.py", "file_name": "approx.py", "file_type": "text/x-python", "category": "implementation", "start_line": 125, "end_line": 132, "span_ids": ["TestApprox.test_inf_tolerance"], "tokens": 142}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestApprox:\n\n def test_inf_tolerance(self):\n # Everything should be equal if the tolerance is infinite.\n large_diffs = [(1, 1000), (1e-50, 1e50), (-1.0, -1e300), (0.0, 10)]\n for a, x in large_diffs:\n assert a != approx(x, rel=0.0, abs=0.0)\n assert a == approx(x, rel=inf, abs=0.0)\n assert a == approx(x, rel=0.0, abs=inf)\n assert a == approx(x, rel=inf, abs=inf)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_inf_tolerance_expecting_zero_TestApprox.test_reasonable_defaults.assert_0_1_0_2_appro": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_inf_tolerance_expecting_zero_TestApprox.test_reasonable_defaults.assert_0_1_0_2_appro", "embedding": null, "metadata": {"file_path": "testing/python/approx.py", "file_name": "approx.py", "file_type": "text/x-python", "category": "implementation", "start_line": 151, "end_line": 170, "span_ids": ["TestApprox.test_nan_tolerance", "TestApprox.test_inf_tolerance_expecting_zero", "TestApprox.test_reasonable_defaults"], "tokens": 222}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestApprox:\n\n def test_inf_tolerance_expecting_zero(self) -> None:\n # If the relative tolerance is zero but the expected value is infinite,\n # the actual tolerance is a NaN, which should be an error.\n with pytest.raises(ValueError):\n 1 == approx(0, rel=inf, abs=0.0)\n with pytest.raises(ValueError):\n 1 == approx(0, rel=inf, abs=inf)\n\n def test_nan_tolerance(self) -> None:\n with pytest.raises(ValueError):\n 1.1 == approx(1, rel=nan)\n with pytest.raises(ValueError):\n 1.1 == approx(1, abs=nan)\n with pytest.raises(ValueError):\n 1.1 == approx(1, rel=nan, abs=nan)\n\n def test_reasonable_defaults(self):\n # Whatever the defaults are, they should work for numbers close to 1\n # than have a small amount of floating-point error.\n assert 0.1 + 0.2 == approx(0.3)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_default_tolerances_TestApprox.test_default_tolerances.for_op_a_x_in_examples_.assert_op_a_approx_x_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_default_tolerances_TestApprox.test_default_tolerances.for_op_a_x_in_examples_.assert_op_a_approx_x_", "embedding": null, "metadata": {"file_path": "testing/python/approx.py", "file_name": "approx.py", "file_type": "text/x-python", "category": "implementation", "start_line": 153, "end_line": 170, "span_ids": ["TestApprox.test_default_tolerances"], "tokens": 233}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestApprox:\n\n def test_default_tolerances(self):\n # This tests the defaults as they are currently set. If you change the\n # defaults, this test will fail but you should feel free to change it.\n # None of the other tests (except the doctests) should be affected by\n # the choice of defaults.\n examples = [\n # Relative tolerance used.\n (eq, 1e100 + 1e94, 1e100),\n (ne, 1e100 + 2e94, 1e100),\n (eq, 1e0 + 1e-6, 1e0),\n (ne, 1e0 + 2e-6, 1e0),\n # Absolute tolerance used.\n (eq, 1e-100, +1e-106),\n (eq, 1e-100, +2e-106),\n (eq, 1e-100, 0),\n ]\n for op, a, x in examples:\n assert op(a, approx(x))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_custom_tolerances_TestApprox.test_custom_tolerances.None_11": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_custom_tolerances_TestApprox.test_custom_tolerances.None_11", "embedding": null, "metadata": {"file_path": "testing/python/approx.py", "file_name": "approx.py", "file_type": "text/x-python", "category": "implementation", "start_line": 172, "end_line": 186, "span_ids": ["TestApprox.test_custom_tolerances"], "tokens": 411}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestApprox:\n\n def test_custom_tolerances(self):\n assert 1e8 + 1e0 == approx(1e8, rel=5e-8, abs=5e0)\n assert 1e8 + 1e0 == approx(1e8, rel=5e-9, abs=5e0)\n assert 1e8 + 1e0 == approx(1e8, rel=5e-8, abs=5e-1)\n assert 1e8 + 1e0 != approx(1e8, rel=5e-9, abs=5e-1)\n\n assert 1e0 + 1e-8 == approx(1e0, rel=5e-8, abs=5e-8)\n assert 1e0 + 1e-8 == approx(1e0, rel=5e-9, abs=5e-8)\n assert 1e0 + 1e-8 == approx(1e0, rel=5e-8, abs=5e-9)\n assert 1e0 + 1e-8 != approx(1e0, rel=5e-9, abs=5e-9)\n\n assert 1e-8 + 1e-16 == approx(1e-8, rel=5e-8, abs=5e-16)\n assert 1e-8 + 1e-16 == approx(1e-8, rel=5e-9, abs=5e-16)\n assert 1e-8 + 1e-16 == approx(1e-8, rel=5e-8, abs=5e-17)\n assert 1e-8 + 1e-16 != approx(1e-8, rel=5e-9, abs=5e-17)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_relative_tolerance_TestApprox.test_relative_tolerance.for_a_x_in_within_1e8_re.None_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_relative_tolerance_TestApprox.test_relative_tolerance.for_a_x_in_within_1e8_re.None_1", "embedding": null, "metadata": {"file_path": "testing/python/approx.py", "file_name": "approx.py", "file_type": "text/x-python", "category": "implementation", "start_line": 188, "end_line": 192, "span_ids": ["TestApprox.test_relative_tolerance"], "tokens": 121}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestApprox:\n\n def test_relative_tolerance(self):\n within_1e8_rel = [(1e8 + 1e0, 1e8), (1e0 + 1e-8, 1e0), (1e-8 + 1e-16, 1e-8)]\n for a, x in within_1e8_rel:\n assert a == approx(x, rel=5e-8, abs=0.0)\n assert a != approx(x, rel=5e-9, abs=0.0)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_absolute_tolerance_TestApprox.test_absolute_tolerance.for_a_x_in_within_1e8_ab.None_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_absolute_tolerance_TestApprox.test_absolute_tolerance.for_a_x_in_within_1e8_ab.None_1", "embedding": null, "metadata": {"file_path": "testing/python/approx.py", "file_name": "approx.py", "file_type": "text/x-python", "category": "implementation", "start_line": 194, "end_line": 198, "span_ids": ["TestApprox.test_absolute_tolerance"], "tokens": 118}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestApprox:\n\n def test_absolute_tolerance(self):\n within_1e8_abs = [(1e8 + 9e-9, 1e8), (1e0 + 9e-9, 1e0), (1e-8 + 9e-9, 1e-8)]\n for a, x in within_1e8_abs:\n assert a == approx(x, rel=0, abs=5e-8)\n assert a != approx(x, rel=0, abs=5e-9)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_expecting_zero_TestApprox.test_expecting_zero.for_op_a_x_in_examples_.None_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_expecting_zero_TestApprox.test_expecting_zero.for_op_a_x_in_examples_.None_1", "embedding": null, "metadata": {"file_path": "testing/python/approx.py", "file_name": "approx.py", "file_type": "text/x-python", "category": "implementation", "start_line": 200, "end_line": 213, "span_ids": ["TestApprox.test_expecting_zero"], "tokens": 183}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestApprox:\n\n def test_expecting_zero(self):\n examples = [\n (ne, 1e-6, 0.0),\n (ne, -1e-6, 0.0),\n (eq, 1e-12, 0.0),\n (eq, -1e-12, 0.0),\n (ne, 2e-12, 0.0),\n (ne, -2e-12, 0.0),\n (ne, inf, 0.0),\n (ne, nan, 0.0),\n ]\n for op, a, x in examples:\n assert op(a, approx(x, rel=0.0, abs=1e-12))\n assert op(a, approx(x, rel=1e-6, abs=1e-12))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_expecting_inf_TestApprox.test_expecting_nan.for_op_a_x_in_examples_.assert_op_a_approx_x_na": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_expecting_inf_TestApprox.test_expecting_nan.for_op_a_x_in_examples_.assert_op_a_approx_x_na", "embedding": null, "metadata": {"file_path": "testing/python/approx.py", "file_name": "approx.py", "file_type": "text/x-python", "category": "implementation", "start_line": 215, "end_line": 239, "span_ids": ["TestApprox.test_expecting_inf", "TestApprox.test_expecting_nan"], "tokens": 196}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestApprox:\n\n def test_expecting_inf(self):\n examples = [\n (eq, inf, inf),\n (eq, -inf, -inf),\n (ne, inf, -inf),\n (ne, 0.0, inf),\n (ne, nan, inf),\n ]\n for op, a, x in examples:\n assert op(a, approx(x))\n\n def test_expecting_nan(self):\n examples = [\n (eq, nan, nan),\n (eq, -nan, -nan),\n (eq, nan, -nan),\n (ne, 0.0, nan),\n (ne, inf, nan),\n ]\n for op, a, x in examples:\n # Nothing is equal to NaN by default.\n assert a != approx(x)\n\n # If ``nan_ok=True``, then NaN is equal to NaN.\n assert op(a, approx(x, nan_ok=True))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_int_TestApprox.test_int.for_a_x_in_within_1e6_.assert_approx_x_rel_5e_7": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_int_TestApprox.test_int.for_a_x_in_within_1e6_.assert_approx_x_rel_5e_7", "embedding": null, "metadata": {"file_path": "testing/python/approx.py", "file_name": "approx.py", "file_type": "text/x-python", "category": "implementation", "start_line": 241, "end_line": 247, "span_ids": ["TestApprox.test_int"], "tokens": 122}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestApprox:\n\n def test_int(self):\n within_1e6 = [(1000001, 1000000), (-1000001, -1000000)]\n for a, x in within_1e6:\n assert a == approx(x, rel=5e-6, abs=0)\n assert a != approx(x, rel=5e-7, abs=0)\n assert approx(x, rel=5e-6, abs=0) == a\n assert approx(x, rel=5e-7, abs=0) != a", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_decimal_TestApprox.test_decimal.for_a_x_in_within_1e6_.None_4": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_decimal_TestApprox.test_decimal.for_a_x_in_within_1e6_.None_4", "embedding": null, "metadata": {"file_path": "testing/python/approx.py", "file_name": "approx.py", "file_type": "text/x-python", "category": "implementation", "start_line": 249, "end_line": 259, "span_ids": ["TestApprox.test_decimal"], "tokens": 150}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestApprox:\n\n def test_decimal(self):\n within_1e6 = [\n (Decimal(\"1.000001\"), Decimal(\"1.0\")),\n (Decimal(\"-1.000001\"), Decimal(\"-1.0\")),\n ]\n for a, x in within_1e6:\n assert a == approx(x)\n assert a == approx(x, rel=Decimal(\"5e-6\"), abs=0)\n assert a != approx(x, rel=Decimal(\"5e-7\"), abs=0)\n assert approx(x, rel=Decimal(\"5e-6\"), abs=0) == a\n assert approx(x, rel=Decimal(\"5e-7\"), abs=0) != a", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_fraction_TestApprox.test_fraction.for_a_x_in_within_1e6_.assert_approx_x_rel_5e_7": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_fraction_TestApprox.test_fraction.for_a_x_in_within_1e6_.assert_approx_x_rel_5e_7", "embedding": null, "metadata": {"file_path": "testing/python/approx.py", "file_name": "approx.py", "file_type": "text/x-python", "category": "implementation", "start_line": 261, "end_line": 270, "span_ids": ["TestApprox.test_fraction"], "tokens": 139}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestApprox:\n\n def test_fraction(self):\n within_1e6 = [\n (1 + Fraction(1, 1000000), Fraction(1)),\n (-1 - Fraction(-1, 1000000), Fraction(-1)),\n ]\n for a, x in within_1e6:\n assert a == approx(x, rel=5e-6, abs=0)\n assert a != approx(x, rel=5e-7, abs=0)\n assert approx(x, rel=5e-6, abs=0) == a\n assert approx(x, rel=5e-7, abs=0) != a", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_complex_TestApprox.test_complex.for_a_x_in_within_1e6_.assert_approx_x_rel_5e_7": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_complex_TestApprox.test_complex.for_a_x_in_within_1e6_.assert_approx_x_rel_5e_7", "embedding": null, "metadata": {"file_path": "testing/python/approx.py", "file_name": "approx.py", "file_type": "text/x-python", "category": "implementation", "start_line": 272, "end_line": 283, "span_ids": ["TestApprox.test_complex"], "tokens": 201}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestApprox:\n\n def test_complex(self):\n within_1e6 = [\n (1.000001 + 1.0j, 1.0 + 1.0j),\n (1.0 + 1.000001j, 1.0 + 1.0j),\n (-1.000001 + 1.0j, -1.0 + 1.0j),\n (1.0 - 1.000001j, 1.0 - 1.0j),\n ]\n for a, x in within_1e6:\n assert a == approx(x, rel=5e-6, abs=0)\n assert a != approx(x, rel=5e-7, abs=0)\n assert approx(x, rel=5e-6, abs=0) == a\n assert approx(x, rel=5e-7, abs=0) != a", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_list_TestApprox.test_list.None_3": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_list_TestApprox.test_list.None_3", "embedding": null, "metadata": {"file_path": "testing/python/approx.py", "file_name": "approx.py", "file_type": "text/x-python", "category": "implementation", "start_line": 285, "end_line": 293, "span_ids": ["TestApprox.test_list"], "tokens": 125}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestApprox:\n\n def test_list(self):\n actual = [1 + 1e-7, 2 + 1e-8]\n expected = [1, 2]\n\n # Return false if any element is outside the tolerance.\n assert actual == approx(expected, rel=5e-7, abs=0)\n assert actual != approx(expected, rel=5e-8, abs=0)\n assert approx(expected, rel=5e-7, abs=0) == actual\n assert approx(expected, rel=5e-8, abs=0) != actual", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_numpy_array_TestApprox.test_numpy_array.None_7": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_numpy_array_TestApprox.test_numpy_array.None_7", "embedding": null, "metadata": {"file_path": "testing/python/approx.py", "file_name": "approx.py", "file_type": "text/x-python", "category": "implementation", "start_line": 330, "end_line": 346, "span_ids": ["TestApprox.test_numpy_array"], "tokens": 231}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestApprox:\n\n def test_numpy_array(self):\n np = pytest.importorskip(\"numpy\")\n\n actual = np.array([1 + 1e-7, 2 + 1e-8])\n expected = np.array([1, 2])\n\n # Return false if any element is outside the tolerance.\n assert actual == approx(expected, rel=5e-7, abs=0)\n assert actual != approx(expected, rel=5e-8, abs=0)\n assert approx(expected, rel=5e-7, abs=0) == expected\n assert approx(expected, rel=5e-8, abs=0) != actual\n\n # Should be able to compare lists with numpy arrays.\n assert list(actual) == approx(expected, rel=5e-7, abs=0)\n assert list(actual) != approx(expected, rel=5e-8, abs=0)\n assert actual == approx(list(expected), rel=5e-7, abs=0)\n assert actual != approx(list(expected), rel=5e-8, abs=0)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_numpy_tolerance_args_TestApprox.test_numpy_tolerance_args.for_op__abs__rel_in_tes.None_5": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_numpy_tolerance_args_TestApprox.test_numpy_tolerance_args.for_op__abs__rel_in_tes.None_5", "embedding": null, "metadata": {"file_path": "testing/python/approx.py", "file_name": "approx.py", "file_type": "text/x-python", "category": "implementation", "start_line": 348, "end_line": 376, "span_ids": ["TestApprox.test_numpy_tolerance_args"], "tokens": 332}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestApprox:\n\n def test_numpy_tolerance_args(self):\n \"\"\"\n Check that numpy rel/abs args are handled correctly\n for comparison against an np.array\n Check both sides of the operator, hopefully it doesn't impact things.\n Test all permutations of where the approx and np.array() can show up\n \"\"\"\n np = pytest.importorskip(\"numpy\")\n expected = 100.0\n actual = 99.0\n abs_diff = expected - actual\n rel_diff = (expected - actual) / expected\n\n tests = [\n (eq, abs_diff, 0),\n (eq, 0, rel_diff),\n (ne, 0, rel_diff / 2.0), # rel diff fail\n (ne, abs_diff / 2.0, 0), # abs diff fail\n ]\n\n for op, _abs, _rel in tests:\n assert op(np.array(actual), approx(expected, abs=_abs, rel=_rel)) # a, b\n assert op(approx(expected, abs=_abs, rel=_rel), np.array(actual)) # b, a\n\n assert op(actual, approx(np.array(expected), abs=_abs, rel=_rel)) # a, b\n assert op(approx(np.array(expected), abs=_abs, rel=_rel), actual) # b, a\n\n assert op(np.array(actual), approx(np.array(expected), abs=_abs, rel=_rel))\n assert op(approx(np.array(expected), abs=_abs, rel=_rel), np.array(actual))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_numpy_expecting_nan_TestApprox.test_numpy_expecting_nan.for_op_a_x_in_examples_.assert_op_a_approx_np_ar": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_numpy_expecting_nan_TestApprox.test_numpy_expecting_nan.for_op_a_x_in_examples_.assert_op_a_approx_np_ar", "embedding": null, "metadata": {"file_path": "testing/python/approx.py", "file_name": "approx.py", "file_type": "text/x-python", "category": "implementation", "start_line": 378, "end_line": 394, "span_ids": ["TestApprox.test_numpy_expecting_nan"], "tokens": 157}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestApprox:\n\n def test_numpy_expecting_nan(self):\n np = pytest.importorskip(\"numpy\")\n examples = [\n (eq, nan, nan),\n (eq, -nan, -nan),\n (eq, nan, -nan),\n (ne, 0.0, nan),\n (ne, inf, nan),\n ]\n for op, a, x in examples:\n # Nothing is equal to NaN by default.\n assert np.array(a) != approx(x)\n assert a != approx(np.array(x))\n\n # If ``nan_ok=True``, then NaN is equal to NaN.\n assert op(np.array(a), approx(x, nan_ok=True))\n assert op(a, approx(np.array(x), nan_ok=True))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_numpy_array_with_scalar_TestApprox.test_numpy_array_with_scalar.None_3": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_numpy_array_with_scalar_TestApprox.test_numpy_array_with_scalar.None_3", "embedding": null, "metadata": {"file_path": "testing/python/approx.py", "file_name": "approx.py", "file_type": "text/x-python", "category": "implementation", "start_line": 476, "end_line": 485, "span_ids": ["TestApprox.test_numpy_array_with_scalar"], "tokens": 127}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestApprox:\n\n def test_numpy_array_with_scalar(self):\n np = pytest.importorskip(\"numpy\")\n\n actual = np.array([1 + 1e-7, 1 - 1e-8])\n expected = 1.0\n\n assert actual == approx(expected, rel=5e-7, abs=0)\n assert actual != approx(expected, rel=5e-8, abs=0)\n assert approx(expected, rel=5e-7, abs=0) == actual\n assert approx(expected, rel=5e-8, abs=0) != actual", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_numpy_scalar_with_array_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_numpy_scalar_with_array_", "embedding": null, "metadata": {"file_path": "testing/python/approx.py", "file_name": "approx.py", "file_type": "text/x-python", "category": "implementation", "start_line": 500, "end_line": 521, "span_ids": ["TestApprox.test_numpy_scalar_with_array", "TestApprox.test_generic_sized_iterable_object.MySizedIterable.__iter__", "TestApprox.test_generic_sized_iterable_object.MySizedIterable", "TestApprox.test_generic_sized_iterable_object"], "tokens": 206}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestApprox:\n\n def test_numpy_scalar_with_array(self):\n np = pytest.importorskip(\"numpy\")\n\n actual = 1.0\n expected = np.array([1 + 1e-7, 1 - 1e-8])\n\n assert actual == approx(expected, rel=5e-7, abs=0)\n assert actual != approx(expected, rel=5e-8, abs=0)\n assert approx(expected, rel=5e-7, abs=0) == actual\n assert approx(expected, rel=5e-8, abs=0) != actual\n\n def test_generic_sized_iterable_object(self):\n class MySizedIterable:\n def __iter__(self):\n return iter([1, 2, 3, 4])\n\n def __len__(self):\n return 4\n\n expected = MySizedIterable()\n assert [1, 2, 3, 4] == approx(expected)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestModule.test_syntax_error_in_module_TestModule.test_invalid_test_module_name.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestModule.test_syntax_error_in_module_TestModule.test_invalid_test_module_name.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 71, "end_line": 89, "span_ids": ["TestModule.test_invalid_test_module_name", "TestModule.test_module_considers_pluginmanager_at_import", "TestModule.test_syntax_error_in_module"], "tokens": 208}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestModule:\n\n def test_syntax_error_in_module(self, pytester: Pytester) -> None:\n modcol = pytester.getmodulecol(\"this is a syntax error\")\n pytest.raises(modcol.CollectError, modcol.collect)\n pytest.raises(modcol.CollectError, modcol.collect)\n\n def test_module_considers_pluginmanager_at_import(self, pytester: Pytester) -> None:\n modcol = pytester.getmodulecol(\"pytest_plugins='xasdlkj',\")\n pytest.raises(ImportError, lambda: modcol.obj)\n\n def test_invalid_test_module_name(self, pytester: Pytester) -> None:\n a = pytester.mkdir(\"a\")\n a.joinpath(\"test_one.part1.py\").touch()\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n [\n \"ImportError while importing test module*test_one.part1*\",\n \"Hint: make sure your test modules/packages have valid Python names.\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestModule.test_show_traceback_import_error_unicode_TestModule.test_show_traceback_import_error_unicode.assert_result_ret_2": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestModule.test_show_traceback_import_error_unicode_TestModule.test_show_traceback_import_error_unicode.assert_result_ret_2", "embedding": null, "metadata": {"file_path": "testing/python/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 128, "end_line": 141, "span_ids": ["TestModule.test_show_traceback_import_error_unicode"], "tokens": 116}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestModule:\n\n def test_show_traceback_import_error_unicode(self, pytester: Pytester) -> None:\n \"\"\"Check test modules collected which raise ImportError with unicode messages\n are handled properly (#2336).\n \"\"\"\n pytester.makepyfile(\"raise ImportError('Something bad happened \u263a')\")\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n [\n \"ImportError while importing test module*\",\n \"Traceback:\",\n \"*raise ImportError*Something bad happened*\",\n ]\n )\n assert result.ret == 2", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestClass_TestClass.test_class_subclassobject.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestClass_TestClass.test_class_subclassobject.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 144, "end_line": 185, "span_ids": ["TestClass.test_class_with_init_warning", "TestClass.test_class_with_new_warning", "TestClass", "TestClass.test_class_subclassobject"], "tokens": 265}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestClass:\n def test_class_with_init_warning(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n class TestClass1(object):\n def __init__(self):\n pass\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n [\n \"*cannot collect test class 'TestClass1' because it has \"\n \"a __init__ constructor (from: test_class_with_init_warning.py)\"\n ]\n )\n\n def test_class_with_new_warning(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n class TestClass1(object):\n def __new__(self):\n pass\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n [\n \"*cannot collect test class 'TestClass1' because it has \"\n \"a __new__ constructor (from: test_class_with_new_warning.py)\"\n ]\n )\n\n def test_class_subclassobject(self, pytester: Pytester) -> None:\n pytester.getmodulecol(\n \"\"\"\n class test(object):\n pass\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*collected 0*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestClass.test_static_method_TestClass.test_static_method.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestClass.test_static_method_TestClass.test_static_method.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 187, "end_line": 207, "span_ids": ["TestClass.test_static_method"], "tokens": 133}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestClass:\n\n def test_static_method(self, pytester: Pytester) -> None:\n \"\"\"Support for collecting staticmethod tests (#2528, #2699)\"\"\"\n pytester.getmodulecol(\n \"\"\"\n import pytest\n class Test(object):\n @staticmethod\n def test_something():\n pass\n\n @pytest.fixture\n def fix(self):\n return 1\n\n @staticmethod\n def test_fix(fix):\n assert fix == 1\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*collected 2 items*\", \"*2 passed in*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction_TestFunction.test_function_as_object_instance_ignored.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction_TestFunction.test_function_as_object_instance_ignored.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 265, "end_line": 290, "span_ids": ["TestFunction.test_getmodulecollector", "TestFunction.test_function_as_object_instance_ignored", "TestFunction"], "tokens": 193}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFunction:\n def test_getmodulecollector(self, pytester: Pytester) -> None:\n item = pytester.getitem(\"def test_func(): pass\")\n modcol = item.getparent(pytest.Module)\n assert isinstance(modcol, pytest.Module)\n assert hasattr(modcol.obj, \"test_func\")\n\n @pytest.mark.filterwarnings(\"default\")\n def test_function_as_object_instance_ignored(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n class A(object):\n def __call__(self, tmpdir):\n 0/0\n\n test_a = A()\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n [\n \"collected 0 items\",\n \"*test_function_as_object_instance_ignored.py:2: \"\n \"*cannot collect 'test_a' because it is not a function.\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.test_parametrize_with_non_hashable_values_TestFunction.test_parametrize_with_non_hashable_values.rec_assertoutcome_passed_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.test_parametrize_with_non_hashable_values_TestFunction.test_parametrize_with_non_hashable_values.rec_assertoutcome_passed_", "embedding": null, "metadata": {"file_path": "testing/python/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 361, "end_line": 379, "span_ids": ["TestFunction.test_parametrize_with_non_hashable_values"], "tokens": 158}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFunction:\n\n def test_parametrize_with_non_hashable_values(self, pytester: Pytester) -> None:\n \"\"\"Test parametrization with non-hashable values.\"\"\"\n pytester.makepyfile(\n \"\"\"\n archival_mapping = {\n '1.0': {'tag': '1.0'},\n '1.2.2a1': {'tag': 'release-1.2.2a1'},\n }\n\n import pytest\n @pytest.mark.parametrize('key value'.split(),\n archival_mapping.items())\n def test_archival_to_version(key, value):\n assert key in archival_mapping\n assert value == archival_mapping[key]\n \"\"\"\n )\n rec = pytester.inline_run()\n rec.assertoutcome(passed=2)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.test_parametrize_with_non_hashable_values_indirect_TestFunction.test_parametrize_with_non_hashable_values_indirect.rec_assertoutcome_passed_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.test_parametrize_with_non_hashable_values_indirect_TestFunction.test_parametrize_with_non_hashable_values_indirect.rec_assertoutcome_passed_", "embedding": null, "metadata": {"file_path": "testing/python/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 381, "end_line": 410, "span_ids": ["TestFunction.test_parametrize_with_non_hashable_values_indirect"], "tokens": 202}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFunction:\n\n def test_parametrize_with_non_hashable_values_indirect(\n self, pytester: Pytester\n ) -> None:\n \"\"\"Test parametrization with non-hashable values with indirect parametrization.\"\"\"\n pytester.makepyfile(\n \"\"\"\n archival_mapping = {\n '1.0': {'tag': '1.0'},\n '1.2.2a1': {'tag': 'release-1.2.2a1'},\n }\n\n import pytest\n\n @pytest.fixture\n def key(request):\n return request.param\n\n @pytest.fixture\n def value(request):\n return request.param\n\n @pytest.mark.parametrize('key value'.split(),\n archival_mapping.items(), indirect=True)\n def test_archival_to_version(key, value):\n assert key in archival_mapping\n assert value == archival_mapping[key]\n \"\"\"\n )\n rec = pytester.inline_run()\n rec.assertoutcome(passed=2)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.test_parametrize_overrides_fixture_TestFunction.test_parametrize_overrides_fixture.rec_assertoutcome_passed_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.test_parametrize_overrides_fixture_TestFunction.test_parametrize_overrides_fixture.rec_assertoutcome_passed_", "embedding": null, "metadata": {"file_path": "testing/python/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 412, "end_line": 439, "span_ids": ["TestFunction.test_parametrize_overrides_fixture"], "tokens": 199}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFunction:\n\n def test_parametrize_overrides_fixture(self, pytester: Pytester) -> None:\n \"\"\"Test parametrization when parameter overrides existing fixture with same name.\"\"\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.fixture\n def value():\n return 'value'\n\n @pytest.mark.parametrize('value',\n ['overridden'])\n def test_overridden_via_param(value):\n assert value == 'overridden'\n\n @pytest.mark.parametrize('somevalue', ['overridden'])\n def test_not_overridden(value, somevalue):\n assert value == 'value'\n assert somevalue == 'overridden'\n\n @pytest.mark.parametrize('other,value', [('foo', 'overridden')])\n def test_overridden_via_multiparam(other, value):\n assert other == 'foo'\n assert value == 'overridden'\n \"\"\"\n )\n rec = pytester.inline_run()\n rec.assertoutcome(passed=3)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.test_parametrize_overrides_parametrized_fixture_TestFunction.test_parametrize_overrides_parametrized_fixture.rec_assertoutcome_passed_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.test_parametrize_overrides_parametrized_fixture_TestFunction.test_parametrize_overrides_parametrized_fixture.rec_assertoutcome_passed_", "embedding": null, "metadata": {"file_path": "testing/python/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 441, "end_line": 460, "span_ids": ["TestFunction.test_parametrize_overrides_parametrized_fixture"], "tokens": 133}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFunction:\n\n def test_parametrize_overrides_parametrized_fixture(\n self, pytester: Pytester\n ) -> None:\n \"\"\"Test parametrization when parameter overrides existing parametrized fixture with same name.\"\"\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.fixture(params=[1, 2])\n def value(request):\n return request.param\n\n @pytest.mark.parametrize('value',\n ['overridden'])\n def test_overridden_via_param(value):\n assert value == 'overridden'\n \"\"\"\n )\n rec = pytester.inline_run()\n rec.assertoutcome(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.test_parametrize_overrides_indirect_dependency_fixture_TestFunction.test_parametrize_overrides_indirect_dependency_fixture.rec_assertoutcome_passed_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.test_parametrize_overrides_indirect_dependency_fixture_TestFunction.test_parametrize_overrides_indirect_dependency_fixture.rec_assertoutcome_passed_", "embedding": null, "metadata": {"file_path": "testing/python/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 462, "end_line": 493, "span_ids": ["TestFunction.test_parametrize_overrides_indirect_dependency_fixture"], "tokens": 202}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFunction:\n\n def test_parametrize_overrides_indirect_dependency_fixture(\n self, pytester: Pytester\n ) -> None:\n \"\"\"Test parametrization when parameter overrides a fixture that a test indirectly depends on\"\"\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n fix3_instantiated = False\n\n @pytest.fixture\n def fix1(fix2):\n return fix2 + '1'\n\n @pytest.fixture\n def fix2(fix3):\n return fix3 + '2'\n\n @pytest.fixture\n def fix3():\n global fix3_instantiated\n fix3_instantiated = True\n return '3'\n\n @pytest.mark.parametrize('fix2', ['2'])\n def test_it(fix1):\n assert fix1 == '21'\n assert not fix3_instantiated\n \"\"\"\n )\n rec = pytester.inline_run()\n rec.assertoutcome(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.test_parametrize_with_mark_TestFunction.test_parametrize_with_mark.assert_foo_in_keywords_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.test_parametrize_with_mark_TestFunction.test_parametrize_with_mark.assert_foo_in_keywords_", "embedding": null, "metadata": {"file_path": "testing/python/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 495, "end_line": 514, "span_ids": ["TestFunction.test_parametrize_with_mark"], "tokens": 162}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFunction:\n\n def test_parametrize_with_mark(self, pytester: Pytester) -> None:\n items = pytester.getitems(\n \"\"\"\n import pytest\n @pytest.mark.foo\n @pytest.mark.parametrize('arg', [\n 1,\n pytest.param(2, marks=[pytest.mark.baz, pytest.mark.bar])\n ])\n def test_function(arg):\n pass\n \"\"\"\n )\n keywords = [item.keywords for item in items]\n assert (\n \"foo\" in keywords[0]\n and \"bar\" not in keywords[0]\n and \"baz\" not in keywords[0]\n )\n assert \"foo\" in keywords[1] and \"bar\" in keywords[1] and \"baz\" in keywords[1]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.test_multiple_parametrize_TestFunction.test_multiple_parametrize.assert_colitems_3_name_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.test_multiple_parametrize_TestFunction.test_multiple_parametrize.assert_colitems_3_name_", "embedding": null, "metadata": {"file_path": "testing/python/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 558, "end_line": 572, "span_ids": ["TestFunction.test_multiple_parametrize"], "tokens": 156}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFunction:\n\n def test_multiple_parametrize(self, pytester: Pytester) -> None:\n modcol = pytester.getmodulecol(\n \"\"\"\n import pytest\n @pytest.mark.parametrize('x', [0, 1])\n @pytest.mark.parametrize('y', [2, 3])\n def test1(x, y):\n pass\n \"\"\"\n )\n colitems = modcol.collect()\n assert colitems[0].name == \"test1[2-0]\"\n assert colitems[1].name == \"test1[2-1]\"\n assert colitems[2].name == \"test1[3-0]\"\n assert colitems[3].name == \"test1[3-1]\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.test_issue751_multiple_parametrize_with_ids_TestFunction.test_issue751_multiple_parametrize_with_ids.assert_colitems_3_name_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.test_issue751_multiple_parametrize_with_ids_TestFunction.test_issue751_multiple_parametrize_with_ids.assert_colitems_3_name_", "embedding": null, "metadata": {"file_path": "testing/python/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 574, "end_line": 591, "span_ids": ["TestFunction.test_issue751_multiple_parametrize_with_ids"], "tokens": 188}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFunction:\n\n def test_issue751_multiple_parametrize_with_ids(self, pytester: Pytester) -> None:\n modcol = pytester.getmodulecol(\n \"\"\"\n import pytest\n @pytest.mark.parametrize('x', [0], ids=['c'])\n @pytest.mark.parametrize('y', [0, 1], ids=['a', 'b'])\n class Test(object):\n def test1(self, x, y):\n pass\n def test2(self, x, y):\n pass\n \"\"\"\n )\n colitems = modcol.collect()[0].collect()[0].collect()\n assert colitems[0].name == \"test1[a-c]\"\n assert colitems[1].name == \"test1[b-c]\"\n assert colitems[2].name == \"test2[a-c]\"\n assert colitems[3].name == \"test2[b-c]\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestSorting_TestSorting.test_check_equality.for_fn_in_fn1_fn2_fn3_.assert_modcol_fn": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestSorting_TestSorting.test_check_equality.for_fn_in_fn1_fn2_fn3_.assert_modcol_fn", "embedding": null, "metadata": {"file_path": "testing/python/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 722, "end_line": 749, "span_ids": ["TestSorting.test_check_equality", "TestSorting"], "tokens": 261}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestSorting:\n def test_check_equality(self, pytester: Pytester) -> None:\n modcol = pytester.getmodulecol(\n \"\"\"\n def test_pass(): pass\n def test_fail(): assert 0\n \"\"\"\n )\n fn1 = pytester.collect_by_name(modcol, \"test_pass\")\n assert isinstance(fn1, pytest.Function)\n fn2 = pytester.collect_by_name(modcol, \"test_pass\")\n assert isinstance(fn2, pytest.Function)\n\n assert fn1 == fn2\n assert fn1 != modcol\n assert hash(fn1) == hash(fn2)\n\n fn3 = pytester.collect_by_name(modcol, \"test_fail\")\n assert isinstance(fn3, pytest.Function)\n assert not (fn1 == fn3)\n assert fn1 != fn3\n\n for fn in fn1, fn2, fn3:\n assert fn != 3 # type: ignore[comparison-overlap]\n assert fn != modcol\n assert fn != [1, 2, 3] # type: ignore[comparison-overlap]\n assert [1, 2, 3] != fn # type: ignore[comparison-overlap]\n assert modcol != fn", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestSorting.test_allow_sane_sorting_for_decorators_TestSorting.test_allow_sane_sorting_for_decorators.assert_item_name_for_ite": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestSorting.test_allow_sane_sorting_for_decorators_TestSorting.test_allow_sane_sorting_for_decorators.assert_item_name_for_ite", "embedding": null, "metadata": {"file_path": "testing/python/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 751, "end_line": 771, "span_ids": ["TestSorting.test_allow_sane_sorting_for_decorators"], "tokens": 139}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestSorting:\n\n def test_allow_sane_sorting_for_decorators(self, pytester: Pytester) -> None:\n modcol = pytester.getmodulecol(\n \"\"\"\n def dec(f):\n g = lambda: f(2)\n g.place_as = f\n return g\n\n\n def test_b(y):\n pass\n test_b = dec(test_b)\n\n def test_a(y):\n pass\n test_a = dec(test_a)\n \"\"\"\n )\n colitems = modcol.collect()\n assert len(colitems) == 2\n assert [item.name for item in colitems] == [\"test_b\", \"test_a\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestConftestCustomization_TestConftestCustomization.test_pytest_pycollect_module.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestConftestCustomization_TestConftestCustomization.test_pytest_pycollect_module.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 774, "end_line": 789, "span_ids": ["TestConftestCustomization", "TestConftestCustomization.test_pytest_pycollect_module"], "tokens": 155}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestConftestCustomization:\n def test_pytest_pycollect_module(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n import pytest\n class MyModule(pytest.Module):\n pass\n def pytest_pycollect_makemodule(path, parent):\n if path.basename == \"test_xyz.py\":\n return MyModule.from_parent(fspath=path, parent=parent)\n \"\"\"\n )\n pytester.makepyfile(\"def test_some(): pass\")\n pytester.makepyfile(test_xyz=\"def test_func(): pass\")\n result = pytester.runpytest(\"--collect-only\")\n result.stdout.fnmatch_lines([\"* None:\n b = pytester.path.joinpath(\"a\", \"b\")\n b.mkdir(parents=True)\n b.joinpath(\"conftest.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n import pytest\n @pytest.hookimpl(hookwrapper=True)\n def pytest_pycollect_makemodule():\n outcome = yield\n mod = outcome.get_result()\n mod.obj.hello = \"world\"\n \"\"\"\n )\n )\n b.joinpath(\"test_module.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n def test_hello():\n assert hello == \"world\"\n \"\"\"\n )\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestConftestCustomization.test_customized_pymakeitem_TestConftestCustomization.test_customized_pymakeitem.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestConftestCustomization.test_customized_pymakeitem_TestConftestCustomization.test_customized_pymakeitem.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 817, "end_line": 849, "span_ids": ["TestConftestCustomization.test_customized_pymakeitem"], "tokens": 222}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestConftestCustomization:\n\n def test_customized_pymakeitem(self, pytester: Pytester) -> None:\n b = pytester.path.joinpath(\"a\", \"b\")\n b.mkdir(parents=True)\n b.joinpath(\"conftest.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n import pytest\n @pytest.hookimpl(hookwrapper=True)\n def pytest_pycollect_makeitem():\n outcome = yield\n if outcome.excinfo is None:\n result = outcome.get_result()\n if result:\n for func in result:\n func._some123 = \"world\"\n \"\"\"\n )\n )\n b.joinpath(\"test_module.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n import pytest\n\n @pytest.fixture()\n def obj(request):\n return request.node._some123\n def test_hello(obj):\n assert obj == \"world\"\n \"\"\"\n )\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestConftestCustomization.test_issue2369_collect_module_fileext_TestConftestCustomization.test_issue2369_collect_module_fileext.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestConftestCustomization.test_issue2369_collect_module_fileext_TestConftestCustomization.test_issue2369_collect_module_fileext.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 866, "end_line": 897, "span_ids": ["TestConftestCustomization.test_issue2369_collect_module_fileext"], "tokens": 276}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestConftestCustomization:\n\n def test_issue2369_collect_module_fileext(self, pytester: Pytester) -> None:\n \"\"\"Ensure we can collect files with weird file extensions as Python\n modules (#2369)\"\"\"\n # We'll implement a little finder and loader to import files containing\n # Python source code whose file extension is \".narf\".\n pytester.makeconftest(\n \"\"\"\n import sys, os, imp\n from _pytest.python import Module\n\n class Loader(object):\n def load_module(self, name):\n return imp.load_source(name, name + \".narf\")\n class Finder(object):\n def find_module(self, name, path=None):\n if os.path.exists(name + \".narf\"):\n return Loader()\n sys.meta_path.append(Finder())\n\n def pytest_collect_file(path, parent):\n if path.ext == \".narf\":\n return Module.from_parent(fspath=path, parent=parent)\"\"\"\n )\n pytester.makefile(\n \".narf\",\n \"\"\"\\\n def test_something():\n assert 1 + 1 == 2\"\"\",\n )\n # Use runpytest_subprocess, since we're futzing with sys.meta_path.\n result = pytester.runpytest_subprocess()\n result.stdout.fnmatch_lines([\"*1 passed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_test_setup_only_available_in_subdir_test_setup_only_available_in_subdir.result_assert_outcomes_pa": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_test_setup_only_available_in_subdir_test_setup_only_available_in_subdir.result_assert_outcomes_pa", "embedding": null, "metadata": {"file_path": "testing/python/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 928, "end_line": 960, "span_ids": ["test_setup_only_available_in_subdir"], "tokens": 318}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_setup_only_available_in_subdir(pytester: Pytester) -> None:\n sub1 = pytester.mkpydir(\"sub1\")\n sub2 = pytester.mkpydir(\"sub2\")\n sub1.joinpath(\"conftest.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n import pytest\n def pytest_runtest_setup(item):\n assert item.fspath.purebasename == \"test_in_sub1\"\n def pytest_runtest_call(item):\n assert item.fspath.purebasename == \"test_in_sub1\"\n def pytest_runtest_teardown(item):\n assert item.fspath.purebasename == \"test_in_sub1\"\n \"\"\"\n )\n )\n sub2.joinpath(\"conftest.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n import pytest\n def pytest_runtest_setup(item):\n assert item.fspath.purebasename == \"test_in_sub2\"\n def pytest_runtest_call(item):\n assert item.fspath.purebasename == \"test_in_sub2\"\n def pytest_runtest_teardown(item):\n assert item.fspath.purebasename == \"test_in_sub2\"\n \"\"\"\n )\n )\n sub1.joinpath(\"test_in_sub1.py\").write_text(\"def test_1(): pass\")\n sub2.joinpath(\"test_in_sub2.py\").write_text(\"def test_2(): pass\")\n result = pytester.runpytest(\"-v\", \"-s\")\n result.assert_outcomes(passed=2)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_test_modulecol_roundtrip_TestTracebackCutting.test_traceback_argsetup.assert_numentries_3": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_test_modulecol_roundtrip_TestTracebackCutting.test_traceback_argsetup.assert_numentries_3", "embedding": null, "metadata": {"file_path": "testing/python/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 963, "end_line": 1002, "span_ids": ["TestTracebackCutting.test_skip_simple", "TestTracebackCutting", "TestTracebackCutting.test_traceback_argsetup", "test_modulecol_roundtrip"], "tokens": 364}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_modulecol_roundtrip(pytester: Pytester) -> None:\n modcol = pytester.getmodulecol(\"pass\", withinit=False)\n trail = modcol.nodeid\n newcol = modcol.session.perform_collect([trail], genitems=0)[0]\n assert modcol.name == newcol.name\n\n\nclass TestTracebackCutting:\n def test_skip_simple(self):\n with pytest.raises(pytest.skip.Exception) as excinfo:\n pytest.skip(\"xxx\")\n assert excinfo.traceback[-1].frame.code.name == \"skip\"\n assert excinfo.traceback[-1].ishidden()\n assert excinfo.traceback[-2].frame.code.name == \"test_skip_simple\"\n assert not excinfo.traceback[-2].ishidden()\n\n def test_traceback_argsetup(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n import pytest\n\n @pytest.fixture\n def hello(request):\n raise ValueError(\"xyz\")\n \"\"\"\n )\n p = pytester.makepyfile(\"def test(hello): pass\")\n result = pytester.runpytest(p)\n assert result.ret != 0\n out = result.stdout.str()\n assert \"xyz\" in out\n assert \"conftest.py:5: ValueError\" in out\n numentries = out.count(\"_ _ _\") # separator for traceback entries\n assert numentries == 0\n\n result = pytester.runpytest(\"--fulltrace\", p)\n out = result.stdout.str()\n assert \"conftest.py:5: ValueError\" in out\n numentries = out.count(\"_ _ _ _\") # separator for traceback entries\n assert numentries > 3", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestTracebackCutting.test_traceback_error_during_import_TestTracebackCutting.test_traceback_error_during_import.None_2": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestTracebackCutting.test_traceback_error_during_import_TestTracebackCutting.test_traceback_error_during_import.None_2", "embedding": null, "metadata": {"file_path": "testing/python/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1004, "end_line": 1023, "span_ids": ["TestTracebackCutting.test_traceback_error_during_import"], "tokens": 184}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTracebackCutting:\n\n def test_traceback_error_during_import(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n x = 1\n x = 2\n x = 17\n asd\n \"\"\"\n )\n result = pytester.runpytest()\n assert result.ret != 0\n out = result.stdout.str()\n assert \"x = 1\" not in out\n assert \"x = 2\" not in out\n result.stdout.fnmatch_lines([\" *asd*\", \"E*NameError*\"])\n result = pytester.runpytest(\"--fulltrace\")\n out = result.stdout.str()\n assert \"x = 1\" in out\n assert \"x = 2\" in out\n result.stdout.fnmatch_lines([\">*asd*\", \"E*NameError*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestTracebackCutting.test_traceback_filter_error_during_fixture_collection_TestTracebackCutting.test_traceback_filter_error_during_fixture_collection.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestTracebackCutting.test_traceback_filter_error_during_fixture_collection_TestTracebackCutting.test_traceback_filter_error_during_fixture_collection.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1025, "end_line": 1051, "span_ids": ["TestTracebackCutting.test_traceback_filter_error_during_fixture_collection"], "tokens": 178}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTracebackCutting:\n\n def test_traceback_filter_error_during_fixture_collection(\n self, pytester: Pytester\n ) -> None:\n \"\"\"Integration test for issue #995.\"\"\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n def fail_me(func):\n ns = {}\n exec('def w(): raise ValueError(\"fail me\")', ns)\n return ns['w']\n\n @pytest.fixture(scope='class')\n @fail_me\n def fail_fixture():\n pass\n\n def test_failing_fixture(fail_fixture):\n pass\n \"\"\"\n )\n result = pytester.runpytest()\n assert result.ret != 0\n out = result.stdout.str()\n assert \"INTERNALERROR>\" not in out\n result.stdout.fnmatch_lines([\"*ValueError: fail me*\", \"* 1 error in *\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestTracebackCutting.test_filter_traceback_generated_code_TestTracebackCutting.test_filter_traceback_generated_code.assert_not_filter_traceba": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestTracebackCutting.test_filter_traceback_generated_code_TestTracebackCutting.test_filter_traceback_generated_code.assert_not_filter_traceba", "embedding": null, "metadata": {"file_path": "testing/python/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1053, "end_line": 1075, "span_ids": ["TestTracebackCutting.test_filter_traceback_generated_code"], "tokens": 194}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTracebackCutting:\n\n def test_filter_traceback_generated_code(self) -> None:\n \"\"\"Test that filter_traceback() works with the fact that\n _pytest._code.code.Code.path attribute might return an str object.\n\n In this case, one of the entries on the traceback was produced by\n dynamically generated code.\n See: https://bitbucket.org/pytest-dev/py/issues/71\n This fixes #995.\n \"\"\"\n from _pytest._code import filter_traceback\n\n tb = None\n try:\n ns: Dict[str, Any] = {}\n exec(\"def foo(): raise ValueError\", ns)\n ns[\"foo\"]()\n except ValueError:\n _, _, tb = sys.exc_info()\n\n assert tb is not None\n traceback = _pytest._code.Traceback(tb)\n assert isinstance(traceback[-1].path, str)\n assert not filter_traceback(traceback[-1])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestTracebackCutting.test_filter_traceback_path_no_longer_valid_TestTracebackCutting.test_filter_traceback_path_no_longer_valid.assert_filter_traceback_t": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestTracebackCutting.test_filter_traceback_path_no_longer_valid_TestTracebackCutting.test_filter_traceback_path_no_longer_valid.assert_filter_traceback_t", "embedding": null, "metadata": {"file_path": "testing/python/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1077, "end_line": 1105, "span_ids": ["TestTracebackCutting.test_filter_traceback_path_no_longer_valid"], "tokens": 225}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTracebackCutting:\n\n def test_filter_traceback_path_no_longer_valid(self, pytester: Pytester) -> None:\n \"\"\"Test that filter_traceback() works with the fact that\n _pytest._code.code.Code.path attribute might return an str object.\n\n In this case, one of the files in the traceback no longer exists.\n This fixes #1133.\n \"\"\"\n from _pytest._code import filter_traceback\n\n pytester.syspathinsert()\n pytester.makepyfile(\n filter_traceback_entry_as_str=\"\"\"\n def foo():\n raise ValueError\n \"\"\"\n )\n tb = None\n try:\n import filter_traceback_entry_as_str\n\n filter_traceback_entry_as_str.foo()\n except ValueError:\n _, _, tb = sys.exc_info()\n\n assert tb is not None\n pytester.path.joinpath(\"filter_traceback_entry_as_str.py\").unlink()\n traceback = _pytest._code.Traceback(tb)\n assert isinstance(traceback[-1].path, str)\n assert filter_traceback(traceback[-1])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestReportInfo_TestReportInfo.test_itemreport_reportinfo.assert_item_location_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestReportInfo_TestReportInfo.test_itemreport_reportinfo.assert_item_location_", "embedding": null, "metadata": {"file_path": "testing/python/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1108, "end_line": 1123, "span_ids": ["TestReportInfo", "TestReportInfo.test_itemreport_reportinfo"], "tokens": 139}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestReportInfo:\n def test_itemreport_reportinfo(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n import pytest\n class MyFunction(pytest.Function):\n def reportinfo(self):\n return \"ABCDE\", 42, \"custom\"\n def pytest_pycollect_makeitem(collector, name, obj):\n if name == \"test_func\":\n return MyFunction.from_parent(name=name, parent=collector)\n \"\"\"\n )\n item = pytester.getitem(\"def test_func(): pass\")\n item.config.pluginmanager.getplugin(\"runner\")\n assert item.location == (\"ABCDE\", 42, \"custom\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestReportInfo.test_func_reportinfo_TestReportInfo.test_class_reportinfo.assert_msg_TestClass_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestReportInfo.test_func_reportinfo_TestReportInfo.test_class_reportinfo.assert_msg_TestClass_", "embedding": null, "metadata": {"file_path": "testing/python/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1125, "end_line": 1145, "span_ids": ["TestReportInfo.test_func_reportinfo", "TestReportInfo.test_class_reportinfo"], "tokens": 187}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestReportInfo:\n\n def test_func_reportinfo(self, pytester: Pytester) -> None:\n item = pytester.getitem(\"def test_func(): pass\")\n fspath, lineno, modpath = item.reportinfo()\n assert fspath == item.fspath\n assert lineno == 0\n assert modpath == \"test_func\"\n\n def test_class_reportinfo(self, pytester: Pytester) -> None:\n modcol = pytester.getmodulecol(\n \"\"\"\n # lineno 0\n class TestClass(object):\n def test_hello(self): pass\n \"\"\"\n )\n classcol = pytester.collect_by_name(modcol, \"TestClass\")\n assert isinstance(classcol, Class)\n fspath, lineno, msg = classcol.reportinfo()\n assert fspath == modcol.fspath\n assert lineno == 1\n assert msg == \"TestClass\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestReportInfo.test_reportinfo_with_nasty_getattr_TestReportInfo.test_reportinfo_with_nasty_getattr.fspath_lineno_msg_ins": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestReportInfo.test_reportinfo_with_nasty_getattr_TestReportInfo.test_reportinfo_with_nasty_getattr.fspath_lineno_msg_ins", "embedding": null, "metadata": {"file_path": "testing/python/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1147, "end_line": 1167, "span_ids": ["TestReportInfo.test_reportinfo_with_nasty_getattr"], "tokens": 176}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestReportInfo:\n\n @pytest.mark.filterwarnings(\n \"ignore:usage of Generator.Function is deprecated, please use pytest.Function instead\"\n )\n def test_reportinfo_with_nasty_getattr(self, pytester: Pytester) -> None:\n # https://github.com/pytest-dev/pytest/issues/1204\n modcol = pytester.getmodulecol(\n \"\"\"\n # lineno 0\n class TestClass(object):\n def __getattr__(self, name):\n return \"this is not an int\"\n\n def intest_foo(self):\n pass\n \"\"\"\n )\n classcol = pytester.collect_by_name(modcol, \"TestClass\")\n assert isinstance(classcol, Class)\n instance = list(classcol.collect())[0]\n assert isinstance(instance, Instance)\n fspath, lineno, msg = instance.reportinfo()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_test_customized_python_discovery_test_customized_python_discovery.None_3": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_test_customized_python_discovery_test_customized_python_discovery.None_3", "embedding": null, "metadata": {"file_path": "testing/python/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1170, "end_line": 1197, "span_ids": ["test_customized_python_discovery"], "tokens": 188}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_customized_python_discovery(pytester: Pytester) -> None:\n pytester.makeini(\n \"\"\"\n [pytest]\n python_files=check_*.py\n python_classes=Check\n python_functions=check\n \"\"\"\n )\n p = pytester.makepyfile(\n \"\"\"\n def check_simple():\n pass\n class CheckMyApp(object):\n def check_meth(self):\n pass\n \"\"\"\n )\n p2 = p.with_name(p.name.replace(\"test\", \"check\"))\n p.rename(p2)\n result = pytester.runpytest(\"--collect-only\", \"-s\")\n result.stdout.fnmatch_lines(\n [\"*check_customized*\", \"*check_simple*\", \"*CheckMyApp*\", \"*check_meth*\"]\n )\n\n result = pytester.runpytest()\n assert result.ret == 0\n result.stdout.fnmatch_lines([\"*2 passed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_test_dont_collect_non_function_callable_test_dont_collect_non_function_callable.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_test_dont_collect_non_function_callable_test_dont_collect_non_function_callable.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1240, "end_line": 1266, "span_ids": ["test_dont_collect_non_function_callable"], "tokens": 185}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.filterwarnings(\"default\")\ndef test_dont_collect_non_function_callable(pytester: Pytester) -> None:\n \"\"\"Test for issue https://github.com/pytest-dev/pytest/issues/331\n\n In this case an INTERNALERROR occurred trying to report the failure of\n a test like this one because pytest failed to get the source lines.\n \"\"\"\n pytester.makepyfile(\n \"\"\"\n class Oh(object):\n def __call__(self):\n pass\n\n test_a = Oh()\n\n def test_real():\n pass\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n [\n \"*collected 1 item*\",\n \"*test_dont_collect_non_function_callable.py:2: *cannot collect 'test_a' because it is not a function*\",\n \"*1 passed, 1 warning in *\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_test_class_injection_does_not_break_collection_test_class_injection_does_not_break_collection.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_test_class_injection_does_not_break_collection_test_class_injection_does_not_break_collection.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1269, "end_line": 1296, "span_ids": ["test_class_injection_does_not_break_collection"], "tokens": 189}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_class_injection_does_not_break_collection(pytester: Pytester) -> None:\n \"\"\"Tests whether injection during collection time will terminate testing.\n\n In this case the error should not occur if the TestClass itself\n is modified during collection time, and the original method list\n is still used for collection.\n \"\"\"\n pytester.makeconftest(\n \"\"\"\n from test_inject import TestClass\n def pytest_generate_tests(metafunc):\n TestClass.changed_var = {}\n \"\"\"\n )\n pytester.makepyfile(\n test_inject='''\n class TestClass(object):\n def test_injection(self):\n \"\"\"Test being parametrized.\"\"\"\n pass\n '''\n )\n result = pytester.runpytest()\n assert (\n \"RuntimeError: dictionary changed size during iteration\"\n not in result.stdout.str()\n )\n result.stdout.fnmatch_lines([\"*1 passed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_test_keep_duplicates_test_keep_duplicates.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_test_keep_duplicates_test_keep_duplicates.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1345, "end_line": 1362, "span_ids": ["test_keep_duplicates"], "tokens": 136}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_keep_duplicates(pytester: Pytester) -> None:\n \"\"\"Test for issue https://github.com/pytest-dev/pytest/issues/1609 (#1609)\n\n Use --keep-duplicates to collect tests from duplicate directories.\n \"\"\"\n a = pytester.mkdir(\"a\")\n fh = a.joinpath(\"test_a.py\")\n fh.write_text(\n textwrap.dedent(\n \"\"\"\\\n import pytest\n def test_real():\n pass\n \"\"\"\n )\n )\n result = pytester.runpytest(\"--keep-duplicates\", str(a), str(a))\n result.stdout.fnmatch_lines([\"*collected 2 item*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_test_package_collection_infinite_recursion_test_package_collection_init_given_as_argument.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_test_package_collection_infinite_recursion_test_package_collection_init_given_as_argument.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1365, "end_line": 1375, "span_ids": ["test_package_collection_infinite_recursion", "test_package_collection_init_given_as_argument"], "tokens": 122}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_package_collection_infinite_recursion(pytester: Pytester) -> None:\n pytester.copy_example(\"collect/package_infinite_recursion\")\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*1 passed*\"])\n\n\ndef test_package_collection_init_given_as_argument(pytester: Pytester) -> None:\n \"\"\"Regression test for #3749\"\"\"\n p = pytester.copy_example(\"collect/package_init_given_as_arg\")\n result = pytester.runpytest(p / \"pkg\" / \"__init__.py\")\n result.stdout.fnmatch_lines([\"*1 passed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_test_package_with_modules_test_package_with_modules.None_7": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_test_package_with_modules_test_package_with_modules.None_7", "embedding": null, "metadata": {"file_path": "testing/python/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1378, "end_line": 1418, "span_ids": ["test_package_with_modules"], "tokens": 356}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_package_with_modules(pytester: Pytester) -> None:\n \"\"\"\n .\n \u2514\u2500\u2500 root\n \u251c\u2500\u2500 __init__.py\n \u251c\u2500\u2500 sub1\n \u2502 \u251c\u2500\u2500 __init__.py\n \u2502 \u2514\u2500\u2500 sub1_1\n \u2502 \u251c\u2500\u2500 __init__.py\n \u2502 \u2514\u2500\u2500 test_in_sub1.py\n \u2514\u2500\u2500 sub2\n \u2514\u2500\u2500 test\n \u2514\u2500\u2500 test_in_sub2.py\n\n \"\"\"\n root = pytester.mkpydir(\"root\")\n sub1 = root.joinpath(\"sub1\")\n sub1_test = sub1.joinpath(\"sub1_1\")\n sub1_test.mkdir(parents=True)\n for d in (sub1, sub1_test):\n d.joinpath(\"__init__.py\").touch()\n\n sub2 = root.joinpath(\"sub2\")\n sub2_test = sub2.joinpath(\"test\")\n sub2_test.mkdir(parents=True)\n\n sub1_test.joinpath(\"test_in_sub1.py\").write_text(\"def test_1(): pass\")\n sub2_test.joinpath(\"test_in_sub2.py\").write_text(\"def test_2(): pass\")\n\n # Execute from .\n result = pytester.runpytest(\"-v\", \"-s\")\n result.assert_outcomes(passed=2)\n\n # Execute from . with one argument \"root\"\n result = pytester.runpytest(\"-v\", \"-s\", \"root\")\n result.assert_outcomes(passed=2)\n\n # Chdir into package's root and execute with no args\n os.chdir(root)\n result = pytester.runpytest(\"-v\", \"-s\")\n result.assert_outcomes(passed=2)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_test_package_ordering_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_test_package_ordering_", "embedding": null, "metadata": {"file_path": "testing/python/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1421, "end_line": 1456, "span_ids": ["test_package_ordering"], "tokens": 270}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_package_ordering(pytester: Pytester) -> None:\n \"\"\"\n .\n \u2514\u2500\u2500 root\n \u251c\u2500\u2500 Test_root.py\n \u251c\u2500\u2500 __init__.py\n \u251c\u2500\u2500 sub1\n \u2502 \u251c\u2500\u2500 Test_sub1.py\n \u2502 \u2514\u2500\u2500 __init__.py\n \u2514\u2500\u2500 sub2\n \u2514\u2500\u2500 test\n \u2514\u2500\u2500 test_sub2.py\n\n \"\"\"\n pytester.makeini(\n \"\"\"\n [pytest]\n python_files=*.py\n \"\"\"\n )\n root = pytester.mkpydir(\"root\")\n sub1 = root.joinpath(\"sub1\")\n sub1.mkdir()\n sub1.joinpath(\"__init__.py\").touch()\n sub2 = root.joinpath(\"sub2\")\n sub2_test = sub2.joinpath(\"test\")\n sub2_test.mkdir(parents=True)\n\n root.joinpath(\"Test_root.py\").write_text(\"def test_1(): pass\")\n sub1.joinpath(\"Test_sub1.py\").write_text(\"def test_2(): pass\")\n sub2_test.joinpath(\"test_sub2.py\").write_text(\"def test_3(): pass\")\n\n # Execute from .\n result = pytester.runpytest(\"-v\", \"-s\")\n result.assert_outcomes(passed=3)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFillFixtures.test_extend_fixture_conftest_plugin_TestFillFixtures.test_extend_fixture_conftest_plugin.assert_result_ret_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFillFixtures.test_extend_fixture_conftest_plugin_TestFillFixtures.test_extend_fixture_conftest_plugin.assert_result_ret_0", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 178, "end_line": 207, "span_ids": ["TestFillFixtures.test_extend_fixture_conftest_plugin"], "tokens": 167}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.pytester_example_path(\"fixtures/fill_fixtures\")\nclass TestFillFixtures:\n\n def test_extend_fixture_conftest_plugin(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n testplugin=\"\"\"\n import pytest\n\n @pytest.fixture\n def foo():\n return 7\n \"\"\"\n )\n pytester.syspathinsert()\n pytester.makeconftest(\n \"\"\"\n import pytest\n\n pytest_plugins = 'testplugin'\n\n @pytest.fixture\n def foo(foo):\n return foo + 7\n \"\"\"\n )\n pytester.makepyfile(\n \"\"\"\n def test_foo(foo):\n assert foo == 14\n \"\"\"\n )\n result = pytester.runpytest(\"-s\")\n assert result.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFillFixtures.test_extend_fixture_plugin_plugin_TestFillFixtures.test_extend_fixture_plugin_plugin.assert_result_ret_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFillFixtures.test_extend_fixture_plugin_plugin_TestFillFixtures.test_extend_fixture_plugin_plugin.assert_result_ret_0", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 209, "end_line": 239, "span_ids": ["TestFillFixtures.test_extend_fixture_plugin_plugin"], "tokens": 185}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.pytester_example_path(\"fixtures/fill_fixtures\")\nclass TestFillFixtures:\n\n def test_extend_fixture_plugin_plugin(self, pytester: Pytester) -> None:\n # Two plugins should extend each order in loading order\n pytester.makepyfile(\n testplugin0=\"\"\"\n import pytest\n\n @pytest.fixture\n def foo():\n return 7\n \"\"\"\n )\n pytester.makepyfile(\n testplugin1=\"\"\"\n import pytest\n\n @pytest.fixture\n def foo(foo):\n return foo + 7\n \"\"\"\n )\n pytester.syspathinsert()\n pytester.makepyfile(\n \"\"\"\n pytest_plugins = ['testplugin0', 'testplugin1']\n\n def test_foo(foo):\n assert foo == 14\n \"\"\"\n )\n result = pytester.runpytest()\n assert result.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFillFixtures.test_override_parametrized_fixture_conftest_module_TestFillFixtures.test_override_parametrized_fixture_conftest_module.None_2": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFillFixtures.test_override_parametrized_fixture_conftest_module_TestFillFixtures.test_override_parametrized_fixture_conftest_module.None_2", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 241, "end_line": 269, "span_ids": ["TestFillFixtures.test_override_parametrized_fixture_conftest_module"], "tokens": 201}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.pytester_example_path(\"fixtures/fill_fixtures\")\nclass TestFillFixtures:\n\n def test_override_parametrized_fixture_conftest_module(\n self, pytester: Pytester\n ) -> None:\n \"\"\"Test override of the parametrized fixture with non-parametrized one on the test module level.\"\"\"\n pytester.makeconftest(\n \"\"\"\n import pytest\n\n @pytest.fixture(params=[1, 2, 3])\n def spam(request):\n return request.param\n \"\"\"\n )\n testfile = pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.fixture\n def spam():\n return 'spam'\n\n def test_spam(spam):\n assert spam == 'spam'\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*1 passed*\"])\n result = pytester.runpytest(testfile)\n result.stdout.fnmatch_lines([\"*1 passed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFillFixtures.test_override_parametrized_fixture_conftest_conftest_TestFillFixtures.test_override_parametrized_fixture_conftest_conftest.None_4": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFillFixtures.test_override_parametrized_fixture_conftest_conftest_TestFillFixtures.test_override_parametrized_fixture_conftest_conftest.None_4", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 271, "end_line": 308, "span_ids": ["TestFillFixtures.test_override_parametrized_fixture_conftest_conftest"], "tokens": 264}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.pytester_example_path(\"fixtures/fill_fixtures\")\nclass TestFillFixtures:\n\n def test_override_parametrized_fixture_conftest_conftest(\n self, pytester: Pytester\n ) -> None:\n \"\"\"Test override of the parametrized fixture with non-parametrized one on the conftest level.\"\"\"\n pytester.makeconftest(\n \"\"\"\n import pytest\n\n @pytest.fixture(params=[1, 2, 3])\n def spam(request):\n return request.param\n \"\"\"\n )\n subdir = pytester.mkpydir(\"subdir\")\n subdir.joinpath(\"conftest.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n import pytest\n\n @pytest.fixture\n def spam():\n return 'spam'\n \"\"\"\n )\n )\n testfile = subdir.joinpath(\"test_spam.py\")\n testfile.write_text(\n textwrap.dedent(\n \"\"\"\\\n def test_spam(spam):\n assert spam == \"spam\"\n \"\"\"\n )\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*1 passed*\"])\n result = pytester.runpytest(testfile)\n result.stdout.fnmatch_lines([\"*1 passed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFillFixtures.test_override_non_parametrized_fixture_conftest_module_TestFillFixtures.test_override_non_parametrized_fixture_conftest_module.None_2": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFillFixtures.test_override_non_parametrized_fixture_conftest_module_TestFillFixtures.test_override_non_parametrized_fixture_conftest_module.None_2", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 310, "end_line": 341, "span_ids": ["TestFillFixtures.test_override_non_parametrized_fixture_conftest_module"], "tokens": 221}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.pytester_example_path(\"fixtures/fill_fixtures\")\nclass TestFillFixtures:\n\n def test_override_non_parametrized_fixture_conftest_module(\n self, pytester: Pytester\n ) -> None:\n \"\"\"Test override of the non-parametrized fixture with parametrized one on the test module level.\"\"\"\n pytester.makeconftest(\n \"\"\"\n import pytest\n\n @pytest.fixture\n def spam():\n return 'spam'\n \"\"\"\n )\n testfile = pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.fixture(params=[1, 2, 3])\n def spam(request):\n return request.param\n\n params = {'spam': 1}\n\n def test_spam(spam):\n assert spam == params['spam']\n params['spam'] += 1\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*3 passed*\"])\n result = pytester.runpytest(testfile)\n result.stdout.fnmatch_lines([\"*3 passed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFillFixtures.test_override_non_parametrized_fixture_conftest_conftest_TestFillFixtures.test_override_non_parametrized_fixture_conftest_conftest.None_4": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFillFixtures.test_override_non_parametrized_fixture_conftest_conftest_TestFillFixtures.test_override_non_parametrized_fixture_conftest_conftest.None_4", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 343, "end_line": 383, "span_ids": ["TestFillFixtures.test_override_non_parametrized_fixture_conftest_conftest"], "tokens": 284}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.pytester_example_path(\"fixtures/fill_fixtures\")\nclass TestFillFixtures:\n\n def test_override_non_parametrized_fixture_conftest_conftest(\n self, pytester: Pytester\n ) -> None:\n \"\"\"Test override of the non-parametrized fixture with parametrized one on the conftest level.\"\"\"\n pytester.makeconftest(\n \"\"\"\n import pytest\n\n @pytest.fixture\n def spam():\n return 'spam'\n \"\"\"\n )\n subdir = pytester.mkpydir(\"subdir\")\n subdir.joinpath(\"conftest.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n import pytest\n\n @pytest.fixture(params=[1, 2, 3])\n def spam(request):\n return request.param\n \"\"\"\n )\n )\n testfile = subdir.joinpath(\"test_spam.py\")\n testfile.write_text(\n textwrap.dedent(\n \"\"\"\\\n params = {'spam': 1}\n\n def test_spam(spam):\n assert spam == params['spam']\n params['spam'] += 1\n \"\"\"\n )\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*3 passed*\"])\n result = pytester.runpytest(testfile)\n result.stdout.fnmatch_lines([\"*3 passed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFillFixtures.test_override_autouse_fixture_with_parametrized_fixture_conftest_conftest_TestFillFixtures.test_override_autouse_fixture_with_parametrized_fixture_conftest_conftest.None_4": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFillFixtures.test_override_autouse_fixture_with_parametrized_fixture_conftest_conftest_TestFillFixtures.test_override_autouse_fixture_with_parametrized_fixture_conftest_conftest.None_4", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 385, "end_line": 427, "span_ids": ["TestFillFixtures.test_override_autouse_fixture_with_parametrized_fixture_conftest_conftest"], "tokens": 304}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.pytester_example_path(\"fixtures/fill_fixtures\")\nclass TestFillFixtures:\n\n def test_override_autouse_fixture_with_parametrized_fixture_conftest_conftest(\n self, pytester: Pytester\n ) -> None:\n \"\"\"Test override of the autouse fixture with parametrized one on the conftest level.\n This test covers the issue explained in issue 1601\n \"\"\"\n pytester.makeconftest(\n \"\"\"\n import pytest\n\n @pytest.fixture(autouse=True)\n def spam():\n return 'spam'\n \"\"\"\n )\n subdir = pytester.mkpydir(\"subdir\")\n subdir.joinpath(\"conftest.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n import pytest\n\n @pytest.fixture(params=[1, 2, 3])\n def spam(request):\n return request.param\n \"\"\"\n )\n )\n testfile = subdir.joinpath(\"test_spam.py\")\n testfile.write_text(\n textwrap.dedent(\n \"\"\"\\\n params = {'spam': 1}\n\n def test_spam(spam):\n assert spam == params['spam']\n params['spam'] += 1\n \"\"\"\n )\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*3 passed*\"])\n result = pytester.runpytest(testfile)\n result.stdout.fnmatch_lines([\"*3 passed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFillFixtures.test_autouse_fixture_plugin_TestFillFixtures.test_autouse_fixture_plugin.assert_result_ret_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFillFixtures.test_autouse_fixture_plugin_TestFillFixtures.test_autouse_fixture_plugin.assert_result_ret_0", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 561, "end_line": 583, "span_ids": ["TestFillFixtures.test_autouse_fixture_plugin"], "tokens": 163}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.pytester_example_path(\"fixtures/fill_fixtures\")\nclass TestFillFixtures:\n\n def test_autouse_fixture_plugin(self, pytester: Pytester) -> None:\n # A fixture from a plugin has no baseid set, which screwed up\n # the autouse fixture handling.\n pytester.makepyfile(\n testplugin=\"\"\"\n import pytest\n\n @pytest.fixture(autouse=True)\n def foo(request):\n request.function.foo = 7\n \"\"\"\n )\n pytester.syspathinsert()\n pytester.makepyfile(\n \"\"\"\n pytest_plugins = 'testplugin'\n\n def test_foo(request):\n assert request.function.foo == 7\n \"\"\"\n )\n result = pytester.runpytest()\n assert result.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFillFixtures.test_fixture_excinfo_leak_TestFillFixtures.test_fixture_excinfo_leak.assert_result_ret_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFillFixtures.test_fixture_excinfo_leak_TestFillFixtures.test_fixture_excinfo_leak.assert_result_ret_0", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 623, "end_line": 645, "span_ids": ["TestFillFixtures.test_fixture_excinfo_leak"], "tokens": 179}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.pytester_example_path(\"fixtures/fill_fixtures\")\nclass TestFillFixtures:\n\n def test_fixture_excinfo_leak(self, pytester: Pytester) -> None:\n # on python2 sys.excinfo would leak into fixture executions\n pytester.makepyfile(\n \"\"\"\n import sys\n import traceback\n import pytest\n\n @pytest.fixture\n def leak():\n if sys.exc_info()[0]: # python3 bug :)\n traceback.print_exc()\n #fails\n assert sys.exc_info() == (None, None, None)\n\n def test_leak(leak):\n if sys.exc_info()[0]: # python3 bug :)\n traceback.print_exc()\n assert sys.exc_info() == (None, None, None)\n \"\"\"\n )\n result = pytester.runpytest()\n assert result.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestBasic_TestRequestBasic.test_request_attributes_method.assert_req_instance___cla": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestBasic_TestRequestBasic.test_request_attributes_method.assert_req_instance___cla", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 648, "end_line": 685, "span_ids": ["TestRequestBasic.test_request_attributes", "TestRequestBasic", "TestRequestBasic.test_request_attributes_method"], "tokens": 256}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRequestBasic:\n def test_request_attributes(self, pytester: Pytester) -> None:\n item = pytester.getitem(\n \"\"\"\n import pytest\n\n @pytest.fixture\n def something(request): pass\n def test_func(something): pass\n \"\"\"\n )\n assert isinstance(item, Function)\n req = fixtures.FixtureRequest(item, _ispytest=True)\n assert req.function == item.obj\n assert req.keywords == item.keywords\n assert hasattr(req.module, \"test_func\")\n assert req.cls is None\n assert req.function.__name__ == \"test_func\"\n assert req.config == item.config\n assert repr(req).find(req.function.__name__) != -1\n\n def test_request_attributes_method(self, pytester: Pytester) -> None:\n (item,) = pytester.getitems(\n \"\"\"\n import pytest\n class TestB(object):\n\n @pytest.fixture\n def something(self, request):\n return 1\n def test_func(self, something):\n pass\n \"\"\"\n )\n assert isinstance(item, Function)\n req = item._request\n assert req.cls.__name__ == \"TestB\"\n assert req.instance.__class__ == req.cls", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestBasic.test_request_contains_funcarg_arg2fixturedefs_TestRequestBasic.test_request_contains_funcarg_arg2fixturedefs.assert_arg2fixturedefs_s": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestBasic.test_request_contains_funcarg_arg2fixturedefs_TestRequestBasic.test_request_contains_funcarg_arg2fixturedefs.assert_arg2fixturedefs_s", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 687, "end_line": 705, "span_ids": ["TestRequestBasic.test_request_contains_funcarg_arg2fixturedefs"], "tokens": 158}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRequestBasic:\n\n def test_request_contains_funcarg_arg2fixturedefs(self, pytester: Pytester) -> None:\n modcol = pytester.getmodulecol(\n \"\"\"\n import pytest\n @pytest.fixture\n def something(request):\n pass\n class TestClass(object):\n def test_method(self, something):\n pass\n \"\"\"\n )\n (item1,) = pytester.genitems([modcol])\n assert item1.name == \"test_method\"\n arg2fixturedefs = fixtures.FixtureRequest(\n item1, _ispytest=True\n )._arg2fixturedefs\n assert len(arg2fixturedefs) == 1\n assert arg2fixturedefs[\"something\"][0].argname == \"something\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestBasic.test_request_garbage_TestRequestBasic.test_request_garbage.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestBasic.test_request_garbage_TestRequestBasic.test_request_garbage.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 707, "end_line": 745, "span_ids": ["TestRequestBasic.test_request_garbage"], "tokens": 240}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRequestBasic:\n\n @pytest.mark.skipif(\n hasattr(sys, \"pypy_version_info\"),\n reason=\"this method of test doesn't work on pypy\",\n )\n def test_request_garbage(self, pytester: Pytester) -> None:\n try:\n import xdist # noqa\n except ImportError:\n pass\n else:\n pytest.xfail(\"this test is flaky when executed with xdist\")\n pytester.makepyfile(\n \"\"\"\n import sys\n import pytest\n from _pytest.fixtures import PseudoFixtureDef\n import gc\n\n @pytest.fixture(autouse=True)\n def something(request):\n original = gc.get_debug()\n gc.set_debug(gc.DEBUG_SAVEALL)\n gc.collect()\n\n yield\n\n try:\n gc.collect()\n leaked = [x for _ in gc.garbage if isinstance(_, PseudoFixtureDef)]\n assert leaked == []\n finally:\n gc.set_debug(original)\n\n def test_func():\n pass\n \"\"\"\n )\n result = pytester.runpytest_subprocess()\n result.stdout.fnmatch_lines([\"* 1 passed in *\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestBasic.test_getfixturevalue_recursive_TestRequestBasic.test_getfixturevalue_recursive.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestBasic.test_getfixturevalue_recursive_TestRequestBasic.test_getfixturevalue_recursive.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 747, "end_line": 769, "span_ids": ["TestRequestBasic.test_getfixturevalue_recursive"], "tokens": 129}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRequestBasic:\n\n def test_getfixturevalue_recursive(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n import pytest\n\n @pytest.fixture\n def something(request):\n return 1\n \"\"\"\n )\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.fixture\n def something(request):\n return request.getfixturevalue(\"something\") + 1\n def test_func(something):\n assert something == 2\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestBasic.test_getfixturevalue_teardown_TestRequestBasic.test_getfixturevalue_teardown.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestBasic.test_getfixturevalue_teardown_TestRequestBasic.test_getfixturevalue_teardown.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 771, "end_line": 807, "span_ids": ["TestRequestBasic.test_getfixturevalue_teardown"], "tokens": 252}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRequestBasic:\n\n def test_getfixturevalue_teardown(self, pytester: Pytester) -> None:\n \"\"\"\n Issue #1895\n\n `test_inner` requests `inner` fixture, which in turn requests `resource`\n using `getfixturevalue`. `test_func` then requests `resource`.\n\n `resource` is teardown before `inner` because the fixture mechanism won't consider\n `inner` dependent on `resource` when it is used via `getfixturevalue`: `test_func`\n will then cause the `resource`'s finalizer to be called first because of this.\n \"\"\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.fixture(scope='session')\n def resource():\n r = ['value']\n yield r\n r.pop()\n\n @pytest.fixture(scope='session')\n def inner(request):\n resource = request.getfixturevalue('resource')\n assert resource == ['value']\n yield\n assert resource == ['value']\n\n def test_inner(inner):\n pass\n\n def test_func(resource):\n pass\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"* 2 passed in *\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestBasic.test_request_addfinalizer_TestRequestBasic.test_request_addfinalizer.assert_teardownlist_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestBasic.test_request_addfinalizer_TestRequestBasic.test_request_addfinalizer.assert_teardownlist_1", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 846, "end_line": 868, "span_ids": ["TestRequestBasic.test_request_addfinalizer"], "tokens": 173}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRequestBasic:\n\n def test_request_addfinalizer(self, pytester: Pytester) -> None:\n item = pytester.getitem(\n \"\"\"\n import pytest\n teardownlist = []\n @pytest.fixture\n def something(request):\n request.addfinalizer(lambda: teardownlist.append(1))\n def test_func(something): pass\n \"\"\"\n )\n assert isinstance(item, Function)\n item.session._setupstate.setup(item)\n item._request._fillfixtures()\n # successively check finalization calls\n parent = item.getparent(pytest.Module)\n assert parent is not None\n teardownlist = parent.obj.teardownlist\n ss = item.session._setupstate\n assert not teardownlist\n ss.teardown_exact(None)\n print(ss.stack)\n assert teardownlist == [1]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestBasic.test_request_addfinalizer_failing_setup_TestRequestBasic.test_request_addfinalizer_failing_setup.reprec_assertoutcome_fail": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestBasic.test_request_addfinalizer_failing_setup_TestRequestBasic.test_request_addfinalizer_failing_setup.reprec_assertoutcome_fail", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 870, "end_line": 886, "span_ids": ["TestRequestBasic.test_request_addfinalizer_failing_setup"], "tokens": 121}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRequestBasic:\n\n def test_request_addfinalizer_failing_setup(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n values = [1]\n @pytest.fixture\n def myfix(request):\n request.addfinalizer(values.pop)\n assert 0\n def test_fix(myfix):\n pass\n def test_finalizer_ran():\n assert not values\n \"\"\"\n )\n reprec = pytester.inline_run(\"-s\")\n reprec.assertoutcome(failed=1, passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestBasic.test_request_addfinalizer_failing_setup_module_TestRequestBasic.test_request_addfinalizer_failing_setup_module.assert_not_mod_values": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestBasic.test_request_addfinalizer_failing_setup_module_TestRequestBasic.test_request_addfinalizer_failing_setup_module.assert_not_mod_values", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 888, "end_line": 906, "span_ids": ["TestRequestBasic.test_request_addfinalizer_failing_setup_module"], "tokens": 136}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRequestBasic:\n\n def test_request_addfinalizer_failing_setup_module(\n self, pytester: Pytester\n ) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n values = [1, 2]\n @pytest.fixture(scope=\"module\")\n def myfix(request):\n request.addfinalizer(values.pop)\n request.addfinalizer(values.pop)\n assert 0\n def test_fix(myfix):\n pass\n \"\"\"\n )\n reprec = pytester.inline_run(\"-s\")\n mod = reprec.getcalls(\"pytest_runtest_setup\")[0].item.module\n assert not mod.values", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestBasic.test_request_addfinalizer_partial_setup_failure_TestRequestBasic.test_request_addfinalizer_partial_setup_failure.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestBasic.test_request_addfinalizer_partial_setup_failure_TestRequestBasic.test_request_addfinalizer_partial_setup_failure.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 908, "end_line": 927, "span_ids": ["TestRequestBasic.test_request_addfinalizer_partial_setup_failure"], "tokens": 135}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRequestBasic:\n\n def test_request_addfinalizer_partial_setup_failure(\n self, pytester: Pytester\n ) -> None:\n p = pytester.makepyfile(\n \"\"\"\n import pytest\n values = []\n @pytest.fixture\n def something(request):\n request.addfinalizer(lambda: values.append(None))\n def test_func(something, missingarg):\n pass\n def test_second():\n assert len(values) == 1\n \"\"\"\n )\n result = pytester.runpytest(p)\n result.stdout.fnmatch_lines(\n [\"*1 error*\"] # XXX the whole module collection fails\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestBasic.test_request_subrequest_addfinalizer_exceptions_TestRequestBasic.test_request_subrequest_addfinalizer_exceptions.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestBasic.test_request_subrequest_addfinalizer_exceptions_TestRequestBasic.test_request_subrequest_addfinalizer_exceptions.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 929, "end_line": 963, "span_ids": ["TestRequestBasic.test_request_subrequest_addfinalizer_exceptions"], "tokens": 282}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRequestBasic:\n\n def test_request_subrequest_addfinalizer_exceptions(\n self, pytester: Pytester\n ) -> None:\n \"\"\"\n Ensure exceptions raised during teardown by a finalizer are suppressed\n until all finalizers are called, re-raising the first exception (#2440)\n \"\"\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n values = []\n def _excepts(where):\n raise Exception('Error in %s fixture' % where)\n @pytest.fixture\n def subrequest(request):\n return request\n @pytest.fixture\n def something(subrequest):\n subrequest.addfinalizer(lambda: values.append(1))\n subrequest.addfinalizer(lambda: values.append(2))\n subrequest.addfinalizer(lambda: _excepts('something'))\n @pytest.fixture\n def excepts(subrequest):\n subrequest.addfinalizer(lambda: _excepts('excepts'))\n subrequest.addfinalizer(lambda: values.append(3))\n def test_first(something, excepts):\n pass\n def test_second():\n assert values == [3, 2, 1]\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n [\"*Exception: Error in excepts fixture\", \"* 2 passed, 1 error in *\"]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestBasic.test_request_getmodulepath_TestRequestBasic.test_request_fixturenames.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestBasic.test_request_getmodulepath_TestRequestBasic.test_request_fixturenames.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 965, "end_line": 992, "span_ids": ["TestRequestBasic.test_request_fixturenames", "TestRequestBasic.test_request_getmodulepath"], "tokens": 241}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRequestBasic:\n\n def test_request_getmodulepath(self, pytester: Pytester) -> None:\n modcol = pytester.getmodulecol(\"def test_somefunc(): pass\")\n (item,) = pytester.genitems([modcol])\n req = fixtures.FixtureRequest(item, _ispytest=True)\n assert req.fspath == modcol.fspath\n\n def test_request_fixturenames(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n from _pytest.pytester import get_public_names\n @pytest.fixture()\n def arg1():\n pass\n @pytest.fixture()\n def farg(arg1):\n pass\n @pytest.fixture(autouse=True)\n def sarg(tmpdir):\n pass\n def test_function(request, farg):\n assert set(get_public_names(request.fixturenames)) == \\\n set([\"tmpdir\", \"sarg\", \"arg1\", \"request\", \"farg\",\n \"tmp_path\", \"tmp_path_factory\"])\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestBasic.test_fixtures_sub_subdir_normalize_sep_TestRequestBasic.test_fixtures_sub_subdir_normalize_sep.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestBasic.test_fixtures_sub_subdir_normalize_sep_TestRequestBasic.test_fixtures_sub_subdir_normalize_sep.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1032, "end_line": 1055, "span_ids": ["TestRequestBasic.test_fixtures_sub_subdir_normalize_sep"], "tokens": 176}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRequestBasic:\n\n def test_fixtures_sub_subdir_normalize_sep(self, pytester: Pytester) -> None:\n # this tests that normalization of nodeids takes place\n b = pytester.path.joinpath(\"tests\", \"unit\")\n b.mkdir(parents=True)\n b.joinpath(\"conftest.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n import pytest\n @pytest.fixture\n def arg1():\n pass\n \"\"\"\n )\n )\n p = b.joinpath(\"test_module.py\")\n p.write_text(\"def test_func(arg1): pass\")\n result = pytester.runpytest(p, \"--fixtures\")\n assert result.ret == 0\n result.stdout.fnmatch_lines(\n \"\"\"\n *fixtures defined*conftest*\n *arg1*\n \"\"\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestBasic.test_show_fixtures_color_yes_TestRequestBasic.test_newstyle_with_request.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestBasic.test_show_fixtures_color_yes_TestRequestBasic.test_newstyle_with_request.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1057, "end_line": 1074, "span_ids": ["TestRequestBasic.test_show_fixtures_color_yes", "TestRequestBasic.test_newstyle_with_request"], "tokens": 147}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRequestBasic:\n\n def test_show_fixtures_color_yes(self, pytester: Pytester) -> None:\n pytester.makepyfile(\"def test_this(): assert 1\")\n result = pytester.runpytest(\"--color=yes\", \"--fixtures\")\n assert \"\\x1b[32mtmpdir\" in result.stdout.str()\n\n def test_newstyle_with_request(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.fixture()\n def arg(request):\n pass\n def test_1(arg):\n pass\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestBasic.test_setupcontext_no_param_TestRequestBasic.test_setupcontext_no_param.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestBasic.test_setupcontext_no_param_TestRequestBasic.test_setupcontext_no_param.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1076, "end_line": 1092, "span_ids": ["TestRequestBasic.test_setupcontext_no_param"], "tokens": 121}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRequestBasic:\n\n def test_setupcontext_no_param(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.fixture(params=[1,2])\n def arg(request):\n return request.param\n\n @pytest.fixture(autouse=True)\n def mysetup(request, arg):\n assert not hasattr(request, \"param\")\n def test_1(arg):\n assert arg in (1,2)\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=2)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureUsages_TestFixtureUsages.test_receives_funcargs.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureUsages_TestFixtureUsages.test_receives_funcargs.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1165, "end_line": 1201, "span_ids": ["TestFixtureUsages.test_receives_funcargs", "TestFixtureUsages", "TestFixtureUsages.test_noargfixturedec"], "tokens": 221}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFixtureUsages:\n def test_noargfixturedec(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.fixture\n def arg1():\n return 1\n\n def test_func(arg1):\n assert arg1 == 1\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=1)\n\n def test_receives_funcargs(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.fixture()\n def arg1():\n return 1\n\n @pytest.fixture()\n def arg2(arg1):\n return arg1 + 1\n\n def test_add(arg2):\n assert arg2 == 2\n def test_all(arg1, arg2):\n assert arg1 == 1\n assert arg2 == 2\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=2)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureUsages.test_receives_funcargs_scope_mismatch_TestFixtureUsages.test_receives_funcargs_scope_mismatch.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureUsages.test_receives_funcargs_scope_mismatch_TestFixtureUsages.test_receives_funcargs_scope_mismatch.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1203, "end_line": 1227, "span_ids": ["TestFixtureUsages.test_receives_funcargs_scope_mismatch"], "tokens": 181}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFixtureUsages:\n\n def test_receives_funcargs_scope_mismatch(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.fixture(scope=\"function\")\n def arg1():\n return 1\n\n @pytest.fixture(scope=\"module\")\n def arg2(arg1):\n return arg1 + 1\n\n def test_add(arg2):\n assert arg2 == 2\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n [\n \"*ScopeMismatch*involved factories*\",\n \"test_receives_funcargs_scope_mismatch.py:6: def arg2(arg1)\",\n \"test_receives_funcargs_scope_mismatch.py:2: def arg1()\",\n \"*1 error*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureUsages.test_receives_funcargs_scope_mismatch_issue660_TestFixtureUsages.test_invalid_scope.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureUsages.test_receives_funcargs_scope_mismatch_issue660_TestFixtureUsages.test_invalid_scope.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1229, "end_line": 1267, "span_ids": ["TestFixtureUsages.test_receives_funcargs_scope_mismatch_issue660", "TestFixtureUsages.test_invalid_scope"], "tokens": 247}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFixtureUsages:\n\n def test_receives_funcargs_scope_mismatch_issue660(\n self, pytester: Pytester\n ) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.fixture(scope=\"function\")\n def arg1():\n return 1\n\n @pytest.fixture(scope=\"module\")\n def arg2(arg1):\n return arg1 + 1\n\n def test_add(arg1, arg2):\n assert arg2 == 2\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n [\"*ScopeMismatch*involved factories*\", \"* def arg2*\", \"*1 error*\"]\n )\n\n def test_invalid_scope(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.fixture(scope=\"functions\")\n def badscope():\n pass\n\n def test_nothing(badscope):\n pass\n \"\"\"\n )\n result = pytester.runpytest_inprocess()\n result.stdout.fnmatch_lines(\n \"*Fixture 'badscope' from test_invalid_scope.py got an unexpected scope value 'functions'\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureUsages.test_funcarg_parametrized_and_used_twice_TestFixtureUsages.test_funcarg_parametrized_and_used_twice.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureUsages.test_funcarg_parametrized_and_used_twice_TestFixtureUsages.test_funcarg_parametrized_and_used_twice.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1301, "end_line": 1321, "span_ids": ["TestFixtureUsages.test_funcarg_parametrized_and_used_twice"], "tokens": 147}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFixtureUsages:\n\n def test_funcarg_parametrized_and_used_twice(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n values = []\n @pytest.fixture(params=[1,2])\n def arg1(request):\n values.append(1)\n return request.param\n\n @pytest.fixture()\n def arg2(arg1):\n return arg1 + 1\n\n def test_add(arg1, arg2):\n assert arg2 == arg1 + 1\n assert len(values) == arg1\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*2 passed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureUsages.test_usefixtures_marker_TestFixtureUsages.test_usefixtures_marker.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureUsages.test_usefixtures_marker_TestFixtureUsages.test_usefixtures_marker.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1368, "end_line": 1391, "span_ids": ["TestFixtureUsages.test_usefixtures_marker"], "tokens": 157}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFixtureUsages:\n\n def test_usefixtures_marker(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n values = []\n\n @pytest.fixture(scope=\"class\")\n def myfix(request):\n request.cls.hello = \"world\"\n values.append(1)\n\n class TestClass(object):\n def test_one(self):\n assert self.hello == \"world\"\n assert len(values) == 1\n def test_two(self):\n assert self.hello == \"world\"\n assert len(values) == 1\n pytest.mark.usefixtures(\"myfix\")(TestClass)\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=2)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureUsages.test_usefixtures_ini_TestFixtureUsages.test_usefixtures_ini.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureUsages.test_usefixtures_ini_TestFixtureUsages.test_usefixtures_ini.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1393, "end_line": 1420, "span_ids": ["TestFixtureUsages.test_usefixtures_ini"], "tokens": 155}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFixtureUsages:\n\n def test_usefixtures_ini(self, pytester: Pytester) -> None:\n pytester.makeini(\n \"\"\"\n [pytest]\n usefixtures = myfix\n \"\"\"\n )\n pytester.makeconftest(\n \"\"\"\n import pytest\n\n @pytest.fixture(scope=\"class\")\n def myfix(request):\n request.cls.hello = \"world\"\n\n \"\"\"\n )\n pytester.makepyfile(\n \"\"\"\n class TestClass(object):\n def test_one(self):\n assert self.hello == \"world\"\n def test_two(self):\n assert self.hello == \"world\"\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=2)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureUsages.test_usefixtures_seen_in_showmarkers_TestFixtureUsages.test_request_instance_issue203.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureUsages.test_usefixtures_seen_in_showmarkers_TestFixtureUsages.test_request_instance_issue203.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1422, "end_line": 1445, "span_ids": ["TestFixtureUsages.test_usefixtures_seen_in_showmarkers", "TestFixtureUsages.test_request_instance_issue203"], "tokens": 168}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFixtureUsages:\n\n def test_usefixtures_seen_in_showmarkers(self, pytester: Pytester) -> None:\n result = pytester.runpytest(\"--markers\")\n result.stdout.fnmatch_lines(\n \"\"\"\n *usefixtures(fixturename1*mark tests*fixtures*\n \"\"\"\n )\n\n def test_request_instance_issue203(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n class TestClass(object):\n @pytest.fixture\n def setup1(self, request):\n assert self == request.instance\n self.arg1 = 1\n def test_hello(self, setup1):\n assert self.arg1 == 1\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureUsages.test_fixture_parametrized_with_iterator_TestFixtureUsages.test_fixture_parametrized_with_iterator.assert_values_1_2_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureUsages.test_fixture_parametrized_with_iterator_TestFixtureUsages.test_fixture_parametrized_with_iterator.assert_values_1_2_1", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1447, "end_line": 1474, "span_ids": ["TestFixtureUsages.test_fixture_parametrized_with_iterator"], "tokens": 186}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFixtureUsages:\n\n def test_fixture_parametrized_with_iterator(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n values = []\n def f():\n yield 1\n yield 2\n dec = pytest.fixture(scope=\"module\", params=f())\n\n @dec\n def arg(request):\n return request.param\n @dec\n def arg2(request):\n return request.param\n\n def test_1(arg):\n values.append(arg)\n def test_2(arg2):\n values.append(arg2*10)\n \"\"\"\n )\n reprec = pytester.inline_run(\"-v\")\n reprec.assertoutcome(passed=4)\n values = reprec.getcalls(\"pytest_runtest_call\")[0].item.module.values\n assert values == [1, 2, 10, 20]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureUsages.test_setup_functions_as_fixtures_TestFixtureUsages.test_setup_functions_as_fixtures.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureUsages.test_setup_functions_as_fixtures_TestFixtureUsages.test_setup_functions_as_fixtures.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1476, "end_line": 1513, "span_ids": ["TestFixtureUsages.test_setup_functions_as_fixtures"], "tokens": 206}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFixtureUsages:\n\n def test_setup_functions_as_fixtures(self, pytester: Pytester) -> None:\n \"\"\"Ensure setup_* methods obey fixture scope rules (#517, #3094).\"\"\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n DB_INITIALIZED = None\n\n @pytest.fixture(scope=\"session\", autouse=True)\n def db():\n global DB_INITIALIZED\n DB_INITIALIZED = True\n yield\n DB_INITIALIZED = False\n\n def setup_module():\n assert DB_INITIALIZED\n\n def teardown_module():\n assert DB_INITIALIZED\n\n class TestClass(object):\n\n def setup_method(self, method):\n assert DB_INITIALIZED\n\n def teardown_method(self, method):\n assert DB_INITIALIZED\n\n def test_printer_1(self):\n pass\n\n def test_printer_2(self):\n pass\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"* 2 passed in *\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureManagerParseFactories_TestFixtureManagerParseFactories.test_parsefactories_evil_objects_issue214.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureManagerParseFactories_TestFixtureManagerParseFactories.test_parsefactories_evil_objects_issue214.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1516, "end_line": 1552, "span_ids": ["TestFixtureManagerParseFactories.pytester", "TestFixtureManagerParseFactories.test_parsefactories_evil_objects_issue214", "TestFixtureManagerParseFactories"], "tokens": 203}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFixtureManagerParseFactories:\n @pytest.fixture\n def pytester(self, pytester: Pytester) -> Pytester:\n pytester.makeconftest(\n \"\"\"\n import pytest\n\n @pytest.fixture\n def hello(request):\n return \"conftest\"\n\n @pytest.fixture\n def fm(request):\n return request._fixturemanager\n\n @pytest.fixture\n def item(request):\n return request._pyfuncitem\n \"\"\"\n )\n return pytester\n\n def test_parsefactories_evil_objects_issue214(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n class A(object):\n def __call__(self):\n pass\n def __getattr__(self, name):\n raise RuntimeError()\n a = A()\n def test_hello():\n pass\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=1, failed=0)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureManagerParseFactories.test_parsefactories_conftest_TestFixtureManagerParseFactories.test_parsefactories_conftest.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureManagerParseFactories.test_parsefactories_conftest_TestFixtureManagerParseFactories.test_parsefactories_conftest.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1554, "end_line": 1566, "span_ids": ["TestFixtureManagerParseFactories.test_parsefactories_conftest"], "tokens": 126}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFixtureManagerParseFactories:\n\n def test_parsefactories_conftest(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n def test_hello(item, fm):\n for name in (\"fm\", \"hello\", \"item\"):\n faclist = fm.getfixturedefs(name, item.nodeid)\n assert len(faclist) == 1\n fac = faclist[0]\n assert fac.func.__name__ == name\n \"\"\"\n )\n reprec = pytester.inline_run(\"-s\")\n reprec.assertoutcome(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureManagerParseFactories.test_parsefactories_conftest_and_module_and_class_TestFixtureManagerParseFactories.test_parsefactories_conftest_and_module_and_class.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureManagerParseFactories.test_parsefactories_conftest_and_module_and_class_TestFixtureManagerParseFactories.test_parsefactories_conftest_and_module_and_class.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1568, "end_line": 1593, "span_ids": ["TestFixtureManagerParseFactories.test_parsefactories_conftest_and_module_and_class"], "tokens": 205}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFixtureManagerParseFactories:\n\n def test_parsefactories_conftest_and_module_and_class(\n self, pytester: Pytester\n ) -> None:\n pytester.makepyfile(\n \"\"\"\\\n import pytest\n\n @pytest.fixture\n def hello(request):\n return \"module\"\n class TestClass(object):\n @pytest.fixture\n def hello(self, request):\n return \"class\"\n def test_hello(self, item, fm):\n faclist = fm.getfixturedefs(\"hello\", item.nodeid)\n print(faclist)\n assert len(faclist) == 3\n\n assert faclist[0].func(item._request) == \"conftest\"\n assert faclist[1].func(item._request) == \"module\"\n assert faclist[2].func(item._request) == \"class\"\n \"\"\"\n )\n reprec = pytester.inline_run(\"-s\")\n reprec.assertoutcome(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureManagerParseFactories.test_package_xunit_fixture_TestFixtureManagerParseFactories.test_package_xunit_fixture.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureManagerParseFactories.test_package_xunit_fixture_TestFixtureManagerParseFactories.test_package_xunit_fixture.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1648, "end_line": 1697, "span_ids": ["TestFixtureManagerParseFactories.test_package_xunit_fixture"], "tokens": 289}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFixtureManagerParseFactories:\n\n def test_package_xunit_fixture(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n __init__=\"\"\"\\\n values = []\n \"\"\"\n )\n package = pytester.mkdir(\"package\")\n package.joinpath(\"__init__.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n from .. import values\n def setup_module():\n values.append(\"package\")\n def teardown_module():\n values[:] = []\n \"\"\"\n )\n )\n package.joinpath(\"test_x.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n from .. import values\n def test_x():\n assert values == [\"package\"]\n \"\"\"\n )\n )\n package = pytester.mkdir(\"package2\")\n package.joinpath(\"__init__.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n from .. import values\n def setup_module():\n values.append(\"package2\")\n def teardown_module():\n values[:] = []\n \"\"\"\n )\n )\n package.joinpath(\"test_x.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n from .. import values\n def test_x():\n assert values == [\"package2\"]\n \"\"\"\n )\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=2)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureManagerParseFactories.test_package_fixture_complex_TestFixtureManagerParseFactories.test_collect_custom_items.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureManagerParseFactories.test_package_fixture_complex_TestFixtureManagerParseFactories.test_collect_custom_items.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1699, "end_line": 1743, "span_ids": ["TestFixtureManagerParseFactories.test_collect_custom_items", "TestFixtureManagerParseFactories.test_package_fixture_complex"], "tokens": 307}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFixtureManagerParseFactories:\n\n def test_package_fixture_complex(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n __init__=\"\"\"\\\n values = []\n \"\"\"\n )\n pytester.syspathinsert(pytester.path.name)\n package = pytester.mkdir(\"package\")\n package.joinpath(\"__init__.py\").write_text(\"\")\n package.joinpath(\"conftest.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n import pytest\n from .. import values\n @pytest.fixture(scope=\"package\")\n def one():\n values.append(\"package\")\n yield values\n values.pop()\n @pytest.fixture(scope=\"package\", autouse=True)\n def two():\n values.append(\"package-auto\")\n yield values\n values.pop()\n \"\"\"\n )\n )\n package.joinpath(\"test_x.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n from .. import values\n def test_package_autouse():\n assert values == [\"package-auto\"]\n def test_package(one):\n assert values == [\"package-auto\", \"package\"]\n \"\"\"\n )\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=2)\n\n def test_collect_custom_items(self, pytester: Pytester) -> None:\n pytester.copy_example(\"fixtures/custom_item\")\n result = pytester.runpytest(\"foo\")\n result.stdout.fnmatch_lines([\"*passed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestAutouseDiscovery.test_parsefactories_conftest_TestAutouseDiscovery.test_parsefactories_conftest.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestAutouseDiscovery.test_parsefactories_conftest_TestAutouseDiscovery.test_parsefactories_conftest.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1774, "end_line": 1786, "span_ids": ["TestAutouseDiscovery.test_parsefactories_conftest"], "tokens": 134}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAutouseDiscovery:\n\n def test_parsefactories_conftest(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n from _pytest.pytester import get_public_names\n def test_check_setup(item, fm):\n autousenames = list(fm._getautousenames(item.nodeid))\n assert len(get_public_names(autousenames)) == 2\n assert \"perfunction2\" in autousenames\n assert \"perfunction\" in autousenames\n \"\"\"\n )\n reprec = pytester.inline_run(\"-s\")\n reprec.assertoutcome(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestAutouseDiscovery.test_two_classes_separated_autouse_TestAutouseDiscovery.test_two_classes_separated_autouse.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestAutouseDiscovery.test_two_classes_separated_autouse_TestAutouseDiscovery.test_two_classes_separated_autouse.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1788, "end_line": 1809, "span_ids": ["TestAutouseDiscovery.test_two_classes_separated_autouse"], "tokens": 156}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAutouseDiscovery:\n\n def test_two_classes_separated_autouse(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n class TestA(object):\n values = []\n @pytest.fixture(autouse=True)\n def setup1(self):\n self.values.append(1)\n def test_setup1(self):\n assert self.values == [1]\n class TestB(object):\n values = []\n @pytest.fixture(autouse=True)\n def setup2(self):\n self.values.append(1)\n def test_setup2(self):\n assert self.values == [1]\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=2)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestAutouseDiscovery.test_setup_at_classlevel_TestAutouseDiscovery.test_setup_at_classlevel.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestAutouseDiscovery.test_setup_at_classlevel_TestAutouseDiscovery.test_setup_at_classlevel.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1811, "end_line": 1826, "span_ids": ["TestAutouseDiscovery.test_setup_at_classlevel"], "tokens": 132}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAutouseDiscovery:\n\n def test_setup_at_classlevel(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n class TestClass(object):\n @pytest.fixture(autouse=True)\n def permethod(self, request):\n request.instance.funcname = request.function.__name__\n def test_method1(self):\n assert self.funcname == \"test_method1\"\n def test_method2(self):\n assert self.funcname == \"test_method2\"\n \"\"\"\n )\n reprec = pytester.inline_run(\"-s\")\n reprec.assertoutcome(passed=2)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestAutouseDiscovery.test_setup_enabled_functionnode_TestAutouseDiscovery.test_setup_enabled_functionnode.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestAutouseDiscovery.test_setup_enabled_functionnode_TestAutouseDiscovery.test_setup_enabled_functionnode.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1828, "end_line": 1854, "span_ids": ["TestAutouseDiscovery.test_setup_enabled_functionnode"], "tokens": 177}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAutouseDiscovery:\n\n @pytest.mark.xfail(reason=\"'enabled' feature not implemented\")\n def test_setup_enabled_functionnode(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n def enabled(parentnode, markers):\n return \"needsdb\" in markers\n\n @pytest.fixture(params=[1,2])\n def db(request):\n return request.param\n\n @pytest.fixture(enabled=enabled, autouse=True)\n def createdb(db):\n pass\n\n def test_func1(request):\n assert \"db\" not in request.fixturenames\n\n @pytest.mark.needsdb\n def test_func2(request):\n assert \"db\" in request.fixturenames\n \"\"\"\n )\n reprec = pytester.inline_run(\"-s\")\n reprec.assertoutcome(passed=2)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestAutouseDiscovery.test_callables_nocode_TestAutouseDiscovery.test_callables_nocode.reprec_assertoutcome_fail": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestAutouseDiscovery.test_callables_nocode_TestAutouseDiscovery.test_callables_nocode.reprec_assertoutcome_fail", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1856, "end_line": 1871, "span_ids": ["TestAutouseDiscovery.test_callables_nocode"], "tokens": 132}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAutouseDiscovery:\n\n def test_callables_nocode(self, pytester: Pytester) -> None:\n \"\"\"An imported mock.call would break setup/factory discovery due to\n it being callable and __code__ not being a code object.\"\"\"\n pytester.makepyfile(\n \"\"\"\n class _call(tuple):\n def __call__(self, *k, **kw):\n pass\n def __getattr__(self, k):\n return self\n\n call = _call()\n \"\"\"\n )\n reprec = pytester.inline_run(\"-s\")\n reprec.assertoutcome(failed=0, passed=0)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestAutouseDiscovery.test_autouse_in_conftests_TestAutouseDiscovery.test_autouse_in_conftests.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestAutouseDiscovery.test_autouse_in_conftests_TestAutouseDiscovery.test_autouse_in_conftests.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1873, "end_line": 1892, "span_ids": ["TestAutouseDiscovery.test_autouse_in_conftests"], "tokens": 163}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAutouseDiscovery:\n\n def test_autouse_in_conftests(self, pytester: Pytester) -> None:\n a = pytester.mkdir(\"a\")\n b = pytester.mkdir(\"a1\")\n conftest = pytester.makeconftest(\n \"\"\"\n import pytest\n @pytest.fixture(autouse=True)\n def hello():\n xxx\n \"\"\"\n )\n conftest.rename(a.joinpath(conftest.name))\n a.joinpath(\"test_something.py\").write_text(\"def test_func(): pass\")\n b.joinpath(\"test_otherthing.py\").write_text(\"def test_func(): pass\")\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n \"\"\"\n *1 passed*1 error*\n \"\"\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestAutouseDiscovery.test_autouse_in_module_and_two_classes_TestAutouseDiscovery.test_autouse_in_module_and_two_classes.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestAutouseDiscovery.test_autouse_in_module_and_two_classes_TestAutouseDiscovery.test_autouse_in_module_and_two_classes.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1894, "end_line": 1917, "span_ids": ["TestAutouseDiscovery.test_autouse_in_module_and_two_classes"], "tokens": 178}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAutouseDiscovery:\n\n def test_autouse_in_module_and_two_classes(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n values = []\n @pytest.fixture(autouse=True)\n def append1():\n values.append(\"module\")\n def test_x():\n assert values == [\"module\"]\n\n class TestA(object):\n @pytest.fixture(autouse=True)\n def append2(self):\n values.append(\"A\")\n def test_hello(self):\n assert values == [\"module\", \"module\", \"A\"], values\n class TestA2(object):\n def test_world(self):\n assert values == [\"module\", \"module\", \"A\", \"module\"], values\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=3)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestAutouseManagement_TestAutouseManagement.test_autouse_conftest_mid_directory.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestAutouseManagement_TestAutouseManagement.test_autouse_conftest_mid_directory.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1920, "end_line": 1948, "span_ids": ["TestAutouseManagement", "TestAutouseManagement.test_autouse_conftest_mid_directory"], "tokens": 190}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAutouseManagement:\n def test_autouse_conftest_mid_directory(self, pytester: Pytester) -> None:\n pkgdir = pytester.mkpydir(\"xyz123\")\n pkgdir.joinpath(\"conftest.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n import pytest\n @pytest.fixture(autouse=True)\n def app():\n import sys\n sys._myapp = \"hello\"\n \"\"\"\n )\n )\n sub = pkgdir.joinpath(\"tests\")\n sub.mkdir()\n t = sub.joinpath(\"test_app.py\")\n t.touch()\n t.write_text(\n textwrap.dedent(\n \"\"\"\\\n import sys\n def test_app():\n assert sys._myapp == \"hello\"\n \"\"\"\n )\n )\n reprec = pytester.inline_run(\"-s\")\n reprec.assertoutcome(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestAutouseManagement.test_funcarg_and_setup_TestAutouseManagement.test_funcarg_and_setup.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestAutouseManagement.test_funcarg_and_setup_TestAutouseManagement.test_funcarg_and_setup.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1950, "end_line": 1975, "span_ids": ["TestAutouseManagement.test_funcarg_and_setup"], "tokens": 173}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAutouseManagement:\n\n def test_funcarg_and_setup(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n values = []\n @pytest.fixture(scope=\"module\")\n def arg():\n values.append(1)\n return 0\n @pytest.fixture(scope=\"module\", autouse=True)\n def something(arg):\n values.append(2)\n\n def test_hello(arg):\n assert len(values) == 2\n assert values == [1,2]\n assert arg == 0\n\n def test_hello2(arg):\n assert len(values) == 2\n assert values == [1,2]\n assert arg == 0\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=2)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestAutouseManagement.test_uses_parametrized_resource_TestAutouseManagement.test_uses_parametrized_resource.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestAutouseManagement.test_uses_parametrized_resource_TestAutouseManagement.test_uses_parametrized_resource.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1977, "end_line": 2001, "span_ids": ["TestAutouseManagement.test_uses_parametrized_resource"], "tokens": 156}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAutouseManagement:\n\n def test_uses_parametrized_resource(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n values = []\n @pytest.fixture(params=[1,2])\n def arg(request):\n return request.param\n\n @pytest.fixture(autouse=True)\n def something(arg):\n values.append(arg)\n\n def test_hello():\n if len(values) == 1:\n assert values == [1]\n elif len(values) == 2:\n assert values == [1, 2]\n else:\n 0/0\n\n \"\"\"\n )\n reprec = pytester.inline_run(\"-s\")\n reprec.assertoutcome(passed=2)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestAutouseManagement.test_session_parametrized_function_TestAutouseManagement.test_session_parametrized_function.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestAutouseManagement.test_session_parametrized_function_TestAutouseManagement.test_session_parametrized_function.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 2003, "end_line": 2028, "span_ids": ["TestAutouseManagement.test_session_parametrized_function"], "tokens": 166}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAutouseManagement:\n\n def test_session_parametrized_function(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n values = []\n\n @pytest.fixture(scope=\"session\", params=[1,2])\n def arg(request):\n return request.param\n\n @pytest.fixture(scope=\"function\", autouse=True)\n def append(request, arg):\n if request.function.__name__ == \"test_some\":\n values.append(arg)\n\n def test_some():\n pass\n\n def test_result(arg):\n assert len(values) == arg\n assert values[:arg] == [1,2][:arg]\n \"\"\"\n )\n reprec = pytester.inline_run(\"-v\", \"-s\")\n reprec.assertoutcome(passed=4)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestAutouseManagement.test_class_function_parametrization_finalization_TestAutouseManagement.test_class_function_parametrization_finalization.assert_values_fin_a1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestAutouseManagement.test_class_function_parametrization_finalization_TestAutouseManagement.test_class_function_parametrization_finalization.assert_values_fin_a1", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 2030, "end_line": 2073, "span_ids": ["TestAutouseManagement.test_class_function_parametrization_finalization"], "tokens": 311}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAutouseManagement:\n\n def test_class_function_parametrization_finalization(\n self, pytester: Pytester\n ) -> None:\n p = pytester.makeconftest(\n \"\"\"\n import pytest\n import pprint\n\n values = []\n\n @pytest.fixture(scope=\"function\", params=[1,2])\n def farg(request):\n return request.param\n\n @pytest.fixture(scope=\"class\", params=list(\"ab\"))\n def carg(request):\n return request.param\n\n @pytest.fixture(scope=\"function\", autouse=True)\n def append(request, farg, carg):\n def fin():\n values.append(\"fin_%s%s\" % (carg, farg))\n request.addfinalizer(fin)\n \"\"\"\n )\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n class TestClass(object):\n def test_1(self):\n pass\n class TestClass2(object):\n def test_2(self):\n pass\n \"\"\"\n )\n reprec = pytester.inline_run(\"-v\", \"-s\", \"--confcutdir\", pytester.path)\n reprec.assertoutcome(passed=8)\n config = reprec.getcalls(\"pytest_unconfigure\")[0].config\n values = config.pluginmanager._getconftestmodules(p, importmode=\"prepend\")[\n 0\n ].values\n assert values == [\"fin_a1\", \"fin_a2\", \"fin_b1\", \"fin_b2\"] * 2", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestAutouseManagement.test_scope_ordering_TestAutouseManagement.test_scope_ordering.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestAutouseManagement.test_scope_ordering_TestAutouseManagement.test_scope_ordering.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 2075, "end_line": 2096, "span_ids": ["TestAutouseManagement.test_scope_ordering"], "tokens": 157}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAutouseManagement:\n\n def test_scope_ordering(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n values = []\n @pytest.fixture(scope=\"function\", autouse=True)\n def fappend2():\n values.append(2)\n @pytest.fixture(scope=\"class\", autouse=True)\n def classappend3():\n values.append(3)\n @pytest.fixture(scope=\"module\", autouse=True)\n def mappend():\n values.append(1)\n\n class TestHallo(object):\n def test_method(self):\n assert values == [1,3,2]\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestAutouseManagement.test_parametrization_setup_teardown_ordering_TestAutouseManagement.test_parametrization_setup_teardown_ordering.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestAutouseManagement.test_parametrization_setup_teardown_ordering_TestAutouseManagement.test_parametrization_setup_teardown_ordering.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 2098, "end_line": 2125, "span_ids": ["TestAutouseManagement.test_parametrization_setup_teardown_ordering"], "tokens": 281}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAutouseManagement:\n\n def test_parametrization_setup_teardown_ordering(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n values = []\n def pytest_generate_tests(metafunc):\n if metafunc.cls is None:\n assert metafunc.function is test_finish\n if metafunc.cls is not None:\n metafunc.parametrize(\"item\", [1,2], scope=\"class\")\n class TestClass(object):\n @pytest.fixture(scope=\"class\", autouse=True)\n def addteardown(self, item, request):\n values.append(\"setup-%d\" % item)\n request.addfinalizer(lambda: values.append(\"teardown-%d\" % item))\n def test_step1(self, item):\n values.append(\"step1-%d\" % item)\n def test_step2(self, item):\n values.append(\"step2-%d\" % item)\n\n def test_finish():\n print(values)\n assert values == [\"setup-1\", \"step1-1\", \"step2-1\", \"teardown-1\",\n \"setup-2\", \"step1-2\", \"step2-2\", \"teardown-2\",]\n \"\"\"\n )\n reprec = pytester.inline_run(\"-s\")\n reprec.assertoutcome(passed=5)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestAutouseManagement.test_ordering_autouse_before_explicit_TestAutouseManagement.test_ordering_autouse_before_explicit.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestAutouseManagement.test_ordering_autouse_before_explicit_TestAutouseManagement.test_ordering_autouse_before_explicit.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 2127, "end_line": 2144, "span_ids": ["TestAutouseManagement.test_ordering_autouse_before_explicit"], "tokens": 119}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAutouseManagement:\n\n def test_ordering_autouse_before_explicit(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n values = []\n @pytest.fixture(autouse=True)\n def fix1():\n values.append(1)\n @pytest.fixture()\n def arg1():\n values.append(2)\n def test_hello(arg1):\n assert values == [1,2]\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestAutouseManagement.test_ordering_dependencies_torndown_first_TestAutouseManagement.test_ordering_dependencies_torndown_first.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestAutouseManagement.test_ordering_dependencies_torndown_first_TestAutouseManagement.test_ordering_dependencies_torndown_first.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 2146, "end_line": 2173, "span_ids": ["TestAutouseManagement.test_ordering_dependencies_torndown_first"], "tokens": 251}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAutouseManagement:\n\n @pytest.mark.parametrize(\"param1\", [\"\", \"params=[1]\"], ids=[\"p00\", \"p01\"])\n @pytest.mark.parametrize(\"param2\", [\"\", \"params=[1]\"], ids=[\"p10\", \"p11\"])\n def test_ordering_dependencies_torndown_first(\n self, pytester: Pytester, param1, param2\n ) -> None:\n \"\"\"#226\"\"\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n values = []\n @pytest.fixture(%(param1)s)\n def arg1(request):\n request.addfinalizer(lambda: values.append(\"fin1\"))\n values.append(\"new1\")\n @pytest.fixture(%(param2)s)\n def arg2(request, arg1):\n request.addfinalizer(lambda: values.append(\"fin2\"))\n values.append(\"new2\")\n\n def test_arg(arg2):\n pass\n def test_check():\n assert values == [\"new1\", \"new2\", \"fin2\", \"fin1\"]\n \"\"\"\n % locals()\n )\n reprec = pytester.inline_run(\"-s\")\n reprec.assertoutcome(passed=2)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_override_parametrized_fixture_issue_979_TestFixtureMarker.test_override_parametrized_fixture_issue_979.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_override_parametrized_fixture_issue_979_TestFixtureMarker.test_override_parametrized_fixture_issue_979.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 2212, "end_line": 2238, "span_ids": ["TestFixtureMarker.test_override_parametrized_fixture_issue_979"], "tokens": 205}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFixtureMarker:\n\n @pytest.mark.parametrize(\n \"param_args\",\n [\"'fixt, val'\", \"'fixt,val'\", \"['fixt', 'val']\", \"('fixt', 'val')\"],\n )\n def test_override_parametrized_fixture_issue_979(\n self, pytester: Pytester, param_args\n ) -> None:\n \"\"\"Make sure a parametrized argument can override a parametrized fixture.\n\n This was a regression introduced in the fix for #736.\n \"\"\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.fixture(params=[1, 2])\n def fixt(request):\n return request.param\n\n @pytest.mark.parametrize(%s, [(3, 'x'), (4, 'x')])\n def test_foo(fixt, val):\n pass\n \"\"\"\n % param_args\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=2)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_scope_session_TestFixtureMarker.test_scope_session.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_scope_session_TestFixtureMarker.test_scope_session.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 2240, "end_line": 2262, "span_ids": ["TestFixtureMarker.test_scope_session"], "tokens": 151}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFixtureMarker:\n\n def test_scope_session(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n values = []\n @pytest.fixture(scope=\"module\")\n def arg():\n values.append(1)\n return 1\n\n def test_1(arg):\n assert arg == 1\n def test_2(arg):\n assert arg == 1\n assert len(values) == 1\n class TestClass(object):\n def test3(self, arg):\n assert arg == 1\n assert len(values) == 1\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=3)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_scope_session_exc_TestFixtureMarker.test_scope_session_exc.reprec_assertoutcome_skip": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_scope_session_exc_TestFixtureMarker.test_scope_session_exc.reprec_assertoutcome_skip", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 2264, "end_line": 2283, "span_ids": ["TestFixtureMarker.test_scope_session_exc"], "tokens": 125}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFixtureMarker:\n\n def test_scope_session_exc(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n values = []\n @pytest.fixture(scope=\"session\")\n def fix():\n values.append(1)\n pytest.skip('skipping')\n\n def test_1(fix):\n pass\n def test_2(fix):\n pass\n def test_last():\n assert values == [1]\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(skipped=2, passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_scope_session_exc_two_fix_TestFixtureMarker.test_scope_session_exc_two_fix.reprec_assertoutcome_skip": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_scope_session_exc_two_fix_TestFixtureMarker.test_scope_session_exc_two_fix.reprec_assertoutcome_skip", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 2285, "end_line": 2309, "span_ids": ["TestFixtureMarker.test_scope_session_exc_two_fix"], "tokens": 153}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFixtureMarker:\n\n def test_scope_session_exc_two_fix(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n values = []\n m = []\n @pytest.fixture(scope=\"session\")\n def a():\n values.append(1)\n pytest.skip('skipping')\n @pytest.fixture(scope=\"session\")\n def b(a):\n m.append(1)\n\n def test_1(b):\n pass\n def test_2(b):\n pass\n def test_last():\n assert values == [1]\n assert m == []\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(skipped=2, passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_scope_exc_TestFixtureMarker.test_scope_exc.reprec_assertoutcome_skip": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_scope_exc_TestFixtureMarker.test_scope_exc.reprec_assertoutcome_skip", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 2311, "end_line": 2338, "span_ids": ["TestFixtureMarker.test_scope_exc"], "tokens": 171}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFixtureMarker:\n\n def test_scope_exc(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n test_foo=\"\"\"\n def test_foo(fix):\n pass\n \"\"\",\n test_bar=\"\"\"\n def test_bar(fix):\n pass\n \"\"\",\n conftest=\"\"\"\n import pytest\n reqs = []\n @pytest.fixture(scope=\"session\")\n def fix(request):\n reqs.append(1)\n pytest.skip()\n @pytest.fixture\n def req_list():\n return reqs\n \"\"\",\n test_real=\"\"\"\n def test_last(req_list):\n assert req_list == [1]\n \"\"\",\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(skipped=2, passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_scope_module_uses_session_TestFixtureMarker.test_scope_module_uses_session.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_scope_module_uses_session_TestFixtureMarker.test_scope_module_uses_session.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 2340, "end_line": 2362, "span_ids": ["TestFixtureMarker.test_scope_module_uses_session"], "tokens": 154}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFixtureMarker:\n\n def test_scope_module_uses_session(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n values = []\n @pytest.fixture(scope=\"module\")\n def arg():\n values.append(1)\n return 1\n\n def test_1(arg):\n assert arg == 1\n def test_2(arg):\n assert arg == 1\n assert len(values) == 1\n class TestClass(object):\n def test3(self, arg):\n assert arg == 1\n assert len(values) == 1\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=3)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_scope_module_and_finalizer_TestFixtureMarker.test_scope_module_and_finalizer.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_scope_module_and_finalizer_TestFixtureMarker.test_scope_module_and_finalizer.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 2364, "end_line": 2402, "span_ids": ["TestFixtureMarker.test_scope_module_and_finalizer"], "tokens": 275}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFixtureMarker:\n\n def test_scope_module_and_finalizer(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n import pytest\n finalized_list = []\n created_list = []\n @pytest.fixture(scope=\"module\")\n def arg(request):\n created_list.append(1)\n assert request.scope == \"module\"\n request.addfinalizer(lambda: finalized_list.append(1))\n @pytest.fixture\n def created(request):\n return len(created_list)\n @pytest.fixture\n def finalized(request):\n return len(finalized_list)\n \"\"\"\n )\n pytester.makepyfile(\n test_mod1=\"\"\"\n def test_1(arg, created, finalized):\n assert created == 1\n assert finalized == 0\n def test_2(arg, created, finalized):\n assert created == 1\n assert finalized == 0\"\"\",\n test_mod2=\"\"\"\n def test_3(arg, created, finalized):\n assert created == 2\n assert finalized == 1\"\"\",\n test_mode3=\"\"\"\n def test_4(arg, created, finalized):\n assert created == 3\n assert finalized == 2\n \"\"\",\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=4)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_scope_mismatch_various_TestFixtureMarker.test_scope_mismatch_various.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_scope_mismatch_various_TestFixtureMarker.test_scope_mismatch_various.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 2404, "end_line": 2429, "span_ids": ["TestFixtureMarker.test_scope_mismatch_various"], "tokens": 159}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFixtureMarker:\n\n def test_scope_mismatch_various(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n import pytest\n finalized = []\n created = []\n @pytest.fixture(scope=\"function\")\n def arg(request):\n pass\n \"\"\"\n )\n pytester.makepyfile(\n test_mod1=\"\"\"\n import pytest\n @pytest.fixture(scope=\"session\")\n def arg(request):\n request.getfixturevalue(\"arg\")\n def test_1(arg):\n pass\n \"\"\"\n )\n result = pytester.runpytest()\n assert result.ret != 0\n result.stdout.fnmatch_lines(\n [\"*ScopeMismatch*You tried*function*session*request*\"]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_parametrize_and_scope_TestFixtureMarker.test_parametrize_and_scope.assert_c_in_values": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_parametrize_and_scope_TestFixtureMarker.test_parametrize_and_scope.assert_c_in_values", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 2515, "end_line": 2533, "span_ids": ["TestFixtureMarker.test_parametrize_and_scope"], "tokens": 155}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFixtureMarker:\n\n def test_parametrize_and_scope(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.fixture(scope=\"module\", params=[\"a\", \"b\", \"c\"])\n def arg(request):\n return request.param\n values = []\n def test_param(arg):\n values.append(arg)\n \"\"\"\n )\n reprec = pytester.inline_run(\"-v\")\n reprec.assertoutcome(passed=3)\n values = reprec.getcalls(\"pytest_runtest_call\")[0].item.module.values\n assert len(values) == 3\n assert \"a\" in values\n assert \"b\" in values\n assert \"c\" in values", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_scope_mismatch_TestFixtureMarker.test_scope_mismatch.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_scope_mismatch_TestFixtureMarker.test_scope_mismatch.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 2535, "end_line": 2555, "span_ids": ["TestFixtureMarker.test_scope_mismatch"], "tokens": 123}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFixtureMarker:\n\n def test_scope_mismatch(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n import pytest\n @pytest.fixture(scope=\"function\")\n def arg(request):\n pass\n \"\"\"\n )\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.fixture(scope=\"session\")\n def arg(arg):\n pass\n def test_mismatch(arg):\n pass\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*ScopeMismatch*\", \"*1 error*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_parametrize_separated_order_TestFixtureMarker.test_parametrize_separated_order.assert_values_1_1_2": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_parametrize_separated_order_TestFixtureMarker.test_parametrize_separated_order.assert_values_1_1_2", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 2557, "end_line": 2576, "span_ids": ["TestFixtureMarker.test_parametrize_separated_order"], "tokens": 150}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFixtureMarker:\n\n def test_parametrize_separated_order(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.fixture(scope=\"module\", params=[1, 2])\n def arg(request):\n return request.param\n\n values = []\n def test_1(arg):\n values.append(arg)\n def test_2(arg):\n values.append(arg)\n \"\"\"\n )\n reprec = pytester.inline_run(\"-v\")\n reprec.assertoutcome(passed=4)\n values = reprec.getcalls(\"pytest_runtest_call\")[0].item.module.values\n assert values == [1, 1, 2, 2]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_module_parametrized_ordering_TestFixtureMarker.test_module_parametrized_ordering.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_module_parametrized_ordering_TestFixtureMarker.test_module_parametrized_ordering.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 2578, "end_line": 2635, "span_ids": ["TestFixtureMarker.test_module_parametrized_ordering"], "tokens": 493}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFixtureMarker:\n\n def test_module_parametrized_ordering(self, pytester: Pytester) -> None:\n pytester.makeini(\n \"\"\"\n [pytest]\n console_output_style=classic\n \"\"\"\n )\n pytester.makeconftest(\n \"\"\"\n import pytest\n\n @pytest.fixture(scope=\"session\", params=\"s1 s2\".split())\n def sarg():\n pass\n @pytest.fixture(scope=\"module\", params=\"m1 m2\".split())\n def marg():\n pass\n \"\"\"\n )\n pytester.makepyfile(\n test_mod1=\"\"\"\n def test_func(sarg):\n pass\n def test_func1(marg):\n pass\n \"\"\",\n test_mod2=\"\"\"\n def test_func2(sarg):\n pass\n def test_func3(sarg, marg):\n pass\n def test_func3b(sarg, marg):\n pass\n def test_func4(marg):\n pass\n \"\"\",\n )\n result = pytester.runpytest(\"-v\")\n result.stdout.fnmatch_lines(\n \"\"\"\n test_mod1.py::test_func[s1] PASSED\n test_mod2.py::test_func2[s1] PASSED\n test_mod2.py::test_func3[s1-m1] PASSED\n test_mod2.py::test_func3b[s1-m1] PASSED\n test_mod2.py::test_func3[s1-m2] PASSED\n test_mod2.py::test_func3b[s1-m2] PASSED\n test_mod1.py::test_func[s2] PASSED\n test_mod2.py::test_func2[s2] PASSED\n test_mod2.py::test_func3[s2-m1] PASSED\n test_mod2.py::test_func3b[s2-m1] PASSED\n test_mod2.py::test_func4[m1] PASSED\n test_mod2.py::test_func3[s2-m2] PASSED\n test_mod2.py::test_func3b[s2-m2] PASSED\n test_mod2.py::test_func4[m2] PASSED\n test_mod1.py::test_func1[m1] PASSED\n test_mod1.py::test_func1[m2] PASSED\n \"\"\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_dynamic_parametrized_ordering_TestFixtureMarker.test_dynamic_parametrized_ordering.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_dynamic_parametrized_ordering_TestFixtureMarker.test_dynamic_parametrized_ordering.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 2637, "end_line": 2684, "span_ids": ["TestFixtureMarker.test_dynamic_parametrized_ordering"], "tokens": 413}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFixtureMarker:\n\n def test_dynamic_parametrized_ordering(self, pytester: Pytester) -> None:\n pytester.makeini(\n \"\"\"\n [pytest]\n console_output_style=classic\n \"\"\"\n )\n pytester.makeconftest(\n \"\"\"\n import pytest\n\n def pytest_configure(config):\n class DynamicFixturePlugin(object):\n @pytest.fixture(scope='session', params=['flavor1', 'flavor2'])\n def flavor(self, request):\n return request.param\n config.pluginmanager.register(DynamicFixturePlugin(), 'flavor-fixture')\n\n @pytest.fixture(scope='session', params=['vxlan', 'vlan'])\n def encap(request):\n return request.param\n\n @pytest.fixture(scope='session', autouse='True')\n def reprovision(request, flavor, encap):\n pass\n \"\"\"\n )\n pytester.makepyfile(\n \"\"\"\n def test(reprovision):\n pass\n def test2(reprovision):\n pass\n \"\"\"\n )\n result = pytester.runpytest(\"-v\")\n result.stdout.fnmatch_lines(\n \"\"\"\n test_dynamic_parametrized_ordering.py::test[flavor1-vxlan] PASSED\n test_dynamic_parametrized_ordering.py::test2[flavor1-vxlan] PASSED\n test_dynamic_parametrized_ordering.py::test[flavor2-vxlan] PASSED\n test_dynamic_parametrized_ordering.py::test2[flavor2-vxlan] PASSED\n test_dynamic_parametrized_ordering.py::test[flavor2-vlan] PASSED\n test_dynamic_parametrized_ordering.py::test2[flavor2-vlan] PASSED\n test_dynamic_parametrized_ordering.py::test[flavor1-vlan] PASSED\n test_dynamic_parametrized_ordering.py::test2[flavor1-vlan] PASSED\n \"\"\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_class_ordering_TestFixtureMarker.test_class_ordering.result_stdout_re_match_li": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_class_ordering_TestFixtureMarker.test_class_ordering.result_stdout_re_match_li", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 2686, "end_line": 2744, "span_ids": ["TestFixtureMarker.test_class_ordering"], "tokens": 509}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFixtureMarker:\n\n def test_class_ordering(self, pytester: Pytester) -> None:\n pytester.makeini(\n \"\"\"\n [pytest]\n console_output_style=classic\n \"\"\"\n )\n pytester.makeconftest(\n \"\"\"\n import pytest\n\n values = []\n\n @pytest.fixture(scope=\"function\", params=[1,2])\n def farg(request):\n return request.param\n\n @pytest.fixture(scope=\"class\", params=list(\"ab\"))\n def carg(request):\n return request.param\n\n @pytest.fixture(scope=\"function\", autouse=True)\n def append(request, farg, carg):\n def fin():\n values.append(\"fin_%s%s\" % (carg, farg))\n request.addfinalizer(fin)\n \"\"\"\n )\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n class TestClass2(object):\n def test_1(self):\n pass\n def test_2(self):\n pass\n class TestClass(object):\n def test_3(self):\n pass\n \"\"\"\n )\n result = pytester.runpytest(\"-vs\")\n result.stdout.re_match_lines(\n r\"\"\"\n test_class_ordering.py::TestClass2::test_1\\[a-1\\] PASSED\n test_class_ordering.py::TestClass2::test_1\\[a-2\\] PASSED\n test_class_ordering.py::TestClass2::test_2\\[a-1\\] PASSED\n test_class_ordering.py::TestClass2::test_2\\[a-2\\] PASSED\n test_class_ordering.py::TestClass2::test_1\\[b-1\\] PASSED\n test_class_ordering.py::TestClass2::test_1\\[b-2\\] PASSED\n test_class_ordering.py::TestClass2::test_2\\[b-1\\] PASSED\n test_class_ordering.py::TestClass2::test_2\\[b-2\\] PASSED\n test_class_ordering.py::TestClass::test_3\\[a-1\\] PASSED\n test_class_ordering.py::TestClass::test_3\\[a-2\\] PASSED\n test_class_ordering.py::TestClass::test_3\\[b-1\\] PASSED\n test_class_ordering.py::TestClass::test_3\\[b-2\\] PASSED\n \"\"\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_parametrize_separated_order_higher_scope_first_TestFixtureMarker.test_parametrize_separated_order_higher_scope_first.assert_values_expected": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_parametrize_separated_order_higher_scope_first_TestFixtureMarker.test_parametrize_separated_order_higher_scope_first.assert_values_expected", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 2746, "end_line": 2822, "span_ids": ["TestFixtureMarker.test_parametrize_separated_order_higher_scope_first"], "tokens": 510}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFixtureMarker:\n\n def test_parametrize_separated_order_higher_scope_first(\n self, pytester: Pytester\n ) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.fixture(scope=\"function\", params=[1, 2])\n def arg(request):\n param = request.param\n request.addfinalizer(lambda: values.append(\"fin:%s\" % param))\n values.append(\"create:%s\" % param)\n return request.param\n\n @pytest.fixture(scope=\"module\", params=[\"mod1\", \"mod2\"])\n def modarg(request):\n param = request.param\n request.addfinalizer(lambda: values.append(\"fin:%s\" % param))\n values.append(\"create:%s\" % param)\n return request.param\n\n values = []\n def test_1(arg):\n values.append(\"test1\")\n def test_2(modarg):\n values.append(\"test2\")\n def test_3(arg, modarg):\n values.append(\"test3\")\n def test_4(modarg, arg):\n values.append(\"test4\")\n \"\"\"\n )\n reprec = pytester.inline_run(\"-v\")\n reprec.assertoutcome(passed=12)\n values = reprec.getcalls(\"pytest_runtest_call\")[0].item.module.values\n expected = [\n \"create:1\",\n \"test1\",\n \"fin:1\",\n \"create:2\",\n \"test1\",\n \"fin:2\",\n \"create:mod1\",\n \"test2\",\n \"create:1\",\n \"test3\",\n \"fin:1\",\n \"create:2\",\n \"test3\",\n \"fin:2\",\n \"create:1\",\n \"test4\",\n \"fin:1\",\n \"create:2\",\n \"test4\",\n \"fin:2\",\n \"fin:mod1\",\n \"create:mod2\",\n \"test2\",\n \"create:1\",\n \"test3\",\n \"fin:1\",\n \"create:2\",\n \"test3\",\n \"fin:2\",\n \"create:1\",\n \"test4\",\n \"fin:1\",\n \"create:2\",\n \"test4\",\n \"fin:2\",\n \"fin:mod2\",\n ]\n import pprint\n\n pprint.pprint(list(zip(values, expected)))\n assert values == expected", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_fixture_finalizer_TestFixtureMarker.test_fixture_finalizer.for_test_in_test_browse.reprec_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_fixture_finalizer_TestFixtureMarker.test_fixture_finalizer.for_test_in_test_browse.reprec_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 2865, "end_line": 2897, "span_ids": ["TestFixtureMarker.test_fixture_finalizer"], "tokens": 193}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFixtureMarker:\n\n def test_fixture_finalizer(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n import pytest\n import sys\n\n @pytest.fixture\n def browser(request):\n\n def finalize():\n sys.stdout.write_text('Finalized')\n request.addfinalizer(finalize)\n return {}\n \"\"\"\n )\n b = pytester.mkdir(\"subdir\")\n b.joinpath(\"test_overridden_fixture_finalizer.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n import pytest\n @pytest.fixture\n def browser(browser):\n browser['visited'] = True\n return browser\n\n def test_browser(browser):\n assert browser['visited'] is True\n \"\"\"\n )\n )\n reprec = pytester.runpytest(\"-s\")\n for test in [\"test_browser\"]:\n reprec.stdout.fnmatch_lines([\"*Finalized*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_class_scope_with_normal_tests_TestFixtureMarker.test_class_scope_with_normal_tests.for_test_in_test_a_t.assert_reprec_matchreport": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_class_scope_with_normal_tests_TestFixtureMarker.test_class_scope_with_normal_tests.for_test_in_test_a_t.assert_reprec_matchreport", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 2899, "end_line": 2925, "span_ids": ["TestFixtureMarker.test_class_scope_with_normal_tests"], "tokens": 169}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFixtureMarker:\n\n def test_class_scope_with_normal_tests(self, pytester: Pytester) -> None:\n testpath = pytester.makepyfile(\n \"\"\"\n import pytest\n\n class Box(object):\n value = 0\n\n @pytest.fixture(scope='class')\n def a(request):\n Box.value += 1\n return Box.value\n\n def test_a(a):\n assert a == 1\n\n class Test1(object):\n def test_b(self, a):\n assert a == 2\n\n class Test2(object):\n def test_c(self, a):\n assert a == 3\"\"\"\n )\n reprec = pytester.inline_run(testpath)\n for test in [\"test_a\", \"test_b\", \"test_c\"]:\n assert reprec.matchreport(test).passed", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_request_is_clean_TestFixtureMarker.test_request_is_clean.assert_values_1_2_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_request_is_clean_TestFixtureMarker.test_request_is_clean.assert_values_1_2_", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 2927, "end_line": 2941, "span_ids": ["TestFixtureMarker.test_request_is_clean"], "tokens": 121}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFixtureMarker:\n\n def test_request_is_clean(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n values = []\n @pytest.fixture(params=[1, 2])\n def fix(request):\n request.addfinalizer(lambda: values.append(request.param))\n def test_fix(fix):\n pass\n \"\"\"\n )\n reprec = pytester.inline_run(\"-s\")\n values = reprec.getcalls(\"pytest_runtest_call\")[0].item.module.values\n assert values == [1, 2]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_parametrize_separated_lifecycle_TestFixtureMarker.test_parametrize_separated_lifecycle.assert_values_5_fin2": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_parametrize_separated_lifecycle_TestFixtureMarker.test_parametrize_separated_lifecycle.assert_values_5_fin2", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 2943, "end_line": 2970, "span_ids": ["TestFixtureMarker.test_parametrize_separated_lifecycle"], "tokens": 229}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFixtureMarker:\n\n def test_parametrize_separated_lifecycle(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n values = []\n @pytest.fixture(scope=\"module\", params=[1, 2])\n def arg(request):\n x = request.param\n request.addfinalizer(lambda: values.append(\"fin%s\" % x))\n return request.param\n def test_1(arg):\n values.append(arg)\n def test_2(arg):\n values.append(arg)\n \"\"\"\n )\n reprec = pytester.inline_run(\"-vs\")\n reprec.assertoutcome(passed=4)\n values = reprec.getcalls(\"pytest_runtest_call\")[0].item.module.values\n import pprint\n\n pprint.pprint(values)\n # assert len(values) == 6\n assert values[0] == values[1] == 1\n assert values[2] == \"fin1\"\n assert values[3] == values[4] == 2\n assert values[5] == \"fin2\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_parametrize_function_scoped_finalizers_called_TestFixtureMarker.test_parametrize_function_scoped_finalizers_called.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_parametrize_function_scoped_finalizers_called_TestFixtureMarker.test_parametrize_function_scoped_finalizers_called.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 2972, "end_line": 2996, "span_ids": ["TestFixtureMarker.test_parametrize_function_scoped_finalizers_called"], "tokens": 191}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFixtureMarker:\n\n def test_parametrize_function_scoped_finalizers_called(\n self, pytester: Pytester\n ) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.fixture(scope=\"function\", params=[1, 2])\n def arg(request):\n x = request.param\n request.addfinalizer(lambda: values.append(\"fin%s\" % x))\n return request.param\n\n values = []\n def test_1(arg):\n values.append(arg)\n def test_2(arg):\n values.append(arg)\n def test_3():\n assert len(values) == 8\n assert values == [1, \"fin1\", 2, \"fin2\", 1, \"fin1\", 2, \"fin2\"]\n \"\"\"\n )\n reprec = pytester.inline_run(\"-v\")\n reprec.assertoutcome(passed=5)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_finalizer_order_on_parametrization_TestFixtureMarker.test_finalizer_order_on_parametrization.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_finalizer_order_on_parametrization_TestFixtureMarker.test_finalizer_order_on_parametrization.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 2998, "end_line": 3035, "span_ids": ["TestFixtureMarker.test_finalizer_order_on_parametrization"], "tokens": 251}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFixtureMarker:\n\n @pytest.mark.parametrize(\"scope\", [\"session\", \"function\", \"module\"])\n def test_finalizer_order_on_parametrization(\n self, scope, pytester: Pytester\n ) -> None:\n \"\"\"#246\"\"\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n values = []\n\n @pytest.fixture(scope=%(scope)r, params=[\"1\"])\n def fix1(request):\n return request.param\n\n @pytest.fixture(scope=%(scope)r)\n def fix2(request, base):\n def cleanup_fix2():\n assert not values, \"base should not have been finalized\"\n request.addfinalizer(cleanup_fix2)\n\n @pytest.fixture(scope=%(scope)r)\n def base(request, fix1):\n def cleanup_base():\n values.append(\"fin_base\")\n print(\"finalizing base\")\n request.addfinalizer(cleanup_base)\n\n def test_begin():\n pass\n def test_baz(base, fix2):\n pass\n def test_other():\n pass\n \"\"\"\n % {\"scope\": scope}\n )\n reprec = pytester.inline_run(\"-lvs\")\n reprec.assertoutcome(passed=3)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_class_scope_parametrization_ordering_TestFixtureMarker.test_class_scope_parametrization_ordering.assert_values_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_class_scope_parametrization_ordering_TestFixtureMarker.test_class_scope_parametrization_ordering.assert_values_", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 3037, "end_line": 3074, "span_ids": ["TestFixtureMarker.test_class_scope_parametrization_ordering"], "tokens": 248}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFixtureMarker:\n\n def test_class_scope_parametrization_ordering(self, pytester: Pytester) -> None:\n \"\"\"#396\"\"\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n values = []\n @pytest.fixture(params=[\"John\", \"Doe\"], scope=\"class\")\n def human(request):\n request.addfinalizer(lambda: values.append(\"fin %s\" % request.param))\n return request.param\n\n class TestGreetings(object):\n def test_hello(self, human):\n values.append(\"test_hello\")\n\n class TestMetrics(object):\n def test_name(self, human):\n values.append(\"test_name\")\n\n def test_population(self, human):\n values.append(\"test_population\")\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=6)\n values = reprec.getcalls(\"pytest_runtest_call\")[0].item.module.values\n assert values == [\n \"test_hello\",\n \"fin John\",\n \"test_hello\",\n \"fin Doe\",\n \"test_name\",\n \"test_population\",\n \"fin John\",\n \"test_name\",\n \"test_population\",\n \"fin Doe\",\n ]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_parametrize_setup_function_TestFixtureMarker.test_parametrize_setup_function.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_parametrize_setup_function_TestFixtureMarker.test_parametrize_setup_function.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 3076, "end_line": 3107, "span_ids": ["TestFixtureMarker.test_parametrize_setup_function"], "tokens": 235}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFixtureMarker:\n\n def test_parametrize_setup_function(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.fixture(scope=\"module\", params=[1, 2])\n def arg(request):\n return request.param\n\n @pytest.fixture(scope=\"module\", autouse=True)\n def mysetup(request, arg):\n request.addfinalizer(lambda: values.append(\"fin%s\" % arg))\n values.append(\"setup%s\" % arg)\n\n values = []\n def test_1(arg):\n values.append(arg)\n def test_2(arg):\n values.append(arg)\n def test_3():\n import pprint\n pprint.pprint(values)\n if arg == 1:\n assert values == [\"setup1\", 1, 1, ]\n elif arg == 2:\n assert values == [\"setup1\", 1, 1, \"fin1\",\n \"setup2\", 2, 2, ]\n\n \"\"\"\n )\n reprec = pytester.inline_run(\"-v\")\n reprec.assertoutcome(passed=6)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestScopeAccess_TestRequestScopeAccess.test_setup.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestScopeAccess_TestRequestScopeAccess.test_setup.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 3205, "end_line": 3235, "span_ids": ["TestRequestScopeAccess.test_setup", "TestRequestScopeAccess"], "tokens": 222}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRequestScopeAccess:\n pytestmark = pytest.mark.parametrize(\n (\"scope\", \"ok\", \"error\"),\n [\n [\"session\", \"\", \"fspath class function module\"],\n [\"module\", \"module fspath\", \"cls function\"],\n [\"class\", \"module fspath cls\", \"function\"],\n [\"function\", \"module fspath cls function\", \"\"],\n ],\n )\n\n def test_setup(self, pytester: Pytester, scope, ok, error) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.fixture(scope=%r, autouse=True)\n def myscoped(request):\n for x in %r:\n assert hasattr(request, x)\n for x in %r:\n pytest.raises(AttributeError, lambda:\n getattr(request, x))\n assert request.session\n assert request.config\n def test_func():\n pass\n \"\"\"\n % (scope, ok.split(), error.split())\n )\n reprec = pytester.inline_run(\"-l\")\n reprec.assertoutcome(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestScopeAccess.test_funcarg_TestRequestScopeAccess.test_funcarg.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestScopeAccess.test_funcarg_TestRequestScopeAccess.test_funcarg.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 3237, "end_line": 3256, "span_ids": ["TestRequestScopeAccess.test_funcarg"], "tokens": 142}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRequestScopeAccess:\n\n def test_funcarg(self, pytester: Pytester, scope, ok, error) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.fixture(scope=%r)\n def arg(request):\n for x in %r:\n assert hasattr(request, x)\n for x in %r:\n pytest.raises(AttributeError, lambda:\n getattr(request, x))\n assert request.session\n assert request.config\n def test_func(arg):\n pass\n \"\"\"\n % (scope, ok.split(), error.split())\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestErrors_TestErrors.test_subfactory_missing_funcarg.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestErrors_TestErrors.test_subfactory_missing_funcarg.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 3259, "end_line": 3275, "span_ids": ["TestErrors", "TestErrors.test_subfactory_missing_funcarg"], "tokens": 116}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestErrors:\n def test_subfactory_missing_funcarg(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.fixture()\n def gen(qwe123):\n return 1\n def test_something(gen):\n pass\n \"\"\"\n )\n result = pytester.runpytest()\n assert result.ret != 0\n result.stdout.fnmatch_lines(\n [\"*def gen(qwe123):*\", \"*fixture*qwe123*not found*\", \"*1 error*\"]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestErrors.test_issue498_fixture_finalizer_failing_TestErrors.test_issue498_fixture_finalizer_failing.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestErrors.test_issue498_fixture_finalizer_failing_TestErrors.test_issue498_fixture_finalizer_failing.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 3277, "end_line": 3306, "span_ids": ["TestErrors.test_issue498_fixture_finalizer_failing"], "tokens": 186}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestErrors:\n\n def test_issue498_fixture_finalizer_failing(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.fixture\n def fix1(request):\n def f():\n raise KeyError\n request.addfinalizer(f)\n return object()\n\n values = []\n def test_1(fix1):\n values.append(fix1)\n def test_2(fix1):\n values.append(fix1)\n def test_3():\n assert values[0] != values[1]\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n \"\"\"\n *ERROR*teardown*test_1*\n *KeyError*\n *ERROR*teardown*test_2*\n *KeyError*\n *3 pass*2 errors*\n \"\"\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestErrors.test_setupfunc_missing_funcarg_TestErrors.test_setupfunc_missing_funcarg.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestErrors.test_setupfunc_missing_funcarg_TestErrors.test_setupfunc_missing_funcarg.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 3308, "end_line": 3323, "span_ids": ["TestErrors.test_setupfunc_missing_funcarg"], "tokens": 119}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestErrors:\n\n def test_setupfunc_missing_funcarg(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.fixture(autouse=True)\n def gen(qwe123):\n return 1\n def test_something():\n pass\n \"\"\"\n )\n result = pytester.runpytest()\n assert result.ret != 0\n result.stdout.fnmatch_lines(\n [\"*def gen(qwe123):*\", \"*fixture*qwe123*not found*\", \"*1 error*\"]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestShowFixtures_TestShowFixtures.test_show_fixtures_verbose.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestShowFixtures_TestShowFixtures.test_show_fixtures_verbose.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 3326, "end_line": 3351, "span_ids": ["TestShowFixtures.test_show_fixtures_verbose", "TestShowFixtures.test_funcarg_compat", "TestShowFixtures.test_show_fixtures", "TestShowFixtures"], "tokens": 196}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestShowFixtures:\n def test_funcarg_compat(self, pytester: Pytester) -> None:\n config = pytester.parseconfigure(\"--funcargs\")\n assert config.option.showfixtures\n\n def test_show_fixtures(self, pytester: Pytester) -> None:\n result = pytester.runpytest(\"--fixtures\")\n result.stdout.fnmatch_lines(\n [\n \"tmpdir_factory [[]session scope[]]\",\n \"*for the test session*\",\n \"tmpdir\",\n \"*temporary directory*\",\n ]\n )\n\n def test_show_fixtures_verbose(self, pytester: Pytester) -> None:\n result = pytester.runpytest(\"--fixtures\", \"-v\")\n result.stdout.fnmatch_lines(\n [\n \"tmpdir_factory [[]session scope[]] -- *tmpdir.py*\",\n \"*for the test session*\",\n \"tmpdir -- *tmpdir.py*\",\n \"*temporary directory*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestShowFixtures.test_show_fixtures_conftest_TestShowFixtures.test_show_fixtures_conftest.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestShowFixtures.test_show_fixtures_conftest_TestShowFixtures.test_show_fixtures_conftest.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 3376, "end_line": 3401, "span_ids": ["TestShowFixtures.test_show_fixtures_conftest"], "tokens": 151}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestShowFixtures:\n\n @pytest.mark.parametrize(\"testmod\", [True, False])\n def test_show_fixtures_conftest(self, pytester: Pytester, testmod) -> None:\n pytester.makeconftest(\n '''\n import pytest\n @pytest.fixture\n def arg1():\n \"\"\" hello world \"\"\"\n '''\n )\n if testmod:\n pytester.makepyfile(\n \"\"\"\n def test_hello():\n pass\n \"\"\"\n )\n result = pytester.runpytest(\"--fixtures\")\n result.stdout.fnmatch_lines(\n \"\"\"\n *tmpdir*\n *fixtures defined from*conftest*\n *arg1*\n *hello world*\n \"\"\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestShowFixtures.test_show_fixtures_trimmed_doc_TestShowFixtures.test_show_fixtures_trimmed_doc.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestShowFixtures.test_show_fixtures_trimmed_doc_TestShowFixtures.test_show_fixtures_trimmed_doc.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 3403, "end_line": 3438, "span_ids": ["TestShowFixtures.test_show_fixtures_trimmed_doc"], "tokens": 171}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestShowFixtures:\n\n def test_show_fixtures_trimmed_doc(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n textwrap.dedent(\n '''\\\n import pytest\n @pytest.fixture\n def arg1():\n \"\"\"\n line1\n line2\n\n \"\"\"\n @pytest.fixture\n def arg2():\n \"\"\"\n line1\n line2\n\n \"\"\"\n '''\n )\n )\n result = pytester.runpytest(\"--fixtures\", p)\n result.stdout.fnmatch_lines(\n textwrap.dedent(\n \"\"\"\\\n * fixtures defined from test_show_fixtures_trimmed_doc *\n arg2\n line1\n line2\n arg1\n line1\n line2\n \"\"\"\n )\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestShowFixtures.test_show_fixtures_indented_doc_TestShowFixtures.test_show_fixtures_indented_doc.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestShowFixtures.test_show_fixtures_indented_doc_TestShowFixtures.test_show_fixtures_indented_doc.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 3440, "end_line": 3464, "span_ids": ["TestShowFixtures.test_show_fixtures_indented_doc"], "tokens": 139}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestShowFixtures:\n\n def test_show_fixtures_indented_doc(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n textwrap.dedent(\n '''\\\n import pytest\n @pytest.fixture\n def fixture1():\n \"\"\"\n line1\n indented line\n \"\"\"\n '''\n )\n )\n result = pytester.runpytest(\"--fixtures\", p)\n result.stdout.fnmatch_lines(\n textwrap.dedent(\n \"\"\"\\\n * fixtures defined from test_show_fixtures_indented_doc *\n fixture1\n line1\n indented line\n \"\"\"\n )\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestShowFixtures.test_show_fixtures_indented_doc_first_line_unindented_TestShowFixtures.test_show_fixtures_indented_doc_first_line_unindented.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestShowFixtures.test_show_fixtures_indented_doc_first_line_unindented_TestShowFixtures.test_show_fixtures_indented_doc_first_line_unindented.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 3466, "end_line": 3493, "span_ids": ["TestShowFixtures.test_show_fixtures_indented_doc_first_line_unindented"], "tokens": 160}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestShowFixtures:\n\n def test_show_fixtures_indented_doc_first_line_unindented(\n self, pytester: Pytester\n ) -> None:\n p = pytester.makepyfile(\n textwrap.dedent(\n '''\\\n import pytest\n @pytest.fixture\n def fixture1():\n \"\"\"line1\n line2\n indented line\n \"\"\"\n '''\n )\n )\n result = pytester.runpytest(\"--fixtures\", p)\n result.stdout.fnmatch_lines(\n textwrap.dedent(\n \"\"\"\\\n * fixtures defined from test_show_fixtures_indented_doc_first_line_unindented *\n fixture1\n line1\n line2\n indented line\n \"\"\"\n )\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestShowFixtures.test_show_fixtures_indented_in_class_TestShowFixtures.test_show_fixtures_indented_in_class.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestShowFixtures.test_show_fixtures_indented_in_class_TestShowFixtures.test_show_fixtures_indented_in_class.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 3495, "end_line": 3521, "span_ids": ["TestShowFixtures.test_show_fixtures_indented_in_class"], "tokens": 155}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestShowFixtures:\n\n def test_show_fixtures_indented_in_class(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n textwrap.dedent(\n '''\\\n import pytest\n class TestClass(object):\n @pytest.fixture\n def fixture1(self):\n \"\"\"line1\n line2\n indented line\n \"\"\"\n '''\n )\n )\n result = pytester.runpytest(\"--fixtures\", p)\n result.stdout.fnmatch_lines(\n textwrap.dedent(\n \"\"\"\\\n * fixtures defined from test_show_fixtures_indented_in_class *\n fixture1\n line1\n line2\n indented line\n \"\"\"\n )\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestShowFixtures.test_show_fixtures_different_files_TestShowFixtures.test_show_fixtures_different_files.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestShowFixtures.test_show_fixtures_different_files_TestShowFixtures.test_show_fixtures_different_files.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 3523, "end_line": 3562, "span_ids": ["TestShowFixtures.test_show_fixtures_different_files"], "tokens": 194}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestShowFixtures:\n\n def test_show_fixtures_different_files(self, pytester: Pytester) -> None:\n \"\"\"`--fixtures` only shows fixtures from first file (#833).\"\"\"\n pytester.makepyfile(\n test_a='''\n import pytest\n\n @pytest.fixture\n def fix_a():\n \"\"\"Fixture A\"\"\"\n pass\n\n def test_a(fix_a):\n pass\n '''\n )\n pytester.makepyfile(\n test_b='''\n import pytest\n\n @pytest.fixture\n def fix_b():\n \"\"\"Fixture B\"\"\"\n pass\n\n def test_b(fix_b):\n pass\n '''\n )\n result = pytester.runpytest(\"--fixtures\")\n result.stdout.fnmatch_lines(\n \"\"\"\n * fixtures defined from test_a *\n fix_a\n Fixture A\n\n * fixtures defined from test_b *\n fix_b\n Fixture B\n \"\"\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestContextManagerFixtureFuncs.test_scoped_TestContextManagerFixtureFuncs.test_scoped.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestContextManagerFixtureFuncs.test_scoped_TestContextManagerFixtureFuncs.test_scoped.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 3643, "end_line": 3666, "span_ids": ["TestContextManagerFixtureFuncs.test_scoped"], "tokens": 150}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestContextManagerFixtureFuncs:\n\n def test_scoped(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.fixture(scope=\"module\")\n def arg1():\n print(\"setup\")\n yield 1\n print(\"teardown\")\n def test_1(arg1):\n print(\"test1\", arg1)\n def test_2(arg1):\n print(\"test2\", arg1)\n \"\"\"\n )\n result = pytester.runpytest(\"-s\")\n result.stdout.fnmatch_lines(\n \"\"\"\n *setup*\n *test1 1*\n *test2 1*\n *teardown*\n \"\"\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestContextManagerFixtureFuncs.test_setup_exception_TestContextManagerFixtureFuncs.test_teardown_exception.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestContextManagerFixtureFuncs.test_setup_exception_TestContextManagerFixtureFuncs.test_teardown_exception.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 3668, "end_line": 3706, "span_ids": ["TestContextManagerFixtureFuncs.test_setup_exception", "TestContextManagerFixtureFuncs.test_teardown_exception"], "tokens": 220}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestContextManagerFixtureFuncs:\n\n def test_setup_exception(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.fixture(scope=\"module\")\n def arg1():\n pytest.fail(\"setup\")\n yield 1\n def test_1(arg1):\n pass\n \"\"\"\n )\n result = pytester.runpytest(\"-s\")\n result.stdout.fnmatch_lines(\n \"\"\"\n *pytest.fail*setup*\n *1 error*\n \"\"\"\n )\n\n def test_teardown_exception(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.fixture(scope=\"module\")\n def arg1():\n yield 1\n pytest.fail(\"teardown\")\n def test_1(arg1):\n pass\n \"\"\"\n )\n result = pytester.runpytest(\"-s\")\n result.stdout.fnmatch_lines(\n \"\"\"\n *pytest.fail*teardown*\n *1 passed*1 error*\n \"\"\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestContextManagerFixtureFuncs.test_yields_more_than_one_TestContextManagerFixtureFuncs.test_custom_name.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestContextManagerFixtureFuncs.test_yields_more_than_one_TestContextManagerFixtureFuncs.test_custom_name.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 3708, "end_line": 3740, "span_ids": ["TestContextManagerFixtureFuncs.test_yields_more_than_one", "TestContextManagerFixtureFuncs.test_custom_name"], "tokens": 202}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestContextManagerFixtureFuncs:\n\n def test_yields_more_than_one(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.fixture(scope=\"module\")\n def arg1():\n yield 1\n yield 2\n def test_1(arg1):\n pass\n \"\"\"\n )\n result = pytester.runpytest(\"-s\")\n result.stdout.fnmatch_lines(\n \"\"\"\n *fixture function*\n *test_yields*:2*\n \"\"\"\n )\n\n def test_custom_name(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.fixture(name='meow')\n def arg1():\n return 'mew'\n def test_1(meow):\n print(meow)\n \"\"\"\n )\n result = pytester.runpytest(\"-s\")\n result.stdout.fnmatch_lines([\"*mew*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestParameterizedSubRequest_TestParameterizedSubRequest.test_call_from_fixture.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestParameterizedSubRequest_TestParameterizedSubRequest.test_call_from_fixture.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 3743, "end_line": 3772, "span_ids": ["TestParameterizedSubRequest", "TestParameterizedSubRequest.test_call_from_fixture"], "tokens": 199}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestParameterizedSubRequest:\n def test_call_from_fixture(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n test_call_from_fixture=\"\"\"\n import pytest\n\n @pytest.fixture(params=[0, 1, 2])\n def fix_with_param(request):\n return request.param\n\n @pytest.fixture\n def get_named_fixture(request):\n return request.getfixturevalue('fix_with_param')\n\n def test_foo(request, get_named_fixture):\n pass\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n [\n \"The requested fixture has no parameter defined for test:\",\n \" test_call_from_fixture.py::test_foo\",\n \"Requested fixture 'fix_with_param' defined in:\",\n \"test_call_from_fixture.py:4\",\n \"Requested here:\",\n \"test_call_from_fixture.py:9\",\n \"*1 error in*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestParameterizedSubRequest.test_call_from_test_TestParameterizedSubRequest.test_call_from_test.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestParameterizedSubRequest.test_call_from_test_TestParameterizedSubRequest.test_call_from_test.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 3774, "end_line": 3798, "span_ids": ["TestParameterizedSubRequest.test_call_from_test"], "tokens": 178}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestParameterizedSubRequest:\n\n def test_call_from_test(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n test_call_from_test=\"\"\"\n import pytest\n\n @pytest.fixture(params=[0, 1, 2])\n def fix_with_param(request):\n return request.param\n\n def test_foo(request):\n request.getfixturevalue('fix_with_param')\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n [\n \"The requested fixture has no parameter defined for test:\",\n \" test_call_from_test.py::test_foo\",\n \"Requested fixture 'fix_with_param' defined in:\",\n \"test_call_from_test.py:4\",\n \"Requested here:\",\n \"test_call_from_test.py:8\",\n \"*1 failed*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestParameterizedSubRequest.test_external_fixture_TestParameterizedSubRequest.test_external_fixture.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestParameterizedSubRequest.test_external_fixture_TestParameterizedSubRequest.test_external_fixture.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 3800, "end_line": 3829, "span_ids": ["TestParameterizedSubRequest.test_external_fixture"], "tokens": 189}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestParameterizedSubRequest:\n\n def test_external_fixture(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n import pytest\n\n @pytest.fixture(params=[0, 1, 2])\n def fix_with_param(request):\n return request.param\n \"\"\"\n )\n\n pytester.makepyfile(\n test_external_fixture=\"\"\"\n def test_foo(request):\n request.getfixturevalue('fix_with_param')\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n [\n \"The requested fixture has no parameter defined for test:\",\n \" test_external_fixture.py::test_foo\",\n \"\",\n \"Requested fixture 'fix_with_param' defined in:\",\n \"conftest.py:4\",\n \"Requested here:\",\n \"test_external_fixture.py:2\",\n \"*1 failed*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_test_pytest_fixture_setup_and_post_finalizer_hook_test_pytest_fixture_setup_and_post_finalizer_hook.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_test_pytest_fixture_setup_and_post_finalizer_hook_test_pytest_fixture_setup_and_post_finalizer_hook.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 3893, "end_line": 3933, "span_ids": ["test_pytest_fixture_setup_and_post_finalizer_hook"], "tokens": 358}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_pytest_fixture_setup_and_post_finalizer_hook(pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n def pytest_fixture_setup(fixturedef, request):\n print('ROOT setup hook called for {0} from {1}'.format(fixturedef.argname, request.node.name))\n def pytest_fixture_post_finalizer(fixturedef, request):\n print('ROOT finalizer hook called for {0} from {1}'.format(fixturedef.argname, request.node.name))\n \"\"\"\n )\n pytester.makepyfile(\n **{\n \"tests/conftest.py\": \"\"\"\n def pytest_fixture_setup(fixturedef, request):\n print('TESTS setup hook called for {0} from {1}'.format(fixturedef.argname, request.node.name))\n def pytest_fixture_post_finalizer(fixturedef, request):\n print('TESTS finalizer hook called for {0} from {1}'.format(fixturedef.argname, request.node.name))\n \"\"\",\n \"tests/test_hooks.py\": \"\"\"\n import pytest\n\n @pytest.fixture()\n def my_fixture():\n return 'some'\n\n def test_func(my_fixture):\n print('TEST test_func')\n assert my_fixture == 'some'\n \"\"\",\n }\n )\n result = pytester.runpytest(\"-s\")\n assert result.ret == 0\n result.stdout.fnmatch_lines(\n [\n \"*TESTS setup hook called for my_fixture from test_func*\",\n \"*ROOT setup hook called for my_fixture from test_func*\",\n \"*TEST test_func*\",\n \"*TESTS finalizer hook called for my_fixture from test_func*\",\n \"*ROOT finalizer hook called for my_fixture from test_func*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestScopeOrdering_TestScopeOrdering.test_func_closure_module_auto.assert_request_fixturenam": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestScopeOrdering_TestScopeOrdering.test_func_closure_module_auto.assert_request_fixturenam", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 3936, "end_line": 3973, "span_ids": ["TestScopeOrdering", "TestScopeOrdering.test_func_closure_module_auto"], "tokens": 311}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestScopeOrdering:\n \"\"\"Class of tests that ensure fixtures are ordered based on their scopes (#2405)\"\"\"\n\n @pytest.mark.parametrize(\"variant\", [\"mark\", \"autouse\"])\n def test_func_closure_module_auto(\n self, pytester: Pytester, variant, monkeypatch\n ) -> None:\n \"\"\"Semantically identical to the example posted in #2405 when ``use_mark=True``\"\"\"\n monkeypatch.setenv(\"FIXTURE_ACTIVATION_VARIANT\", variant)\n pytester.makepyfile(\n \"\"\"\n import warnings\n import os\n import pytest\n VAR = 'FIXTURE_ACTIVATION_VARIANT'\n VALID_VARS = ('autouse', 'mark')\n\n VARIANT = os.environ.get(VAR)\n if VARIANT is None or VARIANT not in VALID_VARS:\n warnings.warn(\"{!r} is not in {}, assuming autouse\".format(VARIANT, VALID_VARS) )\n variant = 'mark'\n\n @pytest.fixture(scope='module', autouse=VARIANT == 'autouse')\n def m1(): pass\n\n if VARIANT=='mark':\n pytestmark = pytest.mark.usefixtures('m1')\n\n @pytest.fixture(scope='function', autouse=True)\n def f1(): pass\n\n def test_func(m1):\n pass\n \"\"\"\n )\n items, _ = pytester.inline_genitems()\n request = FixtureRequest(items[0], _ispytest=True)\n assert request.fixturenames == \"m1 f1\".split()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestScopeOrdering.test_func_closure_module_TestScopeOrdering.test_func_closure_module.assert_request_fixturenam": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestScopeOrdering.test_func_closure_module_TestScopeOrdering.test_func_closure_module.assert_request_fixturenam", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 4030, "end_line": 4047, "span_ids": ["TestScopeOrdering.test_func_closure_module"], "tokens": 122}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestScopeOrdering:\n\n def test_func_closure_module(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.fixture(scope='module')\n def m1(): pass\n\n @pytest.fixture(scope='function')\n def f1(): pass\n\n def test_func(f1, m1):\n pass\n \"\"\"\n )\n items, _ = pytester.inline_genitems()\n request = FixtureRequest(items[0], _ispytest=True)\n assert request.fixturenames == \"m1 f1\".split()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestScopeOrdering.test_func_closure_scopes_reordered_TestScopeOrdering.test_func_closure_scopes_reordered.assert_request_fixturenam": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestScopeOrdering.test_func_closure_scopes_reordered_TestScopeOrdering.test_func_closure_scopes_reordered.assert_request_fixturenam", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 4049, "end_line": 4080, "span_ids": ["TestScopeOrdering.test_func_closure_scopes_reordered"], "tokens": 223}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestScopeOrdering:\n\n def test_func_closure_scopes_reordered(self, pytester: Pytester) -> None:\n \"\"\"Test ensures that fixtures are ordered by scope regardless of the order of the parameters, although\n fixtures of same scope keep the declared order\n \"\"\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.fixture(scope='session')\n def s1(): pass\n\n @pytest.fixture(scope='module')\n def m1(): pass\n\n @pytest.fixture(scope='function')\n def f1(): pass\n\n @pytest.fixture(scope='function')\n def f2(): pass\n\n class Test:\n\n @pytest.fixture(scope='class')\n def c1(cls): pass\n\n def test_func(self, f2, f1, c1, m1, s1):\n pass\n \"\"\"\n )\n items, _ = pytester.inline_genitems()\n request = FixtureRequest(items[0], _ispytest=True)\n assert request.fixturenames == \"s1 m1 c1 f2 f1\".split()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestScopeOrdering.test_func_closure_same_scope_closer_root_first_TestScopeOrdering.test_func_closure_same_scope_closer_root_first.assert_request_fixturenam": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestScopeOrdering.test_func_closure_same_scope_closer_root_first_TestScopeOrdering.test_func_closure_same_scope_closer_root_first.assert_request_fixturenam", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 4082, "end_line": 4122, "span_ids": ["TestScopeOrdering.test_func_closure_same_scope_closer_root_first"], "tokens": 269}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestScopeOrdering:\n\n def test_func_closure_same_scope_closer_root_first(\n self, pytester: Pytester\n ) -> None:\n \"\"\"Auto-use fixtures of same scope are ordered by closer-to-root first\"\"\"\n pytester.makeconftest(\n \"\"\"\n import pytest\n\n @pytest.fixture(scope='module', autouse=True)\n def m_conf(): pass\n \"\"\"\n )\n pytester.makepyfile(\n **{\n \"sub/conftest.py\": \"\"\"\n import pytest\n\n @pytest.fixture(scope='package', autouse=True)\n def p_sub(): pass\n\n @pytest.fixture(scope='module', autouse=True)\n def m_sub(): pass\n \"\"\",\n \"sub/__init__.py\": \"\",\n \"sub/test_func.py\": \"\"\"\n import pytest\n\n @pytest.fixture(scope='module', autouse=True)\n def m_test(): pass\n\n @pytest.fixture(scope='function')\n def f1(): pass\n\n def test_func(m_test, f1):\n pass\n \"\"\",\n }\n )\n items, _ = pytester.inline_genitems()\n request = FixtureRequest(items[0], _ispytest=True)\n assert request.fixturenames == \"p_sub m_conf m_sub m_test f1\".split()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestScopeOrdering.test_func_closure_all_scopes_complex_TestScopeOrdering.test_func_closure_all_scopes_complex.assert_request_fixturenam": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestScopeOrdering.test_func_closure_all_scopes_complex_TestScopeOrdering.test_func_closure_all_scopes_complex.assert_request_fixturenam", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 4124, "end_line": 4166, "span_ids": ["TestScopeOrdering.test_func_closure_all_scopes_complex"], "tokens": 283}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestScopeOrdering:\n\n def test_func_closure_all_scopes_complex(self, pytester: Pytester) -> None:\n \"\"\"Complex test involving all scopes and mixing autouse with normal fixtures\"\"\"\n pytester.makeconftest(\n \"\"\"\n import pytest\n\n @pytest.fixture(scope='session')\n def s1(): pass\n\n @pytest.fixture(scope='package', autouse=True)\n def p1(): pass\n \"\"\"\n )\n pytester.makepyfile(**{\"__init__.py\": \"\"})\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.fixture(scope='module', autouse=True)\n def m1(): pass\n\n @pytest.fixture(scope='module')\n def m2(s1): pass\n\n @pytest.fixture(scope='function')\n def f1(): pass\n\n @pytest.fixture(scope='function')\n def f2(): pass\n\n class Test:\n\n @pytest.fixture(scope='class', autouse=True)\n def c1(self):\n pass\n\n def test_func(self, f2, f1, m2):\n pass\n \"\"\"\n )\n items, _ = pytester.inline_genitems()\n request = FixtureRequest(items[0], _ispytest=True)\n assert request.fixturenames == \"s1 p1 m1 m2 c1 f2 f1\".split()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/integration.py_TestMockDecoration_TestMockDecoration.test_unittest_mock.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/integration.py_TestMockDecoration_TestMockDecoration.test_unittest_mock.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/integration.py", "file_name": "integration.py", "file_type": "text/x-python", "category": "implementation", "start_line": 99, "end_line": 145, "span_ids": ["TestMockDecoration.test_getfuncargnames_patching", "TestMockDecoration.test_getfuncargnames_patching.T.original", "TestMockDecoration.test_wrapped_getfuncargnames", "TestMockDecoration.test_unittest_mock", "TestMockDecoration", "TestMockDecoration.test_getfuncargnames_patching.T"], "tokens": 274}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMockDecoration:\n def test_wrapped_getfuncargnames(self) -> None:\n from _pytest.compat import getfuncargnames\n\n def wrap(f):\n def func():\n pass\n\n func.__wrapped__ = f # type: ignore\n return func\n\n @wrap\n def f(x):\n pass\n\n values = getfuncargnames(f)\n assert values == (\"x\",)\n\n def test_getfuncargnames_patching(self):\n from _pytest.compat import getfuncargnames\n from unittest.mock import patch\n\n class T:\n def original(self, x, y, z):\n pass\n\n @patch.object(T, \"original\")\n def f(x, y, z):\n pass\n\n values = getfuncargnames(f)\n assert values == (\"y\", \"z\")\n\n def test_unittest_mock(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import unittest.mock\n class T(unittest.TestCase):\n @unittest.mock.patch(\"os.path.abspath\")\n def test_hello(self, abspath):\n import os\n os.path.abspath(\"hello\")\n abspath.assert_any_call(\"hello\")\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/integration.py_TestMockDecoration.test_unittest_mock_and_fixture_TestMockDecoration.test_unittest_mock_and_fixture.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/integration.py_TestMockDecoration.test_unittest_mock_and_fixture_TestMockDecoration.test_unittest_mock_and_fixture.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/integration.py", "file_name": "integration.py", "file_type": "text/x-python", "category": "implementation", "start_line": 147, "end_line": 166, "span_ids": ["TestMockDecoration.test_unittest_mock_and_fixture"], "tokens": 123}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMockDecoration:\n\n def test_unittest_mock_and_fixture(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import os.path\n import unittest.mock\n import pytest\n\n @pytest.fixture\n def inject_me():\n pass\n\n @unittest.mock.patch.object(os.path, \"abspath\",\n new=unittest.mock.MagicMock)\n def test_hello(inject_me):\n import os\n os.path.abspath(\"hello\")\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/integration.py_TestMockDecoration.test_unittest_mock_and_pypi_mock_TestMockDecoration.test_unittest_mock_and_pypi_mock.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/integration.py_TestMockDecoration.test_unittest_mock_and_pypi_mock_TestMockDecoration.test_unittest_mock_and_pypi_mock.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/integration.py", "file_name": "integration.py", "file_type": "text/x-python", "category": "implementation", "start_line": 168, "end_line": 189, "span_ids": ["TestMockDecoration.test_unittest_mock_and_pypi_mock"], "tokens": 166}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMockDecoration:\n\n def test_unittest_mock_and_pypi_mock(self, pytester: Pytester) -> None:\n pytest.importorskip(\"mock\", \"1.0.1\")\n pytester.makepyfile(\n \"\"\"\n import mock\n import unittest.mock\n class TestBoth(object):\n @unittest.mock.patch(\"os.path.abspath\")\n def test_hello(self, abspath):\n import os\n os.path.abspath(\"hello\")\n abspath.assert_any_call(\"hello\")\n\n @mock.patch(\"os.path.abspath\")\n def test_hello_mock(self, abspath):\n import os\n os.path.abspath(\"hello\")\n abspath.assert_any_call(\"hello\")\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=2)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/integration.py_TestMockDecoration.test_mock_TestMockDecoration.test_mock.assert_funcnames_T_t": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/integration.py_TestMockDecoration.test_mock_TestMockDecoration.test_mock.assert_funcnames_T_t", "embedding": null, "metadata": {"file_path": "testing/python/integration.py", "file_name": "integration.py", "file_type": "text/x-python", "category": "implementation", "start_line": 219, "end_line": 250, "span_ids": ["TestMockDecoration.test_mock"], "tokens": 277}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMockDecoration:\n\n def test_mock(self, pytester: Pytester) -> None:\n pytest.importorskip(\"mock\", \"1.0.1\")\n pytester.makepyfile(\n \"\"\"\n import os\n import unittest\n import mock\n\n class T(unittest.TestCase):\n @mock.patch(\"os.path.abspath\")\n def test_hello(self, abspath):\n os.path.abspath(\"hello\")\n abspath.assert_any_call(\"hello\")\n def mock_basename(path):\n return \"mock_basename\"\n @mock.patch(\"os.path.abspath\")\n @mock.patch(\"os.path.normpath\")\n @mock.patch(\"os.path.basename\", new=mock_basename)\n def test_someting(normpath, abspath, tmpdir):\n abspath.return_value = \"this\"\n os.path.normpath(os.path.abspath(\"hello\"))\n normpath.assert_any_call(\"this\")\n assert os.path.basename(\"123\") == \"mock_basename\"\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=2)\n calls = reprec.getcalls(\"pytest_runtest_logreport\")\n funcnames = [\n call.report.location[2] for call in calls if call.report.when == \"call\"\n ]\n assert funcnames == [\"T.test_hello\", \"test_someting\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/integration.py_TestMockDecoration.test_mock_sorting_TestMockDecoration.test_mock_sorting.assert_names_test_on": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/integration.py_TestMockDecoration.test_mock_sorting_TestMockDecoration.test_mock_sorting.assert_names_test_on", "embedding": null, "metadata": {"file_path": "testing/python/integration.py", "file_name": "integration.py", "file_type": "text/x-python", "category": "implementation", "start_line": 252, "end_line": 274, "span_ids": ["TestMockDecoration.test_mock_sorting"], "tokens": 189}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMockDecoration:\n\n def test_mock_sorting(self, pytester: Pytester) -> None:\n pytest.importorskip(\"mock\", \"1.0.1\")\n pytester.makepyfile(\n \"\"\"\n import os\n import mock\n\n @mock.patch(\"os.path.abspath\")\n def test_one(abspath):\n pass\n @mock.patch(\"os.path.abspath\")\n def test_two(abspath):\n pass\n @mock.patch(\"os.path.abspath\")\n def test_three(abspath):\n pass\n \"\"\"\n )\n reprec = pytester.inline_run()\n calls = reprec.getreports(\"pytest_runtest_logreport\")\n calls = [x for x in calls if x.when == \"call\"]\n names = [x.nodeid.split(\"::\")[-1] for x in calls]\n assert names == [\"test_one\", \"test_two\", \"test_three\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/integration.py_TestMockDecoration.test_mock_double_patch_issue473_TestMockDecoration.test_mock_double_patch_issue473.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/integration.py_TestMockDecoration.test_mock_double_patch_issue473_TestMockDecoration.test_mock_double_patch_issue473.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/integration.py", "file_name": "integration.py", "file_type": "text/x-python", "category": "implementation", "start_line": 276, "end_line": 292, "span_ids": ["TestMockDecoration.test_mock_double_patch_issue473"], "tokens": 127}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMockDecoration:\n\n def test_mock_double_patch_issue473(self, pytester: Pytester) -> None:\n pytest.importorskip(\"mock\", \"1.0.1\")\n pytester.makepyfile(\n \"\"\"\n from mock import patch\n from pytest import mark\n\n @patch('os.getcwd')\n @patch('os.path')\n @mark.slow\n class TestSimple(object):\n def test_simple_thing(self, mock_path, mock_getcwd):\n pass\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/integration.py_TestReRunTests_TestReRunTests.test_rerun.None_3": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/integration.py_TestReRunTests_TestReRunTests.test_rerun.None_3", "embedding": null, "metadata": {"file_path": "testing/python/integration.py", "file_name": "integration.py", "file_type": "text/x-python", "category": "implementation", "start_line": 295, "end_line": 332, "span_ids": ["TestReRunTests.test_rerun", "TestReRunTests"], "tokens": 225}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestReRunTests:\n def test_rerun(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n from _pytest.runner import runtestprotocol\n def pytest_runtest_protocol(item, nextitem):\n runtestprotocol(item, log=False, nextitem=nextitem)\n runtestprotocol(item, log=True, nextitem=nextitem)\n \"\"\"\n )\n pytester.makepyfile(\n \"\"\"\n import pytest\n count = 0\n req = None\n @pytest.fixture\n def fix(request):\n global count, req\n assert request != req\n req = request\n print(\"fix count %s\" % count)\n count += 1\n def test_fix(fix):\n pass\n \"\"\"\n )\n result = pytester.runpytest(\"-s\")\n result.stdout.fnmatch_lines(\n \"\"\"\n *fix count 0*\n *fix count 1*\n \"\"\"\n )\n result.stdout.fnmatch_lines(\n \"\"\"\n *2 passed*\n \"\"\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/integration.py_test_pytestconfig_is_session_scoped_TestNoselikeTestAttribute.test_class_and_method.assert_not_calls": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/integration.py_test_pytestconfig_is_session_scoped_TestNoselikeTestAttribute.test_class_and_method.assert_not_calls", "embedding": null, "metadata": {"file_path": "testing/python/integration.py", "file_name": "integration.py", "file_type": "text/x-python", "category": "implementation", "start_line": 335, "end_line": 374, "span_ids": ["TestNoselikeTestAttribute.test_module_with_global_test", "TestNoselikeTestAttribute.test_class_and_method", "test_pytestconfig_is_session_scoped", "TestNoselikeTestAttribute"], "tokens": 251}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_pytestconfig_is_session_scoped() -> None:\n from _pytest.fixtures import pytestconfig\n\n marker = getfixturemarker(pytestconfig)\n assert marker is not None\n assert marker.scope == \"session\"\n\n\nclass TestNoselikeTestAttribute:\n def test_module_with_global_test(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n __test__ = False\n def test_hello():\n pass\n \"\"\"\n )\n reprec = pytester.inline_run()\n assert not reprec.getfailedcollections()\n calls = reprec.getreports(\"pytest_runtest_logreport\")\n assert not calls\n\n def test_class_and_method(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n __test__ = True\n def test_func():\n pass\n test_func.__test__ = False\n\n class TestSome(object):\n __test__ = False\n def test_method(self):\n pass\n \"\"\"\n )\n reprec = pytester.inline_run()\n assert not reprec.getfailedcollections()\n calls = reprec.getreports(\"pytest_runtest_logreport\")\n assert not calls", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/integration.py_TestNoselikeTestAttribute.test_unittest_class_TestNoselikeTestAttribute.test_unittest_class.assert_call_items_0_cls_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/integration.py_TestNoselikeTestAttribute.test_unittest_class_TestNoselikeTestAttribute.test_unittest_class.assert_call_items_0_cls_", "embedding": null, "metadata": {"file_path": "testing/python/integration.py", "file_name": "integration.py", "file_type": "text/x-python", "category": "implementation", "start_line": 376, "end_line": 393, "span_ids": ["TestNoselikeTestAttribute.test_unittest_class"], "tokens": 140}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestNoselikeTestAttribute:\n\n def test_unittest_class(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import unittest\n class TC(unittest.TestCase):\n def test_1(self):\n pass\n class TC2(unittest.TestCase):\n __test__ = False\n def test_2(self):\n pass\n \"\"\"\n )\n reprec = pytester.inline_run()\n assert not reprec.getfailedcollections()\n call = reprec.getcalls(\"pytest_collection_modifyitems\")[0]\n assert len(call.items) == 1\n assert call.items[0].cls.__name__ == \"TC\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/integration.py_TestNoselikeTestAttribute.test_class_with_nasty_getattr_TestNoselikeTestAttribute.test_class_with_nasty_getattr.assert_not_call_items": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/integration.py_TestNoselikeTestAttribute.test_class_with_nasty_getattr_TestNoselikeTestAttribute.test_class_with_nasty_getattr.assert_not_call_items", "embedding": null, "metadata": {"file_path": "testing/python/integration.py", "file_name": "integration.py", "file_type": "text/x-python", "category": "implementation", "start_line": 395, "end_line": 424, "span_ids": ["TestNoselikeTestAttribute.test_class_with_nasty_getattr"], "tokens": 205}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestNoselikeTestAttribute:\n\n def test_class_with_nasty_getattr(self, pytester: Pytester) -> None:\n \"\"\"Make sure we handle classes with a custom nasty __getattr__ right.\n\n With a custom __getattr__ which e.g. returns a function (like with a\n RPC wrapper), we shouldn't assume this meant \"__test__ = True\".\n \"\"\"\n # https://github.com/pytest-dev/pytest/issues/1204\n pytester.makepyfile(\n \"\"\"\n class MetaModel(type):\n\n def __getattr__(cls, key):\n return lambda: None\n\n\n BaseModel = MetaModel('Model', (), {})\n\n\n class Model(BaseModel):\n\n __metaclass__ = MetaModel\n\n def test_blah(self):\n pass\n \"\"\"\n )\n reprec = pytester.inline_run()\n assert not reprec.getfailedcollections()\n call = reprec.getcalls(\"pytest_collection_modifyitems\")[0]\n assert not call.items", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/integration.py_TestParameterize_TestParameterize.test_idfn_marker.res_stdout_fnmatch_lines_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/integration.py_TestParameterize_TestParameterize.test_idfn_marker.res_stdout_fnmatch_lines_", "embedding": null, "metadata": {"file_path": "testing/python/integration.py", "file_name": "integration.py", "file_type": "text/x-python", "category": "implementation", "start_line": 427, "end_line": 449, "span_ids": ["TestParameterize.test_idfn_marker", "TestParameterize"], "tokens": 146}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestParameterize:\n \"\"\"#351\"\"\"\n\n def test_idfn_marker(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n def idfn(param):\n if param == 0:\n return 'spam'\n elif param == 1:\n return 'ham'\n else:\n return None\n\n @pytest.mark.parametrize('a,b', [(0, 2), (1, 2)], ids=idfn)\n def test_params(a, b):\n pass\n \"\"\"\n )\n res = pytester.runpytest(\"--collect-only\")\n res.stdout.fnmatch_lines([\"*spam-2*\", \"*ham-2*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/integration.py_TestParameterize.test_idfn_fixture_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/integration.py_TestParameterize.test_idfn_fixture_", "embedding": null, "metadata": {"file_path": "testing/python/integration.py", "file_name": "integration.py", "file_type": "text/x-python", "category": "implementation", "start_line": 451, "end_line": 478, "span_ids": ["TestParameterize.test_idfn_fixture"], "tokens": 166}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestParameterize:\n\n def test_idfn_fixture(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n def idfn(param):\n if param == 0:\n return 'spam'\n elif param == 1:\n return 'ham'\n else:\n return None\n\n @pytest.fixture(params=[0, 1], ids=idfn)\n def a(request):\n return request.param\n\n @pytest.fixture(params=[1, 2], ids=idfn)\n def b(request):\n return request.param\n\n def test_params(a, b):\n pass\n \"\"\"\n )\n res = pytester.runpytest(\"--collect-only\")\n res.stdout.fnmatch_lines([\"*spam-2*\", \"*ham-2*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_find_parametrized_scope_TestMetafunc.test_find_parametrized_scope.None_12": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_find_parametrized_scope_TestMetafunc.test_find_parametrized_scope.None_12", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 139, "end_line": 186, "span_ids": ["TestMetafunc.test_find_parametrized_scope.DummyFixtureDef", "TestMetafunc.test_find_parametrized_scope.DummyFixtureDef:2", "TestMetafunc.test_find_parametrized_scope"], "tokens": 471}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafunc:\n\n def test_find_parametrized_scope(self) -> None:\n \"\"\"Unit test for _find_parametrized_scope (#3941).\"\"\"\n from _pytest.python import _find_parametrized_scope\n\n @attr.s\n class DummyFixtureDef:\n scope = attr.ib()\n\n fixtures_defs = cast(\n Dict[str, Sequence[fixtures.FixtureDef[object]]],\n dict(\n session_fix=[DummyFixtureDef(\"session\")],\n package_fix=[DummyFixtureDef(\"package\")],\n module_fix=[DummyFixtureDef(\"module\")],\n class_fix=[DummyFixtureDef(\"class\")],\n func_fix=[DummyFixtureDef(\"function\")],\n ),\n )\n\n # use arguments to determine narrow scope; the cause of the bug is that it would look on all\n # fixture defs given to the method\n def find_scope(argnames, indirect):\n return _find_parametrized_scope(argnames, fixtures_defs, indirect=indirect)\n\n assert find_scope([\"func_fix\"], indirect=True) == \"function\"\n assert find_scope([\"class_fix\"], indirect=True) == \"class\"\n assert find_scope([\"module_fix\"], indirect=True) == \"module\"\n assert find_scope([\"package_fix\"], indirect=True) == \"package\"\n assert find_scope([\"session_fix\"], indirect=True) == \"session\"\n\n assert find_scope([\"class_fix\", \"func_fix\"], indirect=True) == \"function\"\n assert find_scope([\"func_fix\", \"session_fix\"], indirect=True) == \"function\"\n assert find_scope([\"session_fix\", \"class_fix\"], indirect=True) == \"class\"\n assert find_scope([\"package_fix\", \"session_fix\"], indirect=True) == \"package\"\n assert find_scope([\"module_fix\", \"session_fix\"], indirect=True) == \"module\"\n\n # when indirect is False or is not for all scopes, always use function\n assert find_scope([\"session_fix\", \"module_fix\"], indirect=False) == \"function\"\n assert (\n find_scope([\"session_fix\", \"module_fix\"], indirect=[\"module_fix\"])\n == \"function\"\n )\n assert (\n find_scope(\n [\"session_fix\", \"module_fix\"], indirect=[\"session_fix\", \"module_fix\"]\n )\n == \"module\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_unicode_idval_TestMetafunc.test_unicode_idval.for_val_expected_in_valu.assert__idval_val_a_6": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_unicode_idval_TestMetafunc.test_unicode_idval.for_val_expected_in_valu.assert__idval_val_a_6", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 288, "end_line": 306, "span_ids": ["TestMetafunc.test_unicode_idval"], "tokens": 256}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafunc:\n\n def test_unicode_idval(self) -> None:\n \"\"\"Test that Unicode strings outside the ASCII character set get\n escaped, using byte escapes if they're in that range or unicode\n escapes if they're not.\n\n \"\"\"\n values = [\n (\"\", r\"\"),\n (\"ascii\", r\"ascii\"),\n (\"a\u00e7\u00e3o\", r\"a\\xe7\\xe3o\"),\n (\"jos\u00e9@blah.com\", r\"jos\\xe9@blah.com\"),\n (\n r\"\u03b4\u03bf\u03ba.\u03b9\u03bc\u03ae@\u03c0\u03b1\u03c1\u03ac\u03b4\u03b5\u03b9\u03b3\u03bc\u03b1.\u03b4\u03bf\u03ba\u03b9\u03bc\u03ae\",\n r\"\\u03b4\\u03bf\\u03ba.\\u03b9\\u03bc\\u03ae@\\u03c0\\u03b1\\u03c1\\u03ac\\u03b4\\u03b5\\u03b9\\u03b3\"\n r\"\\u03bc\\u03b1.\\u03b4\\u03bf\\u03ba\\u03b9\\u03bc\\u03ae\",\n ),\n ]\n for val, expected in values:\n assert _idval(val, \"a\", 6, None, nodeid=None, config=None) == expected", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_bytes_idval_TestMetafunc.test_bytes_idval.for_val_expected_in_valu.assert__idval_val_a_6": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_bytes_idval_TestMetafunc.test_bytes_idval.for_val_expected_in_valu.assert__idval_val_a_6", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 337, "end_line": 347, "span_ids": ["TestMetafunc.test_bytes_idval"], "tokens": 148}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafunc:\n\n def test_bytes_idval(self) -> None:\n \"\"\"Unit test for the expected behavior to obtain ids for parametrized\n bytes values: bytes objects are always escaped using \"binary escape\".\"\"\"\n values = [\n (b\"\", r\"\"),\n (b\"\\xc3\\xb4\\xff\\xe4\", r\"\\xc3\\xb4\\xff\\xe4\"),\n (b\"ascii\", r\"ascii\"),\n (\"\u03b1\u03c1\u03ac\".encode(), r\"\\xce\\xb1\\xcf\\x81\\xce\\xac\"),\n ]\n for val, expected in values:\n assert _idval(val, \"a\", 6, idfn=None, nodeid=None, config=None) == expected", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_idmaker_non_printable_characters_TestMetafunc.test_idmaker_non_printable_characters.assert_result_x00_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_idmaker_non_printable_characters_TestMetafunc.test_idmaker_non_printable_characters.assert_result_x00_", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 418, "end_line": 430, "span_ids": ["TestMetafunc.test_idmaker_non_printable_characters"], "tokens": 138}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafunc:\n\n def test_idmaker_non_printable_characters(self) -> None:\n result = idmaker(\n (\"s\", \"n\"),\n [\n pytest.param(\"\\x00\", 1),\n pytest.param(\"\\x05\", 2),\n pytest.param(b\"\\x00\", 3),\n pytest.param(b\"\\x05\", 4),\n pytest.param(\"\\t\", 5),\n pytest.param(b\"\\t\", 6),\n ],\n )\n assert result == [\"\\\\x00-1\", \"\\\\x05-2\", \"\\\\x00-3\", \"\\\\x05-4\", \"\\\\t-5\", \"\\\\t-6\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_idmaker_manual_ids_must_be_printable_TestMetafunc.test_idmaker_enum.assert_result_Foo_on": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_idmaker_manual_ids_must_be_printable_TestMetafunc.test_idmaker_enum.assert_result_Foo_on", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 432, "end_line": 446, "span_ids": ["TestMetafunc.test_idmaker_manual_ids_must_be_printable", "TestMetafunc.test_idmaker_enum"], "tokens": 148}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafunc:\n\n def test_idmaker_manual_ids_must_be_printable(self) -> None:\n result = idmaker(\n (\"s\",),\n [\n pytest.param(\"x00\", id=\"hello \\x00\"),\n pytest.param(\"x05\", id=\"hello \\x05\"),\n ],\n )\n assert result == [\"hello \\\\x00\", \"hello \\\\x05\"]\n\n def test_idmaker_enum(self) -> None:\n enum = pytest.importorskip(\"enum\")\n e = enum.Enum(\"Foo\", \"one, two\")\n result = idmaker((\"a\", \"b\"), [pytest.param(e.one, e.two)])\n assert result == [\"Foo.one-Foo.two\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_idmaker_idfn_TestMetafunc.test_idmaker_idfn.assert_result_10_0_I": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_idmaker_idfn_TestMetafunc.test_idmaker_idfn.assert_result_10_0_I", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 448, "end_line": 465, "span_ids": ["TestMetafunc.test_idmaker_idfn"], "tokens": 129}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafunc:\n\n def test_idmaker_idfn(self) -> None:\n \"\"\"#351\"\"\"\n\n def ids(val: object) -> Optional[str]:\n if isinstance(val, Exception):\n return repr(val)\n return None\n\n result = idmaker(\n (\"a\", \"b\"),\n [\n pytest.param(10.0, IndexError()),\n pytest.param(20, KeyError()),\n pytest.param(\"three\", [1, 2, 3]),\n ],\n idfn=ids,\n )\n assert result == [\"10.0-IndexError()\", \"20-KeyError()\", \"three-b2\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_idmaker_idfn_unique_names_TestMetafunc.test_idmaker_idfn_unique_names.assert_result_a_a0_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_idmaker_idfn_unique_names_TestMetafunc.test_idmaker_idfn_unique_names.assert_result_a_a0_", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 467, "end_line": 482, "span_ids": ["TestMetafunc.test_idmaker_idfn_unique_names"], "tokens": 116}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafunc:\n\n def test_idmaker_idfn_unique_names(self) -> None:\n \"\"\"#351\"\"\"\n\n def ids(val: object) -> str:\n return \"a\"\n\n result = idmaker(\n (\"a\", \"b\"),\n [\n pytest.param(10.0, IndexError()),\n pytest.param(20, KeyError()),\n pytest.param(\"three\", [1, 2, 3]),\n ],\n idfn=ids,\n )\n assert result == [\"a-a0\", \"a-a1\", \"a-a2\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_ids_exception_TestMetafunc.test_parametrize_ids_exception.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_ids_exception_TestMetafunc.test_parametrize_ids_exception.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 559, "end_line": 582, "span_ids": ["TestMetafunc.test_parametrize_ids_exception"], "tokens": 155}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafunc:\n\n def test_parametrize_ids_exception(self, pytester: Pytester) -> None:\n \"\"\"\n :param pytester: the instance of Pytester class, a temporary\n test directory.\n \"\"\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n def ids(arg):\n raise Exception(\"bad ids\")\n\n @pytest.mark.parametrize(\"arg\", [\"a\", \"b\"], ids=ids)\n def test_foo(arg):\n pass\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n [\n \"*Exception: bad ids\",\n \"*test_foo: error raised while trying to determine id of parameter 'arg' at position 0\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_indirect_TestMetafunc.test_parametrize_indirect.None_4": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_indirect_TestMetafunc.test_parametrize_indirect.None_4", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 638, "end_line": 651, "span_ids": ["TestMetafunc.test_parametrize_indirect"], "tokens": 148}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafunc:\n\n def test_parametrize_indirect(self) -> None:\n \"\"\"#714\"\"\"\n\n def func(x, y):\n pass\n\n metafunc = self.Metafunc(func)\n metafunc.parametrize(\"x\", [1], indirect=True)\n metafunc.parametrize(\"y\", [2, 3], indirect=True)\n assert len(metafunc._calls) == 2\n assert metafunc._calls[0].funcargs == {}\n assert metafunc._calls[1].funcargs == {}\n assert metafunc._calls[0].params == dict(x=1, y=2)\n assert metafunc._calls[1].params == dict(x=1, y=3)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_indirect_list_functional_TestMetafunc.test_parametrize_indirect_list_functional.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_indirect_list_functional_TestMetafunc.test_parametrize_indirect_list_functional.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 691, "end_line": 718, "span_ids": ["TestMetafunc.test_parametrize_indirect_list_functional"], "tokens": 230}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafunc:\n\n def test_parametrize_indirect_list_functional(self, pytester: Pytester) -> None:\n \"\"\"\n #714\n Test parametrization with 'indirect' parameter applied on\n particular arguments. As y is is direct, its value should\n be used directly rather than being passed to the fixture\n y.\n\n :param pytester: the instance of Pytester class, a temporary\n test directory.\n \"\"\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.fixture(scope='function')\n def x(request):\n return request.param * 3\n @pytest.fixture(scope='function')\n def y(request):\n return request.param * 2\n @pytest.mark.parametrize('x, y', [('a', 'b')], indirect=['x'])\n def test_simple(x,y):\n assert len(x) == 3\n assert len(y) == 1\n \"\"\"\n )\n result = pytester.runpytest(\"-v\")\n result.stdout.fnmatch_lines([\"*test_simple*a-b*\", \"*1 passed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_indirect_list_error_TestMetafunc.test_parametrize_uses_no_fixture_error_indirect_false.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_indirect_list_error_TestMetafunc.test_parametrize_uses_no_fixture_error_indirect_false.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 720, "end_line": 750, "span_ids": ["TestMetafunc.test_parametrize_indirect_list_error", "TestMetafunc.test_parametrize_uses_no_fixture_error_indirect_false"], "tokens": 239}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafunc:\n\n def test_parametrize_indirect_list_error(self) -> None:\n \"\"\"#714\"\"\"\n\n def func(x, y):\n pass\n\n metafunc = self.Metafunc(func)\n with pytest.raises(fail.Exception):\n metafunc.parametrize(\"x, y\", [(\"a\", \"b\")], indirect=[\"x\", \"z\"])\n\n def test_parametrize_uses_no_fixture_error_indirect_false(\n self, pytester: Pytester\n ) -> None:\n \"\"\"The 'uses no fixture' error tells the user at collection time\n that the parametrize data they've set up doesn't correspond to the\n fixtures in their test function, rather than silently ignoring this\n and letting the test potentially pass.\n\n #714\n \"\"\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.mark.parametrize('x, y', [('a', 'b')], indirect=False)\n def test_simple(x):\n assert len(x) == 3\n \"\"\"\n )\n result = pytester.runpytest(\"--collect-only\")\n result.stdout.fnmatch_lines([\"*uses no argument 'y'*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_uses_no_fixture_error_indirect_true_TestMetafunc.test_parametrize_uses_no_fixture_error_indirect_true.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_uses_no_fixture_error_indirect_true_TestMetafunc.test_parametrize_uses_no_fixture_error_indirect_true.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 752, "end_line": 772, "span_ids": ["TestMetafunc.test_parametrize_uses_no_fixture_error_indirect_true"], "tokens": 158}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafunc:\n\n def test_parametrize_uses_no_fixture_error_indirect_true(\n self, pytester: Pytester\n ) -> None:\n \"\"\"#714\"\"\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.fixture(scope='function')\n def x(request):\n return request.param * 3\n @pytest.fixture(scope='function')\n def y(request):\n return request.param * 2\n\n @pytest.mark.parametrize('x, y', [('a', 'b')], indirect=True)\n def test_simple(x):\n assert len(x) == 3\n \"\"\"\n )\n result = pytester.runpytest(\"--collect-only\")\n result.stdout.fnmatch_lines([\"*uses no fixture 'y'*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_indirect_uses_no_fixture_error_indirect_string_TestMetafunc.test_parametrize_indirect_uses_no_fixture_error_indirect_string.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_indirect_uses_no_fixture_error_indirect_string_TestMetafunc.test_parametrize_indirect_uses_no_fixture_error_indirect_string.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 774, "end_line": 791, "span_ids": ["TestMetafunc.test_parametrize_indirect_uses_no_fixture_error_indirect_string"], "tokens": 140}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafunc:\n\n def test_parametrize_indirect_uses_no_fixture_error_indirect_string(\n self, pytester: Pytester\n ) -> None:\n \"\"\"#714\"\"\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.fixture(scope='function')\n def x(request):\n return request.param * 3\n\n @pytest.mark.parametrize('x, y', [('a', 'b')], indirect='y')\n def test_simple(x):\n assert len(x) == 3\n \"\"\"\n )\n result = pytester.runpytest(\"--collect-only\")\n result.stdout.fnmatch_lines([\"*uses no fixture 'y'*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_indirect_uses_no_fixture_error_indirect_list_TestMetafunc.test_parametrize_indirect_uses_no_fixture_error_indirect_list.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_indirect_uses_no_fixture_error_indirect_list_TestMetafunc.test_parametrize_indirect_uses_no_fixture_error_indirect_list.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 793, "end_line": 810, "span_ids": ["TestMetafunc.test_parametrize_indirect_uses_no_fixture_error_indirect_list"], "tokens": 140}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafunc:\n\n def test_parametrize_indirect_uses_no_fixture_error_indirect_list(\n self, pytester: Pytester\n ) -> None:\n \"\"\"#714\"\"\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.fixture(scope='function')\n def x(request):\n return request.param * 3\n\n @pytest.mark.parametrize('x, y', [('a', 'b')], indirect=['y'])\n def test_simple(x):\n assert len(x) == 3\n \"\"\"\n )\n result = pytester.runpytest(\"--collect-only\")\n result.stdout.fnmatch_lines([\"*uses no fixture 'y'*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_argument_not_in_indirect_list_TestMetafunc.test_parametrize_argument_not_in_indirect_list.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_argument_not_in_indirect_list_TestMetafunc.test_parametrize_argument_not_in_indirect_list.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 812, "end_line": 829, "span_ids": ["TestMetafunc.test_parametrize_argument_not_in_indirect_list"], "tokens": 136}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafunc:\n\n def test_parametrize_argument_not_in_indirect_list(\n self, pytester: Pytester\n ) -> None:\n \"\"\"#714\"\"\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.fixture(scope='function')\n def x(request):\n return request.param * 3\n\n @pytest.mark.parametrize('x, y', [('a', 'b')], indirect=['x'])\n def test_simple(x):\n assert len(x) == 3\n \"\"\"\n )\n result = pytester.runpytest(\"--collect-only\")\n result.stdout.fnmatch_lines([\"*uses no argument 'y'*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_gives_indicative_error_on_function_with_default_argument_TestMetafunc.test_parametrize_gives_indicative_error_on_function_with_default_argument.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_gives_indicative_error_on_function_with_default_argument_TestMetafunc.test_parametrize_gives_indicative_error_on_function_with_default_argument.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 831, "end_line": 846, "span_ids": ["TestMetafunc.test_parametrize_gives_indicative_error_on_function_with_default_argument"], "tokens": 123}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafunc:\n\n def test_parametrize_gives_indicative_error_on_function_with_default_argument(\n self, pytester: Pytester\n ) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.mark.parametrize('x, y', [('a', 'b')])\n def test_simple(x, y=1):\n assert len(x) == 1\n \"\"\"\n )\n result = pytester.runpytest(\"--collect-only\")\n result.stdout.fnmatch_lines(\n [\"*already takes an argument 'y' with a default value\"]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_functional_TestMetafunc.test_parametrize_functional.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_functional_TestMetafunc.test_parametrize_functional.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 848, "end_line": 867, "span_ids": ["TestMetafunc.test_parametrize_functional"], "tokens": 156}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafunc:\n\n def test_parametrize_functional(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n def pytest_generate_tests(metafunc):\n metafunc.parametrize('x', [1,2], indirect=True)\n metafunc.parametrize('y', [2])\n @pytest.fixture\n def x(request):\n return request.param * 10\n\n def test_simple(x,y):\n assert x in (10,20)\n assert y == 2\n \"\"\"\n )\n result = pytester.runpytest(\"-v\")\n result.stdout.fnmatch_lines(\n [\"*test_simple*1-2*\", \"*test_simple*2-2*\", \"*2 passed*\"]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_onearg_TestMetafunc.test_parametrize_onearg_indirect.None_3": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_onearg_TestMetafunc.test_parametrize_onearg_indirect.None_3", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 873, "end_line": 888, "span_ids": ["TestMetafunc.test_parametrize_onearg_indirect", "TestMetafunc.test_parametrize_onearg"], "tokens": 218}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafunc:\n\n def test_parametrize_onearg(self) -> None:\n metafunc = self.Metafunc(lambda x: None)\n metafunc.parametrize(\"x\", [1, 2])\n assert len(metafunc._calls) == 2\n assert metafunc._calls[0].funcargs == dict(x=1)\n assert metafunc._calls[0].id == \"1\"\n assert metafunc._calls[1].funcargs == dict(x=2)\n assert metafunc._calls[1].id == \"2\"\n\n def test_parametrize_onearg_indirect(self) -> None:\n metafunc = self.Metafunc(lambda x: None)\n metafunc.parametrize(\"x\", [1, 2], indirect=True)\n assert metafunc._calls[0].params == dict(x=1)\n assert metafunc._calls[0].id == \"1\"\n assert metafunc._calls[1].params == dict(x=2)\n assert metafunc._calls[1].id == \"2\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_twoargs_TestMetafunc.test_parametrize_twoargs.None_4": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_twoargs_TestMetafunc.test_parametrize_twoargs.None_4", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 890, "end_line": 897, "span_ids": ["TestMetafunc.test_parametrize_twoargs"], "tokens": 139}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafunc:\n\n def test_parametrize_twoargs(self) -> None:\n metafunc = self.Metafunc(lambda x, y: None)\n metafunc.parametrize((\"x\", \"y\"), [(1, 2), (3, 4)])\n assert len(metafunc._calls) == 2\n assert metafunc._calls[0].funcargs == dict(x=1, y=2)\n assert metafunc._calls[0].id == \"1-2\"\n assert metafunc._calls[1].funcargs == dict(x=3, y=4)\n assert metafunc._calls[1].id == \"3-4\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_multiple_times_TestMetafunc.test_parametrize_CSV.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_multiple_times_TestMetafunc.test_parametrize_CSV.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 895, "end_line": 922, "span_ids": ["TestMetafunc.test_parametrize_CSV", "TestMetafunc.test_parametrize_multiple_times"], "tokens": 222}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafunc:\n\n def test_parametrize_multiple_times(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n pytestmark = pytest.mark.parametrize(\"x\", [1,2])\n def test_func(x):\n assert 0, x\n class TestClass(object):\n pytestmark = pytest.mark.parametrize(\"y\", [3,4])\n def test_meth(self, x, y):\n assert 0, x\n \"\"\"\n )\n result = pytester.runpytest()\n assert result.ret == 1\n result.assert_outcomes(failed=6)\n\n def test_parametrize_CSV(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.mark.parametrize(\"x, y,\", [(1,2), (2,3)])\n def test_func(x, y):\n assert x+1 == y\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=2)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_class_scenarios_TestMetafunc.test_parametrize_class_scenarios.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_class_scenarios_TestMetafunc.test_parametrize_class_scenarios.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 924, "end_line": 964, "span_ids": ["TestMetafunc.test_parametrize_class_scenarios"], "tokens": 319}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafunc:\n\n def test_parametrize_class_scenarios(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n # same as doc/en/example/parametrize scenario example\n def pytest_generate_tests(metafunc):\n idlist = []\n argvalues = []\n for scenario in metafunc.cls.scenarios:\n idlist.append(scenario[0])\n items = scenario[1].items()\n argnames = [x[0] for x in items]\n argvalues.append(([x[1] for x in items]))\n metafunc.parametrize(argnames, argvalues, ids=idlist, scope=\"class\")\n\n class Test(object):\n scenarios = [['1', {'arg': {1: 2}, \"arg2\": \"value2\"}],\n ['2', {'arg':'value2', \"arg2\": \"value2\"}]]\n\n def test_1(self, arg, arg2):\n pass\n\n def test_2(self, arg2, arg):\n pass\n\n def test_3(self, arg, arg2):\n pass\n \"\"\"\n )\n result = pytester.runpytest(\"-v\")\n assert result.ret == 0\n result.stdout.fnmatch_lines(\n \"\"\"\n *test_1*1*\n *test_2*1*\n *test_3*1*\n *test_1*2*\n *test_2*2*\n *test_3*2*\n *6 passed*\n \"\"\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_format_args_TestMetafunc.test_format_args.None_3": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_format_args_TestMetafunc.test_format_args.None_3", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 966, "end_line": 985, "span_ids": ["TestMetafunc.test_format_args"], "tokens": 128}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafunc:\n\n def test_format_args(self) -> None:\n def function1():\n pass\n\n assert _format_args(function1) == \"()\"\n\n def function2(arg1):\n pass\n\n assert _format_args(function2) == \"(arg1)\"\n\n def function3(arg1, arg2=\"qwe\"):\n pass\n\n assert _format_args(function3) == \"(arg1, arg2='qwe')\"\n\n def function4(arg1, *args, **kwargs):\n pass\n\n assert _format_args(function4) == \"(arg1, *args, **kwargs)\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional_TestMetafuncFunctional.test_attributes.result_assert_outcomes_pa": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional_TestMetafuncFunctional.test_attributes.result_assert_outcomes_pa", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 988, "end_line": 1017, "span_ids": ["TestMetafuncFunctional", "TestMetafuncFunctional.test_attributes"], "tokens": 227}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafuncFunctional:\n def test_attributes(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n # assumes that generate/provide runs in the same process\n import sys, pytest\n def pytest_generate_tests(metafunc):\n metafunc.parametrize('metafunc', [metafunc])\n\n @pytest.fixture\n def metafunc(request):\n return request.param\n\n def test_function(metafunc, pytestconfig):\n assert metafunc.config == pytestconfig\n assert metafunc.module.__name__ == __name__\n assert metafunc.function == test_function\n assert metafunc.cls is None\n\n class TestClass(object):\n def test_method(self, metafunc, pytestconfig):\n assert metafunc.config == pytestconfig\n assert metafunc.module.__name__ == __name__\n unbound = TestClass.test_method\n assert metafunc.function == unbound\n assert metafunc.cls == TestClass\n \"\"\"\n )\n result = pytester.runpytest(p, \"-v\")\n result.assert_outcomes(passed=2)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_two_functions_TestMetafuncFunctional.test_two_functions.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_two_functions_TestMetafuncFunctional.test_two_functions.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1019, "end_line": 1041, "span_ids": ["TestMetafuncFunctional.test_two_functions"], "tokens": 176}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafuncFunctional:\n\n def test_two_functions(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n def pytest_generate_tests(metafunc):\n metafunc.parametrize('arg1', [10, 20], ids=['0', '1'])\n\n def test_func1(arg1):\n assert arg1 == 10\n\n def test_func2(arg1):\n assert arg1 in (10, 20)\n \"\"\"\n )\n result = pytester.runpytest(\"-v\", p)\n result.stdout.fnmatch_lines(\n [\n \"*test_func1*0*PASS*\",\n \"*test_func1*1*FAIL*\",\n \"*test_func2*PASS*\",\n \"*test_func2*PASS*\",\n \"*1 failed, 3 passed*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_noself_in_method_TestMetafuncFunctional.test_generate_tests_in_class.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_noself_in_method_TestMetafuncFunctional.test_generate_tests_in_class.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1043, "end_line": 1069, "span_ids": ["TestMetafuncFunctional.test_noself_in_method", "TestMetafuncFunctional.test_generate_tests_in_class"], "tokens": 208}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafuncFunctional:\n\n def test_noself_in_method(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n def pytest_generate_tests(metafunc):\n assert 'xyz' not in metafunc.fixturenames\n\n class TestHello(object):\n def test_hello(xyz):\n pass\n \"\"\"\n )\n result = pytester.runpytest(p)\n result.assert_outcomes(passed=1)\n\n def test_generate_tests_in_class(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n class TestClass(object):\n def pytest_generate_tests(self, metafunc):\n metafunc.parametrize('hello', ['world'], ids=['hellow'])\n\n def test_myfunc(self, hello):\n assert hello == \"world\"\n \"\"\"\n )\n result = pytester.runpytest(\"-v\", p)\n result.stdout.fnmatch_lines([\"*test_myfunc*hello*PASS*\", \"*1 passed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_two_functions_not_same_instance_TestMetafuncFunctional.test_two_functions_not_same_instance.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_two_functions_not_same_instance_TestMetafuncFunctional.test_two_functions_not_same_instance.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1071, "end_line": 1086, "span_ids": ["TestMetafuncFunctional.test_two_functions_not_same_instance"], "tokens": 144}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafuncFunctional:\n\n def test_two_functions_not_same_instance(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n def pytest_generate_tests(metafunc):\n metafunc.parametrize('arg1', [10, 20], ids=[\"0\", \"1\"])\n\n class TestClass(object):\n def test_func(self, arg1):\n assert not hasattr(self, 'x')\n self.x = 1\n \"\"\"\n )\n result = pytester.runpytest(\"-v\", p)\n result.stdout.fnmatch_lines(\n [\"*test_func*0*PASS*\", \"*test_func*1*PASS*\", \"*2 pass*\"]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_issue28_setup_method_in_generate_tests_TestMetafuncFunctional.test_issue28_setup_method_in_generate_tests.result_assert_outcomes_pa": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_issue28_setup_method_in_generate_tests_TestMetafuncFunctional.test_issue28_setup_method_in_generate_tests.result_assert_outcomes_pa", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1088, "end_line": 1102, "span_ids": ["TestMetafuncFunctional.test_issue28_setup_method_in_generate_tests"], "tokens": 118}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafuncFunctional:\n\n def test_issue28_setup_method_in_generate_tests(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n def pytest_generate_tests(metafunc):\n metafunc.parametrize('arg1', [1])\n\n class TestClass(object):\n def test_method(self, arg1):\n assert arg1 == self.val\n def setup_method(self, func):\n self.val = 1\n \"\"\"\n )\n result = pytester.runpytest(p)\n result.assert_outcomes(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_parametrize_functional2_TestMetafuncFunctional.test_parametrize_functional2.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_parametrize_functional2_TestMetafuncFunctional.test_parametrize_functional2.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1104, "end_line": 1117, "span_ids": ["TestMetafuncFunctional.test_parametrize_functional2"], "tokens": 149}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafuncFunctional:\n\n def test_parametrize_functional2(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n def pytest_generate_tests(metafunc):\n metafunc.parametrize(\"arg1\", [1,2])\n metafunc.parametrize(\"arg2\", [4,5])\n def test_hello(arg1, arg2):\n assert 0, (arg1, arg2)\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n [\"*(1, 4)*\", \"*(1, 5)*\", \"*(2, 4)*\", \"*(2, 5)*\", \"*4 failed*\"]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_parametrize_and_inner_getfixturevalue_TestMetafuncFunctional.test_parametrize_and_inner_getfixturevalue.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_parametrize_and_inner_getfixturevalue_TestMetafuncFunctional.test_parametrize_and_inner_getfixturevalue.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1119, "end_line": 1141, "span_ids": ["TestMetafuncFunctional.test_parametrize_and_inner_getfixturevalue"], "tokens": 179}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafuncFunctional:\n\n def test_parametrize_and_inner_getfixturevalue(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n def pytest_generate_tests(metafunc):\n metafunc.parametrize(\"arg1\", [1], indirect=True)\n metafunc.parametrize(\"arg2\", [10], indirect=True)\n\n import pytest\n @pytest.fixture\n def arg1(request):\n x = request.getfixturevalue(\"arg2\")\n return x + request.param\n\n @pytest.fixture\n def arg2(request):\n return request.param\n\n def test_func1(arg1, arg2):\n assert arg1 == 11\n \"\"\"\n )\n result = pytester.runpytest(\"-v\", p)\n result.stdout.fnmatch_lines([\"*test_func1*1*PASS*\", \"*1 passed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_parametrize_on_setup_arg_TestMetafuncFunctional.test_parametrize_on_setup_arg.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_parametrize_on_setup_arg_TestMetafuncFunctional.test_parametrize_on_setup_arg.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1143, "end_line": 1164, "span_ids": ["TestMetafuncFunctional.test_parametrize_on_setup_arg"], "tokens": 163}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafuncFunctional:\n\n def test_parametrize_on_setup_arg(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n def pytest_generate_tests(metafunc):\n assert \"arg1\" in metafunc.fixturenames\n metafunc.parametrize(\"arg1\", [1], indirect=True)\n\n import pytest\n @pytest.fixture\n def arg1(request):\n return request.param\n\n @pytest.fixture\n def arg2(request, arg1):\n return 10 * arg1\n\n def test_func(arg2):\n assert arg2 == 10\n \"\"\"\n )\n result = pytester.runpytest(\"-v\", p)\n result.stdout.fnmatch_lines([\"*test_func*1*PASS*\", \"*1 passed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_parametrize_with_ids_TestMetafuncFunctional.test_parametrize_with_ids.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_parametrize_with_ids_TestMetafuncFunctional.test_parametrize_with_ids.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1166, "end_line": 1188, "span_ids": ["TestMetafuncFunctional.test_parametrize_with_ids"], "tokens": 162}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafuncFunctional:\n\n def test_parametrize_with_ids(self, pytester: Pytester) -> None:\n pytester.makeini(\n \"\"\"\n [pytest]\n console_output_style=classic\n \"\"\"\n )\n pytester.makepyfile(\n \"\"\"\n import pytest\n def pytest_generate_tests(metafunc):\n metafunc.parametrize((\"a\", \"b\"), [(1,1), (1,2)],\n ids=[\"basic\", \"advanced\"])\n\n def test_function(a, b):\n assert a == b\n \"\"\"\n )\n result = pytester.runpytest(\"-v\")\n assert result.ret == 1\n result.stdout.fnmatch_lines_random(\n [\"*test_function*basic*PASSED\", \"*test_function*advanced*FAILED\"]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_parametrize_without_ids_TestMetafuncFunctional.test_parametrize_without_ids.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_parametrize_without_ids_TestMetafuncFunctional.test_parametrize_without_ids.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1190, "end_line": 1208, "span_ids": ["TestMetafuncFunctional.test_parametrize_without_ids"], "tokens": 129}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafuncFunctional:\n\n def test_parametrize_without_ids(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n def pytest_generate_tests(metafunc):\n metafunc.parametrize((\"a\", \"b\"),\n [(1,object()), (1.3,object())])\n\n def test_function(a, b):\n assert 1\n \"\"\"\n )\n result = pytester.runpytest(\"-v\")\n result.stdout.fnmatch_lines(\n \"\"\"\n *test_function*1-b0*\n *test_function*1.3-b1*\n \"\"\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_parametrize_with_None_in_ids_TestMetafuncFunctional.test_parametrize_with_None_in_ids.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_parametrize_with_None_in_ids_TestMetafuncFunctional.test_parametrize_with_None_in_ids.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1210, "end_line": 1230, "span_ids": ["TestMetafuncFunctional.test_parametrize_with_None_in_ids"], "tokens": 168}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafuncFunctional:\n\n def test_parametrize_with_None_in_ids(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n def pytest_generate_tests(metafunc):\n metafunc.parametrize((\"a\", \"b\"), [(1,1), (1,1), (1,2)],\n ids=[\"basic\", None, \"advanced\"])\n\n def test_function(a, b):\n assert a == b\n \"\"\"\n )\n result = pytester.runpytest(\"-v\")\n assert result.ret == 1\n result.stdout.fnmatch_lines_random(\n [\n \"*test_function*basic*PASSED*\",\n \"*test_function*1-1*PASSED*\",\n \"*test_function*advanced*FAILED*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_fixture_parametrized_empty_ids_TestMetafuncFunctional.test_parametrized_empty_ids.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_fixture_parametrized_empty_ids_TestMetafuncFunctional.test_parametrized_empty_ids.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1232, "end_line": 1261, "span_ids": ["TestMetafuncFunctional.test_fixture_parametrized_empty_ids", "TestMetafuncFunctional.test_parametrized_empty_ids"], "tokens": 211}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafuncFunctional:\n\n def test_fixture_parametrized_empty_ids(self, pytester: Pytester) -> None:\n \"\"\"Fixtures parametrized with empty ids cause an internal error (#1849).\"\"\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.fixture(scope=\"module\", ids=[], params=[])\n def temp(request):\n return request.param\n\n def test_temp(temp):\n pass\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"* 1 skipped *\"])\n\n def test_parametrized_empty_ids(self, pytester: Pytester) -> None:\n \"\"\"Tests parametrized with empty ids cause an internal error (#1849).\"\"\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.mark.parametrize('temp', [], ids=list())\n def test_temp(temp):\n pass\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"* 1 skipped *\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_parametrized_ids_invalid_type_TestMetafuncFunctional.test_parametrized_ids_invalid_type.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_parametrized_ids_invalid_type_TestMetafuncFunctional.test_parametrized_ids_invalid_type.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1263, "end_line": 1280, "span_ids": ["TestMetafuncFunctional.test_parametrized_ids_invalid_type"], "tokens": 180}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafuncFunctional:\n\n def test_parametrized_ids_invalid_type(self, pytester: Pytester) -> None:\n \"\"\"Test error with non-strings/non-ints, without generator (#1857).\"\"\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.mark.parametrize(\"x, expected\", [(1, 2), (3, 4), (5, 6)], ids=(None, 2, type))\n def test_ids_numbers(x,expected):\n assert x * 2 == expected\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n [\n \"In test_ids_numbers: ids must be list of string/float/int/bool,\"\n \" found: (type: ) at index 2\"\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_parametrize_with_identical_ids_get_unique_names_TestMetafuncFunctional.test_parametrize_with_identical_ids_get_unique_names.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_parametrize_with_identical_ids_get_unique_names_TestMetafuncFunctional.test_parametrize_with_identical_ids_get_unique_names.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1282, "end_line": 1300, "span_ids": ["TestMetafuncFunctional.test_parametrize_with_identical_ids_get_unique_names"], "tokens": 149}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafuncFunctional:\n\n def test_parametrize_with_identical_ids_get_unique_names(\n self, pytester: Pytester\n ) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n def pytest_generate_tests(metafunc):\n metafunc.parametrize((\"a\", \"b\"), [(1,1), (1,2)],\n ids=[\"a\", \"a\"])\n\n def test_function(a, b):\n assert a == b\n \"\"\"\n )\n result = pytester.runpytest(\"-v\")\n assert result.ret == 1\n result.stdout.fnmatch_lines_random(\n [\"*test_function*a0*PASSED*\", \"*test_function*a1*FAILED*\"]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_parametrize_scope_overrides_TestMetafuncFunctional.test_parametrize_scope_overrides.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_parametrize_scope_overrides_TestMetafuncFunctional.test_parametrize_scope_overrides.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1302, "end_line": 1328, "span_ids": ["TestMetafuncFunctional.test_parametrize_scope_overrides"], "tokens": 212}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafuncFunctional:\n\n @pytest.mark.parametrize((\"scope\", \"length\"), [(\"module\", 2), (\"function\", 4)])\n def test_parametrize_scope_overrides(\n self, pytester: Pytester, scope: str, length: int\n ) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n values = []\n def pytest_generate_tests(metafunc):\n if \"arg\" in metafunc.fixturenames:\n metafunc.parametrize(\"arg\", [1,2], indirect=True,\n scope=%r)\n @pytest.fixture\n def arg(request):\n values.append(request.param)\n return request.param\n def test_hello(arg):\n assert arg in (1,2)\n def test_world(arg):\n assert arg in (1,2)\n def test_checklength():\n assert len(values) == %d\n \"\"\"\n % (scope, length)\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=5)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_parametrize_issue323_TestMetafuncFunctional.test_usefixtures_seen_in_generate_tests.reprec_assert_outcomes_pa": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_parametrize_issue323_TestMetafuncFunctional.test_usefixtures_seen_in_generate_tests.reprec_assert_outcomes_pa", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1330, "end_line": 1362, "span_ids": ["TestMetafuncFunctional.test_usefixtures_seen_in_generate_tests", "TestMetafuncFunctional.test_parametrize_issue323"], "tokens": 211}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafuncFunctional:\n\n def test_parametrize_issue323(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.fixture(scope='module', params=range(966))\n def foo(request):\n return request.param\n\n def test_it(foo):\n pass\n def test_it2(foo):\n pass\n \"\"\"\n )\n reprec = pytester.inline_run(\"--collect-only\")\n assert not reprec.getcalls(\"pytest_internalerror\")\n\n def test_usefixtures_seen_in_generate_tests(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n def pytest_generate_tests(metafunc):\n assert \"abc\" in metafunc.fixturenames\n metafunc.parametrize(\"abc\", [1])\n\n @pytest.mark.usefixtures(\"abc\")\n def test_function():\n pass\n \"\"\"\n )\n reprec = pytester.runpytest()\n reprec.assert_outcomes(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_generate_tests_only_done_in_subdir_TestMetafuncFunctional.test_generate_tests_only_done_in_subdir.result_assert_outcomes_pa": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_generate_tests_only_done_in_subdir_TestMetafuncFunctional.test_generate_tests_only_done_in_subdir.result_assert_outcomes_pa", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1364, "end_line": 1386, "span_ids": ["TestMetafuncFunctional.test_generate_tests_only_done_in_subdir"], "tokens": 238}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafuncFunctional:\n\n def test_generate_tests_only_done_in_subdir(self, pytester: Pytester) -> None:\n sub1 = pytester.mkpydir(\"sub1\")\n sub2 = pytester.mkpydir(\"sub2\")\n sub1.joinpath(\"conftest.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n def pytest_generate_tests(metafunc):\n assert metafunc.function.__name__ == \"test_1\"\n \"\"\"\n )\n )\n sub2.joinpath(\"conftest.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n def pytest_generate_tests(metafunc):\n assert metafunc.function.__name__ == \"test_2\"\n \"\"\"\n )\n )\n sub1.joinpath(\"test_in_sub1.py\").write_text(\"def test_1(): pass\")\n sub2.joinpath(\"test_in_sub2.py\").write_text(\"def test_2(): pass\")\n result = pytester.runpytest(\"--keep-duplicates\", \"-v\", \"-s\", sub1, sub2, sub1)\n result.assert_outcomes(passed=3)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctionalAuto_TestMetafuncFunctionalAuto.test_parametrize_auto_scope.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctionalAuto_TestMetafuncFunctionalAuto.test_parametrize_auto_scope.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1433, "end_line": 1457, "span_ids": ["TestMetafuncFunctionalAuto.test_parametrize_auto_scope", "TestMetafuncFunctionalAuto"], "tokens": 165}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafuncFunctionalAuto:\n \"\"\"Tests related to automatically find out the correct scope for\n parametrized tests (#1832).\"\"\"\n\n def test_parametrize_auto_scope(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.fixture(scope='session', autouse=True)\n def fixture():\n return 1\n\n @pytest.mark.parametrize('animal', [\"dog\", \"cat\"])\n def test_1(animal):\n assert animal in ('dog', 'cat')\n\n @pytest.mark.parametrize('animal', ['fish'])\n def test_2(animal):\n assert animal == 'fish'\n\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"* 3 passed *\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctionalAuto.test_parametrize_auto_scope_indirect_TestMetafuncFunctionalAuto.test_parametrize_auto_scope_indirect.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctionalAuto.test_parametrize_auto_scope_indirect_TestMetafuncFunctionalAuto.test_parametrize_auto_scope_indirect.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1459, "end_line": 1480, "span_ids": ["TestMetafuncFunctionalAuto.test_parametrize_auto_scope_indirect"], "tokens": 192}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafuncFunctionalAuto:\n\n def test_parametrize_auto_scope_indirect(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.fixture(scope='session')\n def echo(request):\n return request.param\n\n @pytest.mark.parametrize('animal, echo', [(\"dog\", 1), (\"cat\", 2)], indirect=['echo'])\n def test_1(animal, echo):\n assert animal in ('dog', 'cat')\n assert echo in (1, 2, 3)\n\n @pytest.mark.parametrize('animal, echo', [('fish', 3)], indirect=['echo'])\n def test_2(animal, echo):\n assert animal == 'fish'\n assert echo in (1, 2, 3)\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"* 3 passed *\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctionalAuto.test_parametrize_auto_scope_override_fixture_TestMetafuncFunctionalAuto.test_parametrize_auto_scope_override_fixture.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctionalAuto.test_parametrize_auto_scope_override_fixture_TestMetafuncFunctionalAuto.test_parametrize_auto_scope_override_fixture.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1482, "end_line": 1497, "span_ids": ["TestMetafuncFunctionalAuto.test_parametrize_auto_scope_override_fixture"], "tokens": 118}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafuncFunctionalAuto:\n\n def test_parametrize_auto_scope_override_fixture(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.fixture(scope='session', autouse=True)\n def animal():\n return 'fox'\n\n @pytest.mark.parametrize('animal', [\"dog\", \"cat\"])\n def test_1(animal):\n assert animal in ('dog', 'cat')\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"* 2 passed *\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctionalAuto.test_parametrize_all_indirects_TestMetafuncFunctionalAuto.test_parametrize_all_indirects.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctionalAuto.test_parametrize_all_indirects_TestMetafuncFunctionalAuto.test_parametrize_all_indirects.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1499, "end_line": 1524, "span_ids": ["TestMetafuncFunctionalAuto.test_parametrize_all_indirects"], "tokens": 205}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafuncFunctionalAuto:\n\n def test_parametrize_all_indirects(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.fixture()\n def animal(request):\n return request.param\n\n @pytest.fixture(scope='session')\n def echo(request):\n return request.param\n\n @pytest.mark.parametrize('animal, echo', [(\"dog\", 1), (\"cat\", 2)], indirect=True)\n def test_1(animal, echo):\n assert animal in ('dog', 'cat')\n assert echo in (1, 2, 3)\n\n @pytest.mark.parametrize('animal, echo', [(\"fish\", 3)], indirect=True)\n def test_2(animal, echo):\n assert animal == 'fish'\n assert echo in (1, 2, 3)\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"* 3 passed *\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctionalAuto.test_parametrize_some_arguments_auto_scope_TestMetafuncFunctionalAuto.test_parametrize_some_arguments_auto_scope.assert_class_fix_setup_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctionalAuto.test_parametrize_some_arguments_auto_scope_TestMetafuncFunctionalAuto.test_parametrize_some_arguments_auto_scope.assert_class_fix_setup_", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1526, "end_line": 1559, "span_ids": ["TestMetafuncFunctionalAuto.test_parametrize_some_arguments_auto_scope"], "tokens": 264}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafuncFunctionalAuto:\n\n def test_parametrize_some_arguments_auto_scope(\n self, pytester: Pytester, monkeypatch\n ) -> None:\n \"\"\"Integration test for (#3941)\"\"\"\n class_fix_setup: List[object] = []\n monkeypatch.setattr(sys, \"class_fix_setup\", class_fix_setup, raising=False)\n func_fix_setup: List[object] = []\n monkeypatch.setattr(sys, \"func_fix_setup\", func_fix_setup, raising=False)\n\n pytester.makepyfile(\n \"\"\"\n import pytest\n import sys\n\n @pytest.fixture(scope='class', autouse=True)\n def class_fix(request):\n sys.class_fix_setup.append(request.param)\n\n @pytest.fixture(autouse=True)\n def func_fix():\n sys.func_fix_setup.append(True)\n\n @pytest.mark.parametrize('class_fix', [10, 20], indirect=True)\n class Test:\n def test_foo(self):\n pass\n def test_bar(self):\n pass\n \"\"\"\n )\n result = pytester.runpytest_inprocess()\n result.stdout.fnmatch_lines([\"* 4 passed in *\"])\n assert func_fix_setup == [True] * 4\n assert class_fix_setup == [10, 20]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctionalAuto.test_parametrize_issue634_TestMetafuncFunctionalAuto.test_parametrize_issue634.None_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctionalAuto.test_parametrize_issue634_TestMetafuncFunctionalAuto.test_parametrize_issue634.None_1", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1561, "end_line": 1593, "span_ids": ["TestMetafuncFunctionalAuto.test_parametrize_issue634"], "tokens": 231}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafuncFunctionalAuto:\n\n def test_parametrize_issue634(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.fixture(scope='module')\n def foo(request):\n print('preparing foo-%d' % request.param)\n return 'foo-%d' % request.param\n\n def test_one(foo):\n pass\n\n def test_two(foo):\n pass\n\n test_two.test_with = (2, 3)\n\n def pytest_generate_tests(metafunc):\n params = (1, 2, 3, 4)\n if not 'foo' in metafunc.fixturenames:\n return\n\n test_with = getattr(metafunc.function, 'test_with', None)\n if test_with:\n params = test_with\n metafunc.parametrize('foo', params, indirect=True)\n \"\"\"\n )\n result = pytester.runpytest(\"-s\")\n output = result.stdout.str()\n assert output.count(\"preparing foo-2\") == 1\n assert output.count(\"preparing foo-3\") == 1", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMarkersWithParametrization_TestMarkersWithParametrization.test_simple_mark.None_3": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMarkersWithParametrization_TestMarkersWithParametrization.test_simple_mark.None_3", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1596, "end_line": 1618, "span_ids": ["TestMarkersWithParametrization", "TestMarkersWithParametrization.test_simple_mark"], "tokens": 175}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMarkersWithParametrization:\n \"\"\"#308\"\"\"\n\n def test_simple_mark(self, pytester: Pytester) -> None:\n s = \"\"\"\n import pytest\n\n @pytest.mark.foo\n @pytest.mark.parametrize((\"n\", \"expected\"), [\n (1, 2),\n pytest.param(1, 3, marks=pytest.mark.bar),\n (2, 3),\n ])\n def test_increment(n, expected):\n assert n + 1 == expected\n \"\"\"\n items = pytester.getitems(s)\n assert len(items) == 3\n for item in items:\n assert \"foo\" in item.keywords\n assert \"bar\" not in items[0].keywords\n assert \"bar\" in items[1].keywords\n assert \"bar\" not in items[2].keywords", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMarkersWithParametrization.test_select_based_on_mark_TestMarkersWithParametrization.test_select_based_on_mark.assert_len_fail_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMarkersWithParametrization.test_select_based_on_mark_TestMarkersWithParametrization.test_select_based_on_mark.assert_len_fail_0", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1620, "end_line": 1637, "span_ids": ["TestMarkersWithParametrization.test_select_based_on_mark"], "tokens": 158}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMarkersWithParametrization:\n\n def test_select_based_on_mark(self, pytester: Pytester) -> None:\n s = \"\"\"\n import pytest\n\n @pytest.mark.parametrize((\"n\", \"expected\"), [\n (1, 2),\n pytest.param(2, 3, marks=pytest.mark.foo),\n (3, 4),\n ])\n def test_increment(n, expected):\n assert n + 1 == expected\n \"\"\"\n pytester.makepyfile(s)\n rec = pytester.inline_run(\"-m\", \"foo\")\n passed, skipped, fail = rec.listoutcomes()\n assert len(passed) == 1\n assert len(skipped) == 0\n assert len(fail) == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMarkersWithParametrization.test_simple_xfail_TestMarkersWithParametrization.test_simple_xfail.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMarkersWithParametrization.test_simple_xfail_TestMarkersWithParametrization.test_simple_xfail.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1639, "end_line": 1654, "span_ids": ["TestMarkersWithParametrization.test_simple_xfail"], "tokens": 135}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMarkersWithParametrization:\n\n def test_simple_xfail(self, pytester: Pytester) -> None:\n s = \"\"\"\n import pytest\n\n @pytest.mark.parametrize((\"n\", \"expected\"), [\n (1, 2),\n pytest.param(1, 3, marks=pytest.mark.xfail),\n (2, 3),\n ])\n def test_increment(n, expected):\n assert n + 1 == expected\n \"\"\"\n pytester.makepyfile(s)\n reprec = pytester.inline_run()\n # xfail is skip??\n reprec.assertoutcome(passed=2, skipped=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMarkersWithParametrization.test_xfail_with_kwarg_TestMarkersWithParametrization.test_xfail_with_kwarg.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMarkersWithParametrization.test_xfail_with_kwarg_TestMarkersWithParametrization.test_xfail_with_kwarg.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1688, "end_line": 1702, "span_ids": ["TestMarkersWithParametrization.test_xfail_with_kwarg"], "tokens": 133}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMarkersWithParametrization:\n\n def test_xfail_with_kwarg(self, pytester: Pytester) -> None:\n s = \"\"\"\n import pytest\n\n @pytest.mark.parametrize((\"n\", \"expected\"), [\n (1, 2),\n pytest.param(1, 3, marks=pytest.mark.xfail(reason=\"some bug\")),\n (2, 3),\n ])\n def test_increment(n, expected):\n assert n + 1 == expected\n \"\"\"\n pytester.makepyfile(s)\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=2, skipped=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMarkersWithParametrization.test_xfail_with_arg_and_kwarg_TestMarkersWithParametrization.test_xfail_with_arg_and_kwarg.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMarkersWithParametrization.test_xfail_with_arg_and_kwarg_TestMarkersWithParametrization.test_xfail_with_arg_and_kwarg.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1704, "end_line": 1718, "span_ids": ["TestMarkersWithParametrization.test_xfail_with_arg_and_kwarg"], "tokens": 138}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMarkersWithParametrization:\n\n def test_xfail_with_arg_and_kwarg(self, pytester: Pytester) -> None:\n s = \"\"\"\n import pytest\n\n @pytest.mark.parametrize((\"n\", \"expected\"), [\n (1, 2),\n pytest.param(1, 3, marks=pytest.mark.xfail(\"True\", reason=\"some bug\")),\n (2, 3),\n ])\n def test_increment(n, expected):\n assert n + 1 == expected\n \"\"\"\n pytester.makepyfile(s)\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=2, skipped=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMarkersWithParametrization.test_xfail_passing_is_xpass_TestMarkersWithParametrization.test_xfail_passing_is_xpass.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMarkersWithParametrization.test_xfail_passing_is_xpass_TestMarkersWithParametrization.test_xfail_passing_is_xpass.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1720, "end_line": 1740, "span_ids": ["TestMarkersWithParametrization.test_xfail_passing_is_xpass"], "tokens": 203}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMarkersWithParametrization:\n\n @pytest.mark.parametrize(\"strict\", [True, False])\n def test_xfail_passing_is_xpass(self, pytester: Pytester, strict: bool) -> None:\n s = \"\"\"\n import pytest\n\n m = pytest.mark.xfail(\"sys.version_info > (0, 0, 0)\", reason=\"some bug\", strict={strict})\n\n @pytest.mark.parametrize((\"n\", \"expected\"), [\n (1, 2),\n pytest.param(2, 3, marks=m),\n (3, 4),\n ])\n def test_increment(n, expected):\n assert n + 1 == expected\n \"\"\".format(\n strict=strict\n )\n pytester.makepyfile(s)\n reprec = pytester.inline_run()\n passed, failed = (2, 1) if strict else (3, 0)\n reprec.assertoutcome(passed=passed, failed=failed)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMarkersWithParametrization.test_parametrize_marked_value_TestMarkersWithParametrization.test_parametrize_marked_value.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMarkersWithParametrization.test_parametrize_marked_value_TestMarkersWithParametrization.test_parametrize_marked_value.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1785, "end_line": 1808, "span_ids": ["TestMarkersWithParametrization.test_parametrize_marked_value"], "tokens": 230}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMarkersWithParametrization:\n\n @pytest.mark.parametrize(\"strict\", [True, False])\n def test_parametrize_marked_value(self, pytester: Pytester, strict: bool) -> None:\n s = \"\"\"\n import pytest\n\n @pytest.mark.parametrize((\"n\", \"expected\"), [\n pytest.param(\n 2,3,\n marks=pytest.mark.xfail(\"sys.version_info > (0, 0, 0)\", reason=\"some bug\", strict={strict}),\n ),\n pytest.param(\n 2,3,\n marks=[pytest.mark.xfail(\"sys.version_info > (0, 0, 0)\", reason=\"some bug\", strict={strict})],\n ),\n ])\n def test_increment(n, expected):\n assert n + 1 == expected\n \"\"\".format(\n strict=strict\n )\n pytester.makepyfile(s)\n reprec = pytester.inline_run()\n passed, failed = (0, 2) if strict else (2, 0)\n reprec.assertoutcome(passed=passed, failed=failed)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMarkersWithParametrization.test_pytest_make_parametrize_id_TestMarkersWithParametrization.test_pytest_make_parametrize_id.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMarkersWithParametrization.test_pytest_make_parametrize_id_TestMarkersWithParametrization.test_pytest_make_parametrize_id.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1810, "end_line": 1827, "span_ids": ["TestMarkersWithParametrization.test_pytest_make_parametrize_id"], "tokens": 133}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMarkersWithParametrization:\n\n def test_pytest_make_parametrize_id(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n def pytest_make_parametrize_id(config, val):\n return str(val * 2)\n \"\"\"\n )\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.mark.parametrize(\"x\", range(2))\n def test_func(x):\n pass\n \"\"\"\n )\n result = pytester.runpytest(\"-v\")\n result.stdout.fnmatch_lines([\"*test_func*0*PASS*\", \"*test_func*2*PASS*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/raises.py_TestRaises.test_raises_as_contextmanager_TestRaises.test_raises_as_contextmanager.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/raises.py_TestRaises.test_raises_as_contextmanager_TestRaises.test_raises_as_contextmanager.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/raises.py", "file_name": "raises.py", "file_type": "text/x-python", "category": "implementation", "start_line": 54, "end_line": 80, "span_ids": ["TestRaises.test_raises_as_contextmanager"], "tokens": 185}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRaises:\n\n def test_raises_as_contextmanager(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n import _pytest._code\n\n def test_simple():\n with pytest.raises(ZeroDivisionError) as excinfo:\n assert isinstance(excinfo, _pytest._code.ExceptionInfo)\n 1/0\n print(excinfo)\n assert excinfo.type == ZeroDivisionError\n assert isinstance(excinfo.value, ZeroDivisionError)\n\n def test_noraise():\n with pytest.raises(pytest.raises.Exception):\n with pytest.raises(ValueError):\n int()\n\n def test_raise_wrong_exception_passes_by():\n with pytest.raises(ZeroDivisionError):\n with pytest.raises(ValueError):\n 1/0\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*3 passed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/raises.py_TestRaises.test_does_not_raise_TestRaises.test_does_not_raise.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/raises.py_TestRaises.test_does_not_raise_TestRaises.test_does_not_raise.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/raises.py", "file_name": "raises.py", "file_type": "text/x-python", "category": "implementation", "start_line": 82, "end_line": 105, "span_ids": ["TestRaises.test_does_not_raise"], "tokens": 165}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRaises:\n\n def test_does_not_raise(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n from contextlib import contextmanager\n import pytest\n\n @contextmanager\n def does_not_raise():\n yield\n\n @pytest.mark.parametrize('example_input,expectation', [\n (3, does_not_raise()),\n (2, does_not_raise()),\n (1, does_not_raise()),\n (0, pytest.raises(ZeroDivisionError)),\n ])\n def test_division(example_input, expectation):\n '''Test how much I know division.'''\n with expectation:\n assert (6 / example_input) is not None\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*4 passed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/raises.py_TestRaises.test_does_not_raise_does_raise_TestRaises.test_does_not_raise_does_raise.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/raises.py_TestRaises.test_does_not_raise_does_raise_TestRaises.test_does_not_raise_does_raise.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/raises.py", "file_name": "raises.py", "file_type": "text/x-python", "category": "implementation", "start_line": 107, "end_line": 128, "span_ids": ["TestRaises.test_does_not_raise_does_raise"], "tokens": 151}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRaises:\n\n def test_does_not_raise_does_raise(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n from contextlib import contextmanager\n import pytest\n\n @contextmanager\n def does_not_raise():\n yield\n\n @pytest.mark.parametrize('example_input,expectation', [\n (0, does_not_raise()),\n (1, pytest.raises(ZeroDivisionError)),\n ])\n def test_division(example_input, expectation):\n '''Test how much I know division.'''\n with expectation:\n assert (6 / example_input) is not None\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*2 failed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/show_fixtures_per_test.py_test_fixtures_in_conftest_test_fixtures_in_conftest.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/show_fixtures_per_test.py_test_fixtures_in_conftest_test_fixtures_in_conftest.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/show_fixtures_per_test.py", "file_name": "show_fixtures_per_test.py", "file_type": "text/x-python", "category": "test", "start_line": 39, "end_line": 83, "span_ids": ["test_fixtures_in_conftest"], "tokens": 273}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_fixtures_in_conftest(pytester: Pytester) -> None:\n pytester.makeconftest(\n '''\n import pytest\n @pytest.fixture\n def arg1():\n \"\"\"arg1 docstring\"\"\"\n @pytest.fixture\n def arg2():\n \"\"\"arg2 docstring\"\"\"\n @pytest.fixture\n def arg3(arg1, arg2):\n \"\"\"arg3\n docstring\n \"\"\"\n '''\n )\n p = pytester.makepyfile(\n \"\"\"\n def test_arg2(arg2):\n pass\n def test_arg3(arg3):\n pass\n \"\"\"\n )\n result = pytester.runpytest(\"--fixtures-per-test\", p)\n assert result.ret == 0\n\n result.stdout.fnmatch_lines(\n [\n \"*fixtures used by test_arg2*\",\n \"*(test_fixtures_in_conftest.py:2)*\",\n \"arg2\",\n \" arg2 docstring\",\n \"*fixtures used by test_arg3*\",\n \"*(test_fixtures_in_conftest.py:4)*\",\n \"arg1\",\n \" arg1 docstring\",\n \"arg2\",\n \" arg2 docstring\",\n \"arg3\",\n \" arg3\",\n \" docstring\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/show_fixtures_per_test.py_test_should_show_fixtures_used_by_test_test_should_show_fixtures_used_by_test.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/show_fixtures_per_test.py_test_should_show_fixtures_used_by_test_test_should_show_fixtures_used_by_test.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/show_fixtures_per_test.py", "file_name": "show_fixtures_per_test.py", "file_type": "text/x-python", "category": "test", "start_line": 86, "end_line": 120, "span_ids": ["test_should_show_fixtures_used_by_test"], "tokens": 213}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_should_show_fixtures_used_by_test(pytester: Pytester) -> None:\n pytester.makeconftest(\n '''\n import pytest\n @pytest.fixture\n def arg1():\n \"\"\"arg1 from conftest\"\"\"\n @pytest.fixture\n def arg2():\n \"\"\"arg2 from conftest\"\"\"\n '''\n )\n p = pytester.makepyfile(\n '''\n import pytest\n @pytest.fixture\n def arg1():\n \"\"\"arg1 from testmodule\"\"\"\n def test_args(arg1, arg2):\n pass\n '''\n )\n result = pytester.runpytest(\"--fixtures-per-test\", p)\n assert result.ret == 0\n\n result.stdout.fnmatch_lines(\n [\n \"*fixtures used by test_args*\",\n \"*(test_should_show_fixtures_used_by_test.py:6)*\",\n \"arg1\",\n \" arg1 from testmodule\",\n \"arg2\",\n \" arg2 from conftest\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/show_fixtures_per_test.py_test_verbose_include_private_fixtures_and_loc_test_verbose_include_private_fixtures_and_loc.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/show_fixtures_per_test.py_test_verbose_include_private_fixtures_and_loc_test_verbose_include_private_fixtures_and_loc.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/show_fixtures_per_test.py", "file_name": "show_fixtures_per_test.py", "file_type": "text/x-python", "category": "test", "start_line": 123, "end_line": 159, "span_ids": ["test_verbose_include_private_fixtures_and_loc"], "tokens": 263}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_verbose_include_private_fixtures_and_loc(pytester: Pytester) -> None:\n pytester.makeconftest(\n '''\n import pytest\n @pytest.fixture\n def _arg1():\n \"\"\"_arg1 from conftest\"\"\"\n @pytest.fixture\n def arg2(_arg1):\n \"\"\"arg2 from conftest\"\"\"\n '''\n )\n p = pytester.makepyfile(\n '''\n import pytest\n @pytest.fixture\n def arg3():\n \"\"\"arg3 from testmodule\"\"\"\n def test_args(arg2, arg3):\n pass\n '''\n )\n result = pytester.runpytest(\"--fixtures-per-test\", \"-v\", p)\n assert result.ret == 0\n\n result.stdout.fnmatch_lines(\n [\n \"*fixtures used by test_args*\",\n \"*(test_verbose_include_private_fixtures_and_loc.py:6)*\",\n \"_arg1 -- conftest.py:3\",\n \" _arg1 from conftest\",\n \"arg2 -- conftest.py:6\",\n \" arg2 from conftest\",\n \"arg3 -- test_verbose_include_private_fixtures_and_loc.py:3\",\n \" arg3 from testmodule\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/show_fixtures_per_test.py_test_doctest_items_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/show_fixtures_per_test.py_test_doctest_items_", "embedding": null, "metadata": {"file_path": "testing/python/show_fixtures_per_test.py", "file_name": "show_fixtures_per_test.py", "file_type": "text/x-python", "category": "test", "start_line": 162, "end_line": 184, "span_ids": ["test_doctest_items"], "tokens": 129}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_doctest_items(pytester: Pytester) -> None:\n pytester.makepyfile(\n '''\n def foo():\n \"\"\"\n >>> 1 + 1\n 2\n \"\"\"\n '''\n )\n pytester.maketxtfile(\n \"\"\"\n >>> 1 + 1\n 2\n \"\"\"\n )\n result = pytester.runpytest(\n \"--fixtures-per-test\", \"--doctest-modules\", \"--doctest-glob=*.txt\", \"-v\"\n )\n assert result.ret == 0\n\n result.stdout.fnmatch_lines([\"*collected 2 items*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_argcomplete.py_FilesCompleter_FilesCompleter.__call__.return.completion": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_argcomplete.py_FilesCompleter_FilesCompleter.__call__.return.completion", "embedding": null, "metadata": {"file_path": "testing/test_argcomplete.py", "file_name": "test_argcomplete.py", "file_type": "text/x-python", "category": "test", "start_line": 35, "end_line": 65, "span_ids": ["FilesCompleter", "FilesCompleter.__call__"], "tokens": 291}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class FilesCompleter:\n \"\"\"File completer class, optionally takes a list of allowed extensions.\"\"\"\n\n def __init__(self, allowednames=(), directories=True):\n # Fix if someone passes in a string instead of a list\n if type(allowednames) is str:\n allowednames = [allowednames]\n\n self.allowednames = [x.lstrip(\"*\").lstrip(\".\") for x in allowednames]\n self.directories = directories\n\n def __call__(self, prefix, **kwargs):\n completion = []\n if self.allowednames:\n if self.directories:\n files = _wrapcall([\"bash\", \"-c\", f\"compgen -A directory -- '{prefix}'\"])\n completion += [f + \"/\" for f in files]\n for x in self.allowednames:\n completion += _wrapcall(\n [\"bash\", \"-c\", f\"compgen -A file -X '!*.{x}' -- '{prefix}'\"]\n )\n else:\n completion += _wrapcall([\"bash\", \"-c\", f\"compgen -A file -- '{prefix}'\"])\n\n anticomp = _wrapcall([\"bash\", \"-c\", f\"compgen -A directory -- '{prefix}'\"])\n\n completion = list(set(completion) - set(anticomp))\n\n if self.directories:\n completion += [f + \"/\" for f in anticomp]\n return completion", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_argcomplete.py_TestArgComplete_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_argcomplete.py_TestArgComplete_", "embedding": null, "metadata": {"file_path": "testing/test_argcomplete.py", "file_name": "test_argcomplete.py", "file_type": "text/x-python", "category": "test", "start_line": 68, "end_line": 96, "span_ids": ["TestArgComplete.test_compare_with_compgen", "TestArgComplete.test_remove_dir_prefix", "TestArgComplete"], "tokens": 253}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestArgComplete:\n @pytest.mark.skipif(\"sys.platform in ('win32', 'darwin')\")\n def test_compare_with_compgen(\n self, tmp_path: Path, monkeypatch: MonkeyPatch\n ) -> None:\n from _pytest._argcomplete import FastFilesCompleter\n\n ffc = FastFilesCompleter()\n fc = FilesCompleter()\n\n monkeypatch.chdir(tmp_path)\n\n assert equal_with_bash(\"\", ffc, fc, out=sys.stdout)\n\n tmp_path.cwd().joinpath(\"data\").touch()\n\n for x in [\"d\", \"data\", \"doesnotexist\", \"\"]:\n assert equal_with_bash(x, ffc, fc, out=sys.stdout)\n\n @pytest.mark.skipif(\"sys.platform in ('win32', 'darwin')\")\n def test_remove_dir_prefix(self):\n \"\"\"This is not compatible with compgen but it is with bash itself: ls /usr/.\"\"\"\n from _pytest._argcomplete import FastFilesCompleter\n\n ffc = FastFilesCompleter()\n fc = FilesCompleter()\n for x in \"/usr/\".split():\n assert not equal_with_bash(x, ffc, fc, out=sys.stdout)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestImportHookInstallation.test_pytest_plugins_rewrite_TestImportHookInstallation.test_pytest_plugins_rewrite.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestImportHookInstallation.test_pytest_plugins_rewrite_TestImportHookInstallation.test_pytest_plugins_rewrite.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 96, "end_line": 123, "span_ids": ["TestImportHookInstallation.test_pytest_plugins_rewrite"], "tokens": 216}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestImportHookInstallation:\n\n @pytest.mark.parametrize(\"mode\", [\"plain\", \"rewrite\"])\n def test_pytest_plugins_rewrite(self, pytester: Pytester, mode) -> None:\n contents = {\n \"conftest.py\": \"\"\"\n pytest_plugins = ['ham']\n \"\"\",\n \"ham.py\": \"\"\"\n import pytest\n @pytest.fixture\n def check_first():\n def check(values, value):\n assert values.pop(0) == value\n return check\n \"\"\",\n \"test_foo.py\": \"\"\"\n def test_foo(check_first):\n check_first([10, 30], 30)\n \"\"\",\n }\n pytester.makepyfile(**contents)\n result = pytester.runpytest_subprocess(\"--assert=%s\" % mode)\n if mode == \"plain\":\n expected = \"E AssertionError\"\n elif mode == \"rewrite\":\n expected = \"*assert 10 == 30*\"\n else:\n assert 0\n result.stdout.fnmatch_lines([expected])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestImportHookInstallation.test_pytest_plugins_rewrite_module_names_TestImportHookInstallation.test_pytest_plugins_rewrite_module_names.assert_result_ret_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestImportHookInstallation.test_pytest_plugins_rewrite_module_names_TestImportHookInstallation.test_pytest_plugins_rewrite_module_names.assert_result_ret_0", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 125, "end_line": 150, "span_ids": ["TestImportHookInstallation.test_pytest_plugins_rewrite_module_names"], "tokens": 211}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestImportHookInstallation:\n\n @pytest.mark.parametrize(\"mode\", [\"str\", \"list\"])\n def test_pytest_plugins_rewrite_module_names(\n self, pytester: Pytester, mode\n ) -> None:\n \"\"\"Test that pluginmanager correct marks pytest_plugins variables\n for assertion rewriting if they are defined as plain strings or\n list of strings (#1888).\n \"\"\"\n plugins = '\"ham\"' if mode == \"str\" else '[\"ham\"]'\n contents = {\n \"conftest.py\": \"\"\"\n pytest_plugins = {plugins}\n \"\"\".format(\n plugins=plugins\n ),\n \"ham.py\": \"\"\"\n import pytest\n \"\"\",\n \"test_foo.py\": \"\"\"\n def test_foo(pytestconfig):\n assert 'ham' in pytestconfig.pluginmanager.rewrite_hook._must_rewrite\n \"\"\",\n }\n pytester.makepyfile(**contents)\n result = pytester.runpytest_subprocess(\"--assert=rewrite\")\n assert result.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestImportHookInstallation.test_pytest_plugins_rewrite_module_names_correctly_TestImportHookInstallation.test_pytest_plugins_rewrite_module_names_correctly.assert_result_ret_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestImportHookInstallation.test_pytest_plugins_rewrite_module_names_correctly_TestImportHookInstallation.test_pytest_plugins_rewrite_module_names_correctly.assert_result_ret_0", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 152, "end_line": 170, "span_ids": ["TestImportHookInstallation.test_pytest_plugins_rewrite_module_names_correctly"], "tokens": 174}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestImportHookInstallation:\n\n def test_pytest_plugins_rewrite_module_names_correctly(\n self, pytester: Pytester\n ) -> None:\n \"\"\"Test that we match files correctly when they are marked for rewriting (#2939).\"\"\"\n contents = {\n \"conftest.py\": \"\"\"\\\n pytest_plugins = \"ham\"\n \"\"\",\n \"ham.py\": \"\",\n \"hamster.py\": \"\",\n \"test_foo.py\": \"\"\"\\\n def test_foo(pytestconfig):\n assert pytestconfig.pluginmanager.rewrite_hook.find_spec('ham') is not None\n assert pytestconfig.pluginmanager.rewrite_hook.find_spec('hamster') is None\n \"\"\",\n }\n pytester.makepyfile(**contents)\n result = pytester.runpytest_subprocess(\"--assert=rewrite\")\n assert result.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestImportHookInstallation.test_installed_plugin_rewrite_TestImportHookInstallation.test_installed_plugin_rewrite.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestImportHookInstallation.test_installed_plugin_rewrite_TestImportHookInstallation.test_installed_plugin_rewrite.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 172, "end_line": 243, "span_ids": ["TestImportHookInstallation.test_installed_plugin_rewrite"], "tokens": 488}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestImportHookInstallation:\n\n @pytest.mark.parametrize(\"mode\", [\"plain\", \"rewrite\"])\n def test_installed_plugin_rewrite(\n self, pytester: Pytester, mode, monkeypatch\n ) -> None:\n monkeypatch.delenv(\"PYTEST_DISABLE_PLUGIN_AUTOLOAD\", raising=False)\n # Make sure the hook is installed early enough so that plugins\n # installed via setuptools are rewritten.\n pytester.mkdir(\"hampkg\")\n contents = {\n \"hampkg/__init__.py\": \"\"\"\\\n import pytest\n\n @pytest.fixture\n def check_first2():\n def check(values, value):\n assert values.pop(0) == value\n return check\n \"\"\",\n \"spamplugin.py\": \"\"\"\\\n import pytest\n from hampkg import check_first2\n\n @pytest.fixture\n def check_first():\n def check(values, value):\n assert values.pop(0) == value\n return check\n \"\"\",\n \"mainwrapper.py\": \"\"\"\\\n import pytest\n from _pytest.compat import importlib_metadata\n\n class DummyEntryPoint(object):\n name = 'spam'\n module_name = 'spam.py'\n group = 'pytest11'\n\n def load(self):\n import spamplugin\n return spamplugin\n\n class DummyDistInfo(object):\n version = '1.0'\n files = ('spamplugin.py', 'hampkg/__init__.py')\n entry_points = (DummyEntryPoint(),)\n metadata = {'name': 'foo'}\n\n def distributions():\n return (DummyDistInfo(),)\n\n importlib_metadata.distributions = distributions\n pytest.main()\n \"\"\",\n \"test_foo.py\": \"\"\"\\\n def test(check_first):\n check_first([10, 30], 30)\n\n def test2(check_first2):\n check_first([10, 30], 30)\n \"\"\",\n }\n pytester.makepyfile(**contents)\n result = pytester.run(\n sys.executable, \"mainwrapper.py\", \"-s\", \"--assert=%s\" % mode\n )\n if mode == \"plain\":\n expected = \"E AssertionError\"\n elif mode == \"rewrite\":\n expected = \"*assert 10 == 30*\"\n else:\n assert 0\n result.stdout.fnmatch_lines([expected])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestImportHookInstallation.test_rewrite_ast_TestImportHookInstallation.test_register_assert_rewrite_checks_types.pytest_register_assert_re": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestImportHookInstallation.test_rewrite_ast_TestImportHookInstallation.test_register_assert_rewrite_checks_types.pytest_register_assert_re", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 245, "end_line": 295, "span_ids": ["TestImportHookInstallation.test_rewrite_ast", "TestImportHookInstallation.test_register_assert_rewrite_checks_types"], "tokens": 346}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestImportHookInstallation:\n\n def test_rewrite_ast(self, pytester: Pytester) -> None:\n pytester.mkdir(\"pkg\")\n contents = {\n \"pkg/__init__.py\": \"\"\"\n import pytest\n pytest.register_assert_rewrite('pkg.helper')\n \"\"\",\n \"pkg/helper.py\": \"\"\"\n def tool():\n a, b = 2, 3\n assert a == b\n \"\"\",\n \"pkg/plugin.py\": \"\"\"\n import pytest, pkg.helper\n @pytest.fixture\n def tool():\n return pkg.helper.tool\n \"\"\",\n \"pkg/other.py\": \"\"\"\n values = [3, 2]\n def tool():\n assert values.pop() == 3\n \"\"\",\n \"conftest.py\": \"\"\"\n pytest_plugins = ['pkg.plugin']\n \"\"\",\n \"test_pkg.py\": \"\"\"\n import pkg.other\n def test_tool(tool):\n tool()\n def test_other():\n pkg.other.tool()\n \"\"\",\n }\n pytester.makepyfile(**contents)\n result = pytester.runpytest_subprocess(\"--assert=rewrite\")\n result.stdout.fnmatch_lines(\n [\n \">*assert a == b*\",\n \"E*assert 2 == 3*\",\n \">*assert values.pop() == 3*\",\n \"E*AssertionError\",\n ]\n )\n\n def test_register_assert_rewrite_checks_types(self) -> None:\n with pytest.raises(TypeError):\n pytest.register_assert_rewrite([\"pytest_tests_internal_non_existing\"]) # type: ignore\n pytest.register_assert_rewrite(\n \"pytest_tests_internal_non_existing\", \"pytest_tests_internal_non_existing2\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare_TestAssert_reprcompare.test_list.assert_len_expl_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare_TestAssert_reprcompare.test_list.assert_len_expl_1", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 333, "end_line": 395, "span_ids": ["TestAssert_reprcompare.test_summary", "TestAssert_reprcompare", "TestAssert_reprcompare.test_text_skipping_verbose", "TestAssert_reprcompare.test_bytes_diff_verbose", "TestAssert_reprcompare.test_text_skipping", "TestAssert_reprcompare.test_bytes_diff_normal", "TestAssert_reprcompare.test_different_types", "TestAssert_reprcompare.test_list", "TestAssert_reprcompare.test_multiline_text_diff", "TestAssert_reprcompare.test_text_diff"], "tokens": 568}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssert_reprcompare:\n def test_different_types(self) -> None:\n assert callequal([0, 1], \"foo\") is None\n\n def test_summary(self) -> None:\n lines = callequal([0, 1], [0, 2])\n assert lines is not None\n summary = lines[0]\n assert len(summary) < 65\n\n def test_text_diff(self) -> None:\n assert callequal(\"spam\", \"eggs\") == [\n \"'spam' == 'eggs'\",\n \"- eggs\",\n \"+ spam\",\n ]\n\n def test_text_skipping(self) -> None:\n lines = callequal(\"a\" * 50 + \"spam\", \"a\" * 50 + \"eggs\")\n assert lines is not None\n assert \"Skipping\" in lines[1]\n for line in lines:\n assert \"a\" * 50 not in line\n\n def test_text_skipping_verbose(self) -> None:\n lines = callequal(\"a\" * 50 + \"spam\", \"a\" * 50 + \"eggs\", verbose=1)\n assert lines is not None\n assert \"- \" + \"a\" * 50 + \"eggs\" in lines\n assert \"+ \" + \"a\" * 50 + \"spam\" in lines\n\n def test_multiline_text_diff(self) -> None:\n left = \"foo\\nspam\\nbar\"\n right = \"foo\\neggs\\nbar\"\n diff = callequal(left, right)\n assert diff is not None\n assert \"- eggs\" in diff\n assert \"+ spam\" in diff\n\n def test_bytes_diff_normal(self) -> None:\n \"\"\"Check special handling for bytes diff (#5260)\"\"\"\n diff = callequal(b\"spam\", b\"eggs\")\n\n assert diff == [\n \"b'spam' == b'eggs'\",\n \"At index 0 diff: b's' != b'e'\",\n \"Use -v to get the full diff\",\n ]\n\n def test_bytes_diff_verbose(self) -> None:\n \"\"\"Check special handling for bytes diff (#5260)\"\"\"\n diff = callequal(b\"spam\", b\"eggs\", verbose=1)\n assert diff == [\n \"b'spam' == b'eggs'\",\n \"At index 0 diff: b's' != b'e'\",\n \"Full diff:\",\n \"- b'eggs'\",\n \"+ b'spam'\",\n ]\n\n def test_list(self) -> None:\n expl = callequal([0, 1], [0, 2])\n assert expl is not None\n assert len(expl) > 1", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare.test_dict_omitting_with_verbosity_1_TestAssert_reprcompare.test_dict_omitting_with_verbosity_2.assert_lines_2_b_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare.test_dict_omitting_with_verbosity_1_TestAssert_reprcompare.test_dict_omitting_with_verbosity_2.assert_lines_2_b_", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 579, "end_line": 593, "span_ids": ["TestAssert_reprcompare.test_dict_omitting_with_verbosity_2", "TestAssert_reprcompare.test_dict_omitting_with_verbosity_1"], "tokens": 234}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssert_reprcompare:\n\n def test_dict_omitting_with_verbosity_1(self) -> None:\n \"\"\"Ensure differing items are visible for verbosity=1 (#1512).\"\"\"\n lines = callequal({\"a\": 0, \"b\": 1}, {\"a\": 1, \"b\": 1}, verbose=1)\n assert lines is not None\n assert lines[1].startswith(\"Omitting 1 identical item\")\n assert lines[2].startswith(\"Differing items\")\n assert lines[3] == \"{'a': 0} != {'a': 1}\"\n assert \"Common items\" not in lines\n\n def test_dict_omitting_with_verbosity_2(self) -> None:\n lines = callequal({\"a\": 0, \"b\": 1}, {\"a\": 1, \"b\": 1}, verbose=2)\n assert lines is not None\n assert lines[1].startswith(\"Common items:\")\n assert \"Omitting\" not in lines[1]\n assert lines[2] == \"{'b': 1}\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare.test_dict_different_items_TestAssert_reprcompare.test_dict_different_items.None_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare.test_dict_different_items_TestAssert_reprcompare.test_dict_different_items.None_1", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 595, "end_line": 617, "span_ids": ["TestAssert_reprcompare.test_dict_different_items"], "tokens": 261}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssert_reprcompare:\n\n def test_dict_different_items(self) -> None:\n lines = callequal({\"a\": 0}, {\"b\": 1, \"c\": 2}, verbose=2)\n assert lines == [\n \"{'a': 0} == {'b': 1, 'c': 2}\",\n \"Left contains 1 more item:\",\n \"{'a': 0}\",\n \"Right contains 2 more items:\",\n \"{'b': 1, 'c': 2}\",\n \"Full diff:\",\n \"- {'b': 1, 'c': 2}\",\n \"+ {'a': 0}\",\n ]\n lines = callequal({\"b\": 1, \"c\": 2}, {\"a\": 0}, verbose=2)\n assert lines == [\n \"{'b': 1, 'c': 2} == {'a': 0}\",\n \"Left contains 2 more items:\",\n \"{'b': 1, 'c': 2}\",\n \"Right contains 1 more item:\",\n \"{'a': 0}\",\n \"Full diff:\",\n \"- {'a': 0}\",\n \"+ {'b': 1, 'c': 2}\",\n ]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare.test_sequence_different_items_TestAssert_reprcompare.test_sequence_different_items.None_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare.test_sequence_different_items_TestAssert_reprcompare.test_sequence_different_items.None_1", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 619, "end_line": 637, "span_ids": ["TestAssert_reprcompare.test_sequence_different_items"], "tokens": 209}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssert_reprcompare:\n\n def test_sequence_different_items(self) -> None:\n lines = callequal((1, 2), (3, 4, 5), verbose=2)\n assert lines == [\n \"(1, 2) == (3, 4, 5)\",\n \"At index 0 diff: 1 != 3\",\n \"Right contains one more item: 5\",\n \"Full diff:\",\n \"- (3, 4, 5)\",\n \"+ (1, 2)\",\n ]\n lines = callequal((1, 2, 3), (4,), verbose=2)\n assert lines == [\n \"(1, 2, 3) == (4,)\",\n \"At index 0 diff: 1 != 4\",\n \"Left contains 2 more items, first extra item: 2\",\n \"Full diff:\",\n \"- (4,)\",\n \"+ (1, 2, 3)\",\n ]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare.test_set_TestAssert_reprcompare.test_Sequence.assert_len_expl_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare.test_set_TestAssert_reprcompare.test_Sequence.assert_len_expl_1", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 639, "end_line": 672, "span_ids": ["TestAssert_reprcompare.test_frozenzet", "TestAssert_reprcompare.test_set", "TestAssert_reprcompare.test_Sequence", "TestAssert_reprcompare.test_Sequence.TestSequence.__init__", "TestAssert_reprcompare.test_Sequence.TestSequence"], "tokens": 247}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssert_reprcompare:\n\n def test_set(self) -> None:\n expl = callequal({0, 1}, {0, 2})\n assert expl is not None\n assert len(expl) > 1\n\n def test_frozenzet(self) -> None:\n expl = callequal(frozenset([0, 1]), {0, 2})\n assert expl is not None\n assert len(expl) > 1\n\n def test_Sequence(self) -> None:\n # Test comparing with a Sequence subclass.\n class TestSequence(MutableSequence[int]):\n def __init__(self, iterable):\n self.elements = list(iterable)\n\n def __getitem__(self, item):\n return self.elements[item]\n\n def __len__(self):\n return len(self.elements)\n\n def __setitem__(self, item, value):\n pass\n\n def __delitem__(self, item):\n pass\n\n def insert(self, item, index):\n pass\n\n expl = callequal(TestSequence([0, 1]), list([0, 2]))\n assert expl is not None\n assert len(expl) > 1", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare_dataclass_TestAssert_reprcompare_dataclass.test_dataclasses.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare_dataclass_TestAssert_reprcompare_dataclass.test_dataclasses.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 778, "end_line": 796, "span_ids": ["TestAssert_reprcompare_dataclass.test_dataclasses", "TestAssert_reprcompare_dataclass"], "tokens": 200}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssert_reprcompare_dataclass:\n @pytest.mark.skipif(sys.version_info < (3, 7), reason=\"Dataclasses in Python3.7+\")\n def test_dataclasses(self, pytester: Pytester) -> None:\n p = pytester.copy_example(\"dataclasses/test_compare_dataclasses.py\")\n result = pytester.runpytest(p)\n result.assert_outcomes(failed=1, passed=0)\n result.stdout.fnmatch_lines(\n [\n \"E Omitting 1 identical items, use -vv to show\",\n \"E Differing attributes:\",\n \"E ['field_b']\",\n \"E \",\n \"E Drill down into differing attribute field_b:\",\n \"E field_b: 'b' != 'c'...\",\n \"E \",\n \"E ...Full output truncated (3 lines hidden), use '-vv' to show\",\n ],\n consecutive=True,\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare_dataclass.test_dataclasses_verbose_TestAssert_reprcompare_dataclass.test_dataclasses_verbose.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare_dataclass.test_dataclasses_verbose_TestAssert_reprcompare_dataclass.test_dataclasses_verbose.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 850, "end_line": 862, "span_ids": ["TestAssert_reprcompare_dataclass.test_dataclasses_verbose"], "tokens": 142}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssert_reprcompare_dataclass:\n\n @pytest.mark.skipif(sys.version_info < (3, 7), reason=\"Dataclasses in Python3.7+\")\n def test_dataclasses_verbose(self, pytester: Pytester) -> None:\n p = pytester.copy_example(\"dataclasses/test_compare_dataclasses_verbose.py\")\n result = pytester.runpytest(p, \"-vv\")\n result.assert_outcomes(failed=1, passed=0)\n result.stdout.fnmatch_lines(\n [\n \"*Matching attributes:*\",\n \"*['field_a']*\",\n \"*Differing attributes:*\",\n \"*field_b: 'b' != 'c'*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare_dataclass.test_dataclasses_with_attribute_comparison_off_TestAssert_reprcompare_dataclass.test_comparing_two_different_data_classes.result_assert_outcomes_fa": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare_dataclass.test_dataclasses_with_attribute_comparison_off_TestAssert_reprcompare_dataclass.test_comparing_two_different_data_classes.result_assert_outcomes_fa", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 864, "end_line": 880, "span_ids": ["TestAssert_reprcompare_dataclass.test_comparing_two_different_data_classes", "TestAssert_reprcompare_dataclass.test_dataclasses_with_attribute_comparison_off"], "tokens": 203}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssert_reprcompare_dataclass:\n\n @pytest.mark.skipif(sys.version_info < (3, 7), reason=\"Dataclasses in Python3.7+\")\n def test_dataclasses_with_attribute_comparison_off(\n self, pytester: Pytester\n ) -> None:\n p = pytester.copy_example(\n \"dataclasses/test_compare_dataclasses_field_comparison_off.py\"\n )\n result = pytester.runpytest(p, \"-vv\")\n result.assert_outcomes(failed=0, passed=1)\n\n @pytest.mark.skipif(sys.version_info < (3, 7), reason=\"Dataclasses in Python3.7+\")\n def test_comparing_two_different_data_classes(self, pytester: Pytester) -> None:\n p = pytester.copy_example(\n \"dataclasses/test_compare_two_different_dataclasses.py\"\n )\n result = pytester.runpytest(p, \"-vv\")\n result.assert_outcomes(failed=0, passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare_attrsclass.test_attrs_with_attribute_comparison_off_TestAssert_reprcompare_attrsclass.test_comparing_two_different_attrs_classes.assert_lines_is_None": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare_attrsclass.test_attrs_with_attribute_comparison_off_TestAssert_reprcompare_attrsclass.test_comparing_two_different_attrs_classes.assert_lines_is_None", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 955, "end_line": 988, "span_ids": ["TestAssert_reprcompare_attrsclass.test_comparing_two_different_attrs_classes", "TestAssert_reprcompare_attrsclass.test_attrs_with_attribute_comparison_off", "TestAssert_reprcompare_attrsclass.test_comparing_two_different_attrs_classes.SimpleDataObjectOne:2", "TestAssert_reprcompare_attrsclass.test_comparing_two_different_attrs_classes.SimpleDataObjectTwo", "TestAssert_reprcompare_attrsclass.test_comparing_two_different_attrs_classes.SimpleDataObjectOne", "TestAssert_reprcompare_attrsclass.test_comparing_two_different_attrs_classes.SimpleDataObjectTwo:2", "TestAssert_reprcompare_attrsclass.test_attrs_with_attribute_comparison_off.SimpleDataObject", "TestAssert_reprcompare_attrsclass.test_attrs_with_attribute_comparison_off.SimpleDataObject:2"], "tokens": 257}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssert_reprcompare_attrsclass:\n\n def test_attrs_with_attribute_comparison_off(self) -> None:\n @attr.s\n class SimpleDataObject:\n field_a = attr.ib()\n field_b = attr.ib(eq=False)\n\n left = SimpleDataObject(1, \"b\")\n right = SimpleDataObject(1, \"b\")\n\n lines = callequal(left, right, verbose=2)\n print(lines)\n assert lines is not None\n assert lines[2].startswith(\"Matching attributes:\")\n assert \"Omitting\" not in lines[1]\n assert lines[3] == \"['field_a']\"\n for line in lines[3:]:\n assert \"field_b\" not in line\n\n def test_comparing_two_different_attrs_classes(self) -> None:\n @attr.s\n class SimpleDataObjectOne:\n field_a = attr.ib()\n field_b = attr.ib()\n\n @attr.s\n class SimpleDataObjectTwo:\n field_a = attr.ib()\n field_b = attr.ib()\n\n left = SimpleDataObjectOne(1, \"b\")\n right = SimpleDataObjectTwo(1, \"c\")\n\n lines = callequal(left, right)\n assert lines is None", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestFormatExplanation_TestFormatExplanation.test_fmt_newline_escaped.assert_util_format_explan": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestFormatExplanation_TestFormatExplanation.test_fmt_newline_escaped.assert_util_format_explan", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 1029, "end_line": 1069, "span_ids": ["TestFormatExplanation.test_fmt_and", "TestFormatExplanation.test_fmt_simple", "TestFormatExplanation.test_fmt_newline_escaped", "TestFormatExplanation.test_fmt_where", "TestFormatExplanation", "TestFormatExplanation.test_special_chars_full", "TestFormatExplanation.test_fmt_where_nested", "TestFormatExplanation.test_fmt_newline"], "tokens": 483}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFormatExplanation:\n def test_special_chars_full(self, pytester: Pytester) -> None:\n # Issue 453, for the bug this would raise IndexError\n pytester.makepyfile(\n \"\"\"\n def test_foo():\n assert '\\\\n}' == ''\n \"\"\"\n )\n result = pytester.runpytest()\n assert result.ret == 1\n result.stdout.fnmatch_lines([\"*AssertionError*\"])\n\n def test_fmt_simple(self) -> None:\n expl = \"assert foo\"\n assert util.format_explanation(expl) == \"assert foo\"\n\n def test_fmt_where(self) -> None:\n expl = \"\\n\".join([\"assert 1\", \"{1 = foo\", \"} == 2\"])\n res = \"\\n\".join([\"assert 1 == 2\", \" + where 1 = foo\"])\n assert util.format_explanation(expl) == res\n\n def test_fmt_and(self) -> None:\n expl = \"\\n\".join([\"assert 1\", \"{1 = foo\", \"} == 2\", \"{2 = bar\", \"}\"])\n res = \"\\n\".join([\"assert 1 == 2\", \" + where 1 = foo\", \" + and 2 = bar\"])\n assert util.format_explanation(expl) == res\n\n def test_fmt_where_nested(self) -> None:\n expl = \"\\n\".join([\"assert 1\", \"{1 = foo\", \"{foo = bar\", \"}\", \"} == 2\"])\n res = \"\\n\".join([\"assert 1 == 2\", \" + where 1 = foo\", \" + where foo = bar\"])\n assert util.format_explanation(expl) == res\n\n def test_fmt_newline(self) -> None:\n expl = \"\\n\".join(['assert \"foo\" == \"bar\"', \"~- foo\", \"~+ bar\"])\n res = \"\\n\".join(['assert \"foo\" == \"bar\"', \" - foo\", \" + bar\"])\n assert util.format_explanation(expl) == res\n\n def test_fmt_newline_escaped(self) -> None:\n expl = \"\\n\".join([\"assert foo == bar\", \"baz\"])\n res = \"assert foo == bar\\\\nbaz\"\n assert util.format_explanation(expl) == res", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestFormatExplanation.test_fmt_newline_before_where_TestFormatExplanation.test_fmt_newline_before_where.assert_util_format_explan": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestFormatExplanation.test_fmt_newline_before_where_TestFormatExplanation.test_fmt_newline_before_where.assert_util_format_explan", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 1071, "end_line": 1090, "span_ids": ["TestFormatExplanation.test_fmt_newline_before_where"], "tokens": 128}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFormatExplanation:\n\n def test_fmt_newline_before_where(self) -> None:\n expl = \"\\n\".join(\n [\n \"the assertion message here\",\n \">assert 1\",\n \"{1 = foo\",\n \"} == 2\",\n \"{2 = bar\",\n \"}\",\n ]\n )\n res = \"\\n\".join(\n [\n \"the assertion message here\",\n \"assert 1 == 2\",\n \" + where 1 = foo\",\n \" + and 2 = bar\",\n ]\n )\n assert util.format_explanation(expl) == res", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestFormatExplanation.test_fmt_multi_newline_before_where_TestFormatExplanation.test_fmt_multi_newline_before_where.assert_util_format_explan": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestFormatExplanation.test_fmt_multi_newline_before_where_TestFormatExplanation.test_fmt_multi_newline_before_where.assert_util_format_explan", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 1092, "end_line": 1113, "span_ids": ["TestFormatExplanation.test_fmt_multi_newline_before_where"], "tokens": 136}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFormatExplanation:\n\n def test_fmt_multi_newline_before_where(self) -> None:\n expl = \"\\n\".join(\n [\n \"the assertion\",\n \"~message here\",\n \">assert 1\",\n \"{1 = foo\",\n \"} == 2\",\n \"{2 = bar\",\n \"}\",\n ]\n )\n res = \"\\n\".join(\n [\n \"the assertion\",\n \" message here\",\n \"assert 1 == 2\",\n \" + where 1 = foo\",\n \" + and 2 = bar\",\n ]\n )\n assert util.format_explanation(expl) == res", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestTruncateExplanation_TestTruncateExplanation.test_doesnt_truncate_at_when_input_is_5_lines_and_LT_max_chars.assert_result_expl": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestTruncateExplanation_TestTruncateExplanation.test_doesnt_truncate_at_when_input_is_5_lines_and_LT_max_chars.assert_result_expl", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 1116, "end_line": 1129, "span_ids": ["TestTruncateExplanation.test_doesnt_truncate_at_when_input_is_5_lines_and_LT_max_chars", "TestTruncateExplanation", "TestTruncateExplanation.test_doesnt_truncate_when_input_is_empty_list"], "tokens": 163}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTruncateExplanation:\n # The number of lines in the truncation explanation message. Used\n # to calculate that results have the expected length.\n LINES_IN_TRUNCATION_MSG = 2\n\n def test_doesnt_truncate_when_input_is_empty_list(self) -> None:\n expl: List[str] = []\n result = truncate._truncate_explanation(expl, max_lines=8, max_chars=100)\n assert result == expl\n\n def test_doesnt_truncate_at_when_input_is_5_lines_and_LT_max_chars(self) -> None:\n expl = [\"a\" * 100 for x in range(5)]\n result = truncate._truncate_explanation(expl, max_lines=8, max_chars=8 * 80)\n assert result == expl", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestTruncateExplanation.test_truncates_at_8_lines_when_given_list_of_empty_strings_TestTruncateExplanation.test_truncates_at_8_lines_when_given_list_of_empty_strings.assert_last_line_before_t": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestTruncateExplanation.test_truncates_at_8_lines_when_given_list_of_empty_strings_TestTruncateExplanation.test_truncates_at_8_lines_when_given_list_of_empty_strings.assert_last_line_before_t", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 1131, "end_line": 1139, "span_ids": ["TestTruncateExplanation.test_truncates_at_8_lines_when_given_list_of_empty_strings"], "tokens": 155}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTruncateExplanation:\n # The number of lines in the truncation explanation message. Used\n\n def test_truncates_at_8_lines_when_given_list_of_empty_strings(self) -> None:\n expl = [\"\" for x in range(50)]\n result = truncate._truncate_explanation(expl, max_lines=8, max_chars=100)\n assert result != expl\n assert len(result) == 8 + self.LINES_IN_TRUNCATION_MSG\n assert \"Full output truncated\" in result[-1]\n assert \"43 lines hidden\" in result[-1]\n last_line_before_trunc_msg = result[-self.LINES_IN_TRUNCATION_MSG - 1]\n assert last_line_before_trunc_msg.endswith(\"...\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestTruncateExplanation.test_truncates_at_8_lines_when_first_8_lines_are_LT_max_chars_TestTruncateExplanation.test_truncates_at_8_lines_when_first_8_lines_are_LT_max_chars.assert_last_line_before_t": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestTruncateExplanation.test_truncates_at_8_lines_when_first_8_lines_are_LT_max_chars_TestTruncateExplanation.test_truncates_at_8_lines_when_first_8_lines_are_LT_max_chars.assert_last_line_before_t", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 1141, "end_line": 1149, "span_ids": ["TestTruncateExplanation.test_truncates_at_8_lines_when_first_8_lines_are_LT_max_chars"], "tokens": 162}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTruncateExplanation:\n # The number of lines in the truncation explanation message. Used\n\n def test_truncates_at_8_lines_when_first_8_lines_are_LT_max_chars(self) -> None:\n expl = [\"a\" for x in range(100)]\n result = truncate._truncate_explanation(expl, max_lines=8, max_chars=8 * 80)\n assert result != expl\n assert len(result) == 8 + self.LINES_IN_TRUNCATION_MSG\n assert \"Full output truncated\" in result[-1]\n assert \"93 lines hidden\" in result[-1]\n last_line_before_trunc_msg = result[-self.LINES_IN_TRUNCATION_MSG - 1]\n assert last_line_before_trunc_msg.endswith(\"...\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestTruncateExplanation.test_truncates_at_8_lines_when_first_8_lines_are_EQ_max_chars_TestTruncateExplanation.test_truncates_at_8_lines_when_first_8_lines_are_EQ_max_chars.assert_last_line_before_t": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestTruncateExplanation.test_truncates_at_8_lines_when_first_8_lines_are_EQ_max_chars_TestTruncateExplanation.test_truncates_at_8_lines_when_first_8_lines_are_EQ_max_chars.assert_last_line_before_t", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 1151, "end_line": 1159, "span_ids": ["TestTruncateExplanation.test_truncates_at_8_lines_when_first_8_lines_are_EQ_max_chars"], "tokens": 165}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTruncateExplanation:\n # The number of lines in the truncation explanation message. Used\n\n def test_truncates_at_8_lines_when_first_8_lines_are_EQ_max_chars(self) -> None:\n expl = [\"a\" * 80 for x in range(16)]\n result = truncate._truncate_explanation(expl, max_lines=8, max_chars=8 * 80)\n assert result != expl\n assert len(result) == 8 + self.LINES_IN_TRUNCATION_MSG\n assert \"Full output truncated\" in result[-1]\n assert \"9 lines hidden\" in result[-1]\n last_line_before_trunc_msg = result[-self.LINES_IN_TRUNCATION_MSG - 1]\n assert last_line_before_trunc_msg.endswith(\"...\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestTruncateExplanation.test_truncates_at_4_lines_when_first_4_lines_are_GT_max_chars_TestTruncateExplanation.test_truncates_at_4_lines_when_first_4_lines_are_GT_max_chars.assert_last_line_before_t": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestTruncateExplanation.test_truncates_at_4_lines_when_first_4_lines_are_GT_max_chars_TestTruncateExplanation.test_truncates_at_4_lines_when_first_4_lines_are_GT_max_chars.assert_last_line_before_t", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 1161, "end_line": 1169, "span_ids": ["TestTruncateExplanation.test_truncates_at_4_lines_when_first_4_lines_are_GT_max_chars"], "tokens": 162}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTruncateExplanation:\n # The number of lines in the truncation explanation message. Used\n\n def test_truncates_at_4_lines_when_first_4_lines_are_GT_max_chars(self) -> None:\n expl = [\"a\" * 250 for x in range(10)]\n result = truncate._truncate_explanation(expl, max_lines=8, max_chars=999)\n assert result != expl\n assert len(result) == 4 + self.LINES_IN_TRUNCATION_MSG\n assert \"Full output truncated\" in result[-1]\n assert \"7 lines hidden\" in result[-1]\n last_line_before_trunc_msg = result[-self.LINES_IN_TRUNCATION_MSG - 1]\n assert last_line_before_trunc_msg.endswith(\"...\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestTruncateExplanation.test_truncates_at_1_line_when_first_line_is_GT_max_chars_TestTruncateExplanation.test_truncates_at_1_line_when_first_line_is_GT_max_chars.assert_last_line_before_t": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestTruncateExplanation.test_truncates_at_1_line_when_first_line_is_GT_max_chars_TestTruncateExplanation.test_truncates_at_1_line_when_first_line_is_GT_max_chars.assert_last_line_before_t", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 1171, "end_line": 1179, "span_ids": ["TestTruncateExplanation.test_truncates_at_1_line_when_first_line_is_GT_max_chars"], "tokens": 162}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTruncateExplanation:\n # The number of lines in the truncation explanation message. Used\n\n def test_truncates_at_1_line_when_first_line_is_GT_max_chars(self) -> None:\n expl = [\"a\" * 250 for x in range(1000)]\n result = truncate._truncate_explanation(expl, max_lines=8, max_chars=100)\n assert result != expl\n assert len(result) == 1 + self.LINES_IN_TRUNCATION_MSG\n assert \"Full output truncated\" in result[-1]\n assert \"1000 lines hidden\" in result[-1]\n last_line_before_trunc_msg = result[-self.LINES_IN_TRUNCATION_MSG - 1]\n assert last_line_before_trunc_msg.endswith(\"...\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestTruncateExplanation.test_full_output_truncated_TestTruncateExplanation.test_full_output_truncated.None_5": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestTruncateExplanation.test_full_output_truncated_TestTruncateExplanation.test_full_output_truncated.None_5", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 1181, "end_line": 1216, "span_ids": ["TestTruncateExplanation.test_full_output_truncated"], "tokens": 303}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTruncateExplanation:\n # The number of lines in the truncation explanation message. Used\n\n def test_full_output_truncated(self, monkeypatch, pytester: Pytester) -> None:\n \"\"\"Test against full runpytest() output.\"\"\"\n\n line_count = 7\n line_len = 100\n expected_truncated_lines = 2\n pytester.makepyfile(\n r\"\"\"\n def test_many_lines():\n a = list([str(i)[0] * %d for i in range(%d)])\n b = a[::2]\n a = '\\n'.join(map(str, a))\n b = '\\n'.join(map(str, b))\n assert a == b\n \"\"\"\n % (line_len, line_count)\n )\n monkeypatch.delenv(\"CI\", raising=False)\n\n result = pytester.runpytest()\n # without -vv, truncate the message showing a few diff lines only\n result.stdout.fnmatch_lines(\n [\n \"*+ 1*\",\n \"*+ 3*\",\n \"*+ 5*\",\n \"*truncated (%d lines hidden)*use*-vv*\" % expected_truncated_lines,\n ]\n )\n\n result = pytester.runpytest(\"-vv\")\n result.stdout.fnmatch_lines([\"* 6*\"])\n\n monkeypatch.setenv(\"CI\", \"1\")\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"* 6*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_test_sequence_comparison_uses_repr_test_sequence_comparison_uses_repr.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_test_sequence_comparison_uses_repr_test_sequence_comparison_uses_repr.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 1288, "end_line": 1307, "span_ids": ["test_sequence_comparison_uses_repr"], "tokens": 127}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_sequence_comparison_uses_repr(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n def test_hello():\n x = set(\"hello x\")\n y = set(\"hello y\")\n assert x == y\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n [\n \"*def test_hello():*\",\n \"*assert x == y*\",\n \"*E*Extra items*left*\",\n \"*E*'x'*\",\n \"*E*Extra items*right*\",\n \"*E*'y'*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_test_assertrepr_loaded_per_dir_test_assertrepr_loaded_per_dir.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_test_assertrepr_loaded_per_dir_test_assertrepr_loaded_per_dir.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 1310, "end_line": 1333, "span_ids": ["test_assertrepr_loaded_per_dir"], "tokens": 233}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_assertrepr_loaded_per_dir(pytester: Pytester) -> None:\n pytester.makepyfile(test_base=[\"def test_base(): assert 1 == 2\"])\n a = pytester.mkdir(\"a\")\n a.joinpath(\"test_a.py\").write_text(\"def test_a(): assert 1 == 2\")\n a.joinpath(\"conftest.py\").write_text(\n 'def pytest_assertrepr_compare(): return [\"summary a\"]'\n )\n b = pytester.mkdir(\"b\")\n b.joinpath(\"test_b.py\").write_text(\"def test_b(): assert 1 == 2\")\n b.joinpath(\"conftest.py\").write_text(\n 'def pytest_assertrepr_compare(): return [\"summary b\"]'\n )\n\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n [\n \"*def test_base():*\",\n \"*E*assert 1 == 2*\",\n \"*def test_a():*\",\n \"*E*assert summary a*\",\n \"*def test_b():*\",\n \"*E*assert summary b*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_test_traceback_failure_test_traceback_failure.None_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_test_traceback_failure_test_traceback_failure.None_1", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 1362, "end_line": 1414, "span_ids": ["test_traceback_failure"], "tokens": 348}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_traceback_failure(pytester: Pytester) -> None:\n p1 = pytester.makepyfile(\n \"\"\"\n def g():\n return 2\n def f(x):\n assert x == g()\n def test_onefails():\n f(3)\n \"\"\"\n )\n result = pytester.runpytest(p1, \"--tb=long\")\n result.stdout.fnmatch_lines(\n [\n \"*test_traceback_failure.py F*\",\n \"====* FAILURES *====\",\n \"____*____\",\n \"\",\n \" def test_onefails():\",\n \"> f(3)\",\n \"\",\n \"*test_*.py:6: \",\n \"_ _ _ *\",\n # \"\",\n \" def f(x):\",\n \"> assert x == g()\",\n \"E assert 3 == 2\",\n \"E + where 2 = g()\",\n \"\",\n \"*test_traceback_failure.py:4: AssertionError\",\n ]\n )\n\n result = pytester.runpytest(p1) # \"auto\"\n result.stdout.fnmatch_lines(\n [\n \"*test_traceback_failure.py F*\",\n \"====* FAILURES *====\",\n \"____*____\",\n \"\",\n \" def test_onefails():\",\n \"> f(3)\",\n \"\",\n \"*test_*.py:6: \",\n \"\",\n \" def f(x):\",\n \"> assert x == g()\",\n \"E assert 3 == 2\",\n \"E + where 2 = g()\",\n \"\",\n \"*test_traceback_failure.py:4: AssertionError\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_test_exception_handling_no_traceback_test_exception_handling_no_traceback.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_test_exception_handling_no_traceback_test_exception_handling_no_traceback.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 1417, "end_line": 1446, "span_ids": ["test_exception_handling_no_traceback"], "tokens": 208}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_exception_handling_no_traceback(pytester: Pytester) -> None:\n \"\"\"Handle chain exceptions in tasks submitted by the multiprocess module (#1984).\"\"\"\n p1 = pytester.makepyfile(\n \"\"\"\n from multiprocessing import Pool\n\n def process_task(n):\n assert n == 10\n\n def multitask_job():\n tasks = [1]\n with Pool(processes=1) as pool:\n pool.map(process_task, tasks)\n\n def test_multitask_job():\n multitask_job()\n \"\"\"\n )\n pytester.syspathinsert()\n result = pytester.runpytest(p1, \"--tb=long\")\n result.stdout.fnmatch_lines(\n [\n \"====* FAILURES *====\",\n \"*multiprocessing.pool.RemoteTraceback:*\",\n \"Traceback (most recent call last):\",\n \"*assert n == 10\",\n \"The above exception was the direct cause of the following exception:\",\n \"> * multitask_job()\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite_TestAssertionRewrite.test_place_initial_imports.None_9": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite_TestAssertionRewrite.test_place_initial_imports.None_9", "embedding": null, "metadata": {"file_path": "testing/test_assertrewrite.py", "file_name": "test_assertrewrite.py", "file_type": "text/x-python", "category": "test", "start_line": 67, "end_line": 108, "span_ids": ["TestAssertionRewrite.test_place_initial_imports", "TestAssertionRewrite"], "tokens": 422}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssertionRewrite:\n def test_place_initial_imports(self) -> None:\n s = \"\"\"'Doc string'\\nother = stuff\"\"\"\n m = rewrite(s)\n assert isinstance(m.body[0], ast.Expr)\n for imp in m.body[1:3]:\n assert isinstance(imp, ast.Import)\n assert imp.lineno == 2\n assert imp.col_offset == 0\n assert isinstance(m.body[3], ast.Assign)\n s = \"\"\"from __future__ import division\\nother_stuff\"\"\"\n m = rewrite(s)\n assert isinstance(m.body[0], ast.ImportFrom)\n for imp in m.body[1:3]:\n assert isinstance(imp, ast.Import)\n assert imp.lineno == 2\n assert imp.col_offset == 0\n assert isinstance(m.body[3], ast.Expr)\n s = \"\"\"'doc string'\\nfrom __future__ import division\"\"\"\n m = rewrite(s)\n assert isinstance(m.body[0], ast.Expr)\n assert isinstance(m.body[1], ast.ImportFrom)\n for imp in m.body[2:4]:\n assert isinstance(imp, ast.Import)\n assert imp.lineno == 2\n assert imp.col_offset == 0\n s = \"\"\"'doc string'\\nfrom __future__ import division\\nother\"\"\"\n m = rewrite(s)\n assert isinstance(m.body[0], ast.Expr)\n assert isinstance(m.body[1], ast.ImportFrom)\n for imp in m.body[2:4]:\n assert isinstance(imp, ast.Import)\n assert imp.lineno == 3\n assert imp.col_offset == 0\n assert isinstance(m.body[4], ast.Expr)\n s = \"\"\"from . import relative\\nother_stuff\"\"\"\n m = rewrite(s)\n for imp in m.body[:2]:\n assert isinstance(imp, ast.Import)\n assert imp.lineno == 1\n assert imp.col_offset == 0\n assert isinstance(m.body[3], ast.Expr)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_assert_already_has_message_TestAssertionRewrite.test_assertion_messages_bytes.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_assert_already_has_message_TestAssertionRewrite.test_assertion_messages_bytes.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_assertrewrite.py", "file_name": "test_assertrewrite.py", "file_type": "text/x-python", "category": "test", "start_line": 263, "end_line": 336, "span_ids": ["TestAssertionRewrite.test_assertion_message_multiline", "TestAssertionRewrite.test_assert_already_has_message", "TestAssertionRewrite.test_assertion_message", "TestAssertionRewrite.test_assertion_message_tuple", "TestAssertionRewrite.test_assertion_message_escape", "TestAssertionRewrite.test_assertion_message_expr", "TestAssertionRewrite.test_assertion_messages_bytes"], "tokens": 611}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssertionRewrite:\n\n def test_assert_already_has_message(self) -> None:\n def f():\n assert False, \"something bad!\"\n\n assert getmsg(f) == \"AssertionError: something bad!\\nassert False\"\n\n def test_assertion_message(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n def test_foo():\n assert 1 == 2, \"The failure message\"\n \"\"\"\n )\n result = pytester.runpytest()\n assert result.ret == 1\n result.stdout.fnmatch_lines(\n [\"*AssertionError*The failure message*\", \"*assert 1 == 2*\"]\n )\n\n def test_assertion_message_multiline(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n def test_foo():\n assert 1 == 2, \"A multiline\\\\nfailure message\"\n \"\"\"\n )\n result = pytester.runpytest()\n assert result.ret == 1\n result.stdout.fnmatch_lines(\n [\"*AssertionError*A multiline*\", \"*failure message*\", \"*assert 1 == 2*\"]\n )\n\n def test_assertion_message_tuple(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n def test_foo():\n assert 1 == 2, (1, 2)\n \"\"\"\n )\n result = pytester.runpytest()\n assert result.ret == 1\n result.stdout.fnmatch_lines(\n [\"*AssertionError*%s*\" % repr((1, 2)), \"*assert 1 == 2*\"]\n )\n\n def test_assertion_message_expr(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n def test_foo():\n assert 1 == 2, 1 + 2\n \"\"\"\n )\n result = pytester.runpytest()\n assert result.ret == 1\n result.stdout.fnmatch_lines([\"*AssertionError*3*\", \"*assert 1 == 2*\"])\n\n def test_assertion_message_escape(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n def test_foo():\n assert 1 == 2, 'To be escaped: %'\n \"\"\"\n )\n result = pytester.runpytest()\n assert result.ret == 1\n result.stdout.fnmatch_lines(\n [\"*AssertionError: To be escaped: %\", \"*assert 1 == 2\"]\n )\n\n def test_assertion_messages_bytes(self, pytester: Pytester) -> None:\n pytester.makepyfile(\"def test_bytes_assertion():\\n assert False, b'ohai!'\\n\")\n result = pytester.runpytest()\n assert result.ret == 1\n result.stdout.fnmatch_lines([\"*AssertionError: b'ohai!'\", \"*assert False\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_call_TestAssertionRewrite.test_call.None_6": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_call_TestAssertionRewrite.test_call.None_6", "embedding": null, "metadata": {"file_path": "testing/test_assertrewrite.py", "file_name": "test_assertrewrite.py", "file_type": "text/x-python", "category": "test", "start_line": 504, "end_line": 573, "span_ids": ["TestAssertionRewrite.test_call"], "tokens": 401}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssertionRewrite:\n\n def test_call(self) -> None:\n def g(a=42, *args, **kwargs) -> bool:\n return False\n\n ns = {\"g\": g}\n\n def f1() -> None:\n assert g()\n\n assert (\n getmsg(f1, ns)\n == \"\"\"assert False\n + where False = g()\"\"\"\n )\n\n def f2() -> None:\n assert g(1)\n\n assert (\n getmsg(f2, ns)\n == \"\"\"assert False\n + where False = g(1)\"\"\"\n )\n\n def f3() -> None:\n assert g(1, 2)\n\n assert (\n getmsg(f3, ns)\n == \"\"\"assert False\n + where False = g(1, 2)\"\"\"\n )\n\n def f4() -> None:\n assert g(1, g=42)\n\n assert (\n getmsg(f4, ns)\n == \"\"\"assert False\n + where False = g(1, g=42)\"\"\"\n )\n\n def f5() -> None:\n assert g(1, 3, g=23)\n\n assert (\n getmsg(f5, ns)\n == \"\"\"assert False\n + where False = g(1, 3, g=23)\"\"\"\n )\n\n def f6() -> None:\n seq = [1, 2, 3]\n assert g(*seq)\n\n assert (\n getmsg(f6, ns)\n == \"\"\"assert False\n + where False = g(*[1, 2, 3])\"\"\"\n )\n\n def f7() -> None:\n x = \"a\"\n assert g(**{x: 2})\n\n assert (\n getmsg(f7, ns)\n == \"\"\"assert False\n + where False = g(**{'a': 2})\"\"\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_len_TestAssertionRewrite.test_len.if_request_config_getopti.else_.assert_msg_assert_10_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_len_TestAssertionRewrite.test_len.if_request_config_getopti.else_.assert_msg_assert_10_", "embedding": null, "metadata": {"file_path": "testing/test_assertrewrite.py", "file_name": "test_assertrewrite.py", "file_type": "text/x-python", "category": "test", "start_line": 632, "end_line": 641, "span_ids": ["TestAssertionRewrite.test_len"], "tokens": 121}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssertionRewrite:\n\n def test_len(self, request) -> None:\n def f():\n values = list(range(10))\n assert len(values) == 11\n\n msg = getmsg(f)\n if request.config.getoption(\"verbose\") > 0:\n assert msg == \"assert 10 == 11\\n +10\\n -11\"\n else:\n assert msg == \"assert 10 == 11\\n + where 10 = len([0, 1, 2, 3, 4, 5, ...])\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_custom_repr_TestAssertionRewrite.test_custom_repr_non_ascii.assert_UnicodeEncodeErro": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_custom_repr_TestAssertionRewrite.test_custom_repr_non_ascii.assert_UnicodeEncodeErro", "embedding": null, "metadata": {"file_path": "testing/test_assertrewrite.py", "file_name": "test_assertrewrite.py", "file_type": "text/x-python", "category": "test", "start_line": 693, "end_line": 726, "span_ids": ["TestAssertionRewrite.test_custom_repr.f.Foo:2", "TestAssertionRewrite.test_custom_repr_non_ascii.f.A", "TestAssertionRewrite.test_custom_repr_non_ascii", "TestAssertionRewrite.test_custom_repr", "TestAssertionRewrite.test_custom_repr_non_ascii.f.A:2", "TestAssertionRewrite.test_custom_repr.f.Foo"], "tokens": 255}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssertionRewrite:\n\n def test_custom_repr(self, request) -> None:\n def f() -> None:\n class Foo:\n a = 1\n\n def __repr__(self):\n return \"\\n{ \\n~ \\n}\"\n\n f = Foo()\n assert 0 == f.a\n\n msg = getmsg(f)\n assert msg is not None\n lines = util._format_lines([msg])\n if request.config.getoption(\"verbose\") > 0:\n assert lines == [\"assert 0 == 1\\n +0\\n -1\"]\n else:\n assert lines == [\"assert 0 == 1\\n + where 1 = \\\\n{ \\\\n~ \\\\n}.a\"]\n\n def test_custom_repr_non_ascii(self) -> None:\n def f() -> None:\n class A:\n name = \"\u00e4\"\n\n def __repr__(self):\n return self.name.encode(\"UTF-8\") # only legal in python2\n\n a = A()\n assert not a.name\n\n msg = getmsg(f)\n assert msg is not None\n assert \"UnicodeDecodeError\" not in msg\n assert \"UnicodeEncodeError\" not in msg", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestRewriteOnImport_TestRewriteOnImport.test_pycache_is_readonly.try_.finally_.cache_chmod_old_mode_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestRewriteOnImport_TestRewriteOnImport.test_pycache_is_readonly.try_.finally_.cache_chmod_old_mode_", "embedding": null, "metadata": {"file_path": "testing/test_assertrewrite.py", "file_name": "test_assertrewrite.py", "file_type": "text/x-python", "category": "test", "start_line": 729, "end_line": 751, "span_ids": ["TestRewriteOnImport", "TestRewriteOnImport.test_pycache_is_a_file", "TestRewriteOnImport.test_pycache_is_readonly"], "tokens": 191}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRewriteOnImport:\n def test_pycache_is_a_file(self, pytester: Pytester) -> None:\n pytester.path.joinpath(\"__pycache__\").write_text(\"Hello\")\n pytester.makepyfile(\n \"\"\"\n def test_rewritten():\n assert \"@py_builtins\" in globals()\"\"\"\n )\n assert pytester.runpytest().ret == 0\n\n def test_pycache_is_readonly(self, pytester: Pytester) -> None:\n cache = pytester.mkdir(\"__pycache__\")\n old_mode = cache.stat().st_mode\n cache.chmod(old_mode ^ stat.S_IWRITE)\n pytester.makepyfile(\n \"\"\"\n def test_rewritten():\n assert \"@py_builtins\" in globals()\"\"\"\n )\n try:\n assert pytester.runpytest().ret == 0\n finally:\n cache.chmod(old_mode)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestRewriteOnImport.test_pyc_vs_pyo_TestRewriteOnImport.test_pyc_vs_pyo.assert_tagged_pyc_in": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestRewriteOnImport.test_pyc_vs_pyo_TestRewriteOnImport.test_pyc_vs_pyo.assert_tagged_pyc_in", "embedding": null, "metadata": {"file_path": "testing/test_assertrewrite.py", "file_name": "test_assertrewrite.py", "file_type": "text/x-python", "category": "test", "start_line": 849, "end_line": 870, "span_ids": ["TestRewriteOnImport.test_pyc_vs_pyo"], "tokens": 272}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRewriteOnImport:\n\n @pytest.mark.skipif('\"__pypy__\" in sys.modules')\n def test_pyc_vs_pyo(self, pytester: Pytester, monkeypatch) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n def test_optimized():\n \"hello\"\n assert test_optimized.__doc__ is None\"\"\"\n )\n p = make_numbered_dir(root=Path(pytester.path), prefix=\"runpytest-\")\n tmp = \"--basetemp=%s\" % p\n monkeypatch.setenv(\"PYTHONOPTIMIZE\", \"2\")\n monkeypatch.delenv(\"PYTHONDONTWRITEBYTECODE\", raising=False)\n monkeypatch.delenv(\"PYTHONPYCACHEPREFIX\", raising=False)\n assert pytester.runpytest_subprocess(tmp).ret == 0\n tagged = \"test_pyc_vs_pyo.\" + PYTEST_TAG\n assert tagged + \".pyo\" in os.listdir(\"__pycache__\")\n monkeypatch.undo()\n monkeypatch.delenv(\"PYTHONDONTWRITEBYTECODE\", raising=False)\n monkeypatch.delenv(\"PYTHONPYCACHEPREFIX\", raising=False)\n assert pytester.runpytest_subprocess(tmp).ret == 1\n assert tagged + \".pyc\" in os.listdir(\"__pycache__\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestRewriteOnImport.test_remember_rewritten_modules_TestRewriteOnImport.test_remember_rewritten_modules.assert_warnings_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestRewriteOnImport.test_remember_rewritten_modules_TestRewriteOnImport.test_remember_rewritten_modules.assert_warnings_", "embedding": null, "metadata": {"file_path": "testing/test_assertrewrite.py", "file_name": "test_assertrewrite.py", "file_type": "text/x-python", "category": "test", "start_line": 920, "end_line": 938, "span_ids": ["TestRewriteOnImport.test_remember_rewritten_modules"], "tokens": 194}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRewriteOnImport:\n\n def test_remember_rewritten_modules(\n self, pytestconfig, pytester: Pytester, monkeypatch\n ) -> None:\n \"\"\"`AssertionRewriteHook` should remember rewritten modules so it\n doesn't give false positives (#2005).\"\"\"\n monkeypatch.syspath_prepend(pytester.path)\n pytester.makepyfile(test_remember_rewritten_modules=\"\")\n warnings = []\n hook = AssertionRewritingHook(pytestconfig)\n monkeypatch.setattr(\n hook, \"_warn_already_imported\", lambda code, msg: warnings.append(msg)\n )\n spec = hook.find_spec(\"test_remember_rewritten_modules\")\n assert spec is not None\n module = importlib.util.module_from_spec(spec)\n hook.exec_module(module)\n hook.mark_rewrite(\"test_remember_rewritten_modules\")\n hook.mark_rewrite(\"test_remember_rewritten_modules\")\n assert warnings == []", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewriteHookDetails.test_resources_provider_for_loader_TestAssertionRewriteHookDetails.test_resources_provider_for_loader.result_assert_outcomes_pa": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewriteHookDetails.test_resources_provider_for_loader_TestAssertionRewriteHookDetails.test_resources_provider_for_loader.result_assert_outcomes_pa", "embedding": null, "metadata": {"file_path": "testing/test_assertrewrite.py", "file_name": "test_assertrewrite.py", "file_type": "text/x-python", "category": "test", "start_line": 1019, "end_line": 1047, "span_ids": ["TestAssertionRewriteHookDetails.test_resources_provider_for_loader"], "tokens": 218}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssertionRewriteHookDetails:\n\n def test_resources_provider_for_loader(self, pytester: Pytester) -> None:\n \"\"\"\n Attempts to load resources from a package should succeed normally,\n even when the AssertionRewriteHook is used to load the modules.\n\n See #366 for details.\n \"\"\"\n pytest.importorskip(\"pkg_resources\")\n\n pytester.mkpydir(\"testpkg\")\n contents = {\n \"testpkg/test_pkg\": \"\"\"\n import pkg_resources\n\n import pytest\n from _pytest.assertion.rewrite import AssertionRewritingHook\n\n def test_load_resource():\n assert isinstance(__loader__, AssertionRewritingHook)\n res = pkg_resources.resource_string(__name__, 'resource.txt')\n res = res.decode('ascii')\n assert res == 'Load me please.'\n \"\"\"\n }\n pytester.makepyfile(**contents)\n pytester.maketxtfile(**{\"testpkg/resource\": \"Load me please.\"})\n\n result = pytester.runpytest_subprocess()\n result.assert_outcomes(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewriteHookDetails.test_read_pyc_TestAssertionRewriteHookDetails.test_read_pyc._no_error": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewriteHookDetails.test_read_pyc_TestAssertionRewriteHookDetails.test_read_pyc._no_error", "embedding": null, "metadata": {"file_path": "testing/test_assertrewrite.py", "file_name": "test_assertrewrite.py", "file_type": "text/x-python", "category": "test", "start_line": 1049, "end_line": 1069, "span_ids": ["TestAssertionRewriteHookDetails.test_read_pyc"], "tokens": 195}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssertionRewriteHookDetails:\n\n def test_read_pyc(self, tmp_path: Path) -> None:\n \"\"\"\n Ensure that the `_read_pyc` can properly deal with corrupted pyc files.\n In those circumstances it should just give up instead of generating\n an exception that is propagated to the caller.\n \"\"\"\n import py_compile\n from _pytest.assertion.rewrite import _read_pyc\n\n source = tmp_path / \"source.py\"\n pyc = Path(str(source) + \"c\")\n\n source.write_text(\"def test(): pass\")\n py_compile.compile(str(source), str(pyc))\n\n contents = pyc.read_bytes()\n strip_bytes = 20 # header is around 16 bytes, strip a little more\n assert len(contents) > strip_bytes\n pyc.write_bytes(contents[:strip_bytes])\n\n assert _read_pyc(source, pyc) is None # no error", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewriteHookDetails.test_get_data_support_TestAssertionRewriteHookDetails.test_get_data_support.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewriteHookDetails.test_get_data_support_TestAssertionRewriteHookDetails.test_get_data_support.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_assertrewrite.py", "file_name": "test_assertrewrite.py", "file_type": "text/x-python", "category": "test", "start_line": 1151, "end_line": 1167, "span_ids": ["TestAssertionRewriteHookDetails.test_get_data_support"], "tokens": 152}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssertionRewriteHookDetails:\n\n def test_get_data_support(self, pytester: Pytester) -> None:\n \"\"\"Implement optional PEP302 api (#808).\"\"\"\n path = pytester.mkpydir(\"foo\")\n path.joinpath(\"test_foo.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n class Test(object):\n def test_foo(self):\n import pkgutil\n data = pkgutil.get_data('foo.test_foo', 'data.txt')\n assert data == b'Hey'\n \"\"\"\n )\n )\n path.joinpath(\"data.txt\").write_text(\"Hey\")\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*1 passed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestIssue925_TestIssue925.test_many_brackets.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestIssue925_TestIssue925.test_many_brackets.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_assertrewrite.py", "file_name": "test_assertrewrite.py", "file_type": "text/x-python", "category": "test", "start_line": 1189, "end_line": 1218, "span_ids": ["TestIssue925.test_many_brackets", "TestIssue925.test_simple_case", "TestIssue925", "TestIssue925.test_long_case"], "tokens": 235}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestIssue925:\n def test_simple_case(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n def test_ternary_display():\n assert (False == False) == False\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*E*assert (False == False) == False\"])\n\n def test_long_case(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n def test_ternary_display():\n assert False == (False == True) == True\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*E*assert (False == True) == True\"])\n\n def test_many_brackets(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n def test_ternary_display():\n assert True == ((False == True) == True)\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*E*assert True == ((False == True) == True)\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestIssue2121_TestIssue2121.test_rewrite_python_files_contain_subdirs.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestIssue2121_TestIssue2121.test_rewrite_python_files_contain_subdirs.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_assertrewrite.py", "file_name": "test_assertrewrite.py", "file_type": "text/x-python", "category": "test", "start_line": 1221, "end_line": 1238, "span_ids": ["TestIssue2121", "TestIssue2121.test_rewrite_python_files_contain_subdirs"], "tokens": 121}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestIssue2121:\n def test_rewrite_python_files_contain_subdirs(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n **{\n \"tests/file.py\": \"\"\"\n def test_simple_failure():\n assert 1 + 1 == 3\n \"\"\"\n }\n )\n pytester.makeini(\n \"\"\"\n [pytest]\n python_files = tests/**.py\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*E*assert (1 + 1) == 3\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_test_source_mtime_long_long_test_source_mtime_long_long.assert_result_ret_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_test_source_mtime_long_long_test_source_mtime_long_long.assert_result_ret_0", "embedding": null, "metadata": {"file_path": "testing/test_assertrewrite.py", "file_name": "test_assertrewrite.py", "file_type": "text/x-python", "category": "test", "start_line": 1241, "end_line": 1264, "span_ids": ["test_source_mtime_long_long"], "tokens": 203}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.skipif(\n sys.maxsize <= (2 ** 31 - 1), reason=\"Causes OverflowError on 32bit systems\"\n)\n@pytest.mark.parametrize(\"offset\", [-1, +1])\ndef test_source_mtime_long_long(pytester: Pytester, offset) -> None:\n \"\"\"Support modification dates after 2038 in rewritten files (#4903).\n\n pytest would crash with:\n\n fp.write(struct.pack(\" None:\n \"\"\"Fix infinite recursion when writing pyc files: if an import happens to be triggered when writing the pyc\n file, this would cause another call to the hook, which would trigger another pyc writing, which could\n trigger another import, and so on. (#3506)\"\"\"\n from _pytest.assertion import rewrite as rewritemod\n\n pytester.syspathinsert()\n pytester.makepyfile(test_foo=\"def test_foo(): pass\")\n pytester.makepyfile(test_bar=\"def test_bar(): pass\")\n\n original_write_pyc = rewritemod._write_pyc\n\n write_pyc_called = []\n\n def spy_write_pyc(*args, **kwargs):\n # make a note that we have called _write_pyc\n write_pyc_called.append(True)\n # try to import a module at this point: we should not try to rewrite this module\n assert hook.find_spec(\"test_bar\") is None\n return original_write_pyc(*args, **kwargs)\n\n monkeypatch.setattr(rewritemod, \"_write_pyc\", spy_write_pyc)\n monkeypatch.setattr(sys, \"dont_write_bytecode\", False)\n\n hook = AssertionRewritingHook(pytestconfig)\n spec = hook.find_spec(\"test_foo\")\n assert spec is not None\n module = importlib.util.module_from_spec(spec)\n hook.exec_module(module)\n assert len(write_pyc_called) == 1", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestEarlyRewriteBailout_TestEarlyRewriteBailout.hook.return.hook": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestEarlyRewriteBailout_TestEarlyRewriteBailout.hook.return.hook", "embedding": null, "metadata": {"file_path": "testing/test_assertrewrite.py", "file_name": "test_assertrewrite.py", "file_type": "text/x-python", "category": "test", "start_line": 1301, "end_line": 1330, "span_ids": ["TestEarlyRewriteBailout.hook", "TestEarlyRewriteBailout", "TestEarlyRewriteBailout.hook.StubSession:2", "TestEarlyRewriteBailout.hook.StubSession"], "tokens": 251}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestEarlyRewriteBailout:\n @pytest.fixture\n def hook(\n self, pytestconfig, monkeypatch, pytester: Pytester\n ) -> AssertionRewritingHook:\n \"\"\"Returns a patched AssertionRewritingHook instance so we can configure its initial paths and track\n if PathFinder.find_spec has been called.\n \"\"\"\n import importlib.machinery\n\n self.find_spec_calls: List[str] = []\n self.initial_paths: Set[Path] = set()\n\n class StubSession:\n _initialpaths = self.initial_paths\n\n def isinitpath(self, p):\n return p in self._initialpaths\n\n def spy_find_spec(name, path):\n self.find_spec_calls.append(name)\n return importlib.machinery.PathFinder.find_spec(name, path)\n\n hook = AssertionRewritingHook(pytestconfig)\n # use default patterns, otherwise we inherit pytest's testing config\n hook.fnpats[:] = [\"test_*.py\", \"*_test.py\"]\n monkeypatch.setattr(hook, \"_find_spec\", spy_find_spec)\n hook.set_session(StubSession()) # type: ignore[arg-type]\n pytester.syspathinsert()\n return hook", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestEarlyRewriteBailout.test_basic_TestEarlyRewriteBailout.test_basic.None_7": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestEarlyRewriteBailout.test_basic_TestEarlyRewriteBailout.test_basic.None_7", "embedding": null, "metadata": {"file_path": "testing/test_assertrewrite.py", "file_name": "test_assertrewrite.py", "file_type": "text/x-python", "category": "test", "start_line": 1332, "end_line": 1363, "span_ids": ["TestEarlyRewriteBailout.test_basic"], "tokens": 328}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestEarlyRewriteBailout:\n\n def test_basic(self, pytester: Pytester, hook: AssertionRewritingHook) -> None:\n \"\"\"\n Ensure we avoid calling PathFinder.find_spec when we know for sure a certain\n module will not be rewritten to optimize assertion rewriting (#3918).\n \"\"\"\n pytester.makeconftest(\n \"\"\"\n import pytest\n @pytest.fixture\n def fix(): return 1\n \"\"\"\n )\n pytester.makepyfile(test_foo=\"def test_foo(): pass\")\n pytester.makepyfile(bar=\"def bar(): pass\")\n foobar_path = pytester.makepyfile(foobar=\"def foobar(): pass\")\n self.initial_paths.add(foobar_path)\n\n # conftest files should always be rewritten\n assert hook.find_spec(\"conftest\") is not None\n assert self.find_spec_calls == [\"conftest\"]\n\n # files matching \"python_files\" mask should always be rewritten\n assert hook.find_spec(\"test_foo\") is not None\n assert self.find_spec_calls == [\"conftest\", \"test_foo\"]\n\n # file does not match \"python_files\": early bailout\n assert hook.find_spec(\"bar\") is None\n assert self.find_spec_calls == [\"conftest\", \"test_foo\"]\n\n # file is an initial path (passed on the command-line): should be rewritten\n assert hook.find_spec(\"foobar\") is not None\n assert self.find_spec_calls == [\"conftest\", \"test_foo\", \"foobar\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestNewAPI.test_config_cache_TestNewAPI.test_config_cache.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestNewAPI.test_config_cache_TestNewAPI.test_config_cache.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_cacheprovider.py", "file_name": "test_cacheprovider.py", "file_type": "text/x-python", "category": "test", "start_line": 94, "end_line": 110, "span_ids": ["TestNewAPI.test_config_cache"], "tokens": 119}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestNewAPI:\n\n def test_config_cache(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n def pytest_configure(config):\n # see that we get cache information early on\n assert hasattr(config, \"cache\")\n \"\"\"\n )\n pytester.makepyfile(\n \"\"\"\n def test_session(pytestconfig):\n assert hasattr(pytestconfig, \"cache\")\n \"\"\"\n )\n result = pytester.runpytest()\n assert result.ret == 0\n result.stdout.fnmatch_lines([\"*1 passed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestNewAPI.test_cachefuncarg_TestNewAPI.test_cachefuncarg.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestNewAPI.test_cachefuncarg_TestNewAPI.test_cachefuncarg.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_cacheprovider.py", "file_name": "test_cacheprovider.py", "file_type": "text/x-python", "category": "test", "start_line": 112, "end_line": 127, "span_ids": ["TestNewAPI.test_cachefuncarg"], "tokens": 140}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestNewAPI:\n\n def test_cachefuncarg(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n def test_cachefuncarg(cache):\n val = cache.get(\"some/thing\", None)\n assert val is None\n cache.set(\"some/thing\", [1])\n pytest.raises(TypeError, lambda: cache.get(\"some/thing\"))\n val = cache.get(\"some/thing\", [])\n assert val == [1]\n \"\"\"\n )\n result = pytester.runpytest()\n assert result.ret == 0\n result.stdout.fnmatch_lines([\"*1 passed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_test_cache_reportheader_test_cache_reportheader.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_test_cache_reportheader_test_cache_reportheader.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_cacheprovider.py", "file_name": "test_cacheprovider.py", "file_type": "text/x-python", "category": "test", "start_line": 177, "end_line": 187, "span_ids": ["test_cache_reportheader"], "tokens": 134}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\"env\", ((), (\"TOX_ENV_DIR\", \"/tox_env_dir\")))\ndef test_cache_reportheader(env, pytester: Pytester, monkeypatch: MonkeyPatch) -> None:\n pytester.makepyfile(\"\"\"def test_foo(): pass\"\"\")\n if env:\n monkeypatch.setenv(*env)\n expected = os.path.join(env[1], \".pytest_cache\")\n else:\n monkeypatch.delenv(\"TOX_ENV_DIR\", raising=False)\n expected = \".pytest_cache\"\n result = pytester.runpytest(\"-v\")\n result.stdout.fnmatch_lines([\"cachedir: %s\" % expected])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_test_cache_reportheader_external_abspath_test_cache_reportheader_external_abspath.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_test_cache_reportheader_external_abspath_test_cache_reportheader_external_abspath.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_cacheprovider.py", "file_name": "test_cacheprovider.py", "file_type": "text/x-python", "category": "test", "start_line": 190, "end_line": 207, "span_ids": ["test_cache_reportheader_external_abspath"], "tokens": 132}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_cache_reportheader_external_abspath(\n pytester: Pytester, tmp_path_factory: TempPathFactory\n) -> None:\n external_cache = tmp_path_factory.mktemp(\n \"test_cache_reportheader_external_abspath_abs\"\n )\n\n pytester.makepyfile(\"def test_hello(): pass\")\n pytester.makeini(\n \"\"\"\n [pytest]\n cache_dir = {abscache}\n \"\"\".format(\n abscache=external_cache\n )\n )\n result = pytester.runpytest(\"-v\")\n result.stdout.fnmatch_lines([f\"cachedir: {external_cache}\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_test_cache_show_test_cache_show.None_5": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_test_cache_show_test_cache_show.None_5", "embedding": null, "metadata": {"file_path": "testing/test_cacheprovider.py", "file_name": "test_cacheprovider.py", "file_type": "text/x-python", "category": "test", "start_line": 210, "end_line": 259, "span_ids": ["test_cache_show"], "tokens": 400}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_cache_show(pytester: Pytester) -> None:\n result = pytester.runpytest(\"--cache-show\")\n assert result.ret == 0\n result.stdout.fnmatch_lines([\"*cache is empty*\"])\n pytester.makeconftest(\n \"\"\"\n def pytest_configure(config):\n config.cache.set(\"my/name\", [1,2,3])\n config.cache.set(\"my/hello\", \"world\")\n config.cache.set(\"other/some\", {1:2})\n dp = config.cache.makedir(\"mydb\")\n dp.ensure(\"hello\")\n dp.ensure(\"world\")\n \"\"\"\n )\n result = pytester.runpytest()\n assert result.ret == 5 # no tests executed\n\n result = pytester.runpytest(\"--cache-show\")\n result.stdout.fnmatch_lines(\n [\n \"*cachedir:*\",\n \"*- cache values for '[*]' -*\",\n \"cache/nodeids contains:\",\n \"my/name contains:\",\n \" [1, 2, 3]\",\n \"other/some contains:\",\n \" {*'1': 2}\",\n \"*- cache directories for '[*]' -*\",\n \"*mydb/hello*length 0*\",\n \"*mydb/world*length 0*\",\n ]\n )\n assert result.ret == 0\n\n result = pytester.runpytest(\"--cache-show\", \"*/hello\")\n result.stdout.fnmatch_lines(\n [\n \"*cachedir:*\",\n \"*- cache values for '[*]/hello' -*\",\n \"my/hello contains:\",\n \" *'world'\",\n \"*- cache directories for '[*]/hello' -*\",\n \"d/mydb/hello*length 0*\",\n ]\n )\n stdout = result.stdout.str()\n assert \"other/some\" not in stdout\n assert \"d/mydb/world\" not in stdout\n assert result.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed_TestLastFailed.test_lastfailed_usecase.None_6": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed_TestLastFailed.test_lastfailed_usecase.None_6", "embedding": null, "metadata": {"file_path": "testing/test_cacheprovider.py", "file_name": "test_cacheprovider.py", "file_type": "text/x-python", "category": "test", "start_line": 262, "end_line": 309, "span_ids": ["TestLastFailed", "TestLastFailed.test_lastfailed_usecase"], "tokens": 434}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestLastFailed:\n def test_lastfailed_usecase(\n self, pytester: Pytester, monkeypatch: MonkeyPatch\n ) -> None:\n monkeypatch.setattr(\"sys.dont_write_bytecode\", True)\n p = pytester.makepyfile(\n \"\"\"\n def test_1(): assert 0\n def test_2(): assert 0\n def test_3(): assert 1\n \"\"\"\n )\n result = pytester.runpytest(str(p))\n result.stdout.fnmatch_lines([\"*2 failed*\"])\n p = pytester.makepyfile(\n \"\"\"\n def test_1(): assert 1\n def test_2(): assert 1\n def test_3(): assert 0\n \"\"\"\n )\n result = pytester.runpytest(str(p), \"--lf\")\n result.stdout.fnmatch_lines(\n [\n \"collected 3 items / 1 deselected / 2 selected\",\n \"run-last-failure: rerun previous 2 failures\",\n \"*= 2 passed, 1 deselected in *\",\n ]\n )\n result = pytester.runpytest(str(p), \"--lf\")\n result.stdout.fnmatch_lines(\n [\n \"collected 3 items\",\n \"run-last-failure: no previously failed tests, not deselecting items.\",\n \"*1 failed*2 passed*\",\n ]\n )\n pytester.path.joinpath(\".pytest_cache\", \".git\").mkdir(parents=True)\n result = pytester.runpytest(str(p), \"--lf\", \"--cache-clear\")\n result.stdout.fnmatch_lines([\"*1 failed*2 passed*\"])\n assert pytester.path.joinpath(\".pytest_cache\", \"README.md\").is_file()\n assert pytester.path.joinpath(\".pytest_cache\", \".git\").is_dir()\n\n # Run this again to make sure clear-cache is robust\n if os.path.isdir(\".pytest_cache\"):\n shutil.rmtree(\".pytest_cache\")\n result = pytester.runpytest(\"--lf\", \"--cache-clear\")\n result.stdout.fnmatch_lines([\"*1 failed*2 passed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_lastfailed_difference_invocations_TestLastFailed.test_lastfailed_difference_invocations.None_6": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_lastfailed_difference_invocations_TestLastFailed.test_lastfailed_difference_invocations.None_6", "embedding": null, "metadata": {"file_path": "testing/test_cacheprovider.py", "file_name": "test_cacheprovider.py", "file_type": "text/x-python", "category": "test", "start_line": 343, "end_line": 372, "span_ids": ["TestLastFailed.test_lastfailed_difference_invocations"], "tokens": 284}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestLastFailed:\n\n def test_lastfailed_difference_invocations(\n self, pytester: Pytester, monkeypatch: MonkeyPatch\n ) -> None:\n monkeypatch.setattr(\"sys.dont_write_bytecode\", True)\n pytester.makepyfile(\n test_a=\"\"\"\n def test_a1(): assert 0\n def test_a2(): assert 1\n \"\"\",\n test_b=\"def test_b1(): assert 0\",\n )\n p = pytester.path.joinpath(\"test_a.py\")\n p2 = pytester.path.joinpath(\"test_b.py\")\n\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*2 failed*\"])\n result = pytester.runpytest(\"--lf\", p2)\n result.stdout.fnmatch_lines([\"*1 failed*\"])\n\n pytester.makepyfile(test_b=\"def test_b1(): assert 1\")\n result = pytester.runpytest(\"--lf\", p2)\n result.stdout.fnmatch_lines([\"*1 passed*\"])\n result = pytester.runpytest(\"--lf\", p)\n result.stdout.fnmatch_lines(\n [\n \"collected 2 items / 1 deselected / 1 selected\",\n \"run-last-failure: rerun previous 1 failure\",\n \"*= 1 failed, 1 deselected in *\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_lastfailed_xpass_TestLastFailed.test_non_serializable_parametrize.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_lastfailed_xpass_TestLastFailed.test_non_serializable_parametrize.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_cacheprovider.py", "file_name": "test_cacheprovider.py", "file_type": "text/x-python", "category": "test", "start_line": 389, "end_line": 419, "span_ids": ["TestLastFailed.test_lastfailed_xpass", "TestLastFailed.test_non_serializable_parametrize"], "tokens": 210}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestLastFailed:\n\n def test_lastfailed_xpass(self, pytester: Pytester) -> None:\n pytester.inline_runsource(\n \"\"\"\n import pytest\n @pytest.mark.xfail\n def test_hello():\n assert 1\n \"\"\"\n )\n config = pytester.parseconfigure()\n assert config.cache is not None\n lastfailed = config.cache.get(\"cache/lastfailed\", -1)\n assert lastfailed == -1\n\n def test_non_serializable_parametrize(self, pytester: Pytester) -> None:\n \"\"\"Test that failed parametrized tests with unmarshable parameters\n don't break pytest-cache.\n \"\"\"\n pytester.makepyfile(\n r\"\"\"\n import pytest\n\n @pytest.mark.parametrize('val', [\n b'\\xac\\x10\\x02G',\n ])\n def test_fail(val):\n assert False\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*1 failed in*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_terminal_report_lastfailed_TestLastFailed.test_terminal_report_lastfailed.None_4": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_terminal_report_lastfailed_TestLastFailed.test_terminal_report_lastfailed.None_4", "embedding": null, "metadata": {"file_path": "testing/test_cacheprovider.py", "file_name": "test_cacheprovider.py", "file_type": "text/x-python", "category": "test", "start_line": 421, "end_line": 471, "span_ids": ["TestLastFailed.test_terminal_report_lastfailed"], "tokens": 359}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestLastFailed:\n\n def test_terminal_report_lastfailed(self, pytester: Pytester) -> None:\n test_a = pytester.makepyfile(\n test_a=\"\"\"\n def test_a1(): pass\n def test_a2(): pass\n \"\"\"\n )\n test_b = pytester.makepyfile(\n test_b=\"\"\"\n def test_b1(): assert 0\n def test_b2(): assert 0\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"collected 4 items\", \"*2 failed, 2 passed in*\"])\n\n result = pytester.runpytest(\"--lf\")\n result.stdout.fnmatch_lines(\n [\n \"collected 2 items\",\n \"run-last-failure: rerun previous 2 failures (skipped 1 file)\",\n \"*2 failed in*\",\n ]\n )\n\n result = pytester.runpytest(test_a, \"--lf\")\n result.stdout.fnmatch_lines(\n [\n \"collected 2 items\",\n \"run-last-failure: 2 known failures not in selected tests\",\n \"*2 passed in*\",\n ]\n )\n\n result = pytester.runpytest(test_b, \"--lf\")\n result.stdout.fnmatch_lines(\n [\n \"collected 2 items\",\n \"run-last-failure: rerun previous 2 failures\",\n \"*2 failed in*\",\n ]\n )\n\n result = pytester.runpytest(\"test_b.py::test_b1\", \"--lf\")\n result.stdout.fnmatch_lines(\n [\n \"collected 1 item\",\n \"run-last-failure: rerun previous 1 failure\",\n \"*1 failed in*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_terminal_report_failedfirst_TestLastFailed.test_terminal_report_failedfirst.None_2": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_terminal_report_failedfirst_TestLastFailed.test_terminal_report_failedfirst.None_2", "embedding": null, "metadata": {"file_path": "testing/test_cacheprovider.py", "file_name": "test_cacheprovider.py", "file_type": "text/x-python", "category": "test", "start_line": 473, "end_line": 490, "span_ids": ["TestLastFailed.test_terminal_report_failedfirst"], "tokens": 145}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestLastFailed:\n\n def test_terminal_report_failedfirst(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n test_a=\"\"\"\n def test_a1(): assert 0\n def test_a2(): pass\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"collected 2 items\", \"*1 failed, 1 passed in*\"])\n\n result = pytester.runpytest(\"--ff\")\n result.stdout.fnmatch_lines(\n [\n \"collected 2 items\",\n \"run-last-failure: rerun previous 1 failure first\",\n \"*1 failed, 1 passed in*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_lastfailed_collectfailure_TestLastFailed.test_lastfailed_collectfailure.None_2": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_lastfailed_collectfailure_TestLastFailed.test_lastfailed_collectfailure.None_2", "embedding": null, "metadata": {"file_path": "testing/test_cacheprovider.py", "file_name": "test_cacheprovider.py", "file_type": "text/x-python", "category": "test", "start_line": 492, "end_line": 524, "span_ids": ["TestLastFailed.test_lastfailed_collectfailure"], "tokens": 265}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestLastFailed:\n\n def test_lastfailed_collectfailure(\n self, pytester: Pytester, monkeypatch: MonkeyPatch\n ) -> None:\n\n pytester.makepyfile(\n test_maybe=\"\"\"\n import os\n env = os.environ\n if '1' == env['FAILIMPORT']:\n raise ImportError('fail')\n def test_hello():\n assert '0' == env['FAILTEST']\n \"\"\"\n )\n\n def rlf(fail_import, fail_run):\n monkeypatch.setenv(\"FAILIMPORT\", str(fail_import))\n monkeypatch.setenv(\"FAILTEST\", str(fail_run))\n\n pytester.runpytest(\"-q\")\n config = pytester.parseconfigure()\n assert config.cache is not None\n lastfailed = config.cache.get(\"cache/lastfailed\", -1)\n return lastfailed\n\n lastfailed = rlf(fail_import=0, fail_run=0)\n assert lastfailed == -1\n\n lastfailed = rlf(fail_import=1, fail_run=0)\n assert list(lastfailed) == [\"test_maybe.py\"]\n\n lastfailed = rlf(fail_import=0, fail_run=1)\n assert list(lastfailed) == [\"test_maybe.py::test_hello\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_lastfailed_failure_subset_TestLastFailed.test_lastfailed_failure_subset.None_3": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_lastfailed_failure_subset_TestLastFailed.test_lastfailed_failure_subset.None_3", "embedding": null, "metadata": {"file_path": "testing/test_cacheprovider.py", "file_name": "test_cacheprovider.py", "file_type": "text/x-python", "category": "test", "start_line": 526, "end_line": 580, "span_ids": ["TestLastFailed.test_lastfailed_failure_subset"], "tokens": 468}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestLastFailed:\n\n def test_lastfailed_failure_subset(\n self, pytester: Pytester, monkeypatch: MonkeyPatch\n ) -> None:\n pytester.makepyfile(\n test_maybe=\"\"\"\n import os\n env = os.environ\n if '1' == env['FAILIMPORT']:\n raise ImportError('fail')\n def test_hello():\n assert '0' == env['FAILTEST']\n \"\"\"\n )\n\n pytester.makepyfile(\n test_maybe2=\"\"\"\n import os\n env = os.environ\n if '1' == env['FAILIMPORT']:\n raise ImportError('fail')\n\n def test_hello():\n assert '0' == env['FAILTEST']\n\n def test_pass():\n pass\n \"\"\"\n )\n\n def rlf(fail_import, fail_run, args=()):\n monkeypatch.setenv(\"FAILIMPORT\", str(fail_import))\n monkeypatch.setenv(\"FAILTEST\", str(fail_run))\n\n result = pytester.runpytest(\"-q\", \"--lf\", *args)\n config = pytester.parseconfigure()\n assert config.cache is not None\n lastfailed = config.cache.get(\"cache/lastfailed\", -1)\n return result, lastfailed\n\n result, lastfailed = rlf(fail_import=0, fail_run=0)\n assert lastfailed == -1\n result.stdout.fnmatch_lines([\"*3 passed*\"])\n\n result, lastfailed = rlf(fail_import=1, fail_run=0)\n assert sorted(list(lastfailed)) == [\"test_maybe.py\", \"test_maybe2.py\"]\n\n result, lastfailed = rlf(fail_import=0, fail_run=0, args=(\"test_maybe2.py\",))\n assert list(lastfailed) == [\"test_maybe.py\"]\n\n # edge case of test selection - even if we remember failures\n # from other tests we still need to run all tests if no test\n # matches the failures\n result, lastfailed = rlf(fail_import=0, fail_run=0, args=(\"test_maybe2.py\",))\n assert list(lastfailed) == [\"test_maybe.py\"]\n result.stdout.fnmatch_lines([\"*2 passed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_lastfailed_creates_cache_when_needed_TestLastFailed.test_lastfailed_creates_cache_when_needed.assert_os_path_exists_p": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_lastfailed_creates_cache_when_needed_TestLastFailed.test_lastfailed_creates_cache_when_needed.assert_os_path_exists_p", "embedding": null, "metadata": {"file_path": "testing/test_cacheprovider.py", "file_name": "test_cacheprovider.py", "file_type": "text/x-python", "category": "test", "start_line": 582, "end_line": 594, "span_ids": ["TestLastFailed.test_lastfailed_creates_cache_when_needed"], "tokens": 159}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestLastFailed:\n\n def test_lastfailed_creates_cache_when_needed(self, pytester: Pytester) -> None:\n # Issue #1342\n pytester.makepyfile(test_empty=\"\")\n pytester.runpytest(\"-q\", \"--lf\")\n assert not os.path.exists(\".pytest_cache/v/cache/lastfailed\")\n\n pytester.makepyfile(test_successful=\"def test_success():\\n assert True\")\n pytester.runpytest(\"-q\", \"--lf\")\n assert not os.path.exists(\".pytest_cache/v/cache/lastfailed\")\n\n pytester.makepyfile(test_errored=\"def test_error():\\n assert False\")\n pytester.runpytest(\"-q\", \"--lf\")\n assert os.path.exists(\".pytest_cache/v/cache/lastfailed\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_xfail_not_considered_failure_TestLastFailed.test_xfail_strict_considered_failure.assert_self_get_cached_la": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_xfail_not_considered_failure_TestLastFailed.test_xfail_strict_considered_failure.assert_self_get_cached_la", "embedding": null, "metadata": {"file_path": "testing/test_cacheprovider.py", "file_name": "test_cacheprovider.py", "file_type": "text/x-python", "category": "test", "start_line": 596, "end_line": 620, "span_ids": ["TestLastFailed.test_xfail_strict_considered_failure", "TestLastFailed.test_xfail_not_considered_failure"], "tokens": 194}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestLastFailed:\n\n def test_xfail_not_considered_failure(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.mark.xfail\n def test(): assert 0\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*1 xfailed*\"])\n assert self.get_cached_last_failed(pytester) == []\n\n def test_xfail_strict_considered_failure(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.mark.xfail(strict=True)\n def test(): pass\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*1 failed*\"])\n assert self.get_cached_last_failed(pytester) == [\n \"test_xfail_strict_considered_failure.py::test\"\n ]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_failed_changed_to_xfail_or_skip_TestLastFailed.test_failed_changed_to_xfail_or_skip.None_4": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_failed_changed_to_xfail_or_skip_TestLastFailed.test_failed_changed_to_xfail_or_skip.None_4", "embedding": null, "metadata": {"file_path": "testing/test_cacheprovider.py", "file_name": "test_cacheprovider.py", "file_type": "text/x-python", "category": "test", "start_line": 622, "end_line": 650, "span_ids": ["TestLastFailed.test_failed_changed_to_xfail_or_skip"], "tokens": 195}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestLastFailed:\n\n @pytest.mark.parametrize(\"mark\", [\"mark.xfail\", \"mark.skip\"])\n def test_failed_changed_to_xfail_or_skip(\n self, pytester: Pytester, mark: str\n ) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n def test(): assert 0\n \"\"\"\n )\n result = pytester.runpytest()\n assert self.get_cached_last_failed(pytester) == [\n \"test_failed_changed_to_xfail_or_skip.py::test\"\n ]\n assert result.ret == 1\n\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.{mark}\n def test(): assert 0\n \"\"\".format(\n mark=mark\n )\n )\n result = pytester.runpytest()\n assert result.ret == 0\n assert self.get_cached_last_failed(pytester) == []\n assert result.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_lf_and_ff_prints_no_needless_message_TestLastFailed.get_cached_last_failed.return.sorted_config_cache_get_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_lf_and_ff_prints_no_needless_message_TestLastFailed.get_cached_last_failed.return.sorted_config_cache_get_", "embedding": null, "metadata": {"file_path": "testing/test_cacheprovider.py", "file_name": "test_cacheprovider.py", "file_type": "text/x-python", "category": "test", "start_line": 652, "end_line": 674, "span_ids": ["TestLastFailed.test_lf_and_ff_prints_no_needless_message", "TestLastFailed.get_cached_last_failed"], "tokens": 217}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestLastFailed:\n\n @pytest.mark.parametrize(\"quiet\", [True, False])\n @pytest.mark.parametrize(\"opt\", [\"--ff\", \"--lf\"])\n def test_lf_and_ff_prints_no_needless_message(\n self, quiet: bool, opt: str, pytester: Pytester\n ) -> None:\n # Issue 3853\n pytester.makepyfile(\"def test(): assert 0\")\n args = [opt]\n if quiet:\n args.append(\"-q\")\n result = pytester.runpytest(*args)\n result.stdout.no_fnmatch_line(\"*run all*\")\n\n result = pytester.runpytest(*args)\n if quiet:\n result.stdout.no_fnmatch_line(\"*run all*\")\n else:\n assert \"rerun previous\" in result.stdout.str()\n\n def get_cached_last_failed(self, pytester: Pytester) -> List[str]:\n config = pytester.parseconfigure()\n assert config.cache is not None\n return sorted(config.cache.get(\"cache/lastfailed\", {}))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_cache_cumulative_TestLastFailed.test_cache_cumulative.None_4": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_cache_cumulative_TestLastFailed.test_cache_cumulative.None_4", "embedding": null, "metadata": {"file_path": "testing/test_cacheprovider.py", "file_name": "test_cacheprovider.py", "file_type": "text/x-python", "category": "test", "start_line": 676, "end_line": 738, "span_ids": ["TestLastFailed.test_cache_cumulative"], "tokens": 552}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestLastFailed:\n\n def test_cache_cumulative(self, pytester: Pytester) -> None:\n \"\"\"Test workflow where user fixes errors gradually file by file using --lf.\"\"\"\n # 1. initial run\n test_bar = pytester.makepyfile(\n test_bar=\"\"\"\n def test_bar_1(): pass\n def test_bar_2(): assert 0\n \"\"\"\n )\n test_foo = pytester.makepyfile(\n test_foo=\"\"\"\n def test_foo_3(): pass\n def test_foo_4(): assert 0\n \"\"\"\n )\n pytester.runpytest()\n assert self.get_cached_last_failed(pytester) == [\n \"test_bar.py::test_bar_2\",\n \"test_foo.py::test_foo_4\",\n ]\n\n # 2. fix test_bar_2, run only test_bar.py\n pytester.makepyfile(\n test_bar=\"\"\"\n def test_bar_1(): pass\n def test_bar_2(): pass\n \"\"\"\n )\n result = pytester.runpytest(test_bar)\n result.stdout.fnmatch_lines([\"*2 passed*\"])\n # ensure cache does not forget that test_foo_4 failed once before\n assert self.get_cached_last_failed(pytester) == [\"test_foo.py::test_foo_4\"]\n\n result = pytester.runpytest(\"--last-failed\")\n result.stdout.fnmatch_lines(\n [\n \"collected 1 item\",\n \"run-last-failure: rerun previous 1 failure (skipped 1 file)\",\n \"*= 1 failed in *\",\n ]\n )\n assert self.get_cached_last_failed(pytester) == [\"test_foo.py::test_foo_4\"]\n\n # 3. fix test_foo_4, run only test_foo.py\n test_foo = pytester.makepyfile(\n test_foo=\"\"\"\n def test_foo_3(): pass\n def test_foo_4(): pass\n \"\"\"\n )\n result = pytester.runpytest(test_foo, \"--last-failed\")\n result.stdout.fnmatch_lines(\n [\n \"collected 2 items / 1 deselected / 1 selected\",\n \"run-last-failure: rerun previous 1 failure\",\n \"*= 1 passed, 1 deselected in *\",\n ]\n )\n assert self.get_cached_last_failed(pytester) == []\n\n result = pytester.runpytest(\"--last-failed\")\n result.stdout.fnmatch_lines([\"*4 passed*\"])\n assert self.get_cached_last_failed(pytester) == []", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_lastfailed_no_failures_behavior_empty_cache_TestLastFailed.test_lastfailed_no_failures_behavior_empty_cache.None_3": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_lastfailed_no_failures_behavior_empty_cache_TestLastFailed.test_lastfailed_no_failures_behavior_empty_cache.None_3", "embedding": null, "metadata": {"file_path": "testing/test_cacheprovider.py", "file_name": "test_cacheprovider.py", "file_type": "text/x-python", "category": "test", "start_line": 782, "end_line": 796, "span_ids": ["TestLastFailed.test_lastfailed_no_failures_behavior_empty_cache"], "tokens": 163}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestLastFailed:\n\n def test_lastfailed_no_failures_behavior_empty_cache(\n self, pytester: Pytester\n ) -> None:\n pytester.makepyfile(\n \"\"\"\n def test_1(): pass\n def test_2(): assert 0\n \"\"\"\n )\n result = pytester.runpytest(\"--lf\", \"--cache-clear\")\n result.stdout.fnmatch_lines([\"*1 failed*1 passed*\"])\n result = pytester.runpytest(\"--lf\", \"--cache-clear\", \"--lfnf\", \"all\")\n result.stdout.fnmatch_lines([\"*1 failed*1 passed*\"])\n result = pytester.runpytest(\"--lf\", \"--cache-clear\", \"--lfnf\", \"none\")\n result.stdout.fnmatch_lines([\"*2 desel*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_lastfailed_skip_collection_TestLastFailed.test_lastfailed_skip_collection.None_4": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_lastfailed_skip_collection_TestLastFailed.test_lastfailed_skip_collection.None_4", "embedding": null, "metadata": {"file_path": "testing/test_cacheprovider.py", "file_name": "test_cacheprovider.py", "file_type": "text/x-python", "category": "test", "start_line": 798, "end_line": 848, "span_ids": ["TestLastFailed.test_lastfailed_skip_collection"], "tokens": 402}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestLastFailed:\n\n def test_lastfailed_skip_collection(self, pytester: Pytester) -> None:\n \"\"\"\n Test --lf behavior regarding skipping collection of files that are not marked as\n failed in the cache (#5172).\n \"\"\"\n pytester.makepyfile(\n **{\n \"pkg1/test_1.py\": \"\"\"\n import pytest\n\n @pytest.mark.parametrize('i', range(3))\n def test_1(i): pass\n \"\"\",\n \"pkg2/test_2.py\": \"\"\"\n import pytest\n\n @pytest.mark.parametrize('i', range(5))\n def test_1(i):\n assert i not in (1, 3)\n \"\"\",\n }\n )\n # first run: collects 8 items (test_1: 3, test_2: 5)\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"collected 8 items\", \"*2 failed*6 passed*\"])\n # second run: collects only 5 items from test_2, because all tests from test_1 have passed\n result = pytester.runpytest(\"--lf\")\n result.stdout.fnmatch_lines(\n [\n \"collected 2 items\",\n \"run-last-failure: rerun previous 2 failures (skipped 1 file)\",\n \"*= 2 failed in *\",\n ]\n )\n\n # add another file and check if message is correct when skipping more than 1 file\n pytester.makepyfile(\n **{\n \"pkg1/test_3.py\": \"\"\"\n def test_3(): pass\n \"\"\"\n }\n )\n result = pytester.runpytest(\"--lf\")\n result.stdout.fnmatch_lines(\n [\n \"collected 2 items\",\n \"run-last-failure: rerun previous 2 failures (skipped 2 files)\",\n \"*= 2 failed in *\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestReadme_TestReadme.test_readme_failed.assert_self_check_readme_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestReadme_TestReadme.test_readme_failed.assert_self_check_readme_", "embedding": null, "metadata": {"file_path": "testing/test_cacheprovider.py", "file_name": "test_cacheprovider.py", "file_type": "text/x-python", "category": "test", "start_line": 1176, "end_line": 1191, "span_ids": ["TestReadme.check_readme", "TestReadme", "TestReadme.test_readme_failed", "TestReadme.test_readme_passed"], "tokens": 160}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestReadme:\n def check_readme(self, pytester: Pytester) -> bool:\n config = pytester.parseconfigure()\n assert config.cache is not None\n readme = config.cache._cachedir.joinpath(\"README.md\")\n return readme.is_file()\n\n def test_readme_passed(self, pytester: Pytester) -> None:\n pytester.makepyfile(\"def test_always_passes(): pass\")\n pytester.runpytest()\n assert self.check_readme(pytester) is True\n\n def test_readme_failed(self, pytester: Pytester) -> None:\n pytester.makepyfile(\"def test_always_fails(): assert 0\")\n pytester.runpytest()\n assert self.check_readme(pytester) is True", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_test_gitignore_test_gitignore.None_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_test_gitignore_test_gitignore.None_1", "embedding": null, "metadata": {"file_path": "testing/test_cacheprovider.py", "file_name": "test_cacheprovider.py", "file_type": "text/x-python", "category": "test", "start_line": 1194, "end_line": 1208, "span_ids": ["test_gitignore"], "tokens": 163}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_gitignore(pytester: Pytester) -> None:\n \"\"\"Ensure we automatically create .gitignore file in the pytest_cache directory (#3286).\"\"\"\n from _pytest.cacheprovider import Cache\n\n config = pytester.parseconfig()\n cache = Cache.for_config(config, _ispytest=True)\n cache.set(\"foo\", \"bar\")\n msg = \"# Created by pytest automatically.\\n*\\n\"\n gitignore_path = cache._cachedir.joinpath(\".gitignore\")\n assert gitignore_path.read_text(encoding=\"UTF-8\") == msg\n\n # Does not overwrite existing/custom one.\n gitignore_path.write_text(\"custom\")\n cache.set(\"something\", \"else\")\n assert gitignore_path.read_text(encoding=\"UTF-8\") == \"custom\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestPerTestCapturing_TestPerTestCapturing.test_capture_and_fixtures.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestPerTestCapturing_TestPerTestCapturing.test_capture_and_fixtures.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_capture.py", "file_name": "test_capture.py", "file_type": "text/x-python", "category": "test", "start_line": 150, "end_line": 175, "span_ids": ["TestPerTestCapturing.test_capture_and_fixtures", "TestPerTestCapturing"], "tokens": 162}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPerTestCapturing:\n def test_capture_and_fixtures(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n def setup_module(mod):\n print(\"setup module\")\n def setup_function(function):\n print(\"setup \" + function.__name__)\n def test_func1():\n print(\"in func1\")\n assert 0\n def test_func2():\n print(\"in func2\")\n assert 0\n \"\"\"\n )\n result = pytester.runpytest(p)\n result.stdout.fnmatch_lines(\n [\n \"setup module*\",\n \"setup test_func1*\",\n \"in func1*\",\n \"setup test_func2*\",\n \"in func2*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestPerTestCapturing.test_capture_scope_cache_TestPerTestCapturing.test_capture_scope_cache.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestPerTestCapturing.test_capture_scope_cache_TestPerTestCapturing.test_capture_scope_cache.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_capture.py", "file_name": "test_capture.py", "file_type": "text/x-python", "category": "test", "start_line": 177, "end_line": 203, "span_ids": ["TestPerTestCapturing.test_capture_scope_cache"], "tokens": 170}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPerTestCapturing:\n\n @pytest.mark.xfail(reason=\"unimplemented feature\")\n def test_capture_scope_cache(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n import sys\n def setup_module(func):\n print(\"module-setup\")\n def setup_function(func):\n print(\"function-setup\")\n def test_func():\n print(\"in function\")\n assert 0\n def teardown_function(func):\n print(\"in teardown\")\n \"\"\"\n )\n result = pytester.runpytest(p)\n result.stdout.fnmatch_lines(\n [\n \"*test_func():*\",\n \"*Captured stdout during setup*\",\n \"module-setup*\",\n \"function-setup*\",\n \"*Captured stdout*\",\n \"in teardown*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestPerTestCapturing.test_no_carry_over_TestPerTestCapturing.test_teardown_capturing.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestPerTestCapturing.test_no_carry_over_TestPerTestCapturing.test_teardown_capturing.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_capture.py", "file_name": "test_capture.py", "file_type": "text/x-python", "category": "test", "start_line": 205, "end_line": 243, "span_ids": ["TestPerTestCapturing.test_no_carry_over", "TestPerTestCapturing.test_teardown_capturing"], "tokens": 256}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPerTestCapturing:\n\n def test_no_carry_over(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n def test_func1():\n print(\"in func1\")\n def test_func2():\n print(\"in func2\")\n assert 0\n \"\"\"\n )\n result = pytester.runpytest(p)\n s = result.stdout.str()\n assert \"in func1\" not in s\n assert \"in func2\" in s\n\n def test_teardown_capturing(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n def setup_function(function):\n print(\"setup func1\")\n def teardown_function(function):\n print(\"teardown func1\")\n assert 0\n def test_func1():\n print(\"in func1\")\n pass\n \"\"\"\n )\n result = pytester.runpytest(p)\n result.stdout.fnmatch_lines(\n [\n \"*teardown_function*\",\n \"*Captured stdout*\",\n \"setup func1*\",\n \"in func1*\",\n \"teardown func1*\",\n # \"*1 fixture failure*\"\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestPerTestCapturing.test_teardown_capturing_final_TestPerTestCapturing.test_teardown_capturing_final.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestPerTestCapturing.test_teardown_capturing_final_TestPerTestCapturing.test_teardown_capturing_final.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_capture.py", "file_name": "test_capture.py", "file_type": "text/x-python", "category": "test", "start_line": 245, "end_line": 263, "span_ids": ["TestPerTestCapturing.test_teardown_capturing_final"], "tokens": 119}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPerTestCapturing:\n\n def test_teardown_capturing_final(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n def teardown_module(mod):\n print(\"teardown module\")\n assert 0\n def test_func():\n pass\n \"\"\"\n )\n result = pytester.runpytest(p)\n result.stdout.fnmatch_lines(\n [\n \"*def teardown_module(mod):*\",\n \"*Captured stdout*\",\n \"*teardown module*\",\n \"*1 error*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestPerTestCapturing.test_capturing_outerr_TestPerTestCapturing.test_capturing_outerr.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestPerTestCapturing.test_capturing_outerr_TestPerTestCapturing.test_capturing_outerr.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_capture.py", "file_name": "test_capture.py", "file_type": "text/x-python", "category": "test", "start_line": 265, "end_line": 290, "span_ids": ["TestPerTestCapturing.test_capturing_outerr"], "tokens": 185}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPerTestCapturing:\n\n def test_capturing_outerr(self, pytester: Pytester) -> None:\n p1 = pytester.makepyfile(\n \"\"\"\\\n import sys\n def test_capturing():\n print(42)\n sys.stderr.write(str(23))\n def test_capturing_error():\n print(1)\n sys.stderr.write(str(2))\n raise ValueError\n \"\"\"\n )\n result = pytester.runpytest(p1)\n result.stdout.fnmatch_lines(\n [\n \"*test_capturing_outerr.py .F*\",\n \"====* FAILURES *====\",\n \"____*____\",\n \"*test_capturing_outerr.py:8: ValueError\",\n \"*--- Captured stdout *call*\",\n \"1\",\n \"*--- Captured stderr *call*\",\n \"2\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestLoggingInteraction_TestLoggingInteraction.test_logging_and_immediate_setupteardown.for_optargs_in_captu.assert_closed_not_in_s": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestLoggingInteraction_TestLoggingInteraction.test_logging_and_immediate_setupteardown.for_optargs_in_captu.assert_closed_not_in_s", "embedding": null, "metadata": {"file_path": "testing/test_capture.py", "file_name": "test_capture.py", "file_type": "text/x-python", "category": "test", "start_line": 293, "end_line": 332, "span_ids": ["TestLoggingInteraction.test_logging_and_immediate_setupteardown", "TestLoggingInteraction", "TestLoggingInteraction.test_logging_stream_ownership"], "tokens": 288}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestLoggingInteraction:\n def test_logging_stream_ownership(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\\\n def test_logging():\n import logging\n import pytest\n stream = capture.CaptureIO()\n logging.basicConfig(stream=stream)\n stream.close() # to free memory/release resources\n \"\"\"\n )\n result = pytester.runpytest_subprocess(p)\n assert result.stderr.str().find(\"atexit\") == -1\n\n def test_logging_and_immediate_setupteardown(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\\\n import logging\n def setup_function(function):\n logging.warning(\"hello1\")\n\n def test_logging():\n logging.warning(\"hello2\")\n assert 0\n\n def teardown_function(function):\n logging.warning(\"hello3\")\n assert 0\n \"\"\"\n )\n for optargs in ((\"--capture=sys\",), (\"--capture=fd\",)):\n print(optargs)\n result = pytester.runpytest_subprocess(p, *optargs)\n s = result.stdout.str()\n result.stdout.fnmatch_lines(\n [\"*WARN*hello3\", \"*WARN*hello1\", \"*WARN*hello2\"] # errors show first!\n )\n # verify proper termination\n assert \"closed\" not in s", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestLoggingInteraction.test_logging_and_crossscope_fixtures_TestLoggingInteraction.test_logging_and_crossscope_fixtures.for_optargs_in_captu.assert_closed_not_in_s": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestLoggingInteraction.test_logging_and_crossscope_fixtures_TestLoggingInteraction.test_logging_and_crossscope_fixtures.for_optargs_in_captu.assert_closed_not_in_s", "embedding": null, "metadata": {"file_path": "testing/test_capture.py", "file_name": "test_capture.py", "file_type": "text/x-python", "category": "test", "start_line": 334, "end_line": 358, "span_ids": ["TestLoggingInteraction.test_logging_and_crossscope_fixtures"], "tokens": 187}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestLoggingInteraction:\n\n def test_logging_and_crossscope_fixtures(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\\\n import logging\n def setup_module(function):\n logging.warning(\"hello1\")\n\n def test_logging():\n logging.warning(\"hello2\")\n assert 0\n\n def teardown_module(function):\n logging.warning(\"hello3\")\n assert 0\n \"\"\"\n )\n for optargs in ((\"--capture=sys\",), (\"--capture=fd\",)):\n print(optargs)\n result = pytester.runpytest_subprocess(p, *optargs)\n s = result.stdout.str()\n result.stdout.fnmatch_lines(\n [\"*WARN*hello3\", \"*WARN*hello1\", \"*WARN*hello2\"] # errors come first\n )\n # verify proper termination\n assert \"closed\" not in s", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestLoggingInteraction.test_conftestlogging_is_shown_TestLoggingInteraction.test_conftestlogging_is_shown.assert_operation_on_clos": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestLoggingInteraction.test_conftestlogging_is_shown_TestLoggingInteraction.test_conftestlogging_is_shown.assert_operation_on_clos", "embedding": null, "metadata": {"file_path": "testing/test_capture.py", "file_name": "test_capture.py", "file_type": "text/x-python", "category": "test", "start_line": 360, "end_line": 372, "span_ids": ["TestLoggingInteraction.test_conftestlogging_is_shown"], "tokens": 129}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestLoggingInteraction:\n\n def test_conftestlogging_is_shown(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\\\n import logging\n logging.basicConfig()\n logging.warning(\"hello435\")\n \"\"\"\n )\n # make sure that logging is still captured in tests\n result = pytester.runpytest_subprocess(\"-s\", \"-p\", \"no:capturelog\")\n assert result.ret == ExitCode.NO_TESTS_COLLECTED\n result.stderr.fnmatch_lines([\"WARNING*hello435*\"])\n assert \"operation on closed file\" not in result.stderr.str()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestLoggingInteraction.test_conftestlogging_and_test_logging_TestLoggingInteraction.test_conftestlogging_and_test_logging.assert_operation_on_clos": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestLoggingInteraction.test_conftestlogging_and_test_logging_TestLoggingInteraction.test_conftestlogging_and_test_logging.assert_operation_on_clos", "embedding": null, "metadata": {"file_path": "testing/test_capture.py", "file_name": "test_capture.py", "file_type": "text/x-python", "category": "test", "start_line": 374, "end_line": 394, "span_ids": ["TestLoggingInteraction.test_conftestlogging_and_test_logging"], "tokens": 164}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestLoggingInteraction:\n\n def test_conftestlogging_and_test_logging(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\\\n import logging\n logging.basicConfig()\n \"\"\"\n )\n # make sure that logging is still captured in tests\n p = pytester.makepyfile(\n \"\"\"\\\n def test_hello():\n import logging\n logging.warning(\"hello433\")\n assert 0\n \"\"\"\n )\n result = pytester.runpytest_subprocess(p, \"-p\", \"no:capturelog\")\n assert result.ret != 0\n result.stdout.fnmatch_lines([\"WARNING*hello433*\"])\n assert \"something\" not in result.stderr.str()\n assert \"operation on closed file\" not in result.stderr.str()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestLoggingInteraction.test_logging_after_cap_stopped_TestLoggingInteraction.test_logging_after_cap_stopped.assert_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestLoggingInteraction.test_logging_after_cap_stopped_TestLoggingInteraction.test_logging_after_cap_stopped.assert_", "embedding": null, "metadata": {"file_path": "testing/test_capture.py", "file_name": "test_capture.py", "file_type": "text/x-python", "category": "test", "start_line": 396, "end_line": 428, "span_ids": ["TestLoggingInteraction.test_logging_after_cap_stopped"], "tokens": 218}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestLoggingInteraction:\n\n def test_logging_after_cap_stopped(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\\\n import pytest\n import logging\n\n log = logging.getLogger(__name__)\n\n @pytest.fixture\n def log_on_teardown():\n yield\n log.warning('Logging on teardown')\n \"\"\"\n )\n # make sure that logging is still captured in tests\n p = pytester.makepyfile(\n \"\"\"\\\n def test_hello(log_on_teardown):\n import logging\n logging.warning(\"hello433\")\n assert 1\n raise KeyboardInterrupt()\n \"\"\"\n )\n result = pytester.runpytest_subprocess(p, \"--log-cli-level\", \"info\")\n assert result.ret != 0\n result.stdout.fnmatch_lines(\n [\"*WARNING*hello433*\", \"*WARNING*Logging on teardown*\"]\n )\n assert (\n \"AttributeError: 'NoneType' object has no attribute 'resume_capturing'\"\n not in result.stderr.str()\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestCaptureFixture_TestCaptureFixture.test_capsyscapfd.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestCaptureFixture_TestCaptureFixture.test_capsyscapfd.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_capture.py", "file_name": "test_capture.py", "file_type": "text/x-python", "category": "test", "start_line": 431, "end_line": 463, "span_ids": ["TestCaptureFixture.test_capsyscapfd", "TestCaptureFixture.test_std_functional", "TestCaptureFixture"], "tokens": 239}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCaptureFixture:\n @pytest.mark.parametrize(\"opt\", [[], [\"-s\"]])\n def test_std_functional(self, pytester: Pytester, opt) -> None:\n reprec = pytester.inline_runsource(\n \"\"\"\\\n def test_hello(capsys):\n print(42)\n out, err = capsys.readouterr()\n assert out.startswith(\"42\")\n \"\"\",\n *opt,\n )\n reprec.assertoutcome(passed=1)\n\n def test_capsyscapfd(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\\\n def test_one(capsys, capfd):\n pass\n def test_two(capfd, capsys):\n pass\n \"\"\"\n )\n result = pytester.runpytest(p)\n result.stdout.fnmatch_lines(\n [\n \"*ERROR*setup*test_one*\",\n \"E*capfd*capsys*same*time*\",\n \"*ERROR*setup*test_two*\",\n \"E*capsys*capfd*same*time*\",\n \"*2 errors*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestCaptureFixture.test_capturing_getfixturevalue_TestCaptureFixture.test_capturing_getfixturevalue.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestCaptureFixture.test_capturing_getfixturevalue_TestCaptureFixture.test_capturing_getfixturevalue.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_capture.py", "file_name": "test_capture.py", "file_type": "text/x-python", "category": "test", "start_line": 465, "end_line": 486, "span_ids": ["TestCaptureFixture.test_capturing_getfixturevalue"], "tokens": 180}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCaptureFixture:\n\n def test_capturing_getfixturevalue(self, pytester: Pytester) -> None:\n \"\"\"Test that asking for \"capfd\" and \"capsys\" using request.getfixturevalue\n in the same test is an error.\n \"\"\"\n pytester.makepyfile(\n \"\"\"\\\n def test_one(capsys, request):\n request.getfixturevalue(\"capfd\")\n def test_two(capfd, request):\n request.getfixturevalue(\"capsys\")\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n [\n \"*test_one*\",\n \"E * cannot use capfd and capsys at the same time\",\n \"*test_two*\",\n \"E * cannot use capsys and capfd at the same time\",\n \"*2 failed in*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestCaptureFixture.test_capfdbinary_TestCaptureFixture.test_capfdbinary.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestCaptureFixture.test_capfdbinary_TestCaptureFixture.test_capfdbinary.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/test_capture.py", "file_name": "test_capture.py", "file_type": "text/x-python", "category": "test", "start_line": 535, "end_line": 547, "span_ids": ["TestCaptureFixture.test_capfdbinary"], "tokens": 121}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCaptureFixture:\n\n def test_capfdbinary(self, pytester: Pytester) -> None:\n reprec = pytester.inline_runsource(\n \"\"\"\\\n def test_hello(capfdbinary):\n import os\n # some likely un-decodable bytes\n os.write(1, b'\\\\xfe\\\\x98\\\\x20')\n out, err = capfdbinary.readouterr()\n assert out == b'\\\\xfe\\\\x98\\\\x20'\n assert err == b''\n \"\"\"\n )\n reprec.assertoutcome(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestCaptureFixture.test_partial_setup_failure_TestCaptureFixture.test_capture_and_logging.assert_closed_not_in_re": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestCaptureFixture.test_partial_setup_failure_TestCaptureFixture.test_capture_and_logging.assert_closed_not_in_re", "embedding": null, "metadata": {"file_path": "testing/test_capture.py", "file_name": "test_capture.py", "file_type": "text/x-python", "category": "test", "start_line": 585, "end_line": 618, "span_ids": ["TestCaptureFixture.test_capture_and_logging", "TestCaptureFixture.test_partial_setup_failure", "TestCaptureFixture.test_keyboardinterrupt_disables_capturing"], "tokens": 250}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCaptureFixture:\n\n def test_partial_setup_failure(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\\\n def test_hello(capsys, missingarg):\n pass\n \"\"\"\n )\n result = pytester.runpytest(p)\n result.stdout.fnmatch_lines([\"*test_partial_setup_failure*\", \"*1 error*\"])\n\n def test_keyboardinterrupt_disables_capturing(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\\\n def test_hello(capfd):\n import os\n os.write(1, b'42')\n raise KeyboardInterrupt()\n \"\"\"\n )\n result = pytester.runpytest_subprocess(p)\n result.stdout.fnmatch_lines([\"*KeyboardInterrupt*\"])\n assert result.ret == 2\n\n def test_capture_and_logging(self, pytester: Pytester) -> None:\n \"\"\"#14\"\"\"\n p = pytester.makepyfile(\n \"\"\"\\\n import logging\n def test_log(capsys):\n logging.error('x')\n \"\"\"\n )\n result = pytester.runpytest_subprocess(p)\n assert \"closed\" not in result.stderr.str()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestCaptureFixture.test_fixture_use_by_other_fixtures_teardown_TestCaptureFixture.test_fixture_use_by_other_fixtures_teardown.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestCaptureFixture.test_fixture_use_by_other_fixtures_teardown_TestCaptureFixture.test_fixture_use_by_other_fixtures_teardown.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/test_capture.py", "file_name": "test_capture.py", "file_type": "text/x-python", "category": "test", "start_line": 714, "end_line": 742, "span_ids": ["TestCaptureFixture.test_fixture_use_by_other_fixtures_teardown"], "tokens": 222}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCaptureFixture:\n\n @pytest.mark.parametrize(\"cap\", [\"capsys\", \"capfd\"])\n def test_fixture_use_by_other_fixtures_teardown(\n self, pytester: Pytester, cap\n ) -> None:\n \"\"\"Ensure we can access setup and teardown buffers from teardown when using capsys/capfd (##3033)\"\"\"\n pytester.makepyfile(\n \"\"\"\\\n import sys\n import pytest\n import os\n\n @pytest.fixture()\n def fix({cap}):\n print(\"setup out\")\n sys.stderr.write(\"setup err\\\\n\")\n yield\n out, err = {cap}.readouterr()\n assert out == 'setup out\\\\ncall out\\\\n'\n assert err == 'setup err\\\\ncall err\\\\n'\n\n def test_a(fix):\n print(\"call out\")\n sys.stderr.write(\"call err\\\\n\")\n \"\"\".format(\n cap=cap\n )\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_test_setup_failure_does_not_kill_capturing_test_setup_failure_does_not_kill_capturing.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_test_setup_failure_does_not_kill_capturing_test_setup_failure_does_not_kill_capturing.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_capture.py", "file_name": "test_capture.py", "file_type": "text/x-python", "category": "test", "start_line": 745, "end_line": 757, "span_ids": ["test_setup_failure_does_not_kill_capturing"], "tokens": 130}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_setup_failure_does_not_kill_capturing(pytester: Pytester) -> None:\n sub1 = pytester.mkpydir(\"sub1\")\n sub1.joinpath(\"conftest.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n def pytest_runtest_setup(item):\n raise ValueError(42)\n \"\"\"\n )\n )\n sub1.joinpath(\"test_mod.py\").write_text(\"def test_func1(): pass\")\n result = pytester.runpytest(pytester.path, \"--traceconfig\")\n result.stdout.fnmatch_lines([\"*ValueError(42)*\", \"*1 error*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_test_error_during_readouterr_test_error_during_readouterr.result_stderr_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_test_error_during_readouterr_test_error_during_readouterr.result_stderr_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_capture.py", "file_name": "test_capture.py", "file_type": "text/x-python", "category": "test", "start_line": 829, "end_line": 845, "span_ids": ["test_error_during_readouterr"], "tokens": 134}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_error_during_readouterr(pytester: Pytester) -> None:\n \"\"\"Make sure we suspend capturing if errors occur during readouterr\"\"\"\n pytester.makepyfile(\n pytest_xyz=\"\"\"\n from _pytest.capture import FDCapture\n\n def bad_snap(self):\n raise Exception('boom')\n\n assert FDCapture.snap\n FDCapture.snap = bad_snap\n \"\"\"\n )\n result = pytester.runpytest_subprocess(\"-p\", \"pytest_xyz\", \"--version\")\n result.stderr.fnmatch_lines(\n [\"*in bad_snap\", \" raise Exception('boom')\", \"Exception: boom\"]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestCaptureIO_TestCaptureIO.test_write_bytes_to_buffer.assert_f_getvalue_f": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestCaptureIO_TestCaptureIO.test_write_bytes_to_buffer.assert_f_getvalue_f", "embedding": null, "metadata": {"file_path": "testing/test_capture.py", "file_name": "test_capture.py", "file_type": "text/x-python", "category": "test", "start_line": 848, "end_line": 867, "span_ids": ["TestCaptureIO.test_write_bytes_to_buffer", "TestCaptureIO", "TestCaptureIO.test_unicode_and_str_mixture", "TestCaptureIO.test_text"], "tokens": 165}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCaptureIO:\n def test_text(self) -> None:\n f = capture.CaptureIO()\n f.write(\"hello\")\n s = f.getvalue()\n assert s == \"hello\"\n f.close()\n\n def test_unicode_and_str_mixture(self) -> None:\n f = capture.CaptureIO()\n f.write(\"\\u00f6\")\n pytest.raises(TypeError, f.write, b\"hello\")\n\n def test_write_bytes_to_buffer(self) -> None:\n \"\"\"In python3, stdout / stderr are text io wrappers (exposing a buffer\n property of the underlying bytestream). See issue #1407\n \"\"\"\n f = capture.CaptureIO()\n f.buffer.write(b\"foo\\r\\n\")\n assert f.getvalue() == \"foo\\r\\n\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestFDCapture.test_simple_resume_suspend_saved_fd.try_.finally_.os_close_new_fd_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestFDCapture.test_simple_resume_suspend_saved_fd.try_.finally_.os_close_new_fd_", "embedding": null, "metadata": {"file_path": "testing/test_capture.py", "file_name": "test_capture.py", "file_type": "text/x-python", "category": "test", "start_line": 1015, "end_line": 1060, "span_ids": ["saved_fd", "TestFDCapture.test_capfd_sys_stdout_mode", "TestFDCapture.test_simple_resume_suspend"], "tokens": 336}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFDCapture:\n\n def test_simple_resume_suspend(self) -> None:\n with saved_fd(1):\n cap = capture.FDCapture(1)\n cap.start()\n data = b\"hello\"\n os.write(1, data)\n sys.stdout.write(\"whatever\")\n s = cap.snap()\n assert s == \"hellowhatever\"\n cap.suspend()\n os.write(1, b\"world\")\n sys.stdout.write(\"qlwkej\")\n assert not cap.snap()\n cap.resume()\n os.write(1, b\"but now\")\n sys.stdout.write(\" yes\\n\")\n s = cap.snap()\n assert s == \"but now yes\\n\"\n cap.suspend()\n cap.done()\n pytest.raises(AssertionError, cap.suspend)\n\n assert repr(cap) == (\n \"\".format(\n cap.targetfd_save, cap.tmpfile\n )\n )\n # Should not crash with missing \"_old\".\n assert repr(cap.syscapture) == (\n \" _state='done' tmpfile={!r}>\".format(\n cap.syscapture.tmpfile\n )\n )\n\n def test_capfd_sys_stdout_mode(self, capfd) -> None:\n assert \"b\" not in sys.stdout.mode\n\n\n@contextlib.contextmanager\ndef saved_fd(fd):\n new_fd = os.dup(fd)\n try:\n yield\n finally:\n os.dup2(new_fd, fd)\n os.close(new_fd)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestStdCapture_TestStdCapture.test_reset_twice_error.assert_not_err": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestStdCapture_TestStdCapture.test_reset_twice_error.assert_not_err", "embedding": null, "metadata": {"file_path": "testing/test_capture.py", "file_name": "test_capture.py", "file_type": "text/x-python", "category": "test", "start_line": 1063, "end_line": 1122, "span_ids": ["TestStdCapture.test_capturing_done_simple", "TestStdCapture.test_capture_results_accessible_by_attribute", "TestStdCapture.test_capturing_readouterr", "TestStdCapture.test_reset_twice_error", "TestStdCapture.test_capturing_readouterr_unicode", "TestStdCapture.getcapture", "TestStdCapture", "TestStdCapture.test_capturing_reset_simple"], "tokens": 457}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestStdCapture:\n captureclass = staticmethod(StdCapture)\n\n @contextlib.contextmanager\n def getcapture(self, **kw):\n cap = self.__class__.captureclass(**kw)\n cap.start_capturing()\n try:\n yield cap\n finally:\n cap.stop_capturing()\n\n def test_capturing_done_simple(self) -> None:\n with self.getcapture() as cap:\n sys.stdout.write(\"hello\")\n sys.stderr.write(\"world\")\n out, err = cap.readouterr()\n assert out == \"hello\"\n assert err == \"world\"\n\n def test_capturing_reset_simple(self) -> None:\n with self.getcapture() as cap:\n print(\"hello world\")\n sys.stderr.write(\"hello error\\n\")\n out, err = cap.readouterr()\n assert out == \"hello world\\n\"\n assert err == \"hello error\\n\"\n\n def test_capturing_readouterr(self) -> None:\n with self.getcapture() as cap:\n print(\"hello world\")\n sys.stderr.write(\"hello error\\n\")\n out, err = cap.readouterr()\n assert out == \"hello world\\n\"\n assert err == \"hello error\\n\"\n sys.stderr.write(\"error2\")\n out, err = cap.readouterr()\n assert err == \"error2\"\n\n def test_capture_results_accessible_by_attribute(self) -> None:\n with self.getcapture() as cap:\n sys.stdout.write(\"hello\")\n sys.stderr.write(\"world\")\n capture_result = cap.readouterr()\n assert capture_result.out == \"hello\"\n assert capture_result.err == \"world\"\n\n def test_capturing_readouterr_unicode(self) -> None:\n with self.getcapture() as cap:\n print(\"hx\u0105\u0107\")\n out, err = cap.readouterr()\n assert out == \"hx\u0105\u0107\\n\"\n\n def test_reset_twice_error(self) -> None:\n with self.getcapture() as cap:\n print(\"hello\")\n out, err = cap.readouterr()\n pytest.raises(ValueError, cap.stop_capturing)\n assert out == \"hello\\n\"\n assert not err", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestStdCapture.test_capturing_modify_sysouterr_in_between_TestStdCapture.test_capturing_modify_sysouterr_in_between.assert_sys_stderr_olde": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestStdCapture.test_capturing_modify_sysouterr_in_between_TestStdCapture.test_capturing_modify_sysouterr_in_between.assert_sys_stderr_olde", "embedding": null, "metadata": {"file_path": "testing/test_capture.py", "file_name": "test_capture.py", "file_type": "text/x-python", "category": "test", "start_line": 1124, "end_line": 1138, "span_ids": ["TestStdCapture.test_capturing_modify_sysouterr_in_between"], "tokens": 131}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestStdCapture:\n\n def test_capturing_modify_sysouterr_in_between(self) -> None:\n oldout = sys.stdout\n olderr = sys.stderr\n with self.getcapture() as cap:\n sys.stdout.write(\"hello\")\n sys.stderr.write(\"world\")\n sys.stdout = capture.CaptureIO()\n sys.stderr = capture.CaptureIO()\n print(\"not seen\")\n sys.stderr.write(\"not seen\\n\")\n out, err = cap.readouterr()\n assert out == \"hello\"\n assert err == \"world\"\n assert sys.stdout == oldout\n assert sys.stderr == olderr", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestStdCaptureFD_TestStdCaptureFD.test_many.with_lsof_check_.for_i_in_range_10_.cap_stop_capturing_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestStdCaptureFD_TestStdCaptureFD.test_many.with_lsof_check_.for_i_in_range_10_.cap_stop_capturing_", "embedding": null, "metadata": {"file_path": "testing/test_capture.py", "file_name": "test_capture.py", "file_type": "text/x-python", "category": "test", "start_line": 1198, "end_line": 1238, "span_ids": ["TestStdCaptureFD.test_simple_only_fd", "TestStdCaptureFD.test_many", "TestStdCaptureFD.test_intermingling", "TestStdCaptureFD"], "tokens": 266}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestStdCaptureFD(TestStdCapture):\n captureclass = staticmethod(StdCaptureFD)\n\n def test_simple_only_fd(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\\\n import os\n def test_x():\n os.write(1, b\"hello\\\\n\")\n assert 0\n \"\"\"\n )\n result = pytester.runpytest_subprocess()\n result.stdout.fnmatch_lines(\n \"\"\"\n *test_x*\n *assert 0*\n *Captured stdout*\n \"\"\"\n )\n\n def test_intermingling(self):\n with self.getcapture() as cap:\n os.write(1, b\"1\")\n sys.stdout.write(str(2))\n sys.stdout.flush()\n os.write(1, b\"3\")\n os.write(2, b\"a\")\n sys.stderr.write(\"b\")\n sys.stderr.flush()\n os.write(2, b\"c\")\n out, err = cap.readouterr()\n assert out == \"123\"\n assert err == \"abc\"\n\n def test_many(self, capfd):\n with lsof_check():\n for i in range(10):\n cap = StdCaptureFD()\n cap.start_capturing()\n cap.stop_capturing()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestStdCaptureFDinvalidFD_TestStdCaptureFDinvalidFD.test_stdcapture_fd_invalid_fd.assert_result_parseoutcom": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestStdCaptureFDinvalidFD_TestStdCaptureFDinvalidFD.test_stdcapture_fd_invalid_fd.assert_result_parseoutcom", "embedding": null, "metadata": {"file_path": "testing/test_capture.py", "file_name": "test_capture.py", "file_type": "text/x-python", "category": "test", "start_line": 1241, "end_line": 1283, "span_ids": ["TestStdCaptureFDinvalidFD.test_stdcapture_fd_invalid_fd", "TestStdCaptureFDinvalidFD"], "tokens": 409}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestStdCaptureFDinvalidFD:\n def test_stdcapture_fd_invalid_fd(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import os\n from fnmatch import fnmatch\n from _pytest import capture\n\n def StdCaptureFD(out=True, err=True, in_=True):\n return capture.MultiCapture(\n in_=capture.FDCapture(0) if in_ else None,\n out=capture.FDCapture(1) if out else None,\n err=capture.FDCapture(2) if err else None,\n )\n\n def test_stdout():\n os.close(1)\n cap = StdCaptureFD(out=True, err=False, in_=False)\n assert fnmatch(repr(cap.out), \"\")\n cap.start_capturing()\n os.write(1, b\"stdout\")\n assert cap.readouterr() == (\"stdout\", \"\")\n cap.stop_capturing()\n\n def test_stderr():\n os.close(2)\n cap = StdCaptureFD(out=False, err=True, in_=False)\n assert fnmatch(repr(cap.err), \"\")\n cap.start_capturing()\n os.write(2, b\"stderr\")\n assert cap.readouterr() == (\"\", \"stderr\")\n cap.stop_capturing()\n\n def test_stdin():\n os.close(0)\n cap = StdCaptureFD(out=False, err=False, in_=True)\n assert fnmatch(repr(cap.in_), \"\")\n cap.stop_capturing()\n \"\"\"\n )\n result = pytester.runpytest_subprocess(\"--capture=fd\")\n assert result.ret == 0\n assert result.parseoutcomes()[\"passed\"] == 3", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_test_capturing_and_logging_fundamentals_test_capturing_and_logging_fundamentals.assert_atexit_not_in_re": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_test_capturing_and_logging_fundamentals_test_capturing_and_logging_fundamentals.assert_atexit_not_in_re", "embedding": null, "metadata": {"file_path": "testing/test_capture.py", "file_name": "test_capture.py", "file_type": "text/x-python", "category": "test", "start_line": 1375, "end_line": 1417, "span_ids": ["test_capturing_and_logging_fundamentals"], "tokens": 287}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\n \"method\", [\"SysCapture(2)\", \"SysCapture(2, tee=True)\", \"FDCapture(2)\"]\n)\ndef test_capturing_and_logging_fundamentals(pytester: Pytester, method: str) -> None:\n # here we check a fundamental feature\n p = pytester.makepyfile(\n \"\"\"\n import sys, os\n import py, logging\n from _pytest import capture\n cap = capture.MultiCapture(\n in_=None,\n out=None,\n err=capture.%s,\n )\n cap.start_capturing()\n\n logging.warning(\"hello1\")\n outerr = cap.readouterr()\n print(\"suspend, captured %%s\" %%(outerr,))\n logging.warning(\"hello2\")\n\n cap.pop_outerr_to_orig()\n logging.warning(\"hello3\")\n\n outerr = cap.readouterr()\n print(\"suspend2, captured %%s\" %% (outerr,))\n \"\"\"\n % (method,)\n )\n result = pytester.runpython(p)\n result.stdout.fnmatch_lines(\n \"\"\"\n suspend, captured*hello1*\n suspend2, captured*WARNING:root:hello3*\n \"\"\"\n )\n result.stderr.fnmatch_lines(\n \"\"\"\n WARNING:root:hello2\n \"\"\"\n )\n assert \"atexit\" not in result.stderr.str()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_test_global_capture_with_live_logging_test_global_capture_with_live_logging.None_7": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_test_global_capture_with_live_logging_test_global_capture_with_live_logging.None_7", "embedding": null, "metadata": {"file_path": "testing/test_capture.py", "file_name": "test_capture.py", "file_type": "text/x-python", "category": "test", "start_line": 1503, "end_line": 1558, "span_ids": ["test_global_capture_with_live_logging"], "tokens": 365}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_global_capture_with_live_logging(pytester: Pytester) -> None:\n # Issue 3819\n # capture should work with live cli logging\n\n # Teardown report seems to have the capture for the whole process (setup, capture, teardown)\n pytester.makeconftest(\n \"\"\"\n def pytest_runtest_logreport(report):\n if \"test_global\" in report.nodeid:\n if report.when == \"teardown\":\n with open(\"caplog\", \"w\") as f:\n f.write(report.caplog)\n with open(\"capstdout\", \"w\") as f:\n f.write(report.capstdout)\n \"\"\"\n )\n\n pytester.makepyfile(\n \"\"\"\n import logging\n import sys\n import pytest\n\n logger = logging.getLogger(__name__)\n\n @pytest.fixture\n def fix1():\n print(\"fix setup\")\n logging.info(\"fix setup\")\n yield\n logging.info(\"fix teardown\")\n print(\"fix teardown\")\n\n def test_global(fix1):\n print(\"begin test\")\n logging.info(\"something in test\")\n print(\"end test\")\n \"\"\"\n )\n result = pytester.runpytest_subprocess(\"--log-cli-level=INFO\")\n assert result.ret == 0\n\n with open(\"caplog\") as f:\n caplog = f.read()\n\n assert \"fix setup\" in caplog\n assert \"something in test\" in caplog\n assert \"fix teardown\" in caplog\n\n with open(\"capstdout\") as f:\n capstdout = f.read()\n\n assert \"fix setup\" in capstdout\n assert \"begin test\" in capstdout\n assert \"end test\" in capstdout\n assert \"fix teardown\" in capstdout", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_test_capture_with_live_logging_test_capture_with_live_logging.assert_result_ret_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_test_capture_with_live_logging_test_capture_with_live_logging.assert_result_ret_0", "embedding": null, "metadata": {"file_path": "testing/test_capture.py", "file_name": "test_capture.py", "file_type": "text/x-python", "category": "test", "start_line": 1561, "end_line": 1594, "span_ids": ["test_capture_with_live_logging"], "tokens": 206}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\"capture_fixture\", [\"capsys\", \"capfd\"])\ndef test_capture_with_live_logging(\n pytester: Pytester, capture_fixture: CaptureFixture[str]\n) -> None:\n # Issue 3819\n # capture should work with live cli logging\n\n pytester.makepyfile(\n \"\"\"\n import logging\n import sys\n\n logger = logging.getLogger(__name__)\n\n def test_capture({0}):\n print(\"hello\")\n sys.stderr.write(\"world\\\\n\")\n captured = {0}.readouterr()\n assert captured.out == \"hello\\\\n\"\n assert captured.err == \"world\\\\n\"\n\n logging.info(\"something\")\n print(\"next\")\n logging.info(\"something\")\n\n captured = {0}.readouterr()\n assert captured.out == \"next\\\\n\"\n \"\"\".format(\n capture_fixture\n )\n )\n\n result = pytester.runpytest_subprocess(\"--log-cli-level=INFO\")\n assert result.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestCollector.test_getcustomfile_roundtrip_TestCollector.test_getcustomfile_roundtrip.assert_isinstance_nodes_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestCollector.test_getcustomfile_roundtrip_TestCollector.test_getcustomfile_roundtrip.assert_isinstance_nodes_0", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 93, "end_line": 110, "span_ids": ["TestCollector.test_getcustomfile_roundtrip"], "tokens": 163}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCollector:\n\n def test_getcustomfile_roundtrip(self, pytester: Pytester) -> None:\n hello = pytester.makefile(\".xxx\", hello=\"world\")\n pytester.makepyfile(\n conftest=\"\"\"\n import pytest\n class CustomFile(pytest.File):\n pass\n def pytest_collect_file(path, parent):\n if path.ext == \".xxx\":\n return CustomFile.from_parent(fspath=path, parent=parent)\n \"\"\"\n )\n node = pytester.getpathnode(hello)\n assert isinstance(node, pytest.File)\n assert node.name == \"hello.xxx\"\n nodes = node.session.perform_collect([node.nodeid], genitems=False)\n assert len(nodes) == 1\n assert isinstance(nodes[0], pytest.File)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestCollector.test_can_skip_class_with_test_attr_TestCollector.test_can_skip_class_with_test_attr.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestCollector.test_can_skip_class_with_test_attr_TestCollector.test_can_skip_class_with_test_attr.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 112, "end_line": 125, "span_ids": ["TestCollector.test_can_skip_class_with_test_attr"], "tokens": 122}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCollector:\n\n def test_can_skip_class_with_test_attr(self, pytester: Pytester) -> None:\n \"\"\"Assure test class is skipped when using `__test__=False` (See #2007).\"\"\"\n pytester.makepyfile(\n \"\"\"\n class TestFoo(object):\n __test__ = False\n def __init__(self):\n pass\n def test_foo():\n assert True\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"collected 0 items\", \"*no tests ran in*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestCollectFS_TestCollectFS.test_ignored_certain_directories.assert_test_found_in_s": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestCollectFS_TestCollectFS.test_ignored_certain_directories.assert_test_found_in_s", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 128, "end_line": 145, "span_ids": ["TestCollectFS", "TestCollectFS.test_ignored_certain_directories"], "tokens": 233}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCollectFS:\n def test_ignored_certain_directories(self, pytester: Pytester) -> None:\n tmpdir = pytester.path\n ensure_file(tmpdir / \"build\" / \"test_notfound.py\")\n ensure_file(tmpdir / \"dist\" / \"test_notfound.py\")\n ensure_file(tmpdir / \"_darcs\" / \"test_notfound.py\")\n ensure_file(tmpdir / \"CVS\" / \"test_notfound.py\")\n ensure_file(tmpdir / \"{arch}\" / \"test_notfound.py\")\n ensure_file(tmpdir / \".whatever\" / \"test_notfound.py\")\n ensure_file(tmpdir / \".bzr\" / \"test_notfound.py\")\n ensure_file(tmpdir / \"normal\" / \"test_found.py\")\n for x in Path(str(tmpdir)).rglob(\"test_*.py\"):\n x.write_text(\"def test_hello(): pass\", \"utf-8\")\n\n result = pytester.runpytest(\"--collect-only\")\n s = result.stdout.str()\n assert \"test_notfound\" not in s\n assert \"test_found\" in s", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestCollectFS.test_ignored_virtualenvs_norecursedirs_precedence_TestCollectFS.test_ignored_virtualenvs_norecursedirs_precedence.assert_test_invenv_in_r": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestCollectFS.test_ignored_virtualenvs_norecursedirs_precedence_TestCollectFS.test_ignored_virtualenvs_norecursedirs_precedence.assert_test_invenv_in_r", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 174, "end_line": 197, "span_ids": ["TestCollectFS.test_ignored_virtualenvs_norecursedirs_precedence"], "tokens": 236}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCollectFS:\n\n @pytest.mark.parametrize(\n \"fname\",\n (\n \"activate\",\n \"activate.csh\",\n \"activate.fish\",\n \"Activate\",\n \"Activate.bat\",\n \"Activate.ps1\",\n ),\n )\n def test_ignored_virtualenvs_norecursedirs_precedence(\n self, pytester: Pytester, fname: str\n ) -> None:\n bindir = \"Scripts\" if sys.platform.startswith(\"win\") else \"bin\"\n # norecursedirs takes priority\n ensure_file(pytester.path / \".virtual\" / bindir / fname)\n testfile = ensure_file(pytester.path / \".virtual\" / \"test_invenv.py\")\n testfile.write_text(\"def test_hello(): pass\")\n result = pytester.runpytest(\"--collect-in-virtualenv\")\n result.stdout.no_fnmatch_line(\"*test_invenv*\")\n # ...unless the virtualenv is explicitly given on the CLI\n result = pytester.runpytest(\"--collect-in-virtualenv\", \".virtual\")\n assert \"test_invenv\" in result.stdout.str()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestCollectFS.test__in_venv_TestCollectFS.test__in_venv.None_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestCollectFS.test__in_venv_TestCollectFS.test__in_venv.None_1", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 199, "end_line": 220, "span_ids": ["TestCollectFS.test__in_venv"], "tokens": 189}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCollectFS:\n\n @pytest.mark.parametrize(\n \"fname\",\n (\n \"activate\",\n \"activate.csh\",\n \"activate.fish\",\n \"Activate\",\n \"Activate.bat\",\n \"Activate.ps1\",\n ),\n )\n def test__in_venv(self, pytester: Pytester, fname: str) -> None:\n \"\"\"Directly test the virtual env detection function\"\"\"\n bindir = \"Scripts\" if sys.platform.startswith(\"win\") else \"bin\"\n # no bin/activate, not a virtualenv\n base_path = pytester.mkdir(\"venv\")\n assert _in_venv(base_path) is False\n # with bin/activate, totally a virtualenv\n bin_path = base_path.joinpath(bindir)\n bin_path.mkdir()\n bin_path.joinpath(fname).touch()\n assert _in_venv(base_path) is True", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestCollectFS.test_custom_norecursedirs_TestCollectFS.test_custom_norecursedirs.rec_assertoutcome_failed_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestCollectFS.test_custom_norecursedirs_TestCollectFS.test_custom_norecursedirs.rec_assertoutcome_failed_", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 222, "end_line": 236, "span_ids": ["TestCollectFS.test_custom_norecursedirs"], "tokens": 182}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCollectFS:\n\n def test_custom_norecursedirs(self, pytester: Pytester) -> None:\n pytester.makeini(\n \"\"\"\n [pytest]\n norecursedirs = mydir xyz*\n \"\"\"\n )\n tmpdir = pytester.path\n ensure_file(tmpdir / \"mydir\" / \"test_hello.py\").write_text(\"def test_1(): pass\")\n ensure_file(tmpdir / \"xyz123\" / \"test_2.py\").write_text(\"def test_2(): 0/0\")\n ensure_file(tmpdir / \"xy\" / \"test_ok.py\").write_text(\"def test_3(): pass\")\n rec = pytester.inline_run()\n rec.assertoutcome(passed=1)\n rec = pytester.inline_run(\"xyz123/test_2.py\")\n rec.assertoutcome(failed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestCollectFS.test_testpaths_ini_TestCollectFS.test_testpaths_ini.None_1.assert_x_name_for_x_in_i": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestCollectFS.test_testpaths_ini_TestCollectFS.test_testpaths_ini.None_1.assert_x_name_for_x_in_i", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 238, "end_line": 266, "span_ids": ["TestCollectFS.test_testpaths_ini"], "tokens": 336}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCollectFS:\n\n def test_testpaths_ini(self, pytester: Pytester, monkeypatch: MonkeyPatch) -> None:\n pytester.makeini(\n \"\"\"\n [pytest]\n testpaths = gui uts\n \"\"\"\n )\n tmpdir = pytester.path\n ensure_file(tmpdir / \"env\" / \"test_1.py\").write_text(\"def test_env(): pass\")\n ensure_file(tmpdir / \"gui\" / \"test_2.py\").write_text(\"def test_gui(): pass\")\n ensure_file(tmpdir / \"uts\" / \"test_3.py\").write_text(\"def test_uts(): pass\")\n\n # executing from rootdir only tests from `testpaths` directories\n # are collected\n items, reprec = pytester.inline_genitems(\"-v\")\n assert [x.name for x in items] == [\"test_gui\", \"test_uts\"]\n\n # check that explicitly passing directories in the command-line\n # collects the tests\n for dirname in (\"env\", \"gui\", \"uts\"):\n items, reprec = pytester.inline_genitems(tmpdir.joinpath(dirname))\n assert [x.name for x in items] == [\"test_%s\" % dirname]\n\n # changing cwd to each subdirectory and running pytest without\n # arguments collects the tests in that directory normally\n for dirname in (\"env\", \"gui\", \"uts\"):\n monkeypatch.chdir(pytester.path.joinpath(dirname))\n items, reprec = pytester.inline_genitems()\n assert [x.name for x in items] == [\"test_%s\" % dirname]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestPrunetraceback_TestPrunetraceback.test_custom_repr_failure.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestPrunetraceback_TestPrunetraceback.test_custom_repr_failure.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 285, "end_line": 310, "span_ids": ["TestPrunetraceback.test_custom_repr_failure", "TestPrunetraceback"], "tokens": 175}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPrunetraceback:\n def test_custom_repr_failure(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n import not_exists\n \"\"\"\n )\n pytester.makeconftest(\n \"\"\"\n import pytest\n def pytest_collect_file(path, parent):\n return MyFile.from_parent(fspath=path, parent=parent)\n class MyError(Exception):\n pass\n class MyFile(pytest.File):\n def collect(self):\n raise MyError()\n def repr_failure(self, excinfo):\n if excinfo.errisinstance(MyError):\n return \"hello world\"\n return pytest.File.repr_failure(self, excinfo)\n \"\"\"\n )\n\n result = pytester.runpytest(p)\n result.stdout.fnmatch_lines([\"*ERROR collecting*\", \"*hello world*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestPrunetraceback.test_collect_report_postprocessing_TestPrunetraceback.test_collect_report_postprocessing.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestPrunetraceback.test_collect_report_postprocessing_TestPrunetraceback.test_collect_report_postprocessing.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 312, "end_line": 331, "span_ids": ["TestPrunetraceback.test_collect_report_postprocessing"], "tokens": 150}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPrunetraceback:\n\n @pytest.mark.xfail(reason=\"other mechanism for adding to reporting needed\")\n def test_collect_report_postprocessing(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n import not_exists\n \"\"\"\n )\n pytester.makeconftest(\n \"\"\"\n import pytest\n @pytest.hookimpl(hookwrapper=True)\n def pytest_make_collect_report():\n outcome = yield\n rep = outcome.get_result()\n rep.headerlines += [\"header1\"]\n outcome.force_result(rep)\n \"\"\"\n )\n result = pytester.runpytest(p)\n result.stdout.fnmatch_lines([\"*ERROR collecting*\", \"*header1*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestCustomConftests_TestCustomConftests.test_ignore_collect_path.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestCustomConftests_TestCustomConftests.test_ignore_collect_path.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 334, "end_line": 350, "span_ids": ["TestCustomConftests.test_ignore_collect_path", "TestCustomConftests"], "tokens": 164}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCustomConftests:\n def test_ignore_collect_path(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n def pytest_ignore_collect(path, config):\n return path.basename.startswith(\"x\") or \\\n path.basename == \"test_one.py\"\n \"\"\"\n )\n sub = pytester.mkdir(\"xy123\")\n ensure_file(sub / \"test_hello.py\").write_text(\"syntax error\")\n sub.joinpath(\"conftest.py\").write_text(\"syntax error\")\n pytester.makepyfile(\"def test_hello(): pass\")\n pytester.makepyfile(test_one=\"syntax error\")\n result = pytester.runpytest(\"--fulltrace\")\n assert result.ret == 0\n result.stdout.fnmatch_lines([\"*1 passed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestCustomConftests.test_ignore_collect_not_called_on_argument_TestCustomConftests.test_ignore_collect_not_called_on_argument.None_2": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestCustomConftests.test_ignore_collect_not_called_on_argument_TestCustomConftests.test_ignore_collect_not_called_on_argument.None_2", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 352, "end_line": 365, "span_ids": ["TestCustomConftests.test_ignore_collect_not_called_on_argument"], "tokens": 134}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCustomConftests:\n\n def test_ignore_collect_not_called_on_argument(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n def pytest_ignore_collect(path, config):\n return True\n \"\"\"\n )\n p = pytester.makepyfile(\"def test_hello(): pass\")\n result = pytester.runpytest(p)\n assert result.ret == 0\n result.stdout.fnmatch_lines([\"*1 passed*\"])\n result = pytester.runpytest()\n assert result.ret == ExitCode.NO_TESTS_COLLECTED\n result.stdout.fnmatch_lines([\"*collected 0 items*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestCustomConftests.test_collectignore_exclude_on_option_TestCustomConftests.test_collectignore_exclude_on_option.assert_passed_in_result": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestCustomConftests.test_collectignore_exclude_on_option_TestCustomConftests.test_collectignore_exclude_on_option.assert_passed_in_result", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 367, "end_line": 387, "span_ids": ["TestCustomConftests.test_collectignore_exclude_on_option"], "tokens": 193}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCustomConftests:\n\n def test_collectignore_exclude_on_option(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n import py\n from pathlib import Path\n collect_ignore = [py.path.local('hello'), 'test_world.py', Path('bye')]\n def pytest_addoption(parser):\n parser.addoption(\"--XX\", action=\"store_true\", default=False)\n def pytest_configure(config):\n if config.getvalue(\"XX\"):\n collect_ignore[:] = []\n \"\"\"\n )\n pytester.mkdir(\"hello\")\n pytester.makepyfile(test_world=\"def test_hello(): pass\")\n result = pytester.runpytest()\n assert result.ret == ExitCode.NO_TESTS_COLLECTED\n result.stdout.no_fnmatch_line(\"*passed*\")\n result = pytester.runpytest(\"--XX\")\n assert result.ret == 0\n assert \"passed\" in result.stdout.str()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestCustomConftests.test_collectignoreglob_exclude_on_option_TestCustomConftests.test_collectignoreglob_exclude_on_option.None_4": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestCustomConftests.test_collectignoreglob_exclude_on_option_TestCustomConftests.test_collectignoreglob_exclude_on_option.None_4", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 389, "end_line": 407, "span_ids": ["TestCustomConftests.test_collectignoreglob_exclude_on_option"], "tokens": 194}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCustomConftests:\n\n def test_collectignoreglob_exclude_on_option(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n collect_ignore_glob = ['*w*l[dt]*']\n def pytest_addoption(parser):\n parser.addoption(\"--XX\", action=\"store_true\", default=False)\n def pytest_configure(config):\n if config.getvalue(\"XX\"):\n collect_ignore_glob[:] = []\n \"\"\"\n )\n pytester.makepyfile(test_world=\"def test_hello(): pass\")\n pytester.makepyfile(test_welt=\"def test_hallo(): pass\")\n result = pytester.runpytest()\n assert result.ret == ExitCode.NO_TESTS_COLLECTED\n result.stdout.fnmatch_lines([\"*collected 0 items*\"])\n result = pytester.runpytest(\"--XX\")\n assert result.ret == 0\n result.stdout.fnmatch_lines([\"*2 passed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestCustomConftests.test_pytest_fs_collect_hooks_are_seen_TestCustomConftests.test_pytest_fs_collect_hooks_are_seen.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestCustomConftests.test_pytest_fs_collect_hooks_are_seen_TestCustomConftests.test_pytest_fs_collect_hooks_are_seen.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 409, "end_line": 423, "span_ids": ["TestCustomConftests.test_pytest_fs_collect_hooks_are_seen"], "tokens": 134}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCustomConftests:\n\n def test_pytest_fs_collect_hooks_are_seen(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n import pytest\n class MyModule(pytest.Module):\n pass\n def pytest_collect_file(path, parent):\n if path.ext == \".py\":\n return MyModule.from_parent(fspath=path, parent=parent)\n \"\"\"\n )\n pytester.mkdir(\"sub\")\n pytester.makepyfile(\"def test_x(): pass\")\n result = pytester.runpytest(\"--co\")\n result.stdout.fnmatch_lines([\"*MyModule*\", \"*test_x*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestCustomConftests.test_pytest_collect_file_from_sister_dir_TestCustomConftests.test_pytest_collect_file_from_sister_dir.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestCustomConftests.test_pytest_collect_file_from_sister_dir_TestCustomConftests.test_pytest_collect_file_from_sister_dir.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 425, "end_line": 454, "span_ids": ["TestCustomConftests.test_pytest_collect_file_from_sister_dir"], "tokens": 280}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCustomConftests:\n\n def test_pytest_collect_file_from_sister_dir(self, pytester: Pytester) -> None:\n sub1 = pytester.mkpydir(\"sub1\")\n sub2 = pytester.mkpydir(\"sub2\")\n conf1 = pytester.makeconftest(\n \"\"\"\n import pytest\n class MyModule1(pytest.Module):\n pass\n def pytest_collect_file(path, parent):\n if path.ext == \".py\":\n return MyModule1.from_parent(fspath=path, parent=parent)\n \"\"\"\n )\n conf1.replace(sub1.joinpath(conf1.name))\n conf2 = pytester.makeconftest(\n \"\"\"\n import pytest\n class MyModule2(pytest.Module):\n pass\n def pytest_collect_file(path, parent):\n if path.ext == \".py\":\n return MyModule2.from_parent(fspath=path, parent=parent)\n \"\"\"\n )\n conf2.replace(sub2.joinpath(conf2.name))\n p = pytester.makepyfile(\"def test_x(): pass\")\n shutil.copy(p, sub1.joinpath(p.name))\n shutil.copy(p, sub2.joinpath(p.name))\n result = pytester.runpytest(\"--co\")\n result.stdout.fnmatch_lines([\"*MyModule1*\", \"*MyModule2*\", \"*test_x*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestSession.test_collect_protocol_single_function_TestSession.test_collect_protocol_single_function.assert_x_name_for_x_in_s": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestSession.test_collect_protocol_single_function_TestSession.test_collect_protocol_single_function.assert_x_name_for_x_in_s", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 483, "end_line": 504, "span_ids": ["TestSession.test_collect_protocol_single_function"], "tokens": 257}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestSession:\n\n def test_collect_protocol_single_function(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\"def test_func(): pass\")\n id = \"::\".join([p.name, \"test_func\"])\n items, hookrec = pytester.inline_genitems(id)\n (item,) = items\n assert item.name == \"test_func\"\n newid = item.nodeid\n assert newid == id\n pprint.pprint(hookrec.calls)\n topdir = pytester.path # noqa\n hookrec.assert_contains(\n [\n (\"pytest_collectstart\", \"collector.fspath == topdir\"),\n (\"pytest_make_collect_report\", \"collector.fspath == topdir\"),\n (\"pytest_collectstart\", \"collector.fspath == p\"),\n (\"pytest_make_collect_report\", \"collector.fspath == p\"),\n (\"pytest_pycollect_makeitem\", \"name == 'test_func'\"),\n (\"pytest_collectreport\", \"report.result[0].name == 'test_func'\"),\n ]\n )\n # ensure we are reporting the collection of the single test item (#2464)\n assert [x.name for x in self.get_reported_items(hookrec)] == [\"test_func\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestSession.test_collect_custom_nodes_multi_id_TestSession.test_collect_custom_nodes_multi_id.assert_len_self_get_repor": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestSession.test_collect_custom_nodes_multi_id_TestSession.test_collect_custom_nodes_multi_id.assert_len_self_get_repor", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 524, "end_line": 558, "span_ids": ["TestSession.test_collect_custom_nodes_multi_id"], "tokens": 290}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestSession:\n\n def test_collect_custom_nodes_multi_id(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\"def test_func(): pass\")\n pytester.makeconftest(\n \"\"\"\n import pytest\n class SpecialItem(pytest.Item):\n def runtest(self):\n return # ok\n class SpecialFile(pytest.File):\n def collect(self):\n return [SpecialItem.from_parent(name=\"check\", parent=self)]\n def pytest_collect_file(path, parent):\n if path.basename == %r:\n return SpecialFile.from_parent(fspath=path, parent=parent)\n \"\"\"\n % p.name\n )\n id = p.name\n\n items, hookrec = pytester.inline_genitems(id)\n pprint.pprint(hookrec.calls)\n assert len(items) == 2\n hookrec.assert_contains(\n [\n (\"pytest_collectstart\", \"collector.fspath == collector.session.fspath\"),\n (\n \"pytest_collectstart\",\n \"collector.__class__.__name__ == 'SpecialFile'\",\n ),\n (\"pytest_collectstart\", \"collector.__class__.__name__ == 'Module'\"),\n (\"pytest_pycollect_makeitem\", \"name == 'test_func'\"),\n (\"pytest_collectreport\", \"report.nodeid.startswith(p.name)\"),\n ]\n )\n assert len(self.get_reported_items(hookrec)) == 2", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestSession.test_collect_subdir_event_ordering_TestSession.test_collect_subdir_event_ordering.hookrec_assert_contains_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestSession.test_collect_subdir_event_ordering_TestSession.test_collect_subdir_event_ordering.hookrec_assert_contains_", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 560, "end_line": 575, "span_ids": ["TestSession.test_collect_subdir_event_ordering"], "tokens": 163}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestSession:\n\n def test_collect_subdir_event_ordering(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\"def test_func(): pass\")\n aaa = pytester.mkpydir(\"aaa\")\n test_aaa = aaa.joinpath(\"test_aaa.py\")\n p.replace(test_aaa)\n\n items, hookrec = pytester.inline_genitems()\n assert len(items) == 1\n pprint.pprint(hookrec.calls)\n hookrec.assert_contains(\n [\n (\"pytest_collectstart\", \"collector.fspath == test_aaa\"),\n (\"pytest_pycollect_makeitem\", \"name == 'test_func'\"),\n (\"pytest_collectreport\", \"report.nodeid.startswith('aaa/test_aaa.py')\"),\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestSession.test_collect_two_commandline_args_TestSession.test_collect_two_commandline_args.hookrec_assert_contains_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestSession.test_collect_two_commandline_args_TestSession.test_collect_two_commandline_args.hookrec_assert_contains_", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 577, "end_line": 600, "span_ids": ["TestSession.test_collect_two_commandline_args"], "tokens": 254}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestSession:\n\n def test_collect_two_commandline_args(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\"def test_func(): pass\")\n aaa = pytester.mkpydir(\"aaa\")\n bbb = pytester.mkpydir(\"bbb\")\n test_aaa = aaa.joinpath(\"test_aaa.py\")\n shutil.copy(p, test_aaa)\n test_bbb = bbb.joinpath(\"test_bbb.py\")\n p.replace(test_bbb)\n\n id = \".\"\n\n items, hookrec = pytester.inline_genitems(id)\n assert len(items) == 2\n pprint.pprint(hookrec.calls)\n hookrec.assert_contains(\n [\n (\"pytest_collectstart\", \"collector.fspath == test_aaa\"),\n (\"pytest_pycollect_makeitem\", \"name == 'test_func'\"),\n (\"pytest_collectreport\", \"report.nodeid == 'aaa/test_aaa.py'\"),\n (\"pytest_collectstart\", \"collector.fspath == test_bbb\"),\n (\"pytest_pycollect_makeitem\", \"name == 'test_func'\"),\n (\"pytest_collectreport\", \"report.nodeid == 'bbb/test_bbb.py'\"),\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestSession.test_serialization_byid_TestSession.test_find_byid_without_instance_parents.assert_x_name_for_x_in_s": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestSession.test_serialization_byid_TestSession.test_find_byid_without_instance_parents.assert_x_name_for_x_in_s", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 602, "end_line": 626, "span_ids": ["TestSession.test_serialization_byid", "TestSession.test_find_byid_without_instance_parents"], "tokens": 250}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestSession:\n\n def test_serialization_byid(self, pytester: Pytester) -> None:\n pytester.makepyfile(\"def test_func(): pass\")\n items, hookrec = pytester.inline_genitems()\n assert len(items) == 1\n (item,) = items\n items2, hookrec = pytester.inline_genitems(item.nodeid)\n (item2,) = items2\n assert item2.name == item.name\n assert item2.fspath == item.fspath\n\n def test_find_byid_without_instance_parents(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n class TestClass(object):\n def test_method(self):\n pass\n \"\"\"\n )\n arg = p.name + \"::TestClass::test_method\"\n items, hookrec = pytester.inline_genitems(arg)\n assert len(items) == 1\n (item,) = items\n assert item.nodeid.endswith(\"TestClass::test_method\")\n # ensure we are reporting the collection of the single test item (#2464)\n assert [x.name for x in self.get_reported_items(hookrec)] == [\"test_method\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_Test_genitems_Test_genitems.test_check_collect_hashes.for_numi_i_in_enumerate_.for_numj_j_in_enumerate_.if_numj_numi_.assert_i_j": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_Test_genitems_Test_genitems.test_check_collect_hashes.for_numi_i_in_enumerate_.for_numj_j_in_enumerate_.if_numj_numi_.assert_i_j", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 666, "end_line": 684, "span_ids": ["Test_genitems.test_check_collect_hashes", "Test_genitems"], "tokens": 139}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Test_genitems:\n def test_check_collect_hashes(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n def test_1():\n pass\n\n def test_2():\n pass\n \"\"\"\n )\n shutil.copy(p, p.parent / (p.stem + \"2\" + \".py\"))\n items, reprec = pytester.inline_genitems(p.parent)\n assert len(items) == 4\n for numi, i in enumerate(items):\n for numj, j in enumerate(items):\n if numj != numi:\n assert hash(i) != hash(j)\n assert i != j", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_Test_genitems.test_example_items1_Test_genitems.test_example_items1.print_s_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_Test_genitems.test_example_items1_Test_genitems.test_example_items1.print_s_", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 686, "end_line": 720, "span_ids": ["Test_genitems.test_example_items1"], "tokens": 351}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Test_genitems:\n\n def test_example_items1(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n import pytest\n\n def testone():\n pass\n\n class TestX(object):\n def testmethod_one(self):\n pass\n\n class TestY(TestX):\n @pytest.mark.parametrize(\"arg0\", [\".[\"])\n def testmethod_two(self, arg0):\n pass\n \"\"\"\n )\n items, reprec = pytester.inline_genitems(p)\n assert len(items) == 4\n assert items[0].name == \"testone\"\n assert items[1].name == \"testmethod_one\"\n assert items[2].name == \"testmethod_one\"\n assert items[3].name == \"testmethod_two[.[]\"\n\n # let's also test getmodpath here\n assert items[0].getmodpath() == \"testone\" # type: ignore[attr-defined]\n assert items[1].getmodpath() == \"TestX.testmethod_one\" # type: ignore[attr-defined]\n assert items[2].getmodpath() == \"TestY.testmethod_one\" # type: ignore[attr-defined]\n # PR #6202: Fix incorrect result of getmodpath method. (Resolves issue #6189)\n assert items[3].getmodpath() == \"TestY.testmethod_two[.[]\" # type: ignore[attr-defined]\n\n s = items[0].getmodpath(stopatmodule=False) # type: ignore[attr-defined]\n assert s.endswith(\"test_example_items1.testone\")\n print(s)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_Test_genitems.test_class_and_functions_discovery_using_glob_Test_genitems.test_class_and_functions_discovery_using_glob.assert_ids_MyTestSui": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_Test_genitems.test_class_and_functions_discovery_using_glob_Test_genitems.test_class_and_functions_discovery_using_glob.assert_ids_MyTestSui", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 722, "end_line": 745, "span_ids": ["Test_genitems.test_class_and_functions_discovery_using_glob"], "tokens": 178}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Test_genitems:\n\n def test_class_and_functions_discovery_using_glob(self, pytester: Pytester) -> None:\n \"\"\"Test that Python_classes and Python_functions config options work\n as prefixes and glob-like patterns (#600).\"\"\"\n pytester.makeini(\n \"\"\"\n [pytest]\n python_classes = *Suite Test\n python_functions = *_test test\n \"\"\"\n )\n p = pytester.makepyfile(\n \"\"\"\n class MyTestSuite(object):\n def x_test(self):\n pass\n\n class TestCase(object):\n def test_y(self):\n pass\n \"\"\"\n )\n items, reprec = pytester.inline_genitems(p)\n ids = [x.getmodpath() for x in items] # type: ignore[attr-defined]\n assert ids == [\"MyTestSuite.x_test\", \"TestCase.test_y\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_matchnodes_two_collections_same_file_test_matchnodes_two_collections_same_file.res_stdout_fnmatch_lines_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_matchnodes_two_collections_same_file_test_matchnodes_two_collections_same_file.res_stdout_fnmatch_lines_", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 748, "end_line": 786, "span_ids": ["test_matchnodes_two_collections_same_file"], "tokens": 280}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_matchnodes_two_collections_same_file(pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n import pytest\n def pytest_configure(config):\n config.pluginmanager.register(Plugin2())\n\n class Plugin2(object):\n def pytest_collect_file(self, path, parent):\n if path.ext == \".abc\":\n return MyFile2.from_parent(fspath=path, parent=parent)\n\n def pytest_collect_file(path, parent):\n if path.ext == \".abc\":\n return MyFile1.from_parent(fspath=path, parent=parent)\n\n class MyFile1(pytest.File):\n def collect(self):\n yield Item1.from_parent(name=\"item1\", parent=self)\n\n class MyFile2(pytest.File):\n def collect(self):\n yield Item2.from_parent(name=\"item2\", parent=self)\n\n class Item1(pytest.Item):\n def runtest(self):\n pass\n\n class Item2(pytest.Item):\n def runtest(self):\n pass\n \"\"\"\n )\n p = pytester.makefile(\".abc\", \"\")\n result = pytester.runpytest()\n assert result.ret == 0\n result.stdout.fnmatch_lines([\"*2 passed*\"])\n res = pytester.runpytest(\"%s::item2\" % p.name)\n res.stdout.fnmatch_lines([\"*1 passed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestNodekeywords_TestNodekeywords.test_issue345.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestNodekeywords_TestNodekeywords.test_issue345.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 789, "end_line": 814, "span_ids": ["TestNodekeywords.test_issue345", "TestNodekeywords.test_no_under", "TestNodekeywords"], "tokens": 180}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestNodekeywords:\n def test_no_under(self, pytester: Pytester) -> None:\n modcol = pytester.getmodulecol(\n \"\"\"\n def test_pass(): pass\n def test_fail(): assert 0\n \"\"\"\n )\n values = list(modcol.keywords)\n assert modcol.name in values\n for x in values:\n assert not x.startswith(\"_\")\n assert modcol.name in repr(modcol.keywords)\n\n def test_issue345(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n def test_should_not_be_selected():\n assert False, 'I should not have been selected to run'\n\n def test___repr__():\n pass\n \"\"\"\n )\n reprec = pytester.inline_run(\"-k repr\")\n reprec.assertoutcome(passed=1, failed=0)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_COLLECTION_ERROR_PY_FILES_COLLECTION_ERROR_PY_FILES.dict_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_COLLECTION_ERROR_PY_FILES_COLLECTION_ERROR_PY_FILES.dict_", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 813, "end_line": 832, "span_ids": ["impl"], "tokens": 105}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "COLLECTION_ERROR_PY_FILES = dict(\n test_01_failure=\"\"\"\n def test_1():\n assert False\n \"\"\",\n test_02_import_error=\"\"\"\n import asdfasdfasdf\n def test_2():\n assert True\n \"\"\",\n test_03_import_error=\"\"\"\n import asdfasdfasdf\n def test_3():\n assert True\n \"\"\",\n test_04_success=\"\"\"\n def test_4():\n assert True\n \"\"\",\n)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_exit_on_collection_error_test_exit_on_collection_error.res_stdout_fnmatch_lines_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_exit_on_collection_error_test_exit_on_collection_error.res_stdout_fnmatch_lines_", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 878, "end_line": 893, "span_ids": ["test_exit_on_collection_error"], "tokens": 127}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_exit_on_collection_error(pytester: Pytester) -> None:\n \"\"\"Verify that all collection errors are collected and no tests executed\"\"\"\n pytester.makepyfile(**COLLECTION_ERROR_PY_FILES)\n\n res = pytester.runpytest()\n assert res.ret == 2\n\n res.stdout.fnmatch_lines(\n [\n \"collected 2 items / 2 errors\",\n \"*ERROR collecting test_02_import_error.py*\",\n \"*No module named *asdfa*\",\n \"*ERROR collecting test_03_import_error.py*\",\n \"*No module named *asdfa*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_fixture_scope_sibling_conftests_test_fixture_scope_sibling_conftests.res_stdout_fnmatch_lines_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_fixture_scope_sibling_conftests_test_fixture_scope_sibling_conftests.res_stdout_fnmatch_lines_", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 974, "end_line": 1002, "span_ids": ["test_fixture_scope_sibling_conftests"], "tokens": 240}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_fixture_scope_sibling_conftests(pytester: Pytester) -> None:\n \"\"\"Regression test case for https://github.com/pytest-dev/pytest/issues/2836\"\"\"\n foo_path = pytester.mkdir(\"foo\")\n foo_path.joinpath(\"conftest.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n import pytest\n @pytest.fixture\n def fix():\n return 1\n \"\"\"\n )\n )\n foo_path.joinpath(\"test_foo.py\").write_text(\"def test_foo(fix): assert fix == 1\")\n\n # Tests in `food/` should not see the conftest fixture from `foo/`\n food_path = pytester.mkpydir(\"food\")\n food_path.joinpath(\"test_food.py\").write_text(\"def test_food(fix): assert fix == 1\")\n\n res = pytester.runpytest()\n assert res.ret == 1\n\n res.stdout.fnmatch_lines(\n [\n \"*ERROR at setup of test_food*\",\n \"E*fixture 'fix' not found\",\n \"*1 passed, 1 error*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_collect_with_chdir_during_import_test_collect_with_chdir_during_import.None_4": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_collect_with_chdir_during_import_test_collect_with_chdir_during_import.None_4", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 1113, "end_line": 1146, "span_ids": ["test_collect_with_chdir_during_import"], "tokens": 210}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_collect_with_chdir_during_import(pytester: Pytester) -> None:\n subdir = pytester.mkdir(\"sub\")\n pytester.path.joinpath(\"conftest.py\").write_text(\n textwrap.dedent(\n \"\"\"\n import os\n os.chdir(%r)\n \"\"\"\n % (str(subdir),)\n )\n )\n pytester.makepyfile(\n \"\"\"\n def test_1():\n import os\n assert os.getcwd() == %r\n \"\"\"\n % (str(subdir),)\n )\n with pytester.path.cwd():\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*1 passed in*\"])\n assert result.ret == 0\n\n # Handles relative testpaths.\n pytester.makeini(\n \"\"\"\n [pytest]\n testpaths = .\n \"\"\"\n )\n with pytester.path.cwd():\n result = pytester.runpytest(\"--collect-only\")\n result.stdout.fnmatch_lines([\"collected 1 item\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_collect_pyargs_with_testpaths_test_collect_pyargs_with_testpaths.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_collect_pyargs_with_testpaths_test_collect_pyargs_with_testpaths.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 1149, "end_line": 1170, "span_ids": ["test_collect_pyargs_with_testpaths"], "tokens": 195}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_collect_pyargs_with_testpaths(\n pytester: Pytester, monkeypatch: MonkeyPatch\n) -> None:\n testmod = pytester.mkdir(\"testmod\")\n # NOTE: __init__.py is not collected since it does not match python_files.\n testmod.joinpath(\"__init__.py\").write_text(\"def test_func(): pass\")\n testmod.joinpath(\"test_file.py\").write_text(\"def test_func(): pass\")\n\n root = pytester.mkdir(\"root\")\n root.joinpath(\"pytest.ini\").write_text(\n textwrap.dedent(\n \"\"\"\n [pytest]\n addopts = --pyargs\n testpaths = testmod\n \"\"\"\n )\n )\n monkeypatch.setenv(\"PYTHONPATH\", str(pytester.path), prepend=os.pathsep)\n with root.cwd():\n result = pytester.runpytest_subprocess()\n result.stdout.fnmatch_lines([\"*1 passed in*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_collect_symlink_file_arg_test_collect_symlink_file_arg.assert_result_ret_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_collect_symlink_file_arg_test_collect_symlink_file_arg.assert_result_ret_0", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 1173, "end_line": 1185, "span_ids": ["test_collect_symlink_file_arg"], "tokens": 139}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_collect_symlink_file_arg(pytester: Pytester) -> None:\n \"\"\"Collect a direct symlink works even if it does not match python_files (#4325).\"\"\"\n real = pytester.makepyfile(\n real=\"\"\"\n def test_nodeid(request):\n assert request.node.nodeid == \"symlink.py::test_nodeid\"\n \"\"\"\n )\n symlink = pytester.path.joinpath(\"symlink.py\")\n symlink_or_skip(real, symlink)\n result = pytester.runpytest(\"-v\", symlink)\n result.stdout.fnmatch_lines([\"symlink.py::test_nodeid PASSED*\", \"*1 passed in*\"])\n assert result.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_collect_symlink_out_of_tree_test_collect_symlink_out_of_tree.assert_result_ret_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_collect_symlink_out_of_tree_test_collect_symlink_out_of_tree.assert_result_ret_0", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 1188, "end_line": 1213, "span_ids": ["test_collect_symlink_out_of_tree"], "tokens": 210}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_collect_symlink_out_of_tree(pytester: Pytester) -> None:\n \"\"\"Test collection of symlink via out-of-tree rootdir.\"\"\"\n sub = pytester.mkdir(\"sub\")\n real = sub.joinpath(\"test_real.py\")\n real.write_text(\n textwrap.dedent(\n \"\"\"\n def test_nodeid(request):\n # Should not contain sub/ prefix.\n assert request.node.nodeid == \"test_real.py::test_nodeid\"\n \"\"\"\n ),\n )\n\n out_of_tree = pytester.mkdir(\"out_of_tree\")\n symlink_to_sub = out_of_tree.joinpath(\"symlink_to_sub\")\n symlink_or_skip(sub, symlink_to_sub)\n os.chdir(sub)\n result = pytester.runpytest(\"-vs\", \"--rootdir=%s\" % sub, symlink_to_sub)\n result.stdout.fnmatch_lines(\n [\n # Should not contain \"sub/\"!\n \"test_real.py::test_nodeid PASSED\"\n ]\n )\n assert result.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_collect_pkg_init_and_file_in_args_test_collect_pkg_init_and_file_in_args.None_3": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_collect_pkg_init_and_file_in_args_test_collect_pkg_init_and_file_in_args.None_3", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 1239, "end_line": 1265, "span_ids": ["test_collect_pkg_init_and_file_in_args"], "tokens": 260}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_collect_pkg_init_and_file_in_args(pytester: Pytester) -> None:\n subdir = pytester.mkdir(\"sub\")\n init = subdir.joinpath(\"__init__.py\")\n init.write_text(\"def test_init(): pass\")\n p = subdir.joinpath(\"test_file.py\")\n p.write_text(\"def test_file(): pass\")\n\n # NOTE: without \"-o python_files=*.py\" this collects test_file.py twice.\n # This changed/broke with \"Add package scoped fixtures #2283\" (2b1410895)\n # initially (causing a RecursionError).\n result = pytester.runpytest(\"-v\", str(init), str(p))\n result.stdout.fnmatch_lines(\n [\n \"sub/test_file.py::test_file PASSED*\",\n \"sub/test_file.py::test_file PASSED*\",\n \"*2 passed in*\",\n ]\n )\n\n result = pytester.runpytest(\"-v\", \"-o\", \"python_files=*.py\", str(init), str(p))\n result.stdout.fnmatch_lines(\n [\n \"sub/__init__.py::test_init PASSED*\",\n \"sub/test_file.py::test_file PASSED*\",\n \"*2 passed in*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_collect_sub_with_symlinks_test_collect_sub_with_symlinks.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_collect_sub_with_symlinks_test_collect_sub_with_symlinks.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 1280, "end_line": 1301, "span_ids": ["test_collect_sub_with_symlinks"], "tokens": 204}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\"use_pkg\", (True, False))\ndef test_collect_sub_with_symlinks(use_pkg: bool, pytester: Pytester) -> None:\n \"\"\"Collection works with symlinked files and broken symlinks\"\"\"\n sub = pytester.mkdir(\"sub\")\n if use_pkg:\n sub.joinpath(\"__init__.py\").touch()\n sub.joinpath(\"test_file.py\").write_text(\"def test_file(): pass\")\n\n # Create a broken symlink.\n symlink_or_skip(\"test_doesnotexist.py\", sub.joinpath(\"test_broken.py\"))\n\n # Symlink that gets collected.\n symlink_or_skip(\"test_file.py\", sub.joinpath(\"test_symlink.py\"))\n\n result = pytester.runpytest(\"-v\", str(sub))\n result.stdout.fnmatch_lines(\n [\n \"sub/test_file.py::test_file PASSED*\",\n \"sub/test_symlink.py::test_file PASSED*\",\n \"*2 passed in*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_compat.py_test_get_real_func_test_get_real_func.None_2": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_compat.py_test_get_real_func_test_get_real_func.None_2", "embedding": null, "metadata": {"file_path": "testing/test_compat.py", "file_name": "test_compat.py", "file_type": "text/x-python", "category": "test", "start_line": 59, "end_line": 81, "span_ids": ["test_get_real_func"], "tokens": 179}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_get_real_func() -> None:\n \"\"\"Check that get_real_func correctly unwraps decorators until reaching the real function\"\"\"\n\n def decorator(f):\n @wraps(f)\n def inner():\n pass # pragma: no cover\n\n return inner\n\n def func():\n pass # pragma: no cover\n\n wrapped_func = decorator(decorator(func))\n assert get_real_func(wrapped_func) is func\n\n wrapped_func2 = decorator(decorator(wrapped_func))\n assert get_real_func(wrapped_func2) is func\n\n # special case for __pytest_wrapped__ attribute: used to obtain the function up until the point\n # a function was wrapped by pytest itself\n wrapped_func2.__pytest_wrapped__ = _PytestWrapper(wrapped_func)\n assert get_real_func(wrapped_func2) is wrapped_func", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestConfigAPI_TestConfigAPI.test_getoption.assert_config_getoption_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestConfigAPI_TestConfigAPI.test_getoption.assert_config_getoption_", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 523, "end_line": 575, "span_ids": ["TestConfigAPI.test_config_getvalueorskip", "TestConfigAPI.test_config_getoption_unicode", "TestConfigAPI", "TestConfigAPI.test_config_trace", "TestConfigAPI.test_config_getoption", "TestConfigAPI.test_config_getvalueorskip_None", "TestConfigAPI.test_getoption"], "tokens": 433}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestConfigAPI:\n def test_config_trace(self, pytester: Pytester) -> None:\n config = pytester.parseconfig()\n values: List[str] = []\n config.trace.root.setwriter(values.append)\n config.trace(\"hello\")\n assert len(values) == 1\n assert values[0] == \"hello [config]\\n\"\n\n def test_config_getoption(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n def pytest_addoption(parser):\n parser.addoption(\"--hello\", \"-X\", dest=\"hello\")\n \"\"\"\n )\n config = pytester.parseconfig(\"--hello=this\")\n for x in (\"hello\", \"--hello\", \"-X\"):\n assert config.getoption(x) == \"this\"\n pytest.raises(ValueError, config.getoption, \"qweqwe\")\n\n def test_config_getoption_unicode(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n def pytest_addoption(parser):\n parser.addoption('--hello', type=str)\n \"\"\"\n )\n config = pytester.parseconfig(\"--hello=this\")\n assert config.getoption(\"hello\") == \"this\"\n\n def test_config_getvalueorskip(self, pytester: Pytester) -> None:\n config = pytester.parseconfig()\n pytest.raises(pytest.skip.Exception, config.getvalueorskip, \"hello\")\n verbose = config.getvalueorskip(\"verbose\")\n assert verbose == config.option.verbose\n\n def test_config_getvalueorskip_None(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n def pytest_addoption(parser):\n parser.addoption(\"--hello\")\n \"\"\"\n )\n config = pytester.parseconfig()\n with pytest.raises(pytest.skip.Exception):\n config.getvalueorskip(\"hello\")\n\n def test_getoption(self, pytester: Pytester) -> None:\n config = pytester.parseconfig()\n with pytest.raises(ValueError):\n config.getvalue(\"x\")\n assert config.getoption(\"x\", 1) == 1", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestConfigAPI.test_addini_bool_TestConfigAPI.test_addini_bool.assert_config_getini_str": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestConfigAPI.test_addini_bool_TestConfigAPI.test_addini_bool.assert_config_getini_str", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 724, "end_line": 745, "span_ids": ["TestConfigAPI.test_addini_bool"], "tokens": 160}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestConfigAPI:\n\n @pytest.mark.parametrize(\n \"str_val, bool_val\", [(\"True\", True), (\"no\", False), (\"no-ini\", True)]\n )\n def test_addini_bool(\n self, pytester: Pytester, str_val: str, bool_val: bool\n ) -> None:\n pytester.makeconftest(\n \"\"\"\n def pytest_addoption(parser):\n parser.addini(\"strip\", \"\", type=\"bool\", default=True)\n \"\"\"\n )\n if str_val != \"no-ini\":\n pytester.makeini(\n \"\"\"\n [pytest]\n strip=%s\n \"\"\"\n % str_val\n )\n config = pytester.parseconfig()\n assert config.getini(\"strip\") is bool_val", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestConfigAPI.test_addinivalue_line_existing_TestConfigAPI.test_addinivalue_line_existing.None_3": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestConfigAPI.test_addinivalue_line_existing_TestConfigAPI.test_addinivalue_line_existing.None_3", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 747, "end_line": 767, "span_ids": ["TestConfigAPI.test_addinivalue_line_existing"], "tokens": 155}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestConfigAPI:\n\n def test_addinivalue_line_existing(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n def pytest_addoption(parser):\n parser.addini(\"xy\", \"\", type=\"linelist\")\n \"\"\"\n )\n pytester.makeini(\n \"\"\"\n [pytest]\n xy= 123\n \"\"\"\n )\n config = pytester.parseconfig()\n values = config.getini(\"xy\")\n assert len(values) == 1\n assert values == [\"123\"]\n config.addinivalue_line(\"xy\", \"456\")\n values = config.getini(\"xy\")\n assert len(values) == 2\n assert values == [\"123\", \"456\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestConfigAPI.test_addinivalue_line_new_TestConfigAPI.test_addinivalue_line_new.None_4": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestConfigAPI.test_addinivalue_line_new_TestConfigAPI.test_addinivalue_line_new.None_4", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 769, "end_line": 785, "span_ids": ["TestConfigAPI.test_addinivalue_line_new"], "tokens": 155}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestConfigAPI:\n\n def test_addinivalue_line_new(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n def pytest_addoption(parser):\n parser.addini(\"xy\", \"\", type=\"linelist\")\n \"\"\"\n )\n config = pytester.parseconfig()\n assert not config.getini(\"xy\")\n config.addinivalue_line(\"xy\", \"456\")\n values = config.getini(\"xy\")\n assert len(values) == 1\n assert values == [\"456\"]\n config.addinivalue_line(\"xy\", \"123\")\n values = config.getini(\"xy\")\n assert len(values) == 2\n assert values == [\"456\", \"123\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestConfigAPI.test_confcutdir_check_isdir_TestConfigAPI.test_confcutdir_check_isdir.assert_config_getoption_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestConfigAPI.test_confcutdir_check_isdir_TestConfigAPI.test_confcutdir_check_isdir.assert_config_getoption_", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 787, "end_line": 797, "span_ids": ["TestConfigAPI.test_confcutdir_check_isdir"], "tokens": 167}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestConfigAPI:\n\n def test_confcutdir_check_isdir(self, pytester: Pytester) -> None:\n \"\"\"Give an error if --confcutdir is not a valid directory (#2078)\"\"\"\n exp_match = r\"^--confcutdir must be a directory, given: \"\n with pytest.raises(pytest.UsageError, match=exp_match):\n pytester.parseconfig(\"--confcutdir\", pytester.path.joinpath(\"file\"))\n with pytest.raises(pytest.UsageError, match=exp_match):\n pytester.parseconfig(\"--confcutdir\", pytester.path.joinpath(\"inexistant\"))\n\n p = pytester.mkdir(\"dir\")\n config = pytester.parseconfig(\"--confcutdir\", p)\n assert config.getoption(\"confcutdir\") == str(p)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestConfigAPI.test_iter_rewritable_modules_TestConfigAPI.test_iter_rewritable_modules.assert_list__iter_rewrita": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestConfigAPI.test_iter_rewritable_modules_TestConfigAPI.test_iter_rewritable_modules.assert_list__iter_rewrita", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 799, "end_line": 816, "span_ids": ["TestConfigAPI.test_iter_rewritable_modules"], "tokens": 198}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestConfigAPI:\n\n @pytest.mark.parametrize(\n \"names, expected\",\n [\n # dist-info based distributions root are files as will be put in PYTHONPATH\n ([\"bar.py\"], [\"bar\"]),\n ([\"foo/bar.py\"], [\"bar\"]),\n ([\"foo/bar.pyc\"], []),\n ([\"foo/__init__.py\"], [\"foo\"]),\n ([\"bar/__init__.py\", \"xz.py\"], [\"bar\", \"xz\"]),\n ([\"setup.py\"], []),\n # egg based distributions root contain the files from the dist root\n ([\"src/bar/__init__.py\"], [\"bar\"]),\n ([\"src/bar/__init__.py\", \"setup.py\"], [\"bar\"]),\n ([\"source/python/bar/__init__.py\", \"setup.py\"], [\"bar\"]),\n ],\n )\n def test_iter_rewritable_modules(self, names, expected) -> None:\n assert list(_iter_rewritable_modules(names)) == expected", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestConfigFromdictargs.test_inifilename_TestConfigFromdictargs.test_inifilename.None_5": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestConfigFromdictargs.test_inifilename_TestConfigFromdictargs.test_inifilename.None_5", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 843, "end_line": 885, "span_ids": ["TestConfigFromdictargs.test_inifilename"], "tokens": 313}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestConfigFromdictargs:\n\n def test_inifilename(self, tmp_path: Path) -> None:\n d1 = tmp_path.joinpath(\"foo\")\n d1.mkdir()\n p1 = d1.joinpath(\"bar.ini\")\n p1.touch()\n p1.write_text(\n textwrap.dedent(\n \"\"\"\\\n [pytest]\n name = value\n \"\"\"\n )\n )\n\n inifilename = \"../../foo/bar.ini\"\n option_dict = {\"inifilename\": inifilename, \"capture\": \"no\"}\n\n cwd = tmp_path.joinpath(\"a/b\")\n cwd.mkdir(parents=True)\n p2 = cwd.joinpath(\"pytest.ini\")\n p2.touch()\n p2.write_text(\n textwrap.dedent(\n \"\"\"\\\n [pytest]\n name = wrong-value\n should_not_be_set = true\n \"\"\"\n )\n )\n with MonkeyPatch.context() as mp:\n mp.chdir(cwd)\n config = Config.fromdictargs(option_dict, ())\n inipath = absolutepath(inifilename)\n\n assert config.args == [str(cwd)]\n assert config.option.inifilename == inifilename\n assert config.option.capture == \"no\"\n\n # this indicates this is the file used for getting configuration values\n assert config.inipath == inipath\n assert config.inicfg.get(\"name\") == \"value\"\n assert config.inicfg.get(\"should_not_be_set\") is None", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_preparse_ordering_with_setuptools_test_preparse_ordering_with_setuptools.assert_plugin_x_42": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_preparse_ordering_with_setuptools_test_preparse_ordering_with_setuptools.assert_plugin_x_42", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 916, "end_line": 948, "span_ids": ["test_preparse_ordering_with_setuptools.Dist:2", "test_preparse_ordering_with_setuptools.EntryPoint", "test_preparse_ordering_with_setuptools.Dist", "test_preparse_ordering_with_setuptools.EntryPoint.load.PseudoPlugin", "test_preparse_ordering_with_setuptools.EntryPoint:2", "test_preparse_ordering_with_setuptools", "test_preparse_ordering_with_setuptools.EntryPoint.load.PseudoPlugin:2"], "tokens": 206}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_preparse_ordering_with_setuptools(\n pytester: Pytester, monkeypatch: MonkeyPatch\n) -> None:\n monkeypatch.delenv(\"PYTEST_DISABLE_PLUGIN_AUTOLOAD\", raising=False)\n\n class EntryPoint:\n name = \"mytestplugin\"\n group = \"pytest11\"\n\n def load(self):\n class PseudoPlugin:\n x = 42\n\n return PseudoPlugin()\n\n class Dist:\n files = ()\n metadata = {\"name\": \"foo\"}\n entry_points = (EntryPoint(),)\n\n def my_dists():\n return (Dist,)\n\n monkeypatch.setattr(importlib_metadata, \"distributions\", my_dists)\n pytester.makeconftest(\n \"\"\"\n pytest_plugins = \"mytestplugin\",\n \"\"\"\n )\n monkeypatch.setenv(\"PYTEST_PLUGINS\", \"mytestplugin\")\n config = pytester.parseconfig()\n plugin = config.pluginmanager.getplugin(\"mytestplugin\")\n assert plugin.x == 42", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_plugin_preparse_prevents_setuptools_loading_test_plugin_preparse_prevents_setuptools_loading.if_block_it_.else_.assert_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_plugin_preparse_prevents_setuptools_loading_test_plugin_preparse_prevents_setuptools_loading.if_block_it_.else_.assert_", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 1003, "end_line": 1037, "span_ids": ["test_plugin_preparse_prevents_setuptools_loading.Distribution", "test_plugin_preparse_prevents_setuptools_loading.Distribution:2", "test_plugin_preparse_prevents_setuptools_loading.DummyEntryPoint:2", "test_plugin_preparse_prevents_setuptools_loading.DummyEntryPoint", "test_plugin_preparse_prevents_setuptools_loading"], "tokens": 257}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\"block_it\", [True, False])\ndef test_plugin_preparse_prevents_setuptools_loading(\n pytester: Pytester, monkeypatch: MonkeyPatch, block_it: bool\n) -> None:\n monkeypatch.delenv(\"PYTEST_DISABLE_PLUGIN_AUTOLOAD\", raising=False)\n\n plugin_module_placeholder = object()\n\n class DummyEntryPoint:\n name = \"mytestplugin\"\n group = \"pytest11\"\n\n def load(self):\n return plugin_module_placeholder\n\n class Distribution:\n version = \"1.0\"\n files = (\"foo.txt\",)\n metadata = {\"name\": \"foo\"}\n entry_points = (DummyEntryPoint(),)\n\n def distributions():\n return (Distribution(),)\n\n monkeypatch.setattr(importlib_metadata, \"distributions\", distributions)\n args = (\"-p\", \"no:mytestplugin\") if block_it else ()\n config = pytester.parseconfig(*args)\n config.pluginmanager.import_plugin(\"mytestplugin\")\n if block_it:\n assert \"mytestplugin\" not in sys.modules\n assert config.pluginmanager.get_plugin(\"mytestplugin\") is None\n else:\n assert (\n config.pluginmanager.get_plugin(\"mytestplugin\") is plugin_module_placeholder\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_cmdline_processargs_simple_test_invalid_options_show_extra_information.result_stderr_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_cmdline_processargs_simple_test_invalid_options_show_extra_information.result_stderr_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 1113, "end_line": 1140, "span_ids": ["test_invalid_options_show_extra_information", "test_cmdline_processargs_simple"], "tokens": 196}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_cmdline_processargs_simple(pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n def pytest_cmdline_preparse(args):\n args.append(\"-h\")\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*pytest*\", \"*-h*\"])\n\n\ndef test_invalid_options_show_extra_information(pytester: Pytester) -> None:\n \"\"\"Display extra information when pytest exits due to unrecognized\n options in the command-line.\"\"\"\n pytester.makeini(\n \"\"\"\n [pytest]\n addopts = --invalid-option\n \"\"\"\n )\n result = pytester.runpytest()\n result.stderr.fnmatch_lines(\n [\n \"*error: unrecognized arguments: --invalid-option*\",\n \"* inifile: %s*\" % pytester.path.joinpath(\"tox.ini\"),\n \"* rootdir: %s*\" % pytester.path,\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_consider_args_after_options_for_rootdir_test_consider_args_after_options_for_rootdir.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_consider_args_after_options_for_rootdir_test_consider_args_after_options_for_rootdir.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 1143, "end_line": 1173, "span_ids": ["test_consider_args_after_options_for_rootdir"], "tokens": 266}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\n \"args\",\n [\n [\"dir1\", \"dir2\", \"-v\"],\n [\"dir1\", \"-v\", \"dir2\"],\n [\"dir2\", \"-v\", \"dir1\"],\n [\"-v\", \"dir2\", \"dir1\"],\n ],\n)\ndef test_consider_args_after_options_for_rootdir(\n pytester: Pytester, args: List[str]\n) -> None:\n \"\"\"\n Consider all arguments in the command-line for rootdir\n discovery, even if they happen to occur after an option. #949\n \"\"\"\n # replace \"dir1\" and \"dir2\" from \"args\" into their real directory\n root = pytester.mkdir(\"myroot\")\n d1 = root.joinpath(\"dir1\")\n d1.mkdir()\n d2 = root.joinpath(\"dir2\")\n d2.mkdir()\n for i, arg in enumerate(args):\n if arg == \"dir1\":\n args[i] = str(d1)\n elif arg == \"dir2\":\n args[i] = str(d2)\n with MonkeyPatch.context() as mp:\n mp.chdir(root)\n result = pytester.runpytest(*args)\n result.stdout.fnmatch_lines([\"*rootdir: *myroot\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_toolongargs_issue224_test_config_in_subdirectory_colon_command_line_issue2148.assert_result_ret_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_toolongargs_issue224_test_config_in_subdirectory_colon_command_line_issue2148.assert_result_ret_0", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 1176, "end_line": 1206, "span_ids": ["test_config_in_subdirectory_colon_command_line_issue2148", "test_toolongargs_issue224"], "tokens": 236}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_toolongargs_issue224(pytester: Pytester) -> None:\n result = pytester.runpytest(\"-m\", \"hello\" * 500)\n assert result.ret == ExitCode.NO_TESTS_COLLECTED\n\n\ndef test_config_in_subdirectory_colon_command_line_issue2148(\n pytester: Pytester,\n) -> None:\n conftest_source = \"\"\"\n def pytest_addoption(parser):\n parser.addini('foo', 'foo')\n \"\"\"\n\n pytester.makefile(\n \".ini\",\n **{\"pytest\": \"[pytest]\\nfoo = root\", \"subdir/pytest\": \"[pytest]\\nfoo = subdir\"},\n )\n\n pytester.makepyfile(\n **{\n \"conftest\": conftest_source,\n \"subdir/conftest\": conftest_source,\n \"subdir/test_foo\": \"\"\"\\\n def test_foo(pytestconfig):\n assert pytestconfig.getini('foo') == 'subdir'\n \"\"\",\n }\n )\n\n result = pytester.runpytest(\"subdir/test_foo.py::test_foo\")\n assert result.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_notify_exception_test_notify_exception.None_5": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_notify_exception_test_notify_exception.None_5", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 1209, "end_line": 1231, "span_ids": ["test_notify_exception.A.pytest_internalerror", "test_notify_exception", "test_notify_exception.A"], "tokens": 175}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_notify_exception(pytester: Pytester, capfd) -> None:\n config = pytester.parseconfig()\n with pytest.raises(ValueError) as excinfo:\n raise ValueError(1)\n config.notify_exception(excinfo, config.option)\n _, err = capfd.readouterr()\n assert \"ValueError\" in err\n\n class A:\n def pytest_internalerror(self):\n return True\n\n config.pluginmanager.register(A())\n config.notify_exception(excinfo, config.option)\n _, err = capfd.readouterr()\n assert not err\n\n config = pytester.parseconfig(\"-p\", \"no:terminal\")\n with pytest.raises(ValueError) as excinfo:\n raise ValueError(1)\n config.notify_exception(excinfo, config.option)\n _, err = capfd.readouterr()\n assert \"ValueError\" in err", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_collect_pytest_prefix_bug_integration_test_collect_pytest_prefix_bug.assert_pm_parse_hookimpl_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_collect_pytest_prefix_bug_integration_test_collect_pytest_prefix_bug.assert_pm_parse_hookimpl_", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 1275, "end_line": 1290, "span_ids": ["test_collect_pytest_prefix_bug_integration", "test_collect_pytest_prefix_bug.Dummy", "test_collect_pytest_prefix_bug.Dummy.pytest_something:2", "test_collect_pytest_prefix_bug"], "tokens": 132}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_collect_pytest_prefix_bug_integration(pytester: Pytester) -> None:\n \"\"\"Integration test for issue #3775\"\"\"\n p = pytester.copy_example(\"config/collect_pytest_prefix\")\n result = pytester.runpytest(p)\n result.stdout.fnmatch_lines([\"* 1 passed *\"])\n\n\ndef test_collect_pytest_prefix_bug(pytestconfig):\n \"\"\"Ensure we collect only actual functions from conftest files (#3775)\"\"\"\n\n class Dummy:\n class pytest_something:\n pass\n\n pm = pytestconfig.pluginmanager\n assert pm.parse_hookimpl_opts(Dummy(), \"pytest_something\") is None", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestOverrideIniArgs_TestOverrideIniArgs.test_override_ini_names.None_4": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestOverrideIniArgs_TestOverrideIniArgs.test_override_ini_names.None_4", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 1443, "end_line": 1476, "span_ids": ["TestOverrideIniArgs", "TestOverrideIniArgs.test_override_ini_names"], "tokens": 286}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestOverrideIniArgs:\n @pytest.mark.parametrize(\"name\", \"setup.cfg tox.ini pytest.ini\".split())\n def test_override_ini_names(self, pytester: Pytester, name: str) -> None:\n section = \"[pytest]\" if name != \"setup.cfg\" else \"[tool:pytest]\"\n pytester.path.joinpath(name).write_text(\n textwrap.dedent(\n \"\"\"\n {section}\n custom = 1.0\"\"\".format(\n section=section\n )\n )\n )\n pytester.makeconftest(\n \"\"\"\n def pytest_addoption(parser):\n parser.addini(\"custom\", \"\")\"\"\"\n )\n pytester.makepyfile(\n \"\"\"\n def test_pass(pytestconfig):\n ini_val = pytestconfig.getini(\"custom\")\n print('\\\\ncustom_option:%s\\\\n' % ini_val)\"\"\"\n )\n\n result = pytester.runpytest(\"--override-ini\", \"custom=2.0\", \"-s\")\n assert result.ret == 0\n result.stdout.fnmatch_lines([\"custom_option:2.0\"])\n\n result = pytester.runpytest(\n \"--override-ini\", \"custom=2.0\", \"--override-ini=custom=3.0\", \"-s\"\n )\n assert result.ret == 0\n result.stdout.fnmatch_lines([\"custom_option:3.0\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestOverrideIniArgs.test_override_ini_pathlist_TestOverrideIniArgs.test_override_ini_pathlist.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestOverrideIniArgs.test_override_ini_pathlist_TestOverrideIniArgs.test_override_ini_pathlist.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 1478, "end_line": 1501, "span_ids": ["TestOverrideIniArgs.test_override_ini_pathlist"], "tokens": 197}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestOverrideIniArgs:\n\n def test_override_ini_pathlist(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n def pytest_addoption(parser):\n parser.addini(\"paths\", \"my new ini value\", type=\"pathlist\")\"\"\"\n )\n pytester.makeini(\n \"\"\"\n [pytest]\n paths=blah.py\"\"\"\n )\n pytester.makepyfile(\n \"\"\"\n import py.path\n def test_pathlist(pytestconfig):\n config_paths = pytestconfig.getini(\"paths\")\n print(config_paths)\n for cpf in config_paths:\n print('\\\\nuser_path:%s' % cpf.basename)\"\"\"\n )\n result = pytester.runpytest(\n \"--override-ini\", \"paths=foo/bar1.py foo/bar2.py\", \"-s\"\n )\n result.stdout.fnmatch_lines([\"user_path:bar1.py\", \"user_path:bar2.py\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestOverrideIniArgs.test_override_multiple_and_default_TestOverrideIniArgs.test_override_multiple_and_default.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestOverrideIniArgs.test_override_multiple_and_default_TestOverrideIniArgs.test_override_multiple_and_default.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 1503, "end_line": 1547, "span_ids": ["TestOverrideIniArgs.test_override_multiple_and_default"], "tokens": 364}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestOverrideIniArgs:\n\n def test_override_multiple_and_default(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n def pytest_addoption(parser):\n addini = parser.addini\n addini(\"custom_option_1\", \"\", default=\"o1\")\n addini(\"custom_option_2\", \"\", default=\"o2\")\n addini(\"custom_option_3\", \"\", default=False, type=\"bool\")\n addini(\"custom_option_4\", \"\", default=True, type=\"bool\")\"\"\"\n )\n pytester.makeini(\n \"\"\"\n [pytest]\n custom_option_1=custom_option_1\n custom_option_2=custom_option_2\n \"\"\"\n )\n pytester.makepyfile(\n \"\"\"\n def test_multiple_options(pytestconfig):\n prefix = \"custom_option\"\n for x in range(1, 5):\n ini_value=pytestconfig.getini(\"%s_%d\" % (prefix, x))\n print('\\\\nini%d:%s' % (x, ini_value))\n \"\"\"\n )\n result = pytester.runpytest(\n \"--override-ini\",\n \"custom_option_1=fulldir=/tmp/user1\",\n \"-o\",\n \"custom_option_2=url=/tmp/user2?a=b&d=e\",\n \"-o\",\n \"custom_option_3=True\",\n \"-o\",\n \"custom_option_4=no\",\n \"-s\",\n )\n result.stdout.fnmatch_lines(\n [\n \"ini1:fulldir=/tmp/user1\",\n \"ini2:url=/tmp/user2?a=b&d=e\",\n \"ini3:True\",\n \"ini4:False\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestOverrideIniArgs.test_override_ini_usage_error_bad_style_TestOverrideIniArgs.test_override_ini_handled_asap.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestOverrideIniArgs.test_override_ini_usage_error_bad_style_TestOverrideIniArgs.test_override_ini_handled_asap.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 1549, "end_line": 1582, "span_ids": ["TestOverrideIniArgs.test_override_ini_usage_error_bad_style", "TestOverrideIniArgs.test_override_ini_handled_asap"], "tokens": 261}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestOverrideIniArgs:\n\n def test_override_ini_usage_error_bad_style(self, pytester: Pytester) -> None:\n pytester.makeini(\n \"\"\"\n [pytest]\n xdist_strict=False\n \"\"\"\n )\n result = pytester.runpytest(\"--override-ini\", \"xdist_strict\", \"True\")\n result.stderr.fnmatch_lines(\n [\n \"ERROR: -o/--override-ini expects option=value style (got: 'xdist_strict').\",\n ]\n )\n\n @pytest.mark.parametrize(\"with_ini\", [True, False])\n def test_override_ini_handled_asap(\n self, pytester: Pytester, with_ini: bool\n ) -> None:\n \"\"\"-o should be handled as soon as possible and always override what's in ini files (#2238)\"\"\"\n if with_ini:\n pytester.makeini(\n \"\"\"\n [pytest]\n python_files=test_*.py\n \"\"\"\n )\n pytester.makepyfile(\n unittest_ini_handle=\"\"\"\n def test():\n pass\n \"\"\"\n )\n result = pytester.runpytest(\"--override-ini\", \"python_files=unittest_*.py\")\n result.stdout.fnmatch_lines([\"*1 passed in*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestOverrideIniArgs.test_addopts_from_ini_not_concatenated_TestOverrideIniArgs.test_override_ini_does_not_contain_paths.assert_config__override_i": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestOverrideIniArgs.test_addopts_from_ini_not_concatenated_TestOverrideIniArgs.test_override_ini_does_not_contain_paths.assert_config__override_i", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 1606, "end_line": 1629, "span_ids": ["TestOverrideIniArgs.test_override_ini_does_not_contain_paths", "TestOverrideIniArgs.test_addopts_from_ini_not_concatenated"], "tokens": 236}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestOverrideIniArgs:\n\n def test_addopts_from_ini_not_concatenated(self, pytester: Pytester) -> None:\n \"\"\"`addopts` from ini should not take values from normal args (#4265).\"\"\"\n pytester.makeini(\n \"\"\"\n [pytest]\n addopts=-o\n \"\"\"\n )\n result = pytester.runpytest(\"cache_dir=ignored\")\n result.stderr.fnmatch_lines(\n [\n \"%s: error: argument -o/--override-ini: expected one argument (via addopts config)\"\n % (pytester._request.config._parser.optparser.prog,)\n ]\n )\n assert result.ret == _pytest.config.ExitCode.USAGE_ERROR\n\n def test_override_ini_does_not_contain_paths(\n self, _config_for_test, _sys_snapshot\n ) -> None:\n \"\"\"Check that -o no longer swallows all options after it (#3103)\"\"\"\n config = _config_for_test\n config._preparse([\"-o\", \"cache_dir=/cache\", \"/some/test/path\"])\n assert config._override_ini == [\"cache_dir=/cache\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_help_and_version_after_argument_error_test_help_and_version_after_argument_error.None_2": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_help_and_version_after_argument_error_test_help_and_version_after_argument_error.None_2", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 1675, "end_line": 1716, "span_ids": ["test_help_and_version_after_argument_error"], "tokens": 295}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_help_and_version_after_argument_error(pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n def validate(arg):\n raise argparse.ArgumentTypeError(\"argerror\")\n\n def pytest_addoption(parser):\n group = parser.getgroup('cov')\n group.addoption(\n \"--invalid-option-should-allow-for-help\",\n type=validate,\n )\n \"\"\"\n )\n pytester.makeini(\n \"\"\"\n [pytest]\n addopts = --invalid-option-should-allow-for-help\n \"\"\"\n )\n result = pytester.runpytest(\"--help\")\n result.stdout.fnmatch_lines(\n [\n \"usage: *\",\n \"positional arguments:\",\n \"NOTE: displaying only minimal help due to UsageError.\",\n ]\n )\n result.stderr.fnmatch_lines(\n [\n \"ERROR: usage: *\",\n \"%s: error: argument --invalid-option-should-allow-for-help: expected one argument\"\n % (pytester._request.config._parser.optparser.prog,),\n ]\n )\n # Does not display full/default help.\n assert \"to see available markers type: pytest --markers\" not in result.stdout.lines\n assert result.ret == ExitCode.USAGE_ERROR\n\n result = pytester.runpytest(\"--version\")\n result.stderr.fnmatch_lines([f\"pytest {pytest.__version__}\"])\n assert result.ret == ExitCode.USAGE_ERROR", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_config_does_not_load_blocked_plugin_from_args_test_config_does_not_load_blocked_plugin_from_args.None_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_config_does_not_load_blocked_plugin_from_args_test_config_does_not_load_blocked_plugin_from_args.None_1", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 1734, "end_line": 1743, "span_ids": ["test_config_does_not_load_blocked_plugin_from_args"], "tokens": 140}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_config_does_not_load_blocked_plugin_from_args(pytester: Pytester) -> None:\n \"\"\"This tests that pytest's config setup handles \"-p no:X\".\"\"\"\n p = pytester.makepyfile(\"def test(capfd): pass\")\n result = pytester.runpytest(str(p), \"-pno:capture\")\n result.stdout.fnmatch_lines([\"E fixture 'capfd' not found\"])\n assert result.ret == ExitCode.TESTS_FAILED\n\n result = pytester.runpytest(str(p), \"-pno:capture\", \"-s\")\n result.stderr.fnmatch_lines([\"*: error: unrecognized arguments: -s\"])\n assert result.ret == ExitCode.USAGE_ERROR", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_TestConftestValueAccessGlobal_TestConftestValueAccessGlobal.test_basic_init.assert_conftest__rget_wit": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_TestConftestValueAccessGlobal_TestConftestValueAccessGlobal.test_basic_init.assert_conftest__rget_wit", "embedding": null, "metadata": {"file_path": "testing/test_conftest.py", "file_name": "test_conftest.py", "file_type": "text/x-python", "category": "test", "start_line": 41, "end_line": 60, "span_ids": ["TestConftestValueAccessGlobal", "TestConftestValueAccessGlobal.test_basic_init", "TestConftestValueAccessGlobal.basedir"], "tokens": 246}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.usefixtures(\"_sys_snapshot\")\nclass TestConftestValueAccessGlobal:\n @pytest.fixture(scope=\"module\", params=[\"global\", \"inpackage\"])\n def basedir(\n self, request, tmp_path_factory: TempPathFactory\n ) -> Generator[Path, None, None]:\n tmpdir = tmp_path_factory.mktemp(\"basedir\", numbered=True)\n tmpdir.joinpath(\"adir/b\").mkdir(parents=True)\n tmpdir.joinpath(\"adir/conftest.py\").write_text(\"a=1 ; Directory = 3\")\n tmpdir.joinpath(\"adir/b/conftest.py\").write_text(\"b=2 ; a = 1.5\")\n if request.param == \"inpackage\":\n tmpdir.joinpath(\"adir/__init__.py\").touch()\n tmpdir.joinpath(\"adir/b/__init__.py\").touch()\n\n yield tmpdir\n\n def test_basic_init(self, basedir: Path) -> None:\n conftest = PytestPluginManager()\n p = basedir / \"adir\"\n assert conftest._rget_with_confmod(\"a\", p, importmode=\"prepend\")[1] == 1", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_TestConftestValueAccessGlobal.test_immediate_initialiation_and_incremental_are_the_same_TestConftestValueAccessGlobal.test_immediate_initialiation_and_incremental_are_the_same.None_3": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_TestConftestValueAccessGlobal.test_immediate_initialiation_and_incremental_are_the_same_TestConftestValueAccessGlobal.test_immediate_initialiation_and_incremental_are_the_same.None_3", "embedding": null, "metadata": {"file_path": "testing/test_conftest.py", "file_name": "test_conftest.py", "file_type": "text/x-python", "category": "test", "start_line": 62, "end_line": 73, "span_ids": ["TestConftestValueAccessGlobal.test_immediate_initialiation_and_incremental_are_the_same"], "tokens": 193}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.usefixtures(\"_sys_snapshot\")\nclass TestConftestValueAccessGlobal:\n\n def test_immediate_initialiation_and_incremental_are_the_same(\n self, basedir: Path\n ) -> None:\n conftest = PytestPluginManager()\n assert not len(conftest._dirpath2confmods)\n conftest._getconftestmodules(basedir, importmode=\"prepend\")\n snap1 = len(conftest._dirpath2confmods)\n assert snap1 == 1\n conftest._getconftestmodules(basedir / \"adir\", importmode=\"prepend\")\n assert len(conftest._dirpath2confmods) == snap1 + 1\n conftest._getconftestmodules(basedir / \"b\", importmode=\"prepend\")\n assert len(conftest._dirpath2confmods) == snap1 + 2", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_conftest_in_nonpkg_with_init_test_conftest_in_nonpkg_with_init.ConftestWithSetinitial_tm": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_conftest_in_nonpkg_with_init_test_conftest_in_nonpkg_with_init.ConftestWithSetinitial_tm", "embedding": null, "metadata": {"file_path": "testing/test_conftest.py", "file_name": "test_conftest.py", "file_type": "text/x-python", "category": "test", "start_line": 99, "end_line": 105, "span_ids": ["test_conftest_in_nonpkg_with_init"], "tokens": 157}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_conftest_in_nonpkg_with_init(tmp_path: Path, _sys_snapshot) -> None:\n tmp_path.joinpath(\"adir-1.0/b\").mkdir(parents=True)\n tmp_path.joinpath(\"adir-1.0/conftest.py\").write_text(\"a=1 ; Directory = 3\")\n tmp_path.joinpath(\"adir-1.0/b/conftest.py\").write_text(\"b=2 ; a = 1.5\")\n tmp_path.joinpath(\"adir-1.0/b/__init__.py\").touch()\n tmp_path.joinpath(\"adir-1.0/__init__.py\").touch()\n ConftestWithSetinitial(tmp_path.joinpath(\"adir-1.0\", \"b\"))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_doubledash_considered_test_issue151_load_all_conftests.assert_len_d_len_name": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_doubledash_considered_test_issue151_load_all_conftests.assert_len_d_len_name", "embedding": null, "metadata": {"file_path": "testing/test_conftest.py", "file_name": "test_conftest.py", "file_type": "text/x-python", "category": "test", "start_line": 108, "end_line": 126, "span_ids": ["test_issue151_load_all_conftests", "test_doubledash_considered"], "tokens": 195}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_doubledash_considered(pytester: Pytester) -> None:\n conf = pytester.mkdir(\"--option\")\n conf.joinpath(\"conftest.py\").touch()\n conftest = PytestPluginManager()\n conftest_setinitial(conftest, [conf.name, conf.name])\n values = conftest._getconftestmodules(conf, importmode=\"prepend\")\n assert len(values) == 1\n\n\ndef test_issue151_load_all_conftests(pytester: Pytester) -> None:\n names = \"code proj src\".split()\n for name in names:\n p = pytester.mkdir(name)\n p.joinpath(\"conftest.py\").touch()\n\n conftest = PytestPluginManager()\n conftest_setinitial(conftest, names)\n d = list(conftest._conftestpath2mod.values())\n assert len(d) == len(names)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_conftest_global_import_test_conftest_global_import.assert_res_ret_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_conftest_global_import_test_conftest_global_import.assert_res_ret_0", "embedding": null, "metadata": {"file_path": "testing/test_conftest.py", "file_name": "test_conftest.py", "file_type": "text/x-python", "category": "test", "start_line": 129, "end_line": 153, "span_ids": ["test_conftest_global_import"], "tokens": 227}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_conftest_global_import(pytester: Pytester) -> None:\n pytester.makeconftest(\"x=3\")\n p = pytester.makepyfile(\n \"\"\"\n from pathlib import Path\n import pytest\n from _pytest.config import PytestPluginManager\n conf = PytestPluginManager()\n mod = conf._importconftest(Path(\"conftest.py\"), importmode=\"prepend\")\n assert mod.x == 3\n import conftest\n assert conftest is mod, (conftest, mod)\n sub = Path(\"sub\")\n sub.mkdir()\n subconf = sub / \"conftest.py\"\n subconf.write_text(\"y=4\")\n mod2 = conf._importconftest(subconf, importmode=\"prepend\")\n assert mod != mod2\n assert mod2.y == 4\n import conftest\n assert conftest is mod2, (conftest, mod)\n \"\"\"\n )\n res = pytester.runpython(p)\n assert res.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_conftestcutdir_test_conftestcutdir.None_5": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_conftestcutdir_test_conftestcutdir.None_5", "embedding": null, "metadata": {"file_path": "testing/test_conftest.py", "file_name": "test_conftest.py", "file_type": "text/x-python", "category": "test", "start_line": 156, "end_line": 173, "span_ids": ["test_conftestcutdir"], "tokens": 253}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_conftestcutdir(pytester: Pytester) -> None:\n conf = pytester.makeconftest(\"\")\n p = pytester.mkdir(\"x\")\n conftest = PytestPluginManager()\n conftest_setinitial(conftest, [pytester.path], confcutdir=p)\n values = conftest._getconftestmodules(p, importmode=\"prepend\")\n assert len(values) == 0\n values = conftest._getconftestmodules(conf.parent, importmode=\"prepend\")\n assert len(values) == 0\n assert Path(conf) not in conftest._conftestpath2mod\n # but we can still import a conftest directly\n conftest._importconftest(conf, importmode=\"prepend\")\n values = conftest._getconftestmodules(conf.parent, importmode=\"prepend\")\n assert values[0].__file__.startswith(str(conf))\n # and all sub paths get updated properly\n values = conftest._getconftestmodules(p, importmode=\"prepend\")\n assert len(values) == 1\n assert values[0].__file__.startswith(str(conf))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_conftestcutdir_inplace_considered_test_setinitial_conftest_subdirs.if_name_not_in_whatever.else_.assert_len_conftest__conf": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_conftestcutdir_inplace_considered_test_setinitial_conftest_subdirs.if_name_not_in_whatever.else_.assert_len_conftest__conf", "embedding": null, "metadata": {"file_path": "testing/test_conftest.py", "file_name": "test_conftest.py", "file_type": "text/x-python", "category": "test", "start_line": 176, "end_line": 198, "span_ids": ["test_setinitial_conftest_subdirs", "test_conftestcutdir_inplace_considered"], "tokens": 294}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_conftestcutdir_inplace_considered(pytester: Pytester) -> None:\n conf = pytester.makeconftest(\"\")\n conftest = PytestPluginManager()\n conftest_setinitial(conftest, [conf.parent], confcutdir=conf.parent)\n values = conftest._getconftestmodules(conf.parent, importmode=\"prepend\")\n assert len(values) == 1\n assert values[0].__file__.startswith(str(conf))\n\n\n@pytest.mark.parametrize(\"name\", \"test tests whatever .dotdir\".split())\ndef test_setinitial_conftest_subdirs(pytester: Pytester, name: str) -> None:\n sub = pytester.mkdir(name)\n subconftest = sub.joinpath(\"conftest.py\")\n subconftest.touch()\n conftest = PytestPluginManager()\n conftest_setinitial(conftest, [sub.parent], confcutdir=pytester.path)\n key = subconftest.resolve()\n if name not in (\"whatever\", \".dotdir\"):\n assert key in conftest._conftestpath2mod\n assert len(conftest._conftestpath2mod) == 1\n else:\n assert key not in conftest._conftestpath2mod\n assert len(conftest._conftestpath2mod) == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_conftest_existing_junitxml_test_conftest_existing_junitxml.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_conftest_existing_junitxml_test_conftest_existing_junitxml.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_conftest.py", "file_name": "test_conftest.py", "file_type": "text/x-python", "category": "test", "start_line": 329, "end_line": 341, "span_ids": ["test_conftest_existing_junitxml"], "tokens": 125}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_conftest_existing_junitxml(pytester: Pytester) -> None:\n x = pytester.mkdir(\"tests\")\n x.joinpath(\"conftest.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n def pytest_addoption(parser):\n parser.addoption(\"--xyz\", action=\"store_true\")\n \"\"\"\n )\n )\n pytester.makefile(ext=\".xml\", junit=\"\") # Writes junit.xml\n result = pytester.runpytest(\"-h\", \"--junitxml\", \"junit.xml\")\n result.stdout.fnmatch_lines([\"*--xyz*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_fixture_dependency_test_fixture_dependency.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_fixture_dependency_test_fixture_dependency.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_conftest.py", "file_name": "test_conftest.py", "file_type": "text/x-python", "category": "test", "start_line": 361, "end_line": 403, "span_ids": ["test_fixture_dependency"], "tokens": 254}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_fixture_dependency(pytester: Pytester) -> None:\n pytester.makeconftest(\"\")\n pytester.path.joinpath(\"__init__.py\").touch()\n sub = pytester.mkdir(\"sub\")\n sub.joinpath(\"__init__.py\").touch()\n sub.joinpath(\"conftest.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n import pytest\n\n @pytest.fixture\n def not_needed():\n assert False, \"Should not be called!\"\n\n @pytest.fixture\n def foo():\n assert False, \"Should not be called!\"\n\n @pytest.fixture\n def bar(foo):\n return 'bar'\n \"\"\"\n )\n )\n subsub = sub.joinpath(\"subsub\")\n subsub.mkdir()\n subsub.joinpath(\"__init__.py\").touch()\n subsub.joinpath(\"test_bar.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n import pytest\n\n @pytest.fixture\n def bar():\n return 'sub bar'\n\n def test_event_fixture(bar):\n assert bar == 'sub bar'\n \"\"\"\n )\n )\n result = pytester.runpytest(\"sub\")\n result.stdout.fnmatch_lines([\"*1 passed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_conftest_found_with_double_dash_test_conftest_found_with_double_dash.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_conftest_found_with_double_dash_test_conftest_found_with_double_dash.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_conftest.py", "file_name": "test_conftest.py", "file_type": "text/x-python", "category": "test", "start_line": 406, "end_line": 423, "span_ids": ["test_conftest_found_with_double_dash"], "tokens": 135}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_conftest_found_with_double_dash(pytester: Pytester) -> None:\n sub = pytester.mkdir(\"sub\")\n sub.joinpath(\"conftest.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n def pytest_addoption(parser):\n parser.addoption(\"--hello-world\", action=\"store_true\")\n \"\"\"\n )\n )\n p = sub.joinpath(\"test_hello.py\")\n p.write_text(\"def test_hello(): pass\")\n result = pytester.runpytest(str(p) + \"::test_hello\", \"-h\")\n result.stdout.fnmatch_lines(\n \"\"\"\n *--hello-world*\n \"\"\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_TestConftestVisibility_TestConftestVisibility._setup_tree.return._runner_runner_packa": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_TestConftestVisibility_TestConftestVisibility._setup_tree.return._runner_runner_packa", "embedding": null, "metadata": {"file_path": "testing/test_conftest.py", "file_name": "test_conftest.py", "file_type": "text/x-python", "category": "test", "start_line": 426, "end_line": 490, "span_ids": ["TestConftestVisibility", "TestConftestVisibility._setup_tree"], "tokens": 467}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestConftestVisibility:\n def _setup_tree(self, pytester: Pytester) -> Dict[str, Path]: # for issue616\n # example mostly taken from:\n # https://mail.python.org/pipermail/pytest-dev/2014-September/002617.html\n runner = pytester.mkdir(\"empty\")\n package = pytester.mkdir(\"package\")\n\n package.joinpath(\"conftest.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n import pytest\n @pytest.fixture\n def fxtr():\n return \"from-package\"\n \"\"\"\n )\n )\n package.joinpath(\"test_pkgroot.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n def test_pkgroot(fxtr):\n assert fxtr == \"from-package\"\n \"\"\"\n )\n )\n\n swc = package.joinpath(\"swc\")\n swc.mkdir()\n swc.joinpath(\"__init__.py\").touch()\n swc.joinpath(\"conftest.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n import pytest\n @pytest.fixture\n def fxtr():\n return \"from-swc\"\n \"\"\"\n )\n )\n swc.joinpath(\"test_with_conftest.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n def test_with_conftest(fxtr):\n assert fxtr == \"from-swc\"\n \"\"\"\n )\n )\n\n snc = package.joinpath(\"snc\")\n snc.mkdir()\n snc.joinpath(\"__init__.py\").touch()\n snc.joinpath(\"test_no_conftest.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n def test_no_conftest(fxtr):\n assert fxtr == \"from-package\" # No local conftest.py, so should\n # use value from parent dir's\n \"\"\"\n )\n )\n print(\"created directory structure:\")\n for x in pytester.path.rglob(\"\"):\n print(\" \" + str(x.relative_to(pytester.path)))\n\n return {\"runner\": runner, \"package\": package, \"swc\": swc, \"snc\": snc}", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_search_conftest_up_to_inifile_test_search_conftest_up_to_inifile.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_search_conftest_up_to_inifile_test_search_conftest_up_to_inifile.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_conftest.py", "file_name": "test_conftest.py", "file_type": "text/x-python", "category": "test", "start_line": 532, "end_line": 583, "span_ids": ["test_search_conftest_up_to_inifile"], "tokens": 372}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\n \"confcutdir,passed,error\", [(\".\", 2, 0), (\"src\", 1, 1), (None, 1, 1)]\n)\ndef test_search_conftest_up_to_inifile(\n pytester: Pytester, confcutdir: str, passed: int, error: int\n) -> None:\n \"\"\"Test that conftest files are detected only up to an ini file, unless\n an explicit --confcutdir option is given.\n \"\"\"\n root = pytester.path\n src = root.joinpath(\"src\")\n src.mkdir()\n src.joinpath(\"pytest.ini\").write_text(\"[pytest]\")\n src.joinpath(\"conftest.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n import pytest\n @pytest.fixture\n def fix1(): pass\n \"\"\"\n )\n )\n src.joinpath(\"test_foo.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n def test_1(fix1):\n pass\n def test_2(out_of_reach):\n pass\n \"\"\"\n )\n )\n root.joinpath(\"conftest.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n import pytest\n @pytest.fixture\n def out_of_reach(): pass\n \"\"\"\n )\n )\n\n args = [str(src)]\n if confcutdir:\n args = [\"--confcutdir=%s\" % root.joinpath(confcutdir)]\n result = pytester.runpytest(*args)\n match = \"\"\n if passed:\n match += \"*%d passed*\" % passed\n if error:\n match += \"*%d error*\" % error\n result.stdout.fnmatch_lines(match)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_issue1073_conftest_special_objects_test_conftest_exception_handling.assert_raise_ValueError_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_issue1073_conftest_special_objects_test_conftest_exception_handling.assert_raise_ValueError_", "embedding": null, "metadata": {"file_path": "testing/test_conftest.py", "file_name": "test_conftest.py", "file_type": "text/x-python", "category": "test", "start_line": 586, "end_line": 620, "span_ids": ["test_conftest_exception_handling", "test_issue1073_conftest_special_objects"], "tokens": 194}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_issue1073_conftest_special_objects(pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\\\n class DontTouchMe(object):\n def __getattr__(self, x):\n raise Exception('cant touch me')\n\n x = DontTouchMe()\n \"\"\"\n )\n pytester.makepyfile(\n \"\"\"\\\n def test_some():\n pass\n \"\"\"\n )\n res = pytester.runpytest()\n assert res.ret == 0\n\n\ndef test_conftest_exception_handling(pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\\\n raise ValueError()\n \"\"\"\n )\n pytester.makepyfile(\n \"\"\"\\\n def test_some():\n pass\n \"\"\"\n )\n res = pytester.runpytest()\n assert res.ret == 4\n assert \"raise ValueError()\" in [line.strip() for line in res.errlines]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_hook_proxy_test_hook_proxy.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_hook_proxy_test_hook_proxy.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_conftest.py", "file_name": "test_conftest.py", "file_type": "text/x-python", "category": "test", "start_line": 623, "end_line": 642, "span_ids": ["test_hook_proxy"], "tokens": 203}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_hook_proxy(pytester: Pytester) -> None:\n \"\"\"Session's gethookproxy() would cache conftests incorrectly (#2016).\n It was decided to remove the cache altogether.\n \"\"\"\n pytester.makepyfile(\n **{\n \"root/demo-0/test_foo1.py\": \"def test1(): pass\",\n \"root/demo-a/test_foo2.py\": \"def test1(): pass\",\n \"root/demo-a/conftest.py\": \"\"\"\\\n def pytest_ignore_collect(path, config):\n return True\n \"\"\",\n \"root/demo-b/test_foo3.py\": \"def test1(): pass\",\n \"root/demo-c/test_foo4.py\": \"def test1(): pass\",\n }\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n [\"*test_foo1.py*\", \"*test_foo3.py*\", \"*test_foo4.py*\", \"*3 passed*\"]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_required_option_help_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_required_option_help_", "embedding": null, "metadata": {"file_path": "testing/test_conftest.py", "file_name": "test_conftest.py", "file_type": "text/x-python", "category": "test", "start_line": 645, "end_line": 659, "span_ids": ["test_required_option_help"], "tokens": 123}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_required_option_help(pytester: Pytester) -> None:\n pytester.makeconftest(\"assert 0\")\n x = pytester.mkdir(\"x\")\n x.joinpath(\"conftest.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n def pytest_addoption(parser):\n parser.addoption(\"--xyz\", action=\"store_true\", required=True)\n \"\"\"\n )\n )\n result = pytester.runpytest(\"-h\", x)\n result.stdout.no_fnmatch_line(\"*argument --xyz is required*\")\n assert \"general:\" in result.stdout.str()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_simple_doctestfile_TestDoctests.test_new_pattern.reprec_assertoutcome_fail": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_simple_doctestfile_TestDoctests.test_new_pattern.reprec_assertoutcome_fail", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 103, "end_line": 123, "span_ids": ["TestDoctests.test_new_pattern", "TestDoctests.test_simple_doctestfile"], "tokens": 155}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDoctests:\n\n def test_simple_doctestfile(self, pytester: Pytester):\n p = pytester.maketxtfile(\n test_doc=\"\"\"\n >>> x = 1\n >>> x == 1\n False\n \"\"\"\n )\n reprec = pytester.inline_run(p)\n reprec.assertoutcome(failed=1)\n\n def test_new_pattern(self, pytester: Pytester):\n p = pytester.maketxtfile(\n xdoc=\"\"\"\n >>> x = 1\n >>> x == 1\n False\n \"\"\"\n )\n reprec = pytester.inline_run(p, \"--doctest-glob=x*.txt\")\n reprec.assertoutcome(failed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_multiple_patterns_TestDoctests.test_multiple_patterns.None_4": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_multiple_patterns_TestDoctests.test_multiple_patterns.None_4", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 125, "end_line": 152, "span_ids": ["TestDoctests.test_multiple_patterns"], "tokens": 232}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDoctests:\n\n def test_multiple_patterns(self, pytester: Pytester):\n \"\"\"Test support for multiple --doctest-glob arguments (#1255).\"\"\"\n pytester.maketxtfile(\n xdoc=\"\"\"\n >>> 1\n 1\n \"\"\"\n )\n pytester.makefile(\n \".foo\",\n test=\"\"\"\n >>> 1\n 1\n \"\"\",\n )\n pytester.maketxtfile(\n test_normal=\"\"\"\n >>> 1\n 1\n \"\"\"\n )\n expected = {\"xdoc.txt\", \"test.foo\", \"test_normal.txt\"}\n assert {x.name for x in pytester.path.iterdir()} == expected\n args = [\"--doctest-glob=xdoc*.txt\", \"--doctest-glob=*.foo\"]\n result = pytester.runpytest(*args)\n result.stdout.fnmatch_lines([\"*test.foo *\", \"*xdoc.txt *\", \"*2 passed*\"])\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*test_normal.txt *\", \"*1 passed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_encoding_TestDoctests.test_encoding.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_encoding_TestDoctests.test_encoding.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 154, "end_line": 179, "span_ids": ["TestDoctests.test_encoding"], "tokens": 172}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDoctests:\n\n @pytest.mark.parametrize(\n \" test_string, encoding\",\n [(\"foo\", \"ascii\"), (\"\u00f6\u00e4\u00fc\", \"latin1\"), (\"\u00f6\u00e4\u00fc\", \"utf-8\")],\n )\n def test_encoding(self, pytester, test_string, encoding):\n \"\"\"Test support for doctest_encoding ini option.\"\"\"\n pytester.makeini(\n \"\"\"\n [pytest]\n doctest_encoding={}\n \"\"\".format(\n encoding\n )\n )\n doctest = \"\"\"\n >>> \"{}\"\n {}\n \"\"\".format(\n test_string, repr(test_string)\n )\n fn = pytester.path / \"test_encoding.txt\"\n fn.write_text(doctest, encoding=encoding)\n\n result = pytester.runpytest()\n\n result.stdout.fnmatch_lines([\"*1 passed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_docstring_full_context_around_error_TestDoctests.test_docstring_full_context_around_error.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_docstring_full_context_around_error_TestDoctests.test_docstring_full_context_around_error.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 289, "end_line": 317, "span_ids": ["TestDoctests.test_docstring_full_context_around_error"], "tokens": 189}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDoctests:\n\n def test_docstring_full_context_around_error(self, pytester: Pytester):\n \"\"\"Test that we show the whole context before the actual line of a failing\n doctest, provided that the context is up to 10 lines long.\n \"\"\"\n pytester.makepyfile(\n '''\n def foo():\n \"\"\"\n text-line-1\n text-line-2\n\n >>> 1 + 1\n 3\n \"\"\"\n '''\n )\n result = pytester.runpytest(\"--doctest-modules\")\n result.stdout.fnmatch_lines(\n [\n \"*docstring_full_context_around_error*\",\n \"003*text-line-1\",\n \"004*text-line-2\",\n \"006*>>> 1 + 1\",\n \"Expected:\",\n \" 3\",\n \"Got:\",\n \" 2\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_doctest_linedata_missing_TestDoctests.test_doctest_linedata_missing.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_doctest_linedata_missing_TestDoctests.test_doctest_linedata_missing.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 319, "end_line": 336, "span_ids": ["TestDoctests.test_doctest_linedata_missing"], "tokens": 131}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDoctests:\n\n def test_doctest_linedata_missing(self, pytester: Pytester):\n pytester.path.joinpath(\"hello.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n class Fun(object):\n @property\n def test(self):\n '''\n >>> a = 1\n >>> 1/0\n '''\n \"\"\"\n )\n )\n result = pytester.runpytest(\"--doctest-modules\")\n result.stdout.fnmatch_lines(\n [\"*hello*\", \"006*>>> 1/0*\", \"*UNEXPECTED*ZeroDivision*\", \"*1 failed*\"]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_doctest_unex_importerror_only_txt_TestDoctests.test_doctest_unex_importerror_only_txt.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_doctest_unex_importerror_only_txt_TestDoctests.test_doctest_unex_importerror_only_txt.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 399, "end_line": 414, "span_ids": ["TestDoctests.test_doctest_unex_importerror_only_txt"], "tokens": 122}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDoctests:\n\n def test_doctest_unex_importerror_only_txt(self, pytester: Pytester):\n pytester.maketxtfile(\n \"\"\"\n >>> import asdalsdkjaslkdjasd\n >>>\n \"\"\"\n )\n result = pytester.runpytest()\n # doctest is never executed because of error during hello.py collection\n result.stdout.fnmatch_lines(\n [\n \"*>>> import asdals*\",\n \"*UNEXPECTED*ModuleNotFoundError*\",\n \"ModuleNotFoundError: No module named *asdal*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_txtfile_failing_TestDoctests.test_txtfile_with_fixtures.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_txtfile_failing_TestDoctests.test_txtfile_with_fixtures.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 481, "end_line": 511, "span_ids": ["TestDoctests.test_txtfile_failing", "TestDoctests.test_txtfile_with_fixtures"], "tokens": 204}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDoctests:\n\n def test_txtfile_failing(self, pytester: Pytester):\n p = pytester.maketxtfile(\n \"\"\"\n >>> i = 0\n >>> i + 1\n 2\n \"\"\"\n )\n result = pytester.runpytest(p, \"-s\")\n result.stdout.fnmatch_lines(\n [\n \"001 >>> i = 0\",\n \"002 >>> i + 1\",\n \"Expected:\",\n \" 2\",\n \"Got:\",\n \" 1\",\n \"*test_txtfile_failing.txt:2: DocTestFailure\",\n ]\n )\n\n def test_txtfile_with_fixtures(self, pytester: Pytester):\n p = pytester.maketxtfile(\n \"\"\"\n >>> p = getfixture('tmp_path')\n >>> p.is_dir()\n True\n \"\"\"\n )\n reprec = pytester.inline_run(p)\n reprec.assertoutcome(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_doctestmodule_two_tests_one_fail_TestDoctests.test_doctestmodule_two_tests_one_fail.reprec_assertoutcome_fail": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_doctestmodule_two_tests_one_fail_TestDoctests.test_doctestmodule_two_tests_one_fail.reprec_assertoutcome_fail", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 579, "end_line": 598, "span_ids": ["TestDoctests.test_doctestmodule_two_tests_one_fail"], "tokens": 128}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDoctests:\n\n def test_doctestmodule_two_tests_one_fail(self, pytester: Pytester):\n p = pytester.makepyfile(\n \"\"\"\n class MyClass(object):\n def bad_meth(self):\n '''\n >>> magic = 42\n >>> magic\n 0\n '''\n def nice_meth(self):\n '''\n >>> magic = 42\n >>> magic - 42\n 0\n '''\n \"\"\"\n )\n reprec = pytester.inline_run(p, \"--doctest-modules\")\n reprec.assertoutcome(failed=1, passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_ignored_whitespace_TestDoctests.test_ignored_whitespace.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_ignored_whitespace_TestDoctests.test_ignored_whitespace.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 600, "end_line": 619, "span_ids": ["TestDoctests.test_ignored_whitespace"], "tokens": 120}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDoctests:\n\n def test_ignored_whitespace(self, pytester: Pytester):\n pytester.makeini(\n \"\"\"\n [pytest]\n doctest_optionflags = ELLIPSIS NORMALIZE_WHITESPACE\n \"\"\"\n )\n p = pytester.makepyfile(\n \"\"\"\n class MyClass(object):\n '''\n >>> a = \"foo \"\n >>> print(a)\n foo\n '''\n pass\n \"\"\"\n )\n reprec = pytester.inline_run(p, \"--doctest-modules\")\n reprec.assertoutcome(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_non_ignored_whitespace_TestDoctests.test_non_ignored_whitespace.reprec_assertoutcome_fail": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_non_ignored_whitespace_TestDoctests.test_non_ignored_whitespace.reprec_assertoutcome_fail", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 621, "end_line": 640, "span_ids": ["TestDoctests.test_non_ignored_whitespace"], "tokens": 120}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDoctests:\n\n def test_non_ignored_whitespace(self, pytester: Pytester):\n pytester.makeini(\n \"\"\"\n [pytest]\n doctest_optionflags = ELLIPSIS\n \"\"\"\n )\n p = pytester.makepyfile(\n \"\"\"\n class MyClass(object):\n '''\n >>> a = \"foo \"\n >>> print(a)\n foo\n '''\n pass\n \"\"\"\n )\n reprec = pytester.inline_run(p, \"--doctest-modules\")\n reprec.assertoutcome(failed=1, passed=0)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_ignored_whitespace_glob_TestDoctests.test_ignored_whitespace_glob.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_ignored_whitespace_glob_TestDoctests.test_ignored_whitespace_glob.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 642, "end_line": 657, "span_ids": ["TestDoctests.test_ignored_whitespace_glob"], "tokens": 117}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDoctests:\n\n def test_ignored_whitespace_glob(self, pytester: Pytester):\n pytester.makeini(\n \"\"\"\n [pytest]\n doctest_optionflags = ELLIPSIS NORMALIZE_WHITESPACE\n \"\"\"\n )\n p = pytester.maketxtfile(\n xdoc=\"\"\"\n >>> a = \"foo \"\n >>> print(a)\n foo\n \"\"\"\n )\n reprec = pytester.inline_run(p, \"--doctest-glob=x*.txt\")\n reprec.assertoutcome(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_non_ignored_whitespace_glob_TestDoctests.test_non_ignored_whitespace_glob.reprec_assertoutcome_fail": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_non_ignored_whitespace_glob_TestDoctests.test_non_ignored_whitespace_glob.reprec_assertoutcome_fail", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 659, "end_line": 674, "span_ids": ["TestDoctests.test_non_ignored_whitespace_glob"], "tokens": 117}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDoctests:\n\n def test_non_ignored_whitespace_glob(self, pytester: Pytester):\n pytester.makeini(\n \"\"\"\n [pytest]\n doctest_optionflags = ELLIPSIS\n \"\"\"\n )\n p = pytester.maketxtfile(\n xdoc=\"\"\"\n >>> a = \"foo \"\n >>> print(a)\n foo\n \"\"\"\n )\n reprec = pytester.inline_run(p, \"--doctest-glob=x*.txt\")\n reprec.assertoutcome(failed=1, passed=0)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_junit_report_for_doctest_TestDoctests.test_junit_report_for_doctest.reprec_assertoutcome_fail": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_junit_report_for_doctest_TestDoctests.test_junit_report_for_doctest.reprec_assertoutcome_fail", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 709, "end_line": 722, "span_ids": ["TestDoctests.test_junit_report_for_doctest"], "tokens": 115}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDoctests:\n\n def test_junit_report_for_doctest(self, pytester: Pytester):\n \"\"\"#713: Fix --junit-xml option when used with --doctest-modules.\"\"\"\n p = pytester.makepyfile(\n \"\"\"\n def foo():\n '''\n >>> 1 + 1\n 3\n '''\n pass\n \"\"\"\n )\n reprec = pytester.inline_run(p, \"--doctest-modules\", \"--junit-xml=junit.xml\")\n reprec.assertoutcome(failed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_unicode_doctest_TestDoctests.test_unicode_doctest.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_unicode_doctest_TestDoctests.test_unicode_doctest.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 724, "end_line": 745, "span_ids": ["TestDoctests.test_unicode_doctest"], "tokens": 150}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDoctests:\n\n def test_unicode_doctest(self, pytester: Pytester):\n \"\"\"\n Test case for issue 2434: DecodeError on Python 2 when doctest contains non-ascii\n characters.\n \"\"\"\n p = pytester.maketxtfile(\n test_unicode_doctest=\"\"\"\n .. doctest::\n\n >>> print(\n ... \"Hi\\\\n\\\\nBy\u00e9\")\n Hi\n ...\n By\u00e9\n >>> 1/0 # By\u00e9\n 1\n \"\"\"\n )\n result = pytester.runpytest(p)\n result.stdout.fnmatch_lines(\n [\"*UNEXPECTED EXCEPTION: ZeroDivisionError*\", \"*1 failed*\"]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_unicode_doctest_module_TestDoctests.test_unicode_doctest_module.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_unicode_doctest_module_TestDoctests.test_unicode_doctest_module.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 747, "end_line": 763, "span_ids": ["TestDoctests.test_unicode_doctest_module"], "tokens": 134}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDoctests:\n\n def test_unicode_doctest_module(self, pytester: Pytester):\n \"\"\"\n Test case for issue 2434: DecodeError on Python 2 when doctest docstring\n contains non-ascii characters.\n \"\"\"\n p = pytester.makepyfile(\n test_unicode_doctest_module=\"\"\"\n def fix_bad_unicode(text):\n '''\n >>> print(fix_bad_unicode('\u00c3\u00banico'))\n \u00fanico\n '''\n return \"\u00fanico\"\n \"\"\"\n )\n result = pytester.runpytest(p, \"--doctest-modules\")\n result.stdout.fnmatch_lines([\"* 1 passed *\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_print_unicode_value_TestDoctests.test_print_unicode_value.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_print_unicode_value_TestDoctests.test_print_unicode_value.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 765, "end_line": 779, "span_ids": ["TestDoctests.test_print_unicode_value"], "tokens": 124}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDoctests:\n\n def test_print_unicode_value(self, pytester: Pytester):\n \"\"\"\n Test case for issue 3583: Printing Unicode in doctest under Python 2.7\n doesn't work\n \"\"\"\n p = pytester.maketxtfile(\n test_print_unicode_value=r\"\"\"\n Here is a doctest::\n\n >>> print('\\xE5\\xE9\\xEE\\xF8\\xFC')\n \u00e5\u00e9\u00ee\u00f8\u00fc\n \"\"\"\n )\n result = pytester.runpytest(p)\n result.stdout.fnmatch_lines([\"* 1 passed *\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_reportinfo_TestDoctests.test_reportinfo.assert_reportinfo_1_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_reportinfo_TestDoctests.test_reportinfo.assert_reportinfo_1_1", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 781, "end_line": 795, "span_ids": ["TestDoctests.test_reportinfo"], "tokens": 115}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDoctests:\n\n def test_reportinfo(self, pytester: Pytester):\n \"\"\"Make sure that DoctestItem.reportinfo() returns lineno.\"\"\"\n p = pytester.makepyfile(\n test_reportinfo=\"\"\"\n def foo(x):\n '''\n >>> foo('a')\n 'b'\n '''\n return 'c'\n \"\"\"\n )\n items, reprec = pytester.inline_genitems(p, \"--doctest-modules\")\n reportinfo = items[0].reportinfo()\n assert reportinfo[1] == 1", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_valid_setup_py_TestDoctests.test_invalid_setup_py.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_valid_setup_py_TestDoctests.test_invalid_setup_py.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 797, "end_line": 827, "span_ids": ["TestDoctests.test_invalid_setup_py", "TestDoctests.test_valid_setup_py"], "tokens": 229}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDoctests:\n\n def test_valid_setup_py(self, pytester: Pytester):\n \"\"\"\n Test to make sure that pytest ignores valid setup.py files when ran\n with --doctest-modules\n \"\"\"\n p = pytester.makepyfile(\n setup=\"\"\"\n from setuptools import setup, find_packages\n setup(name='sample',\n version='0.0',\n description='description',\n packages=find_packages()\n )\n \"\"\"\n )\n result = pytester.runpytest(p, \"--doctest-modules\")\n result.stdout.fnmatch_lines([\"*collected 0 items*\"])\n\n def test_invalid_setup_py(self, pytester: Pytester):\n \"\"\"\n Test to make sure that pytest reads setup.py files that are not used\n for python packages when ran with --doctest-modules\n \"\"\"\n p = pytester.makepyfile(\n setup=\"\"\"\n def test_foo():\n return 'bar'\n \"\"\"\n )\n result = pytester.runpytest(p, \"--doctest-modules\")\n result.stdout.fnmatch_lines([\"*collected 1 item*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestLiterals_TestLiterals.test_allow_unicode.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestLiterals_TestLiterals.test_allow_unicode.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 830, "end_line": 868, "span_ids": ["TestLiterals", "TestLiterals.test_allow_unicode"], "tokens": 242}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestLiterals:\n @pytest.mark.parametrize(\"config_mode\", [\"ini\", \"comment\"])\n def test_allow_unicode(self, pytester, config_mode):\n \"\"\"Test that doctests which output unicode work in all python versions\n tested by pytest when the ALLOW_UNICODE option is used (either in\n the ini file or by an inline comment).\n \"\"\"\n if config_mode == \"ini\":\n pytester.makeini(\n \"\"\"\n [pytest]\n doctest_optionflags = ALLOW_UNICODE\n \"\"\"\n )\n comment = \"\"\n else:\n comment = \"#doctest: +ALLOW_UNICODE\"\n\n pytester.maketxtfile(\n test_doc=\"\"\"\n >>> b'12'.decode('ascii') {comment}\n '12'\n \"\"\".format(\n comment=comment\n )\n )\n pytester.makepyfile(\n foo=\"\"\"\n def foo():\n '''\n >>> b'12'.decode('ascii') {comment}\n '12'\n '''\n \"\"\".format(\n comment=comment\n )\n )\n reprec = pytester.inline_run(\"--doctest-modules\")\n reprec.assertoutcome(passed=2)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestLiterals.test_allow_bytes_TestLiterals.test_allow_bytes.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestLiterals.test_allow_bytes_TestLiterals.test_allow_bytes.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 870, "end_line": 907, "span_ids": ["TestLiterals.test_allow_bytes"], "tokens": 240}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestLiterals:\n\n @pytest.mark.parametrize(\"config_mode\", [\"ini\", \"comment\"])\n def test_allow_bytes(self, pytester, config_mode):\n \"\"\"Test that doctests which output bytes work in all python versions\n tested by pytest when the ALLOW_BYTES option is used (either in\n the ini file or by an inline comment)(#1287).\n \"\"\"\n if config_mode == \"ini\":\n pytester.makeini(\n \"\"\"\n [pytest]\n doctest_optionflags = ALLOW_BYTES\n \"\"\"\n )\n comment = \"\"\n else:\n comment = \"#doctest: +ALLOW_BYTES\"\n\n pytester.maketxtfile(\n test_doc=\"\"\"\n >>> b'foo' {comment}\n 'foo'\n \"\"\".format(\n comment=comment\n )\n )\n pytester.makepyfile(\n foo=\"\"\"\n def foo():\n '''\n >>> b'foo' {comment}\n 'foo'\n '''\n \"\"\".format(\n comment=comment\n )\n )\n reprec = pytester.inline_run(\"--doctest-modules\")\n reprec.assertoutcome(passed=2)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctestSkips_TestDoctestSkips.test_vacuous_all_skipped.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctestSkips_TestDoctestSkips.test_vacuous_all_skipped.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 1086, "end_line": 1143, "span_ids": ["TestDoctestSkips.test_vacuous_all_skipped", "TestDoctestSkips.test_one_skipped_failed", "TestDoctestSkips.test_all_skipped", "TestDoctestSkips", "TestDoctestSkips.makedoctest", "TestDoctestSkips.test_one_skipped"], "tokens": 439}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDoctestSkips:\n \"\"\"\n If all examples in a doctest are skipped due to the SKIP option, then\n the tests should be SKIPPED rather than PASSED. (#957)\n \"\"\"\n\n @pytest.fixture(params=[\"text\", \"module\"])\n def makedoctest(self, pytester, request):\n def makeit(doctest):\n mode = request.param\n if mode == \"text\":\n pytester.maketxtfile(doctest)\n else:\n assert mode == \"module\"\n pytester.makepyfile('\"\"\"\\n%s\"\"\"' % doctest)\n\n return makeit\n\n def test_one_skipped(self, pytester, makedoctest):\n makedoctest(\n \"\"\"\n >>> 1 + 1 # doctest: +SKIP\n 2\n >>> 2 + 2\n 4\n \"\"\"\n )\n reprec = pytester.inline_run(\"--doctest-modules\")\n reprec.assertoutcome(passed=1)\n\n def test_one_skipped_failed(self, pytester, makedoctest):\n makedoctest(\n \"\"\"\n >>> 1 + 1 # doctest: +SKIP\n 2\n >>> 2 + 2\n 200\n \"\"\"\n )\n reprec = pytester.inline_run(\"--doctest-modules\")\n reprec.assertoutcome(failed=1)\n\n def test_all_skipped(self, pytester, makedoctest):\n makedoctest(\n \"\"\"\n >>> 1 + 1 # doctest: +SKIP\n 2\n >>> 2 + 2 # doctest: +SKIP\n 200\n \"\"\"\n )\n reprec = pytester.inline_run(\"--doctest-modules\")\n reprec.assertoutcome(skipped=1)\n\n def test_vacuous_all_skipped(self, pytester, makedoctest):\n makedoctest(\"\")\n reprec = pytester.inline_run(\"--doctest-modules\")\n reprec.assertoutcome(passed=0, skipped=0)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctestSkips.test_continue_on_failure_TestDoctestSkips.test_continue_on_failure.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctestSkips.test_continue_on_failure_TestDoctestSkips.test_continue_on_failure.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 1145, "end_line": 1166, "span_ids": ["TestDoctestSkips.test_continue_on_failure"], "tokens": 195}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDoctestSkips:\n\n def test_continue_on_failure(self, pytester: Pytester):\n pytester.maketxtfile(\n test_something=\"\"\"\n >>> i = 5\n >>> def foo():\n ... raise ValueError('error1')\n >>> foo()\n >>> i\n >>> i + 2\n 7\n >>> i + 1\n \"\"\"\n )\n result = pytester.runpytest(\n \"--doctest-modules\", \"--doctest-continue-on-failure\"\n )\n result.assert_outcomes(passed=0, failed=1)\n # The lines that contains the failure are 4, 5, and 8. The first one\n # is a stack trace and the other two are mismatches.\n result.stdout.fnmatch_lines(\n [\"*4: UnexpectedException*\", \"*5: DocTestFailure*\", \"*8: DocTestFailure*\"]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctestAutoUseFixtures_TestDoctestAutoUseFixtures.test_doctest_module_session_fixture.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctestAutoUseFixtures_TestDoctestAutoUseFixtures.test_doctest_module_session_fixture.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 1169, "end_line": 1206, "span_ids": ["TestDoctestAutoUseFixtures", "TestDoctestAutoUseFixtures.test_doctest_module_session_fixture"], "tokens": 238}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDoctestAutoUseFixtures:\n\n SCOPES = [\"module\", \"session\", \"class\", \"function\"]\n\n def test_doctest_module_session_fixture(self, pytester: Pytester):\n \"\"\"Test that session fixtures are initialized for doctest modules (#768).\"\"\"\n # session fixture which changes some global data, which will\n # be accessed by doctests in a module\n pytester.makeconftest(\n \"\"\"\n import pytest\n import sys\n\n @pytest.fixture(autouse=True, scope='session')\n def myfixture():\n assert not hasattr(sys, 'pytest_session_data')\n sys.pytest_session_data = 1\n yield\n del sys.pytest_session_data\n \"\"\"\n )\n pytester.makepyfile(\n foo=\"\"\"\n import sys\n\n def foo():\n '''\n >>> assert sys.pytest_session_data == 1\n '''\n\n def bar():\n '''\n >>> assert sys.pytest_session_data == 1\n '''\n \"\"\"\n )\n result = pytester.runpytest(\"--doctest-modules\")\n result.stdout.fnmatch_lines([\"*2 passed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctestAutoUseFixtures.test_fixture_scopes_TestDoctestAutoUseFixtures.test_fixture_scopes.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctestAutoUseFixtures.test_fixture_scopes_TestDoctestAutoUseFixtures.test_fixture_scopes.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 1208, "end_line": 1239, "span_ids": ["TestDoctestAutoUseFixtures.test_fixture_scopes"], "tokens": 237}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDoctestAutoUseFixtures:\n\n @pytest.mark.parametrize(\"scope\", SCOPES)\n @pytest.mark.parametrize(\"enable_doctest\", [True, False])\n def test_fixture_scopes(self, pytester, scope, enable_doctest):\n \"\"\"Test that auto-use fixtures work properly with doctest modules.\n See #1057 and #1100.\n \"\"\"\n pytester.makeconftest(\n \"\"\"\n import pytest\n\n @pytest.fixture(autouse=True, scope=\"{scope}\")\n def auto(request):\n return 99\n \"\"\".format(\n scope=scope\n )\n )\n pytester.makepyfile(\n test_1='''\n def test_foo():\n \"\"\"\n >>> getfixture('auto') + 1\n 100\n \"\"\"\n def test_bar():\n assert 1\n '''\n )\n params = (\"--doctest-modules\",) if enable_doctest else ()\n passes = 3 if enable_doctest else 2\n result = pytester.runpytest(*params)\n result.stdout.fnmatch_lines([\"*=== %d passed in *\" % passes])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctestAutoUseFixtures.test_fixture_module_doctest_scopes_TestDoctestAutoUseFixtures.test_fixture_module_doctest_scopes.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctestAutoUseFixtures.test_fixture_module_doctest_scopes_TestDoctestAutoUseFixtures.test_fixture_module_doctest_scopes.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 1241, "end_line": 1277, "span_ids": ["TestDoctestAutoUseFixtures.test_fixture_module_doctest_scopes"], "tokens": 272}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDoctestAutoUseFixtures:\n\n @pytest.mark.parametrize(\"scope\", SCOPES)\n @pytest.mark.parametrize(\"autouse\", [True, False])\n @pytest.mark.parametrize(\"use_fixture_in_doctest\", [True, False])\n def test_fixture_module_doctest_scopes(\n self, pytester, scope, autouse, use_fixture_in_doctest\n ):\n \"\"\"Test that auto-use fixtures work properly with doctest files.\n See #1057 and #1100.\n \"\"\"\n pytester.makeconftest(\n \"\"\"\n import pytest\n\n @pytest.fixture(autouse={autouse}, scope=\"{scope}\")\n def auto(request):\n return 99\n \"\"\".format(\n scope=scope, autouse=autouse\n )\n )\n if use_fixture_in_doctest:\n pytester.maketxtfile(\n test_doc=\"\"\"\n >>> getfixture('auto')\n 99\n \"\"\"\n )\n else:\n pytester.maketxtfile(\n test_doc=\"\"\"\n >>> 1 + 1\n 2\n \"\"\"\n )\n result = pytester.runpytest(\"--doctest-modules\")\n result.stdout.no_fnmatch_line(\"*FAILURES*\")\n result.stdout.fnmatch_lines([\"*=== 1 passed in *\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctestAutoUseFixtures.test_auto_use_request_attributes_TestDoctestAutoUseFixtures.test_auto_use_request_attributes.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctestAutoUseFixtures.test_auto_use_request_attributes_TestDoctestAutoUseFixtures.test_auto_use_request_attributes.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 1279, "end_line": 1309, "span_ids": ["TestDoctestAutoUseFixtures.test_auto_use_request_attributes"], "tokens": 231}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDoctestAutoUseFixtures:\n\n @pytest.mark.parametrize(\"scope\", SCOPES)\n def test_auto_use_request_attributes(self, pytester, scope):\n \"\"\"Check that all attributes of a request in an autouse fixture\n behave as expected when requested for a doctest item.\n \"\"\"\n pytester.makeconftest(\n \"\"\"\n import pytest\n\n @pytest.fixture(autouse=True, scope=\"{scope}\")\n def auto(request):\n if \"{scope}\" == 'module':\n assert request.module is None\n if \"{scope}\" == 'class':\n assert request.cls is None\n if \"{scope}\" == 'function':\n assert request.function is None\n return 99\n \"\"\".format(\n scope=scope\n )\n )\n pytester.maketxtfile(\n test_doc=\"\"\"\n >>> 1 + 1\n 2\n \"\"\"\n )\n result = pytester.runpytest(\"--doctest-modules\")\n str(result.stdout.no_fnmatch_line(\"*FAILURES*\"))\n result.stdout.fnmatch_lines([\"*=== 1 passed in *\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctestNamespaceFixture_TestDoctestNamespaceFixture.test_namespace_doctestfile.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctestNamespaceFixture_TestDoctestNamespaceFixture.test_namespace_doctestfile.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 1312, "end_line": 1341, "span_ids": ["TestDoctestNamespaceFixture.test_namespace_doctestfile", "TestDoctestNamespaceFixture"], "tokens": 186}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDoctestNamespaceFixture:\n\n SCOPES = [\"module\", \"session\", \"class\", \"function\"]\n\n @pytest.mark.parametrize(\"scope\", SCOPES)\n def test_namespace_doctestfile(self, pytester, scope):\n \"\"\"\n Check that inserting something into the namespace works in a\n simple text file doctest\n \"\"\"\n pytester.makeconftest(\n \"\"\"\n import pytest\n import contextlib\n\n @pytest.fixture(autouse=True, scope=\"{scope}\")\n def add_contextlib(doctest_namespace):\n doctest_namespace['cl'] = contextlib\n \"\"\".format(\n scope=scope\n )\n )\n p = pytester.maketxtfile(\n \"\"\"\n >>> print(cl.__name__)\n contextlib\n \"\"\"\n )\n reprec = pytester.inline_run(p)\n reprec.assertoutcome(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctestNamespaceFixture.test_namespace_pyfile_TestDoctestNamespaceFixture.test_namespace_pyfile.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctestNamespaceFixture.test_namespace_pyfile_TestDoctestNamespaceFixture.test_namespace_pyfile.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 1343, "end_line": 1371, "span_ids": ["TestDoctestNamespaceFixture.test_namespace_pyfile"], "tokens": 182}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDoctestNamespaceFixture:\n\n @pytest.mark.parametrize(\"scope\", SCOPES)\n def test_namespace_pyfile(self, pytester, scope):\n \"\"\"\n Check that inserting something into the namespace works in a\n simple Python file docstring doctest\n \"\"\"\n pytester.makeconftest(\n \"\"\"\n import pytest\n import contextlib\n\n @pytest.fixture(autouse=True, scope=\"{scope}\")\n def add_contextlib(doctest_namespace):\n doctest_namespace['cl'] = contextlib\n \"\"\".format(\n scope=scope\n )\n )\n p = pytester.makepyfile(\n \"\"\"\n def foo():\n '''\n >>> print(cl.__name__)\n contextlib\n '''\n \"\"\"\n )\n reprec = pytester.inline_run(p, \"--doctest-modules\")\n reprec.assertoutcome(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctestReportingOption.test_doctest_report_udiff_TestDoctestReportingOption.test_doctest_report_udiff.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctestReportingOption.test_doctest_report_udiff_TestDoctestReportingOption.test_doctest_report_udiff.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 1394, "end_line": 1399, "span_ids": ["TestDoctestReportingOption.test_doctest_report_udiff"], "tokens": 112}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDoctestReportingOption:\n\n @pytest.mark.parametrize(\"format\", [\"udiff\", \"UDIFF\", \"uDiFf\"])\n def test_doctest_report_udiff(self, pytester, format):\n result = self._run_doctest_report(pytester, format)\n result.stdout.fnmatch_lines(\n [\" 0 1 4\", \" -1 2 4\", \" +1 2 5\", \" 2 3 6\"]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctestReportingOption.test_doctest_report_cdiff_TestDoctestReportingOption.test_doctest_report_cdiff.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctestReportingOption.test_doctest_report_cdiff_TestDoctestReportingOption.test_doctest_report_cdiff.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 1401, "end_line": 1415, "span_ids": ["TestDoctestReportingOption.test_doctest_report_cdiff"], "tokens": 150}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDoctestReportingOption:\n\n def test_doctest_report_cdiff(self, pytester: Pytester):\n result = self._run_doctest_report(pytester, \"cdiff\")\n result.stdout.fnmatch_lines(\n [\n \" a b\",\n \" 0 1 4\",\n \" ! 1 2 4\",\n \" 2 3 6\",\n \" --- 1,4 ----\",\n \" a b\",\n \" 0 1 4\",\n \" ! 1 2 5\",\n \" 2 3 6\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctestReportingOption.test_doctest_report_ndiff_TestDoctestReportingOption.test_doctest_report_ndiff.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctestReportingOption.test_doctest_report_ndiff_TestDoctestReportingOption.test_doctest_report_ndiff.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 1417, "end_line": 1429, "span_ids": ["TestDoctestReportingOption.test_doctest_report_ndiff"], "tokens": 123}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDoctestReportingOption:\n\n def test_doctest_report_ndiff(self, pytester: Pytester):\n result = self._run_doctest_report(pytester, \"ndiff\")\n result.stdout.fnmatch_lines(\n [\n \" a b\",\n \" 0 1 4\",\n \" - 1 2 4\",\n \" ? ^\",\n \" + 1 2 5\",\n \" ? ^\",\n \" 2 3 6\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctestReportingOption.test_doctest_report_none_or_only_first_failure_TestDoctestReportingOption.test_doctest_report_invalid.result_stderr_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctestReportingOption.test_doctest_report_none_or_only_first_failure_TestDoctestReportingOption.test_doctest_report_invalid.result_stderr_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 1431, "end_line": 1455, "span_ids": ["TestDoctestReportingOption.test_doctest_report_none_or_only_first_failure", "TestDoctestReportingOption.test_doctest_report_invalid"], "tokens": 232}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDoctestReportingOption:\n\n @pytest.mark.parametrize(\"format\", [\"none\", \"only_first_failure\"])\n def test_doctest_report_none_or_only_first_failure(self, pytester, format):\n result = self._run_doctest_report(pytester, format)\n result.stdout.fnmatch_lines(\n [\n \"Expected:\",\n \" a b\",\n \" 0 1 4\",\n \" 1 2 4\",\n \" 2 3 6\",\n \"Got:\",\n \" a b\",\n \" 0 1 4\",\n \" 1 2 5\",\n \" 2 3 6\",\n ]\n )\n\n def test_doctest_report_invalid(self, pytester: Pytester):\n result = self._run_doctest_report(pytester, \"obviously_invalid_format\")\n result.stderr.fnmatch_lines(\n [\n \"*error: argument --doctest-report: invalid choice: 'obviously_invalid_format' (choose from*\"\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_summing_simple_with_errors_TestPython.test_summing_simple_with_errors.node_assert_attr_name_py": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_summing_simple_with_errors_TestPython.test_summing_simple_with_errors.node_assert_attr_name_py", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 169, "end_line": 196, "span_ids": ["TestPython.test_summing_simple_with_errors"], "tokens": 196}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPython:\n\n @parametrize_families\n def test_summing_simple_with_errors(\n self, pytester: Pytester, run_and_parse: RunAndParse, xunit_family: str\n ) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.fixture\n def fixture():\n raise Exception()\n def test_pass():\n pass\n def test_fail():\n assert 0\n def test_error(fixture):\n pass\n @pytest.mark.xfail\n def test_xfail():\n assert False\n @pytest.mark.xfail(strict=True)\n def test_xpass():\n assert True\n \"\"\"\n )\n result, dom = run_and_parse(family=xunit_family)\n assert result.ret\n node = dom.find_first_by_tag(\"testsuite\")\n node.assert_attr(name=\"pytest\", errors=1, failures=2, skipped=1, tests=5)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_junit_duration_report_TestPython.test_junit_duration_report.if_duration_report_to.else_.assert_val_1_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_junit_duration_report_TestPython.test_junit_duration_report.if_duration_report_to.else_.assert_val_1_0", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 248, "end_line": 281, "span_ids": ["TestPython.test_junit_duration_report"], "tokens": 262}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPython:\n\n @pytest.mark.parametrize(\"duration_report\", [\"call\", \"total\"])\n def test_junit_duration_report(\n self,\n pytester: Pytester,\n monkeypatch: MonkeyPatch,\n duration_report: str,\n run_and_parse: RunAndParse,\n ) -> None:\n\n # mock LogXML.node_reporter so it always sets a known duration to each test report object\n original_node_reporter = LogXML.node_reporter\n\n def node_reporter_wrapper(s, report):\n report.duration = 1.0\n reporter = original_node_reporter(s, report)\n return reporter\n\n monkeypatch.setattr(LogXML, \"node_reporter\", node_reporter_wrapper)\n\n pytester.makepyfile(\n \"\"\"\n def test_foo():\n pass\n \"\"\"\n )\n result, dom = run_and_parse(\"-o\", f\"junit_duration_report={duration_report}\")\n node = dom.find_first_by_tag(\"testsuite\")\n tnode = node.find_first_by_tag(\"testcase\")\n val = float(tnode[\"time\"])\n if duration_report == \"total\":\n assert val == 3.0\n else:\n assert duration_report == \"call\"\n assert val == 1.0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_setup_error_TestPython.test_setup_error.assert_ValueError_in_fn": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_setup_error_TestPython.test_setup_error.assert_ValueError_in_fn", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 283, "end_line": 306, "span_ids": ["TestPython.test_setup_error"], "tokens": 205}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPython:\n\n @parametrize_families\n def test_setup_error(\n self, pytester: Pytester, run_and_parse: RunAndParse, xunit_family: str\n ) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.fixture\n def arg(request):\n raise ValueError(\"Error reason\")\n def test_function(arg):\n pass\n \"\"\"\n )\n result, dom = run_and_parse(family=xunit_family)\n assert result.ret\n node = dom.find_first_by_tag(\"testsuite\")\n node.assert_attr(errors=1, tests=1)\n tnode = node.find_first_by_tag(\"testcase\")\n tnode.assert_attr(classname=\"test_setup_error\", name=\"test_function\")\n fnode = tnode.find_first_by_tag(\"error\")\n fnode.assert_attr(message='failed on setup with \"ValueError: Error reason\"')\n assert \"ValueError\" in fnode.toxml()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_teardown_error_TestPython.test_teardown_error.assert_ValueError_in_fn": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_teardown_error_TestPython.test_teardown_error.assert_ValueError_in_fn", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 308, "end_line": 331, "span_ids": ["TestPython.test_teardown_error"], "tokens": 197}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPython:\n\n @parametrize_families\n def test_teardown_error(\n self, pytester: Pytester, run_and_parse: RunAndParse, xunit_family: str\n ) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.fixture\n def arg():\n yield\n raise ValueError('Error reason')\n def test_function(arg):\n pass\n \"\"\"\n )\n result, dom = run_and_parse(family=xunit_family)\n assert result.ret\n node = dom.find_first_by_tag(\"testsuite\")\n tnode = node.find_first_by_tag(\"testcase\")\n tnode.assert_attr(classname=\"test_teardown_error\", name=\"test_function\")\n fnode = tnode.find_first_by_tag(\"error\")\n fnode.assert_attr(message='failed on teardown with \"ValueError: Error reason\"')\n assert \"ValueError\" in fnode.toxml()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_skip_contains_name_reason_TestPython.test_skip_contains_name_reason.snode_assert_attr_type_p": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_skip_contains_name_reason_TestPython.test_skip_contains_name_reason.snode_assert_attr_type_p", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 364, "end_line": 382, "span_ids": ["TestPython.test_skip_contains_name_reason"], "tokens": 180}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPython:\n\n @parametrize_families\n def test_skip_contains_name_reason(\n self, pytester: Pytester, run_and_parse: RunAndParse, xunit_family: str\n ) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n def test_skip():\n pytest.skip(\"hello23\")\n \"\"\"\n )\n result, dom = run_and_parse(family=xunit_family)\n assert result.ret == 0\n node = dom.find_first_by_tag(\"testsuite\")\n node.assert_attr(skipped=1)\n tnode = node.find_first_by_tag(\"testcase\")\n tnode.assert_attr(classname=\"test_skip_contains_name_reason\", name=\"test_skip\")\n snode = tnode.find_first_by_tag(\"skipped\")\n snode.assert_attr(type=\"pytest.skip\", message=\"hello23\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_mark_skip_contains_name_reason_TestPython.test_mark_skip_contains_name_reason.snode_assert_attr_type_p": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_mark_skip_contains_name_reason_TestPython.test_mark_skip_contains_name_reason.snode_assert_attr_type_p", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 384, "end_line": 405, "span_ids": ["TestPython.test_mark_skip_contains_name_reason"], "tokens": 192}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPython:\n\n @parametrize_families\n def test_mark_skip_contains_name_reason(\n self, pytester: Pytester, run_and_parse: RunAndParse, xunit_family: str\n ) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.mark.skip(reason=\"hello24\")\n def test_skip():\n assert True\n \"\"\"\n )\n result, dom = run_and_parse(family=xunit_family)\n assert result.ret == 0\n node = dom.find_first_by_tag(\"testsuite\")\n node.assert_attr(skipped=1)\n tnode = node.find_first_by_tag(\"testcase\")\n tnode.assert_attr(\n classname=\"test_mark_skip_contains_name_reason\", name=\"test_skip\"\n )\n snode = tnode.find_first_by_tag(\"skipped\")\n snode.assert_attr(type=\"pytest.skip\", message=\"hello24\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_mark_skipif_contains_name_reason_TestPython.test_mark_skipif_contains_name_reason.snode_assert_attr_type_p": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_mark_skipif_contains_name_reason_TestPython.test_mark_skipif_contains_name_reason.snode_assert_attr_type_p", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 407, "end_line": 429, "span_ids": ["TestPython.test_mark_skipif_contains_name_reason"], "tokens": 205}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPython:\n\n @parametrize_families\n def test_mark_skipif_contains_name_reason(\n self, pytester: Pytester, run_and_parse: RunAndParse, xunit_family: str\n ) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n GLOBAL_CONDITION = True\n @pytest.mark.skipif(GLOBAL_CONDITION, reason=\"hello25\")\n def test_skip():\n assert True\n \"\"\"\n )\n result, dom = run_and_parse(family=xunit_family)\n assert result.ret == 0\n node = dom.find_first_by_tag(\"testsuite\")\n node.assert_attr(skipped=1)\n tnode = node.find_first_by_tag(\"testcase\")\n tnode.assert_attr(\n classname=\"test_mark_skipif_contains_name_reason\", name=\"test_skip\"\n )\n snode = tnode.find_first_by_tag(\"skipped\")\n snode.assert_attr(type=\"pytest.skip\", message=\"hello25\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_failure_verbose_message_TestPython.test_failure_verbose_message.fnode_assert_attr_message": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_failure_verbose_message_TestPython.test_failure_verbose_message.fnode_assert_attr_message", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 568, "end_line": 583, "span_ids": ["TestPython.test_failure_verbose_message"], "tokens": 146}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPython:\n\n @parametrize_families\n def test_failure_verbose_message(\n self, pytester: Pytester, run_and_parse: RunAndParse, xunit_family: str\n ) -> None:\n pytester.makepyfile(\n \"\"\"\n import sys\n def test_fail():\n assert 0, \"An error\"\n \"\"\"\n )\n result, dom = run_and_parse(family=xunit_family)\n node = dom.find_first_by_tag(\"testsuite\")\n tnode = node.find_first_by_tag(\"testcase\")\n fnode = tnode.find_first_by_tag(\"failure\")\n fnode.assert_attr(message=\"AssertionError: An error\\nassert 0\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_junit_prefixing_TestPython.test_junit_prefixing.tnode_assert_attr_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_junit_prefixing_TestPython.test_junit_prefixing.tnode_assert_attr_", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 615, "end_line": 637, "span_ids": ["TestPython.test_junit_prefixing"], "tokens": 210}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPython:\n\n @parametrize_families\n def test_junit_prefixing(\n self, pytester: Pytester, run_and_parse: RunAndParse, xunit_family: str\n ) -> None:\n pytester.makepyfile(\n \"\"\"\n def test_func():\n assert 0\n class TestHello(object):\n def test_hello(self):\n pass\n \"\"\"\n )\n result, dom = run_and_parse(\"--junitprefix=xyz\", family=xunit_family)\n assert result.ret\n node = dom.find_first_by_tag(\"testsuite\")\n node.assert_attr(failures=1, tests=2)\n tnode = node.find_first_by_tag(\"testcase\")\n tnode.assert_attr(classname=\"xyz.test_junit_prefixing\", name=\"test_func\")\n tnode = node.find_nth_by_tag(\"testcase\", 1)\n tnode.assert_attr(\n classname=\"xyz.test_junit_prefixing.TestHello\", name=\"test_hello\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_xfailure_marker_TestPython.test_xfailure_marker.fnode_assert_attr_type_p": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_xfailure_marker_TestPython.test_xfailure_marker.fnode_assert_attr_type_p", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 659, "end_line": 678, "span_ids": ["TestPython.test_xfailure_marker"], "tokens": 189}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPython:\n\n @parametrize_families\n def test_xfailure_marker(\n self, pytester: Pytester, run_and_parse: RunAndParse, xunit_family: str\n ) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.mark.xfail(reason=\"42\")\n def test_xfail():\n assert False\n \"\"\"\n )\n result, dom = run_and_parse(family=xunit_family)\n assert not result.ret\n node = dom.find_first_by_tag(\"testsuite\")\n node.assert_attr(skipped=1, tests=1)\n tnode = node.find_first_by_tag(\"testcase\")\n tnode.assert_attr(classname=\"test_xfailure_marker\", name=\"test_xfail\")\n fnode = tnode.find_first_by_tag(\"skipped\")\n fnode.assert_attr(type=\"pytest.xfail\", message=\"42\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_xfailure_xpass_TestPython.test_xfailure_xpass.tnode_assert_attr_classna": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_xfailure_xpass_TestPython.test_xfailure_xpass.tnode_assert_attr_classna", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 711, "end_line": 728, "span_ids": ["TestPython.test_xfailure_xpass"], "tokens": 158}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPython:\n\n @parametrize_families\n def test_xfailure_xpass(\n self, pytester: Pytester, run_and_parse: RunAndParse, xunit_family: str\n ) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.mark.xfail\n def test_xpass():\n pass\n \"\"\"\n )\n result, dom = run_and_parse(family=xunit_family)\n # assert result.ret\n node = dom.find_first_by_tag(\"testsuite\")\n node.assert_attr(skipped=0, tests=1)\n tnode = node.find_first_by_tag(\"testcase\")\n tnode.assert_attr(classname=\"test_xfailure_xpass\", name=\"test_xpass\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_xfailure_xpass_strict_TestPython.test_xfailure_xpass_strict.fnode_assert_attr_message": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_xfailure_xpass_strict_TestPython.test_xfailure_xpass_strict.fnode_assert_attr_message", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 730, "end_line": 749, "span_ids": ["TestPython.test_xfailure_xpass_strict"], "tokens": 200}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPython:\n\n @parametrize_families\n def test_xfailure_xpass_strict(\n self, pytester: Pytester, run_and_parse: RunAndParse, xunit_family: str\n ) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.mark.xfail(strict=True, reason=\"This needs to fail!\")\n def test_xpass():\n pass\n \"\"\"\n )\n result, dom = run_and_parse(family=xunit_family)\n # assert result.ret\n node = dom.find_first_by_tag(\"testsuite\")\n node.assert_attr(skipped=0, tests=1)\n tnode = node.find_first_by_tag(\"testcase\")\n tnode.assert_attr(classname=\"test_xfailure_xpass_strict\", name=\"test_xpass\")\n fnode = tnode.find_first_by_tag(\"failure\")\n fnode.assert_attr(message=\"[XPASS(strict)] This needs to fail!\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestNonPython_TestNonPython.test_summing_simple.assert_custom_item_runte": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestNonPython_TestNonPython.test_summing_simple.assert_custom_item_runte", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 974, "end_line": 1001, "span_ids": ["TestNonPython.test_summing_simple", "TestNonPython"], "tokens": 264}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestNonPython:\n @parametrize_families\n def test_summing_simple(\n self, pytester: Pytester, run_and_parse: RunAndParse, xunit_family: str\n ) -> None:\n pytester.makeconftest(\n \"\"\"\n import pytest\n def pytest_collect_file(path, parent):\n if path.ext == \".xyz\":\n return MyItem.from_parent(name=path.basename, parent=parent)\n class MyItem(pytest.Item):\n def runtest(self):\n raise ValueError(42)\n def repr_failure(self, excinfo):\n return \"custom item runtest failed\"\n \"\"\"\n )\n pytester.path.joinpath(\"myfile.xyz\").write_text(\"hello\")\n result, dom = run_and_parse(family=xunit_family)\n assert result.ret\n node = dom.find_first_by_tag(\"testsuite\")\n node.assert_attr(errors=0, failures=1, skipped=0, tests=1)\n tnode = node.find_first_by_tag(\"testcase\")\n tnode.assert_attr(name=\"myfile.xyz\")\n fnode = tnode.find_first_by_tag(\"failure\")\n fnode.assert_attr(message=\"custom item runtest failed\")\n assert \"custom item runtest failed\" in fnode.toxml()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_invalid_xml_escape_test_invalid_xml_escape.for_i_in_valid_.assert_chr_i_bin_xml_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_invalid_xml_escape_test_invalid_xml_escape.for_i_in_valid_.assert_chr_i_bin_xml_", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 1047, "end_line": 1080, "span_ids": ["test_invalid_xml_escape"], "tokens": 319}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_invalid_xml_escape() -> None:\n # Test some more invalid xml chars, the full range should be\n # tested really but let's just test the edges of the ranges\n # instead.\n # XXX This only tests low unicode character points for now as\n # there are some issues with the testing infrastructure for\n # the higher ones.\n # XXX Testing 0xD (\\r) is tricky as it overwrites the just written\n # line in the output, so we skip it too.\n invalid = (\n 0x00,\n 0x1,\n 0xB,\n 0xC,\n 0xE,\n 0x19,\n 27, # issue #126\n 0xD800,\n 0xDFFF,\n 0xFFFE,\n 0x0FFFF,\n ) # , 0x110000)\n valid = (0x9, 0xA, 0x20)\n # 0xD, 0xD7FF, 0xE000, 0xFFFD, 0x10000, 0x10FFFF)\n\n for i in invalid:\n got = bin_xml_escape(chr(i))\n if i <= 0xFF:\n expected = \"#x%02X\" % i\n else:\n expected = \"#x%04X\" % i\n assert got == expected\n for i in valid:\n assert chr(i) == bin_xml_escape(chr(i))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_logxml_path_expansion_test_logxml_path_expansion.assert_xml_var_logfile_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_logxml_path_expansion_test_logxml_path_expansion.assert_xml_var_logfile_", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 1083, "end_line": 1091, "span_ids": ["test_logxml_path_expansion"], "tokens": 124}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_logxml_path_expansion(tmp_path: Path, monkeypatch: MonkeyPatch) -> None:\n home_tilde = Path(os.path.expanduser(\"~\")).joinpath(\"test.xml\")\n xml_tilde = LogXML(Path(\"~\", \"test.xml\"), None)\n assert xml_tilde.logfile == str(home_tilde)\n\n monkeypatch.setenv(\"HOME\", str(tmp_path))\n home_var = os.path.normpath(os.path.expandvars(\"$HOME/test.xml\"))\n xml_var = LogXML(Path(\"$HOME\", \"test.xml\"), None)\n assert xml_var.logfile == str(home_var)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_double_colon_split_function_issue469_test_double_colon_split_function_issue469.node_assert_attr_name_te": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_double_colon_split_function_issue469_test_double_colon_split_function_issue469.node_assert_attr_name_te", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 1144, "end_line": 1159, "span_ids": ["test_double_colon_split_function_issue469"], "tokens": 126}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_double_colon_split_function_issue469(\n pytester: Pytester, run_and_parse: RunAndParse\n) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.mark.parametrize('param', [\"double::colon\"])\n def test_func(param):\n pass\n \"\"\"\n )\n result, dom = run_and_parse()\n assert result.ret == 0\n node = dom.find_first_by_tag(\"testcase\")\n node.assert_attr(classname=\"test_double_colon_split_function_issue469\")\n node.assert_attr(name=\"test_func[double::colon]\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_double_colon_split_method_issue469_test_double_colon_split_method_issue469.node_assert_attr_name_te": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_double_colon_split_method_issue469_test_double_colon_split_method_issue469.node_assert_attr_name_te", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 1162, "end_line": 1178, "span_ids": ["test_double_colon_split_method_issue469"], "tokens": 136}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_double_colon_split_method_issue469(\n pytester: Pytester, run_and_parse: RunAndParse\n) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n class TestClass(object):\n @pytest.mark.parametrize('param', [\"double::colon\"])\n def test_func(self, param):\n pass\n \"\"\"\n )\n result, dom = run_and_parse()\n assert result.ret == 0\n node = dom.find_first_by_tag(\"testcase\")\n node.assert_attr(classname=\"test_double_colon_split_method_issue469.TestClass\")\n node.assert_attr(name=\"test_func[double::colon]\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_unicode_issue368_test_unicode_issue368.log_pytest_sessionfinish_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_unicode_issue368_test_unicode_issue368.log_pytest_sessionfinish_", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 1181, "end_line": 1208, "span_ids": ["test_unicode_issue368.Report", "test_unicode_issue368", "test_unicode_issue368.Report:2"], "tokens": 274}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_unicode_issue368(pytester: Pytester) -> None:\n path = pytester.path.joinpath(\"test.xml\")\n log = LogXML(str(path), None)\n ustr = \"\u0412\u041d\u0418!\"\n\n class Report(BaseReport):\n longrepr = ustr\n sections: List[Tuple[str, str]] = []\n nodeid = \"something\"\n location = \"tests/filename.py\", 42, \"TestClass.method\"\n when = \"teardown\"\n\n test_report = cast(TestReport, Report())\n\n # hopefully this is not too brittle ...\n log.pytest_sessionstart()\n node_reporter = log._opentestcase(test_report)\n node_reporter.append_failure(test_report)\n node_reporter.append_collect_error(test_report)\n node_reporter.append_collect_skipped(test_report)\n node_reporter.append_error(test_report)\n test_report.longrepr = \"filename\", 1, ustr\n node_reporter.append_skipped(test_report)\n test_report.longrepr = \"filename\", 1, \"Skipped: \u5361\u5623\u5623\"\n node_reporter.append_skipped(test_report)\n test_report.wasxfail = ustr # type: ignore[attr-defined]\n node_reporter.append_skipped(test_report)\n log.pytest_sessionfinish()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_record_property_same_name_test_record_fixtures_without_junitxml.assert_result_ret_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_record_property_same_name_test_record_fixtures_without_junitxml.assert_result_ret_0", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 1233, "end_line": 1265, "span_ids": ["test_record_fixtures_without_junitxml", "test_record_property_same_name"], "tokens": 259}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_record_property_same_name(\n pytester: Pytester, run_and_parse: RunAndParse\n) -> None:\n pytester.makepyfile(\n \"\"\"\n def test_record_with_same_name(record_property):\n record_property(\"foo\", \"bar\")\n record_property(\"foo\", \"baz\")\n \"\"\"\n )\n result, dom = run_and_parse()\n node = dom.find_first_by_tag(\"testsuite\")\n tnode = node.find_first_by_tag(\"testcase\")\n psnode = tnode.find_first_by_tag(\"properties\")\n pnodes = psnode.find_by_tag(\"property\")\n pnodes[0].assert_attr(name=\"foo\", value=\"bar\")\n pnodes[1].assert_attr(name=\"foo\", value=\"baz\")\n\n\n@pytest.mark.parametrize(\"fixture_name\", [\"record_property\", \"record_xml_attribute\"])\ndef test_record_fixtures_without_junitxml(\n pytester: Pytester, fixture_name: str\n) -> None:\n pytester.makepyfile(\n \"\"\"\n def test_record({fixture_name}):\n {fixture_name}(\"foo\", \"bar\")\n \"\"\".format(\n fixture_name=fixture_name\n )\n )\n result = pytester.runpytest()\n assert result.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_record_attribute_test_record_attribute.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_record_attribute_test_record_attribute.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 1268, "end_line": 1294, "span_ids": ["test_record_attribute"], "tokens": 190}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.filterwarnings(\"default\")\ndef test_record_attribute(pytester: Pytester, run_and_parse: RunAndParse) -> None:\n pytester.makeini(\n \"\"\"\n [pytest]\n junit_family = xunit1\n \"\"\"\n )\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.fixture\n def other(record_xml_attribute):\n record_xml_attribute(\"bar\", 1)\n def test_record(record_xml_attribute, other):\n record_xml_attribute(\"foo\", \"<1\");\n \"\"\"\n )\n result, dom = run_and_parse()\n node = dom.find_first_by_tag(\"testsuite\")\n tnode = node.find_first_by_tag(\"testcase\")\n tnode.assert_attr(bar=\"1\")\n tnode.assert_attr(foo=\"<1\")\n result.stdout.fnmatch_lines(\n [\"*test_record_attribute.py:6:*record_xml_attribute is an experimental feature\"]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_record_fixtures_xunit2_test_record_fixtures_xunit2.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_record_fixtures_xunit2_test_record_fixtures_xunit2.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 1297, "end_line": 1335, "span_ids": ["test_record_fixtures_xunit2"], "tokens": 294}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.filterwarnings(\"default\")\n@pytest.mark.parametrize(\"fixture_name\", [\"record_xml_attribute\", \"record_property\"])\ndef test_record_fixtures_xunit2(\n pytester: Pytester, fixture_name: str, run_and_parse: RunAndParse\n) -> None:\n \"\"\"Ensure record_xml_attribute and record_property drop values when outside of legacy family.\"\"\"\n pytester.makeini(\n \"\"\"\n [pytest]\n junit_family = xunit2\n \"\"\"\n )\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.fixture\n def other({fixture_name}):\n {fixture_name}(\"bar\", 1)\n def test_record({fixture_name}, other):\n {fixture_name}(\"foo\", \"<1\");\n \"\"\".format(\n fixture_name=fixture_name\n )\n )\n\n result, dom = run_and_parse(family=None)\n expected_lines = []\n if fixture_name == \"record_xml_attribute\":\n expected_lines.append(\n \"*test_record_fixtures_xunit2.py:6:*record_xml_attribute is an experimental feature\"\n )\n expected_lines = [\n \"*test_record_fixtures_xunit2.py:6:*{fixture_name} is incompatible \"\n \"with junit_family 'xunit2' (use 'legacy' or 'xunit1')\".format(\n fixture_name=fixture_name\n )\n ]\n result.stdout.fnmatch_lines(expected_lines)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_random_report_log_xdist_test_random_report_log_xdist.assert_failed_test_x": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_random_report_log_xdist_test_random_report_log_xdist.assert_failed_test_x", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 1338, "end_line": 1361, "span_ids": ["test_random_report_log_xdist"], "tokens": 221}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_random_report_log_xdist(\n pytester: Pytester, monkeypatch: MonkeyPatch, run_and_parse: RunAndParse\n) -> None:\n \"\"\"`xdist` calls pytest_runtest_logreport as they are executed by the workers,\n with nodes from several nodes overlapping, so junitxml must cope with that\n to produce correct reports (#1064).\"\"\"\n pytest.importorskip(\"xdist\")\n monkeypatch.delenv(\"PYTEST_DISABLE_PLUGIN_AUTOLOAD\", raising=False)\n pytester.makepyfile(\n \"\"\"\n import pytest, time\n @pytest.mark.parametrize('i', list(range(30)))\n def test_x(i):\n assert i != 22\n \"\"\"\n )\n _, dom = run_and_parse(\"-n2\")\n suite_node = dom.find_first_by_tag(\"testsuite\")\n failed = []\n for case_node in suite_node.find_by_tag(\"testcase\"):\n if case_node.find_first_by_tag(\"failure\"):\n failed.append(case_node[\"name\"])\n\n assert failed == [\"test_x[22]\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_fancy_items_regression_test_fancy_items_regression.assert_items_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_fancy_items_regression_test_fancy_items_regression.assert_items_", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 1413, "end_line": 1462, "span_ids": ["test_fancy_items_regression"], "tokens": 312}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_fancy_items_regression(pytester: Pytester, run_and_parse: RunAndParse) -> None:\n # issue 1259\n pytester.makeconftest(\n \"\"\"\n import pytest\n class FunItem(pytest.Item):\n def runtest(self):\n pass\n class NoFunItem(pytest.Item):\n def runtest(self):\n pass\n\n class FunCollector(pytest.File):\n def collect(self):\n return [\n FunItem.from_parent(name='a', parent=self),\n NoFunItem.from_parent(name='a', parent=self),\n NoFunItem.from_parent(name='b', parent=self),\n ]\n\n def pytest_collect_file(path, parent):\n if path.check(ext='.py'):\n return FunCollector.from_parent(fspath=path, parent=parent)\n \"\"\"\n )\n\n pytester.makepyfile(\n \"\"\"\n def test_pass():\n pass\n \"\"\"\n )\n\n result, dom = run_and_parse()\n\n result.stdout.no_fnmatch_line(\"*INTERNALERROR*\")\n\n items = sorted(\"%(classname)s %(name)s\" % x for x in dom.find_by_tag(\"testcase\"))\n import pprint\n\n pprint.pprint(items)\n assert items == [\n \"conftest a\",\n \"conftest a\",\n \"conftest b\",\n \"test_fancy_items_regression a\",\n \"test_fancy_items_regression a\",\n \"test_fancy_items_regression b\",\n \"test_fancy_items_regression test_pass\",\n ]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_global_properties_test_global_properties.assert_actual_expected": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_global_properties_test_global_properties.assert_actual_expected", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 1465, "end_line": 1497, "span_ids": ["test_global_properties.Report", "test_global_properties.Report:2", "test_global_properties"], "tokens": 236}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@parametrize_families\ndef test_global_properties(pytester: Pytester, xunit_family: str) -> None:\n path = pytester.path.joinpath(\"test_global_properties.xml\")\n log = LogXML(str(path), None, family=xunit_family)\n\n class Report(BaseReport):\n sections: List[Tuple[str, str]] = []\n nodeid = \"test_node_id\"\n\n log.pytest_sessionstart()\n log.add_global_property(\"foo\", \"1\")\n log.add_global_property(\"bar\", \"2\")\n log.pytest_sessionfinish()\n\n dom = minidom.parse(str(path))\n\n properties = dom.getElementsByTagName(\"properties\")\n\n assert properties.length == 1, \"There must be one node\"\n\n property_list = dom.getElementsByTagName(\"property\")\n\n assert property_list.length == 2, \"There most be only 2 property nodes\"\n\n expected = {\"foo\": \"1\", \"bar\": \"2\"}\n actual = {}\n\n for p in property_list:\n k = str(p.getAttribute(\"name\"))\n v = str(p.getAttribute(\"value\"))\n actual[k] = v\n\n assert actual == expected", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_url_property_test_url_property.assert_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_url_property_test_url_property.assert_", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 1500, "end_line": 1523, "span_ids": ["test_url_property.Report:2", "test_url_property", "test_url_property.Report"], "tokens": 200}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_url_property(pytester: Pytester) -> None:\n test_url = \"http://www.github.com/pytest-dev\"\n path = pytester.path.joinpath(\"test_url_property.xml\")\n log = LogXML(str(path), None)\n\n class Report(BaseReport):\n longrepr = \"FooBarBaz\"\n sections: List[Tuple[str, str]] = []\n nodeid = \"something\"\n location = \"tests/filename.py\", 42, \"TestClass.method\"\n url = test_url\n\n test_report = cast(TestReport, Report())\n\n log.pytest_sessionstart()\n node_reporter = log._opentestcase(test_report)\n node_reporter.append_failure(test_report)\n log.pytest_sessionfinish()\n\n test_case = minidom.parse(str(path)).getElementsByTagName(\"testcase\")[0]\n\n assert (\n test_case.getAttribute(\"url\") == test_url\n ), \"The URL did not get written to the xml\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_set_suite_name_test_set_suite_name.node_assert_attr_name_exp": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_set_suite_name_test_set_suite_name.node_assert_attr_name_exp", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 1578, "end_line": 1607, "span_ids": ["test_set_suite_name"], "tokens": 187}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\"suite_name\", [\"my_suite\", \"\"])\n@parametrize_families\ndef test_set_suite_name(\n pytester: Pytester, suite_name: str, run_and_parse: RunAndParse, xunit_family: str\n) -> None:\n if suite_name:\n pytester.makeini(\n \"\"\"\n [pytest]\n junit_suite_name={suite_name}\n junit_family={family}\n \"\"\".format(\n suite_name=suite_name, family=xunit_family\n )\n )\n expected = suite_name\n else:\n expected = \"pytest\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n def test_func():\n pass\n \"\"\"\n )\n result, dom = run_and_parse(family=xunit_family)\n assert result.ret == 0\n node = dom.find_first_by_tag(\"testsuite\")\n node.assert_attr(name=expected)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_marked_class_run_twice_test_marked_class_run_twice.rec_assertoutcome_passed_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_marked_class_run_twice_test_marked_class_run_twice.rec_assertoutcome_passed_", "embedding": null, "metadata": {"file_path": "testing/test_mark.py", "file_name": "test_mark.py", "file_type": "text/x-python", "category": "test", "start_line": 48, "end_line": 63, "span_ids": ["test_marked_class_run_twice"], "tokens": 135}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_marked_class_run_twice(pytester: Pytester) -> None:\n \"\"\"Test fails file is run twice that contains marked class.\n See issue#683.\n \"\"\"\n py_file = pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.mark.parametrize('abc', [1, 2, 3])\n class Test1(object):\n def test_1(self, abc):\n assert abc in [1, 2, 3]\n \"\"\"\n )\n file_name = os.path.basename(py_file)\n rec = pytester.inline_run(file_name, file_name)\n rec.assertoutcome(passed=6)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_ini_markers_test_ini_markers.rec_assertoutcome_passed_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_ini_markers_test_ini_markers.rec_assertoutcome_passed_", "embedding": null, "metadata": {"file_path": "testing/test_mark.py", "file_name": "test_mark.py", "file_type": "text/x-python", "category": "test", "start_line": 66, "end_line": 86, "span_ids": ["test_ini_markers"], "tokens": 138}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_ini_markers(pytester: Pytester) -> None:\n pytester.makeini(\n \"\"\"\n [pytest]\n markers =\n a1: this is a webtest marker\n a2: this is a smoke marker\n \"\"\"\n )\n pytester.makepyfile(\n \"\"\"\n def test_markers(pytestconfig):\n markers = pytestconfig.getini(\"markers\")\n print(markers)\n assert len(markers) >= 2\n assert markers[0].startswith(\"a1:\")\n assert markers[1].startswith(\"a2:\")\n \"\"\"\n )\n rec = pytester.inline_run()\n rec.assertoutcome(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_markers_option_test_ini_markers_whitespace.rec_assertoutcome_passed_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_markers_option_test_ini_markers_whitespace.rec_assertoutcome_passed_", "embedding": null, "metadata": {"file_path": "testing/test_mark.py", "file_name": "test_mark.py", "file_type": "text/x-python", "category": "test", "start_line": 89, "end_line": 123, "span_ids": ["test_markers_option", "test_ini_markers_whitespace"], "tokens": 202}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_markers_option(pytester: Pytester) -> None:\n pytester.makeini(\n \"\"\"\n [pytest]\n markers =\n a1: this is a webtest marker\n a1some: another marker\n nodescription\n \"\"\"\n )\n result = pytester.runpytest(\"--markers\")\n result.stdout.fnmatch_lines(\n [\"*a1*this is a webtest*\", \"*a1some*another marker\", \"*nodescription*\"]\n )\n\n\ndef test_ini_markers_whitespace(pytester: Pytester) -> None:\n pytester.makeini(\n \"\"\"\n [pytest]\n markers =\n a1 : this is a whitespace marker\n \"\"\"\n )\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.mark.a1\n def test_markers():\n assert True\n \"\"\"\n )\n rec = pytester.inline_run(\"--strict-markers\", \"-m\", \"a1\")\n rec.assertoutcome(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_marker_without_description_test_marker_without_description.rec_assert_outcomes_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_marker_without_description_test_marker_without_description.rec_assert_outcomes_", "embedding": null, "metadata": {"file_path": "testing/test_mark.py", "file_name": "test_mark.py", "file_type": "text/x-python", "category": "test", "start_line": 126, "end_line": 143, "span_ids": ["test_marker_without_description"], "tokens": 124}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_marker_without_description(pytester: Pytester) -> None:\n pytester.makefile(\n \".cfg\",\n setup=\"\"\"\n [tool:pytest]\n markers=slow\n \"\"\",\n )\n pytester.makeconftest(\n \"\"\"\n import pytest\n pytest.mark.xfail('FAIL')\n \"\"\"\n )\n ftdir = pytester.mkdir(\"ft1_dummy\")\n pytester.path.joinpath(\"conftest.py\").replace(ftdir.joinpath(\"conftest.py\"))\n rec = pytester.runpytest(\"--strict-markers\")\n rec.assert_outcomes()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_markers_option_with_plugin_in_current_dir_test_markers_option_with_plugin_in_current_dir.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_markers_option_with_plugin_in_current_dir_test_markers_option_with_plugin_in_current_dir.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_mark.py", "file_name": "test_mark.py", "file_type": "text/x-python", "category": "test", "start_line": 146, "end_line": 169, "span_ids": ["test_markers_option_with_plugin_in_current_dir"], "tokens": 168}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_markers_option_with_plugin_in_current_dir(pytester: Pytester) -> None:\n pytester.makeconftest('pytest_plugins = \"flip_flop\"')\n pytester.makepyfile(\n flip_flop=\"\"\"\\\n def pytest_configure(config):\n config.addinivalue_line(\"markers\", \"flip:flop\")\n\n def pytest_generate_tests(metafunc):\n try:\n mark = metafunc.function.flipper\n except AttributeError:\n return\n metafunc.parametrize(\"x\", (10, 20))\"\"\"\n )\n pytester.makepyfile(\n \"\"\"\\\n import pytest\n @pytest.mark.flipper\n def test_example(x):\n assert x\"\"\"\n )\n\n result = pytester.runpytest(\"--markers\")\n result.stdout.fnmatch_lines([\"*flip*flop*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_mark_on_pseudo_function_test_strict_prohibits_unregistered_markers.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_mark_on_pseudo_function_test_strict_prohibits_unregistered_markers.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_mark.py", "file_name": "test_mark.py", "file_type": "text/x-python", "category": "test", "start_line": 172, "end_line": 202, "span_ids": ["test_mark_on_pseudo_function", "test_strict_prohibits_unregistered_markers"], "tokens": 192}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_mark_on_pseudo_function(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.mark.r(lambda x: 0/0)\n def test_hello():\n pass\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=1)\n\n\n@pytest.mark.parametrize(\"option_name\", [\"--strict-markers\", \"--strict\"])\ndef test_strict_prohibits_unregistered_markers(\n pytester: Pytester, option_name: str\n) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.mark.unregisteredmark\n def test_hello():\n pass\n \"\"\"\n )\n result = pytester.runpytest(option_name)\n assert result.ret != 0\n result.stdout.fnmatch_lines(\n [\"'unregisteredmark' not found in `markers` configuration option\"]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_parametrized_collected_from_command_line_test_parametrized_collected_from_command_line.rec_assertoutcome_passed_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_parametrized_collected_from_command_line_test_parametrized_collected_from_command_line.rec_assertoutcome_passed_", "embedding": null, "metadata": {"file_path": "testing/test_mark.py", "file_name": "test_mark.py", "file_type": "text/x-python", "category": "test", "start_line": 391, "end_line": 404, "span_ids": ["test_parametrized_collected_from_command_line"], "tokens": 126}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_parametrized_collected_from_command_line(pytester: Pytester) -> None:\n \"\"\"Parametrized test not collected if test named specified in command\n line issue#649.\"\"\"\n py_file = pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.mark.parametrize(\"arg\", [None, 1.3, \"2-3\"])\n def test_func(arg):\n pass\n \"\"\"\n )\n file_name = os.path.basename(py_file)\n rec = pytester.inline_run(file_name + \"::\" + \"test_func\")\n rec.assertoutcome(passed=3)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_parametrized_collect_with_wrong_args_test_parametrized_collect_with_wrong_args.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_parametrized_collect_with_wrong_args_test_parametrized_collect_with_wrong_args.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_mark.py", "file_name": "test_mark.py", "file_type": "text/x-python", "category": "test", "start_line": 407, "end_line": 427, "span_ids": ["test_parametrized_collect_with_wrong_args"], "tokens": 169}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_parametrized_collect_with_wrong_args(pytester: Pytester) -> None:\n \"\"\"Test collect parametrized func with wrong number of args.\"\"\"\n py_file = pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.mark.parametrize('foo, bar', [(1, 2, 3)])\n def test_func(foo, bar):\n pass\n \"\"\"\n )\n\n result = pytester.runpytest(py_file)\n result.stdout.fnmatch_lines(\n [\n 'test_parametrized_collect_with_wrong_args.py::test_func: in \"parametrize\" the number of names (2):',\n \" ['foo', 'bar']\",\n \"must be equal to the number of values (3):\",\n \" (1, 2, 3)\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_parametrized_with_kwargs_test_parametrized_with_kwargs.assert_result_ret_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_parametrized_with_kwargs_test_parametrized_with_kwargs.assert_result_ret_0", "embedding": null, "metadata": {"file_path": "testing/test_mark.py", "file_name": "test_mark.py", "file_type": "text/x-python", "category": "test", "start_line": 430, "end_line": 447, "span_ids": ["test_parametrized_with_kwargs"], "tokens": 116}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_parametrized_with_kwargs(pytester: Pytester) -> None:\n \"\"\"Test collect parametrized func with wrong number of args.\"\"\"\n py_file = pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.fixture(params=[1,2])\n def a(request):\n return request.param\n\n @pytest.mark.parametrize(argnames='b', argvalues=[1, 2])\n def test_func(a, b):\n pass\n \"\"\"\n )\n\n result = pytester.runpytest(py_file)\n assert result.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestFunctional_TestFunctional.test_merging_markers_deep.for_item_in_items_.assert_x_for_x_in_item_i": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestFunctional_TestFunctional.test_merging_markers_deep.for_item_in_items_.assert_x_for_x_in_item_i", "embedding": null, "metadata": {"file_path": "testing/test_mark.py", "file_name": "test_mark.py", "file_type": "text/x-python", "category": "test", "start_line": 472, "end_line": 491, "span_ids": ["TestFunctional.test_merging_markers_deep", "TestFunctional"], "tokens": 145}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFunctional:\n def test_merging_markers_deep(self, pytester: Pytester) -> None:\n # issue 199 - propagate markers into nested classes\n p = pytester.makepyfile(\n \"\"\"\n import pytest\n class TestA(object):\n pytestmark = pytest.mark.a\n def test_b(self):\n assert True\n class TestC(object):\n # this one didn't get marked\n def test_d(self):\n assert True\n \"\"\"\n )\n items, rec = pytester.inline_genitems(p)\n for item in items:\n print(item, item.keywords)\n assert [x for x in item.iter_markers() if x.name == \"a\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestFunctional.test_mark_decorator_subclass_does_not_propagate_to_base_TestFunctional.test_mark_decorator_subclass_does_not_propagate_to_base.self_assert_markers_items": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestFunctional.test_mark_decorator_subclass_does_not_propagate_to_base_TestFunctional.test_mark_decorator_subclass_does_not_propagate_to_base.self_assert_markers_items", "embedding": null, "metadata": {"file_path": "testing/test_mark.py", "file_name": "test_mark.py", "file_type": "text/x-python", "category": "test", "start_line": 493, "end_line": 512, "span_ids": ["TestFunctional.test_mark_decorator_subclass_does_not_propagate_to_base"], "tokens": 132}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFunctional:\n\n def test_mark_decorator_subclass_does_not_propagate_to_base(\n self, pytester: Pytester\n ) -> None:\n p = pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.mark.a\n class Base(object): pass\n\n @pytest.mark.b\n class Test1(Base):\n def test_foo(self): pass\n\n class Test2(Base):\n def test_bar(self): pass\n \"\"\"\n )\n items, rec = pytester.inline_genitems(p)\n self.assert_markers(items, test_foo=(\"a\", \"b\"), test_bar=(\"a\",))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestFunctional.test_mark_should_not_pass_to_siebling_class_TestFunctional.test_mark_should_not_pass_to_siebling_class.assert_list_sub_item_iter": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestFunctional.test_mark_should_not_pass_to_siebling_class_TestFunctional.test_mark_should_not_pass_to_siebling_class.assert_list_sub_item_iter", "embedding": null, "metadata": {"file_path": "testing/test_mark.py", "file_name": "test_mark.py", "file_type": "text/x-python", "category": "test", "start_line": 514, "end_line": 540, "span_ids": ["TestFunctional.test_mark_should_not_pass_to_siebling_class"], "tokens": 175}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFunctional:\n\n def test_mark_should_not_pass_to_siebling_class(self, pytester: Pytester) -> None:\n \"\"\"#568\"\"\"\n p = pytester.makepyfile(\n \"\"\"\n import pytest\n\n class TestBase(object):\n def test_foo(self):\n pass\n\n @pytest.mark.b\n class TestSub(TestBase):\n pass\n\n\n class TestOtherSub(TestBase):\n pass\n\n \"\"\"\n )\n items, rec = pytester.inline_genitems(p)\n base_item, sub_item, sub_item_other = items\n print(items, [x.nodeid for x in items])\n # new api segregates\n assert not list(base_item.iter_markers(name=\"b\"))\n assert not list(sub_item_other.iter_markers(name=\"b\"))\n assert list(sub_item.iter_markers(name=\"b\"))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestFunctional.test_mark_decorator_baseclasses_merged_TestFunctional.test_mark_decorator_baseclasses_merged.self_assert_markers_items": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestFunctional.test_mark_decorator_baseclasses_merged_TestFunctional.test_mark_decorator_baseclasses_merged.self_assert_markers_items", "embedding": null, "metadata": {"file_path": "testing/test_mark.py", "file_name": "test_mark.py", "file_type": "text/x-python", "category": "test", "start_line": 542, "end_line": 563, "span_ids": ["TestFunctional.test_mark_decorator_baseclasses_merged"], "tokens": 153}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFunctional:\n\n def test_mark_decorator_baseclasses_merged(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.mark.a\n class Base(object): pass\n\n @pytest.mark.b\n class Base2(Base): pass\n\n @pytest.mark.c\n class Test1(Base2):\n def test_foo(self): pass\n\n class Test2(Base2):\n @pytest.mark.d\n def test_bar(self): pass\n \"\"\"\n )\n items, rec = pytester.inline_genitems(p)\n self.assert_markers(items, test_foo=(\"a\", \"b\", \"c\"), test_bar=(\"a\", \"b\", \"d\"))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestFunctional.test_mark_with_wrong_marker_TestFunctional.test_mark_dynamically_in_funcarg.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestFunctional.test_mark_with_wrong_marker_TestFunctional.test_mark_dynamically_in_funcarg.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_mark.py", "file_name": "test_mark.py", "file_type": "text/x-python", "category": "test", "start_line": 591, "end_line": 624, "span_ids": ["TestFunctional.test_mark_dynamically_in_funcarg", "TestFunctional.test_mark_with_wrong_marker"], "tokens": 233}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFunctional:\n\n def test_mark_with_wrong_marker(self, pytester: Pytester) -> None:\n reprec = pytester.inline_runsource(\n \"\"\"\n import pytest\n class pytestmark(object):\n pass\n def test_func():\n pass\n \"\"\"\n )\n values = reprec.getfailedcollections()\n assert len(values) == 1\n assert \"TypeError\" in str(values[0].longrepr)\n\n def test_mark_dynamically_in_funcarg(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n import pytest\n @pytest.fixture\n def arg(request):\n request.applymarker(pytest.mark.hello)\n def pytest_terminal_summary(terminalreporter):\n values = terminalreporter.stats['passed']\n terminalreporter._tw.line(\"keyword: %s\" % values[0].keywords)\n \"\"\"\n )\n pytester.makepyfile(\n \"\"\"\n def test_func(arg):\n pass\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"keyword: *hello*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestFunctional.test_no_marker_match_on_unmarked_names_TestFunctional.test_no_marker_match_on_unmarked_names.assert_len_deselected_tes": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestFunctional.test_no_marker_match_on_unmarked_names_TestFunctional.test_no_marker_match_on_unmarked_names.assert_len_deselected_tes", "embedding": null, "metadata": {"file_path": "testing/test_mark.py", "file_name": "test_mark.py", "file_type": "text/x-python", "category": "test", "start_line": 626, "end_line": 643, "span_ids": ["TestFunctional.test_no_marker_match_on_unmarked_names"], "tokens": 161}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFunctional:\n\n def test_no_marker_match_on_unmarked_names(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.mark.shouldmatch\n def test_marked():\n assert 1\n\n def test_unmarked():\n assert 1\n \"\"\"\n )\n reprec = pytester.inline_run(\"-m\", \"test_unmarked\", p)\n passed, skipped, failed = reprec.listoutcomes()\n assert len(passed) + len(skipped) + len(failed) == 0\n dlist = reprec.getcalls(\"pytest_deselected\")\n deselected_tests = dlist[0].items\n assert len(deselected_tests) == 2", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestFunctional.test_keyword_added_for_session_TestFunctional.test_keyword_added_for_session.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestFunctional.test_keyword_added_for_session_TestFunctional.test_keyword_added_for_session.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/test_mark.py", "file_name": "test_mark.py", "file_type": "text/x-python", "category": "test", "start_line": 667, "end_line": 693, "span_ids": ["TestFunctional.test_keyword_added_for_session"], "tokens": 207}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFunctional:\n\n def test_keyword_added_for_session(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n import pytest\n def pytest_collection_modifyitems(session):\n session.add_marker(\"mark1\")\n session.add_marker(pytest.mark.mark2)\n session.add_marker(pytest.mark.mark3)\n pytest.raises(ValueError, lambda:\n session.add_marker(10))\n \"\"\"\n )\n pytester.makepyfile(\n \"\"\"\n def test_some(request):\n assert \"mark1\" in request.keywords\n assert \"mark2\" in request.keywords\n assert \"mark3\" in request.keywords\n assert 10 not in request.keywords\n marker = request.node.get_closest_marker(\"mark1\")\n assert marker.name == \"mark1\"\n assert marker.args == ()\n assert marker.kwargs == {}\n \"\"\"\n )\n reprec = pytester.inline_run(\"-m\", \"mark1\")\n reprec.assertoutcome(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestFunctional.assert_markers_TestFunctional.assert_markers.for_name_expected_marker.assert_markers_set_exp": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestFunctional.assert_markers_TestFunctional.assert_markers.for_name_expected_marker.assert_markers_set_exp", "embedding": null, "metadata": {"file_path": "testing/test_mark.py", "file_name": "test_mark.py", "file_type": "text/x-python", "category": "test", "start_line": 695, "end_line": 705, "span_ids": ["TestFunctional.assert_markers"], "tokens": 120}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFunctional:\n\n def assert_markers(self, items, **expected) -> None:\n \"\"\"Assert that given items have expected marker names applied to them.\n expected should be a dict of (item name -> seq of expected marker names).\n\n Note: this could be moved to ``pytester`` if proven to be useful\n to other modules.\n \"\"\"\n items = {x.name: x for x in items}\n for name, expected_markers in expected.items():\n markers = {m.name for m in items[name].iter_markers()}\n assert markers == set(expected_markers)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestFunctional.test_mark_from_parameters_TestFunctional.test_mark_from_parameters.reprec_assertoutcome_skip": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestFunctional.test_mark_from_parameters_TestFunctional.test_mark_from_parameters.reprec_assertoutcome_skip", "embedding": null, "metadata": {"file_path": "testing/test_mark.py", "file_name": "test_mark.py", "file_type": "text/x-python", "category": "test", "start_line": 707, "end_line": 730, "span_ids": ["TestFunctional.test_mark_from_parameters"], "tokens": 142}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFunctional:\n\n @pytest.mark.filterwarnings(\"ignore\")\n def test_mark_from_parameters(self, pytester: Pytester) -> None:\n \"\"\"#1540\"\"\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n pytestmark = pytest.mark.skipif(True, reason='skip all')\n\n # skipifs inside fixture params\n params = [pytest.mark.skipif(False, reason='dont skip')('parameter')]\n\n\n @pytest.fixture(params=params)\n def parameter(request):\n return request.param\n\n\n def test_1(parameter):\n assert True\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(skipped=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestKeywordSelection_TestKeywordSelection.test_select_simple.check_TestClass_and_test": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestKeywordSelection_TestKeywordSelection.test_select_simple.check_TestClass_and_test", "embedding": null, "metadata": {"file_path": "testing/test_mark.py", "file_name": "test_mark.py", "file_type": "text/x-python", "category": "test", "start_line": 763, "end_line": 784, "span_ids": ["TestKeywordSelection", "TestKeywordSelection.test_select_simple"], "tokens": 184}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestKeywordSelection:\n def test_select_simple(self, pytester: Pytester) -> None:\n file_test = pytester.makepyfile(\n \"\"\"\n def test_one():\n assert 0\n class TestClass(object):\n def test_method_one(self):\n assert 42 == 43\n \"\"\"\n )\n\n def check(keyword, name):\n reprec = pytester.inline_run(\"-s\", \"-k\", keyword, file_test)\n passed, skipped, failed = reprec.listoutcomes()\n assert len(failed) == 1\n assert failed[0].nodeid.split(\"::\")[-1] == name\n assert len(reprec.getcalls(\"pytest_deselected\")) == 1\n\n for keyword in [\"test_one\", \"est_on\"]:\n check(keyword, \"test_one\")\n check(\"TestClass and test\", \"test_method_one\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestKeywordSelection.test_select_extra_keywords_TestKeywordSelection.test_select_extra_keywords.assert_dlist_0_items_0_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestKeywordSelection.test_select_extra_keywords_TestKeywordSelection.test_select_extra_keywords.assert_dlist_0_items_0_", "embedding": null, "metadata": {"file_path": "testing/test_mark.py", "file_name": "test_mark.py", "file_type": "text/x-python", "category": "test", "start_line": 786, "end_line": 825, "span_ids": ["TestKeywordSelection.test_select_extra_keywords"], "tokens": 302}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestKeywordSelection:\n\n @pytest.mark.parametrize(\n \"keyword\",\n [\n \"xxx\",\n \"xxx and test_2\",\n \"TestClass\",\n \"xxx and not test_1\",\n \"TestClass and test_2\",\n \"xxx and TestClass and test_2\",\n ],\n )\n def test_select_extra_keywords(self, pytester: Pytester, keyword) -> None:\n p = pytester.makepyfile(\n test_select=\"\"\"\n def test_1():\n pass\n class TestClass(object):\n def test_2(self):\n pass\n \"\"\"\n )\n pytester.makepyfile(\n conftest=\"\"\"\n import pytest\n @pytest.hookimpl(hookwrapper=True)\n def pytest_pycollect_makeitem(name):\n outcome = yield\n if name == \"TestClass\":\n item = outcome.get_result()\n item.extra_keyword_matches.add(\"xxx\")\n \"\"\"\n )\n reprec = pytester.inline_run(p.parent, \"-s\", \"-k\", keyword)\n print(\"keyword\", repr(keyword))\n passed, skipped, failed = reprec.listoutcomes()\n assert len(passed) == 1\n assert passed[0].nodeid.endswith(\"test_2\")\n dlist = reprec.getcalls(\"pytest_deselected\")\n assert len(dlist) == 1\n assert dlist[0].items[0].name == \"test_1\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestKeywordSelection.test_select_starton_TestKeywordSelection.test_select_starton.assert_item_name_test": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestKeywordSelection.test_select_starton_TestKeywordSelection.test_select_starton.assert_item_name_test", "embedding": null, "metadata": {"file_path": "testing/test_mark.py", "file_name": "test_mark.py", "file_type": "text/x-python", "category": "test", "start_line": 827, "end_line": 842, "span_ids": ["TestKeywordSelection.test_select_starton"], "tokens": 166}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestKeywordSelection:\n\n def test_select_starton(self, pytester: Pytester) -> None:\n threepass = pytester.makepyfile(\n test_threepass=\"\"\"\n def test_one(): assert 1\n def test_two(): assert 1\n def test_three(): assert 1\n \"\"\"\n )\n reprec = pytester.inline_run(\"-k\", \"test_two:\", threepass)\n passed, skipped, failed = reprec.listoutcomes()\n assert len(passed) == 2\n assert not failed\n dlist = reprec.getcalls(\"pytest_deselected\")\n assert len(dlist) == 1\n item = dlist[0].items[0]\n assert item.name == \"test_one\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestKeywordSelection.test_keyword_extra_TestKeywordSelection.test_keyword_extra_dash.assert_passed_skipped_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestKeywordSelection.test_keyword_extra_TestKeywordSelection.test_keyword_extra_dash.assert_passed_skipped_", "embedding": null, "metadata": {"file_path": "testing/test_mark.py", "file_name": "test_mark.py", "file_type": "text/x-python", "category": "test", "start_line": 844, "end_line": 869, "span_ids": ["TestKeywordSelection.test_keyword_extra", "TestKeywordSelection.test_keyword_extra_dash"], "tokens": 205}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestKeywordSelection:\n\n def test_keyword_extra(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n def test_one():\n assert 0\n test_one.mykeyword = True\n \"\"\"\n )\n reprec = pytester.inline_run(\"-k\", \"mykeyword\", p)\n passed, skipped, failed = reprec.countoutcomes()\n assert failed == 1\n\n @pytest.mark.xfail\n def test_keyword_extra_dash(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n def test_one():\n assert 0\n test_one.mykeyword = True\n \"\"\"\n )\n # with argparse the argument to an option cannot\n # start with '-'\n reprec = pytester.inline_run(\"-k\", \"-mykeyword\", p)\n passed, skipped, failed = reprec.countoutcomes()\n assert passed + skipped + failed == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_parameterset_for_parametrize_marks_test_parameterset_for_parametrize_marks.if_mark_xfail_.assert_result_mark_kwargs": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_parameterset_for_parametrize_marks_test_parameterset_for_parametrize_marks.if_mark_xfail_.assert_result_mark_kwargs", "embedding": null, "metadata": {"file_path": "testing/test_mark.py", "file_name": "test_mark.py", "file_type": "text/x-python", "category": "test", "start_line": 941, "end_line": 966, "span_ids": ["test_parameterset_for_parametrize_marks"], "tokens": 189}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\"mark\", [None, \"\", \"skip\", \"xfail\"])\ndef test_parameterset_for_parametrize_marks(\n pytester: Pytester, mark: Optional[str]\n) -> None:\n if mark is not None:\n pytester.makeini(\n \"\"\"\n [pytest]\n {}={}\n \"\"\".format(\n EMPTY_PARAMETERSET_OPTION, mark\n )\n )\n\n config = pytester.parseconfig()\n from _pytest.mark import pytest_configure, get_empty_parameterset_mark\n\n pytest_configure(config)\n result_mark = get_empty_parameterset_mark(config, [\"a\"], all)\n if mark in (None, \"\"):\n # normalize to the requested name\n mark = \"skip\"\n assert result_mark.name == mark\n assert result_mark.kwargs[\"reason\"].startswith(\"got empty parameter set \")\n if mark == \"xfail\":\n assert result_mark.kwargs.get(\"run\") is False", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_monkeypatch.py_TestSetattrWithImportPath_TestSetattrWithImportPath.test_delattr.assert_os_path_abspath": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_monkeypatch.py_TestSetattrWithImportPath_TestSetattrWithImportPath.test_delattr.assert_os_path_abspath", "embedding": null, "metadata": {"file_path": "testing/test_monkeypatch.py", "file_name": "test_monkeypatch.py", "file_type": "text/x-python", "category": "test", "start_line": 55, "end_line": 94, "span_ids": ["TestSetattrWithImportPath.test_unknown_import", "TestSetattrWithImportPath.test_wrong_target", "TestSetattrWithImportPath.test_unicode_string", "TestSetattrWithImportPath", "TestSetattrWithImportPath.test_unknown_attr_non_raising", "TestSetattrWithImportPath.test_string_expression_class", "TestSetattrWithImportPath.test_delattr", "TestSetattrWithImportPath.test_string_expression", "TestSetattrWithImportPath.test_unknown_attr"], "tokens": 397}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestSetattrWithImportPath:\n def test_string_expression(self, monkeypatch: MonkeyPatch) -> None:\n monkeypatch.setattr(\"os.path.abspath\", lambda x: \"hello2\")\n assert os.path.abspath(\"123\") == \"hello2\"\n\n def test_string_expression_class(self, monkeypatch: MonkeyPatch) -> None:\n monkeypatch.setattr(\"_pytest.config.Config\", 42)\n import _pytest\n\n assert _pytest.config.Config == 42 # type: ignore\n\n def test_unicode_string(self, monkeypatch: MonkeyPatch) -> None:\n monkeypatch.setattr(\"_pytest.config.Config\", 42)\n import _pytest\n\n assert _pytest.config.Config == 42 # type: ignore\n monkeypatch.delattr(\"_pytest.config.Config\")\n\n def test_wrong_target(self, monkeypatch: MonkeyPatch) -> None:\n with pytest.raises(TypeError):\n monkeypatch.setattr(None, None) # type: ignore[call-overload]\n\n def test_unknown_import(self, monkeypatch: MonkeyPatch) -> None:\n with pytest.raises(ImportError):\n monkeypatch.setattr(\"unkn123.classx\", None)\n\n def test_unknown_attr(self, monkeypatch: MonkeyPatch) -> None:\n with pytest.raises(AttributeError):\n monkeypatch.setattr(\"os.path.qweqwe\", None)\n\n def test_unknown_attr_non_raising(self, monkeypatch: MonkeyPatch) -> None:\n # https://github.com/pytest-dev/pytest/issues/746\n monkeypatch.setattr(\"os.path.qweqwe\", 42, raising=False)\n assert os.path.qweqwe == 42 # type: ignore\n\n def test_delattr(self, monkeypatch: MonkeyPatch) -> None:\n monkeypatch.delattr(\"os.path.abspath\")\n assert not hasattr(os.path, \"abspath\")\n monkeypatch.undo()\n assert os.path.abspath", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_monkeypatch.py_test_delattr_test_delattr.None_3": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_monkeypatch.py_test_delattr_test_delattr.None_3", "embedding": null, "metadata": {"file_path": "testing/test_monkeypatch.py", "file_name": "test_monkeypatch.py", "file_type": "text/x-python", "category": "test", "start_line": 97, "end_line": 114, "span_ids": ["test_delattr.A:2", "test_delattr", "test_delattr.A"], "tokens": 139}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_delattr() -> None:\n class A:\n x = 1\n\n monkeypatch = MonkeyPatch()\n monkeypatch.delattr(A, \"x\")\n assert not hasattr(A, \"x\")\n monkeypatch.undo()\n assert A.x == 1\n\n monkeypatch = MonkeyPatch()\n monkeypatch.delattr(A, \"x\")\n pytest.raises(AttributeError, monkeypatch.delattr, A, \"y\")\n monkeypatch.delattr(A, \"y\", raising=False)\n monkeypatch.setattr(A, \"x\", 5, raising=False)\n assert A.x == 5\n monkeypatch.undo()\n assert A.x == 1", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_monkeypatch.py_test_setitem_test_setitem.assert_d_x_5": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_monkeypatch.py_test_setitem_test_setitem.assert_d_x_5", "embedding": null, "metadata": {"file_path": "testing/test_monkeypatch.py", "file_name": "test_monkeypatch.py", "file_type": "text/x-python", "category": "test", "start_line": 117, "end_line": 132, "span_ids": ["test_setitem"], "tokens": 156}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_setitem() -> None:\n d = {\"x\": 1}\n monkeypatch = MonkeyPatch()\n monkeypatch.setitem(d, \"x\", 2)\n monkeypatch.setitem(d, \"y\", 1700)\n monkeypatch.setitem(d, \"y\", 1700)\n assert d[\"x\"] == 2\n assert d[\"y\"] == 1700\n monkeypatch.setitem(d, \"x\", 3)\n assert d[\"x\"] == 3\n monkeypatch.undo()\n assert d[\"x\"] == 1\n assert \"y\" not in d\n d[\"x\"] = 5\n monkeypatch.undo()\n assert d[\"x\"] == 5", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_monkeypatch.py_test_setitem_deleted_meanwhile_test_setenv_deleted_meanwhile.None_1.else_.assert_key_not_in_os_envi": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_monkeypatch.py_test_setitem_deleted_meanwhile_test_setenv_deleted_meanwhile.None_1.else_.assert_key_not_in_os_envi", "embedding": null, "metadata": {"file_path": "testing/test_monkeypatch.py", "file_name": "test_monkeypatch.py", "file_type": "text/x-python", "category": "test", "start_line": 131, "end_line": 153, "span_ids": ["test_setitem_deleted_meanwhile", "test_setenv_deleted_meanwhile"], "tokens": 163}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_setitem_deleted_meanwhile() -> None:\n d: Dict[str, object] = {}\n monkeypatch = MonkeyPatch()\n monkeypatch.setitem(d, \"x\", 2)\n del d[\"x\"]\n monkeypatch.undo()\n assert not d\n\n\n@pytest.mark.parametrize(\"before\", [True, False])\ndef test_setenv_deleted_meanwhile(before: bool) -> None:\n key = \"qwpeoip123\"\n if before:\n os.environ[key] = \"world\"\n monkeypatch = MonkeyPatch()\n monkeypatch.setenv(key, \"hello\")\n del os.environ[key]\n monkeypatch.undo()\n if before:\n assert os.environ[key] == \"world\"\n del os.environ[key]\n else:\n assert key not in os.environ", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_monkeypatch.py_test_delitem_test_delitem.assert_d_hello_wo": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_monkeypatch.py_test_delitem_test_delitem.assert_d_hello_wo", "embedding": null, "metadata": {"file_path": "testing/test_monkeypatch.py", "file_name": "test_monkeypatch.py", "file_type": "text/x-python", "category": "test", "start_line": 156, "end_line": 170, "span_ids": ["test_delitem"], "tokens": 163}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_delitem() -> None:\n d: Dict[str, object] = {\"x\": 1}\n monkeypatch = MonkeyPatch()\n monkeypatch.delitem(d, \"x\")\n assert \"x\" not in d\n monkeypatch.delitem(d, \"y\", raising=False)\n pytest.raises(KeyError, monkeypatch.delitem, d, \"y\")\n assert not d\n monkeypatch.setitem(d, \"y\", 1700)\n assert d[\"y\"] == 1700\n d[\"hello\"] = \"world\"\n monkeypatch.setitem(d, \"x\", 1500)\n assert d[\"x\"] == 1500\n monkeypatch.undo()\n assert d == {\"hello\": \"world\", \"x\": 1}", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_monkeypatch.py_test_setenv_test_delenv.try_.finally_.if_name_in_os_environ_.del_os_environ_name_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_monkeypatch.py_test_setenv_test_delenv.try_.finally_.if_name_in_os_environ_.del_os_environ_name_", "embedding": null, "metadata": {"file_path": "testing/test_monkeypatch.py", "file_name": "test_monkeypatch.py", "file_type": "text/x-python", "category": "test", "start_line": 177, "end_line": 206, "span_ids": ["test_setenv", "test_delenv"], "tokens": 226}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_setenv() -> None:\n monkeypatch = MonkeyPatch()\n with pytest.warns(pytest.PytestWarning):\n monkeypatch.setenv(\"XYZ123\", 2) # type: ignore[arg-type]\n import os\n\n assert os.environ[\"XYZ123\"] == \"2\"\n monkeypatch.undo()\n assert \"XYZ123\" not in os.environ\n\n\ndef test_delenv() -> None:\n name = \"xyz1234\"\n assert name not in os.environ\n monkeypatch = MonkeyPatch()\n pytest.raises(KeyError, monkeypatch.delenv, name, raising=True)\n monkeypatch.delenv(name, raising=False)\n monkeypatch.undo()\n os.environ[name] = \"1\"\n try:\n monkeypatch = MonkeyPatch()\n monkeypatch.delenv(name)\n assert name not in os.environ\n monkeypatch.setenv(name, \"3\")\n assert os.environ[name] == \"3\"\n monkeypatch.undo()\n assert os.environ[name] == \"1\"\n finally:\n if name in os.environ:\n del os.environ[name]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_monkeypatch.py_test_importerror_test_importerror.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_monkeypatch.py_test_importerror_test_importerror.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_monkeypatch.py", "file_name": "test_monkeypatch.py", "file_type": "text/x-python", "category": "test", "start_line": 313, "end_line": 337, "span_ids": ["test_importerror"], "tokens": 154}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_importerror(pytester: Pytester) -> None:\n p = pytester.mkpydir(\"package\")\n p.joinpath(\"a.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n import doesnotexist\n\n x = 1\n \"\"\"\n )\n )\n pytester.path.joinpath(\"test_importerror.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n def test_importerror(monkeypatch):\n monkeypatch.setattr('package.a.x', 2)\n \"\"\"\n )\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n \"\"\"\n *import error in package.a: No module named 'doesnotexist'*\n \"\"\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_nose.py_test_setup_func_with_setup_decorator_test_nose_setup_func.result_assert_outcomes_pa": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_nose.py_test_setup_func_with_setup_decorator_test_nose_setup_func.result_assert_outcomes_pa", "embedding": null, "metadata": {"file_path": "testing/test_nose.py", "file_name": "test_nose.py", "file_type": "text/x-python", "category": "test", "start_line": 30, "end_line": 80, "span_ids": ["test_setup_func_not_callable.A", "test_setup_func_with_setup_decorator", "test_setup_func_with_setup_decorator.A", "test_setup_func_with_setup_decorator.A.f", "test_nose_setup_func", "test_setup_func_not_callable", "test_setup_func_not_callable.A:2"], "tokens": 246}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_setup_func_with_setup_decorator() -> None:\n from _pytest.nose import call_optional\n\n values = []\n\n class A:\n @pytest.fixture(autouse=True)\n def f(self):\n values.append(1)\n\n call_optional(A(), \"f\")\n assert not values\n\n\ndef test_setup_func_not_callable() -> None:\n from _pytest.nose import call_optional\n\n class A:\n f = 1\n\n call_optional(A(), \"f\")\n\n\ndef test_nose_setup_func(pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n from nose.tools import with_setup\n\n values = []\n\n def my_setup():\n a = 1\n values.append(a)\n\n def my_teardown():\n b = 2\n values.append(b)\n\n @with_setup(my_setup, my_teardown)\n def test_hello():\n print(values)\n assert values == [1]\n\n def test_world():\n print(values)\n assert values == [1,2]\n\n \"\"\"\n )\n result = pytester.runpytest(p, \"-p\", \"nose\")\n result.assert_outcomes(passed=2)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_nose.py_test_nose_setup_func_failure_test_nose_setup_func_failure_2.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_nose.py_test_nose_setup_func_failure_test_nose_setup_func_failure_2.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/test_nose.py", "file_name": "test_nose.py", "file_type": "text/x-python", "category": "test", "start_line": 83, "end_line": 123, "span_ids": ["test_nose_setup_func_failure_2", "test_nose_setup_func_failure"], "tokens": 238}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_nose_setup_func_failure(pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n from nose.tools import with_setup\n\n values = []\n my_setup = lambda x: 1\n my_teardown = lambda x: 2\n\n @with_setup(my_setup, my_teardown)\n def test_hello():\n print(values)\n assert values == [1]\n\n def test_world():\n print(values)\n assert values == [1,2]\n\n \"\"\"\n )\n result = pytester.runpytest(p, \"-p\", \"nose\")\n result.stdout.fnmatch_lines([\"*TypeError: ()*\"])\n\n\ndef test_nose_setup_func_failure_2(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n values = []\n\n my_setup = 1\n my_teardown = 2\n\n def test_hello():\n assert values == []\n\n test_hello.setup = my_setup\n test_hello.teardown = my_teardown\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_nose.py_test_nose_setup_partial_test_nose_setup_partial.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_nose.py_test_nose_setup_partial_test_nose_setup_partial.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_nose.py", "file_name": "test_nose.py", "file_type": "text/x-python", "category": "test", "start_line": 126, "end_line": 158, "span_ids": ["test_nose_setup_partial"], "tokens": 191}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_nose_setup_partial(pytester: Pytester) -> None:\n pytest.importorskip(\"functools\")\n p = pytester.makepyfile(\n \"\"\"\n from functools import partial\n\n values = []\n\n def my_setup(x):\n a = x\n values.append(a)\n\n def my_teardown(x):\n b = x\n values.append(b)\n\n my_setup_partial = partial(my_setup, 1)\n my_teardown_partial = partial(my_teardown, 2)\n\n def test_hello():\n print(values)\n assert values == [1]\n\n def test_world():\n print(values)\n assert values == [1,2]\n\n test_hello.setup = my_setup_partial\n test_hello.teardown = my_teardown_partial\n \"\"\"\n )\n result = pytester.runpytest(p, \"-p\", \"nose\")\n result.stdout.fnmatch_lines([\"*2 passed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_nose.py_test_module_level_setup_test_module_level_setup.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_nose.py_test_module_level_setup_test_module_level_setup.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_nose.py", "file_name": "test_nose.py", "file_type": "text/x-python", "category": "test", "start_line": 161, "end_line": 189, "span_ids": ["test_module_level_setup"], "tokens": 163}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_module_level_setup(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n from nose.tools import with_setup\n items = {}\n\n def setup():\n items[1]=1\n\n def teardown():\n del items[1]\n\n def setup2():\n items[2] = 2\n\n def teardown2():\n del items[2]\n\n def test_setup_module_setup():\n assert items[1] == 1\n\n @with_setup(setup2, teardown2)\n def test_local_setup():\n assert items[2] == 2\n assert 1 not in items\n \"\"\"\n )\n result = pytester.runpytest(\"-p\", \"nose\")\n result.stdout.fnmatch_lines([\"*2 passed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_nose.py_test_nose_style_setup_teardown_test_nose_setup_ordering.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_nose.py_test_nose_style_setup_teardown_test_nose_setup_ordering.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_nose.py", "file_name": "test_nose.py", "file_type": "text/x-python", "category": "test", "start_line": 192, "end_line": 228, "span_ids": ["test_nose_style_setup_teardown", "test_nose_setup_ordering"], "tokens": 193}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_nose_style_setup_teardown(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n values = []\n\n def setup_module():\n values.append(1)\n\n def teardown_module():\n del values[0]\n\n def test_hello():\n assert values == [1]\n\n def test_world():\n assert values == [1]\n \"\"\"\n )\n result = pytester.runpytest(\"-p\", \"nose\")\n result.stdout.fnmatch_lines([\"*2 passed*\"])\n\n\ndef test_nose_setup_ordering(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n def setup_module(mod):\n mod.visited = True\n\n class TestClass(object):\n def setup(self):\n assert visited\n def test_first(self):\n pass\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*1 passed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_nose.py_test_apiwrapper_problem_issue260_test_apiwrapper_problem_issue260.result_assert_outcomes_pa": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_nose.py_test_apiwrapper_problem_issue260_test_apiwrapper_problem_issue260.result_assert_outcomes_pa", "embedding": null, "metadata": {"file_path": "testing/test_nose.py", "file_name": "test_nose.py", "file_type": "text/x-python", "category": "test", "start_line": 231, "end_line": 253, "span_ids": ["test_apiwrapper_problem_issue260"], "tokens": 165}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_apiwrapper_problem_issue260(pytester: Pytester) -> None:\n # this would end up trying a call an optional teardown on the class\n # for plain unittests we don't want nose behaviour\n pytester.makepyfile(\n \"\"\"\n import unittest\n class TestCase(unittest.TestCase):\n def setup(self):\n #should not be called in unittest testcases\n assert 0, 'setup'\n def teardown(self):\n #should not be called in unittest testcases\n assert 0, 'teardown'\n def setUp(self):\n print('setup')\n def tearDown(self):\n print('teardown')\n def test_fun(self):\n pass\n \"\"\"\n )\n result = pytester.runpytest()\n result.assert_outcomes(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_nose.py_test_setup_teardown_linking_issue265_test_setup_teardown_linking_issue265.reprec_assert_outcomes_pa": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_nose.py_test_setup_teardown_linking_issue265_test_setup_teardown_linking_issue265.reprec_assert_outcomes_pa", "embedding": null, "metadata": {"file_path": "testing/test_nose.py", "file_name": "test_nose.py", "file_type": "text/x-python", "category": "test", "start_line": 256, "end_line": 281, "span_ids": ["test_setup_teardown_linking_issue265"], "tokens": 200}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_setup_teardown_linking_issue265(pytester: Pytester) -> None:\n # we accidentally didn't integrate nose setupstate with normal setupstate\n # this test ensures that won't happen again\n pytester.makepyfile(\n '''\n import pytest\n\n class TestGeneric(object):\n def test_nothing(self):\n \"\"\"Tests the API of the implementation (for generic and specialized).\"\"\"\n\n @pytest.mark.skipif(\"True\", reason=\n \"Skip tests to check if teardown is skipped as well.\")\n class TestSkipTeardown(TestGeneric):\n\n def setup(self):\n \"\"\"Sets up my specialized implementation for $COOL_PLATFORM.\"\"\"\n raise Exception(\"should not call setup for skipped tests\")\n\n def teardown(self):\n \"\"\"Undoes the setup.\"\"\"\n raise Exception(\"should not call teardown for skipped tests\")\n '''\n )\n reprec = pytester.runpytest()\n reprec.assert_outcomes(passed=1, skipped=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_parseopt.py_TestParser.test_argument_type_TestParser.test_argument_type.None_3": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_parseopt.py_TestParser.test_argument_type_TestParser.test_argument_type.None_3", "embedding": null, "metadata": {"file_path": "testing/test_parseopt.py", "file_name": "test_parseopt.py", "file_type": "text/x-python", "category": "test", "start_line": 48, "end_line": 61, "span_ids": ["TestParser.test_argument_type"], "tokens": 151}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestParser:\n\n def test_argument_type(self) -> None:\n argument = parseopt.Argument(\"-t\", dest=\"abc\", type=int)\n assert argument.type is int\n argument = parseopt.Argument(\"-t\", dest=\"abc\", type=str)\n assert argument.type is str\n argument = parseopt.Argument(\"-t\", dest=\"abc\", type=float)\n assert argument.type is float\n with pytest.warns(DeprecationWarning):\n with pytest.raises(KeyError):\n argument = parseopt.Argument(\"-t\", dest=\"abc\", type=\"choice\")\n argument = parseopt.Argument(\n \"-t\", dest=\"abc\", type=str, choices=[\"red\", \"blue\"]\n )\n assert argument.type is str", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_parseopt.py_TestParser.test_parse_split_positional_arguments_TestParser.test_parse_split_positional_arguments.None_6": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_parseopt.py_TestParser.test_parse_split_positional_arguments_TestParser.test_parse_split_positional_arguments.None_6", "embedding": null, "metadata": {"file_path": "testing/test_parseopt.py", "file_name": "test_parseopt.py", "file_type": "text/x-python", "category": "test", "start_line": 167, "end_line": 179, "span_ids": ["TestParser.test_parse_split_positional_arguments"], "tokens": 185}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestParser:\n\n def test_parse_split_positional_arguments(self, parser: parseopt.Parser) -> None:\n parser.addoption(\"-R\", action=\"store_true\")\n parser.addoption(\"-S\", action=\"store_false\")\n args = parser.parse([\"-R\", \"4\", \"2\", \"-S\"])\n assert getattr(args, parseopt.FILE_OR_DIR) == [\"4\", \"2\"]\n args = parser.parse([\"-R\", \"-S\", \"4\", \"2\", \"-R\"])\n assert getattr(args, parseopt.FILE_OR_DIR) == [\"4\", \"2\"]\n assert args.R is True\n assert args.S is False\n args = parser.parse([\"-R\", \"4\", \"-S\", \"2\"])\n assert getattr(args, parseopt.FILE_OR_DIR) == [\"4\", \"2\"]\n assert args.R is True\n assert args.S is False", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_parseopt.py_TestParser.test_parse_defaultgetter_TestParser.test_parse_defaultgetter.assert_option_no_is_False": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_parseopt.py_TestParser.test_parse_defaultgetter_TestParser.test_parse_defaultgetter.assert_option_no_is_False", "embedding": null, "metadata": {"file_path": "testing/test_parseopt.py", "file_name": "test_parseopt.py", "file_type": "text/x-python", "category": "test", "start_line": 181, "end_line": 197, "span_ids": ["TestParser.test_parse_defaultgetter"], "tokens": 153}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestParser:\n\n def test_parse_defaultgetter(self) -> None:\n def defaultget(option):\n if not hasattr(option, \"type\"):\n return\n if option.type is int:\n option.default = 42\n elif option.type is str:\n option.default = \"world\"\n\n parser = parseopt.Parser(processopt=defaultget)\n parser.addoption(\"--this\", dest=\"this\", type=int, action=\"store\")\n parser.addoption(\"--hello\", dest=\"hello\", type=str, action=\"store\")\n parser.addoption(\"--no\", dest=\"no\", action=\"store_true\")\n option = parser.parse([])\n assert option.hello == \"world\"\n assert option.this == 42\n assert option.no is False", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_parseopt.py_TestParser.test_drop_short_helper_TestParser.test_drop_short_helper.assert_join_args_file": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_parseopt.py_TestParser.test_drop_short_helper_TestParser.test_drop_short_helper.assert_join_args_file", "embedding": null, "metadata": {"file_path": "testing/test_parseopt.py", "file_name": "test_parseopt.py", "file_type": "text/x-python", "category": "test", "start_line": 199, "end_line": 234, "span_ids": ["TestParser.test_drop_short_helper"], "tokens": 415}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestParser:\n\n def test_drop_short_helper(self) -> None:\n parser = argparse.ArgumentParser(\n formatter_class=parseopt.DropShorterLongHelpFormatter, allow_abbrev=False\n )\n parser.add_argument(\n \"-t\", \"--twoword\", \"--duo\", \"--two-word\", \"--two\", help=\"foo\"\n )\n # throws error on --deux only!\n parser.add_argument(\n \"-d\", \"--deuxmots\", \"--deux-mots\", action=\"store_true\", help=\"foo\"\n )\n parser.add_argument(\"-s\", action=\"store_true\", help=\"single short\")\n parser.add_argument(\"--abc\", \"-a\", action=\"store_true\", help=\"bar\")\n parser.add_argument(\"--klm\", \"-k\", \"--kl-m\", action=\"store_true\", help=\"bar\")\n parser.add_argument(\n \"-P\", \"--pq-r\", \"-p\", \"--pqr\", action=\"store_true\", help=\"bar\"\n )\n parser.add_argument(\n \"--zwei-wort\", \"--zweiwort\", \"--zweiwort\", action=\"store_true\", help=\"bar\"\n )\n parser.add_argument(\n \"-x\", \"--exit-on-first\", \"--exitfirst\", action=\"store_true\", help=\"spam\"\n )\n parser.add_argument(\"files_and_dirs\", nargs=\"*\")\n args = parser.parse_args([\"-k\", \"--duo\", \"hallo\", \"--exitfirst\"])\n assert args.twoword == \"hallo\"\n assert args.klm is True\n assert args.zwei_wort is False\n assert args.exit_on_first is True\n assert args.s is False\n args = parser.parse_args([\"--deux-mots\"])\n with pytest.raises(AttributeError):\n assert args.deux_mots is True\n assert args.deuxmots is True\n args = parser.parse_args([\"file\", \"dir\"])\n assert \"|\".join(args.files_and_dirs) == \"file|dir\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_parseopt.py_TestParser.test_drop_short_0_TestParser._testing_would_be_more_h": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_parseopt.py_TestParser.test_drop_short_0_TestParser._testing_would_be_more_h", "embedding": null, "metadata": {"file_path": "testing/test_parseopt.py", "file_name": "test_parseopt.py", "file_type": "text/x-python", "category": "test", "start_line": 236, "end_line": 260, "span_ids": ["TestParser.test_drop_short_help0", "TestParser.test_drop_short_3", "TestParser.test_drop_short_2", "TestParser.test_drop_short_0"], "tokens": 291}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestParser:\n\n def test_drop_short_0(self, parser: parseopt.Parser) -> None:\n parser.addoption(\"--funcarg\", \"--func-arg\", action=\"store_true\")\n parser.addoption(\"--abc-def\", \"--abc-def\", action=\"store_true\")\n parser.addoption(\"--klm-hij\", action=\"store_true\")\n with pytest.raises(UsageError):\n parser.parse([\"--funcarg\", \"--k\"])\n\n def test_drop_short_2(self, parser: parseopt.Parser) -> None:\n parser.addoption(\"--func-arg\", \"--doit\", action=\"store_true\")\n args = parser.parse([\"--doit\"])\n assert args.func_arg is True\n\n def test_drop_short_3(self, parser: parseopt.Parser) -> None:\n parser.addoption(\"--func-arg\", \"--funcarg\", \"--doit\", action=\"store_true\")\n args = parser.parse([\"abcd\"])\n assert args.func_arg is False\n assert args.file_or_dir == [\"abcd\"]\n\n def test_drop_short_help0(self, parser: parseopt.Parser) -> None:\n parser.addoption(\"--func-args\", \"--doit\", help=\"foo\", action=\"store_true\")\n parser.parse([])\n help = parser.optparser.format_help()\n assert \"--func-args, --doit foo\" in help\n\n # testing would be more helpful with all help generated", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_parseopt.py_TestParser.test_drop_short_help1_TestParser.test_drop_short_help1.assert_doit_func_arg": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_parseopt.py_TestParser.test_drop_short_help1_TestParser.test_drop_short_help1.assert_doit_func_arg", "embedding": null, "metadata": {"file_path": "testing/test_parseopt.py", "file_name": "test_parseopt.py", "file_type": "text/x-python", "category": "test", "start_line": 261, "end_line": 273, "span_ids": ["TestParser.test_drop_short_help1"], "tokens": 121}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestParser:\n def test_drop_short_help1(self, parser: parseopt.Parser) -> None:\n group = parser.getgroup(\"general\")\n group.addoption(\"--doit\", \"--func-args\", action=\"store_true\", help=\"foo\")\n group._addoption(\n \"-h\",\n \"--help\",\n action=\"store_true\",\n dest=\"help\",\n help=\"show help message and configuration info\",\n )\n parser.parse([\"-h\"])\n help = parser.optparser.format_help()\n assert \"-doit, --func-args foo\" in help", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_parseopt.py_TestParser.test_multiple_metavar_help_TestParser.test_multiple_metavar_help.assert_preferences_val": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_parseopt.py_TestParser.test_multiple_metavar_help_TestParser.test_multiple_metavar_help.assert_preferences_val", "embedding": null, "metadata": {"file_path": "testing/test_parseopt.py", "file_name": "test_parseopt.py", "file_type": "text/x-python", "category": "test", "start_line": 275, "end_line": 287, "span_ids": ["TestParser.test_multiple_metavar_help"], "tokens": 142}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestParser:\n\n def test_multiple_metavar_help(self, parser: parseopt.Parser) -> None:\n \"\"\"\n Help text for options with a metavar tuple should display help\n in the form \"--preferences=value1 value2 value3\" (#2004).\n \"\"\"\n group = parser.getgroup(\"general\")\n group.addoption(\n \"--preferences\", metavar=(\"value1\", \"value2\", \"value3\"), nargs=3\n )\n group._addoption(\"-h\", \"--help\", action=\"store_true\", dest=\"help\")\n parser.parse([\"-h\"])\n help = parser.optparser.format_help()\n assert \"--preferences=value1 value2 value3\" in help", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_parseopt.py_test_argcomplete_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_parseopt.py_test_argcomplete_", "embedding": null, "metadata": {"file_path": "testing/test_parseopt.py", "file_name": "test_parseopt.py", "file_type": "text/x-python", "category": "test", "start_line": 292, "end_line": 346, "span_ids": ["test_argcomplete"], "tokens": 545}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_argcomplete(pytester: Pytester, monkeypatch: MonkeyPatch) -> None:\n try:\n bash_version = subprocess.run(\n [\"bash\", \"--version\"],\n stdout=subprocess.PIPE,\n stderr=subprocess.DEVNULL,\n check=True,\n universal_newlines=True,\n ).stdout\n except (OSError, subprocess.CalledProcessError):\n pytest.skip(\"bash is not available\")\n if \"GNU bash\" not in bash_version:\n # See #7518.\n pytest.skip(\"not a real bash\")\n\n script = str(pytester.path.joinpath(\"test_argcomplete\"))\n\n with open(str(script), \"w\") as fp:\n # redirect output from argcomplete to stdin and stderr is not trivial\n # http://stackoverflow.com/q/12589419/1307905\n # so we use bash\n fp.write(\n 'COMP_WORDBREAKS=\"$COMP_WORDBREAKS\" {} -m pytest 8>&1 9>&2'.format(\n shlex.quote(sys.executable)\n )\n )\n # alternative would be extended Pytester.{run(),_run(),popen()} to be able\n # to handle a keyword argument env that replaces os.environ in popen or\n # extends the copy, advantage: could not forget to restore\n monkeypatch.setenv(\"_ARGCOMPLETE\", \"1\")\n monkeypatch.setenv(\"_ARGCOMPLETE_IFS\", \"\\x0b\")\n monkeypatch.setenv(\"COMP_WORDBREAKS\", \" \\\\t\\\\n\\\"\\\\'><=;|&(:\")\n\n arg = \"--fu\"\n monkeypatch.setenv(\"COMP_LINE\", \"pytest \" + arg)\n monkeypatch.setenv(\"COMP_POINT\", str(len(\"pytest \" + arg)))\n result = pytester.run(\"bash\", str(script), arg)\n if result.ret == 255:\n # argcomplete not found\n pytest.skip(\"argcomplete not available\")\n elif not result.stdout.str():\n pytest.skip(\n \"bash provided no output on stdout, argcomplete not available? (stderr={!r})\".format(\n result.stderr.str()\n )\n )\n else:\n result.stdout.fnmatch_lines([\"--funcargs\", \"--fulltrace\"])\n os.mkdir(\"test_argcomplete.d\")\n arg = \"test_argc\"\n monkeypatch.setenv(\"COMP_LINE\", \"pytest \" + arg)\n monkeypatch.setenv(\"COMP_POINT\", str(len(\"pytest \" + arg)))\n result = pytester.run(\"bash\", str(script), arg)\n result.stdout.fnmatch_lines([\"test_argcomplete\", \"test_argcomplete.d/\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pastebin.py_TestPasteCapture.test_all_TestPasteCapture.test_all.matcher_fnmatch_lines_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pastebin.py_TestPasteCapture.test_all_TestPasteCapture.test_all.matcher_fnmatch_lines_", "embedding": null, "metadata": {"file_path": "testing/test_pastebin.py", "file_name": "test_pastebin.py", "file_type": "text/x-python", "category": "test", "start_line": 36, "end_line": 62, "span_ids": ["TestPasteCapture.test_all"], "tokens": 207}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPasteCapture:\n\n def test_all(self, pytester: Pytester, pastebinlist) -> None:\n from _pytest.pytester import LineMatcher\n\n testpath = pytester.makepyfile(\n \"\"\"\n import pytest\n def test_pass():\n pass\n def test_fail():\n assert 0\n def test_skip():\n pytest.skip(\"\")\n \"\"\"\n )\n reprec = pytester.inline_run(testpath, \"--pastebin=all\", \"-v\")\n assert reprec.countoutcomes() == [1, 1, 1]\n assert len(pastebinlist) == 1\n contents = pastebinlist[0].decode(\"utf-8\")\n matcher = LineMatcher(contents.splitlines())\n matcher.fnmatch_lines(\n [\n \"*test_pass PASSED*\",\n \"*test_fail FAILED*\",\n \"*test_skip SKIPPED*\",\n \"*== 1 failed, 1 passed, 1 skipped in *\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pastebin.py_TestPaste.test_create_new_paste_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pastebin.py_TestPaste.test_create_new_paste_", "embedding": null, "metadata": {"file_path": "testing/test_pastebin.py", "file_name": "test_pastebin.py", "file_type": "text/x-python", "category": "test", "start_line": 162, "end_line": 185, "span_ids": ["TestPaste.test_create_new_paste_failure", "TestPaste.test_create_new_paste"], "tokens": 249}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPaste:\n\n def test_create_new_paste(self, pastebin, mocked_urlopen) -> None:\n result = pastebin.create_new_paste(b\"full-paste-contents\")\n assert result == \"https://bpaste.net/show/3c0c6750bd\"\n assert len(mocked_urlopen) == 1\n url, data = mocked_urlopen[0]\n assert type(data) is bytes\n lexer = \"text\"\n assert url == \"https://bpaste.net\"\n assert \"lexer=%s\" % lexer in data.decode()\n assert \"code=full-paste-contents\" in data.decode()\n assert \"expiry=1week\" in data.decode()\n\n def test_create_new_paste_failure(self, pastebin, monkeypatch: MonkeyPatch) -> None:\n import io\n import urllib.request\n\n def response(url, data):\n stream = io.BytesIO(b\"something bad occurred\")\n return stream\n\n monkeypatch.setattr(urllib.request, \"urlopen\", response)\n result = pastebin.create_new_paste(b\"full-paste-contents\")\n assert result == \"bad response: invalid format ('something bad occurred')\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_TestPytestPluginInteractions.test_addhooks_nohooks_TestPytestPluginInteractions.test_do_option_postinitialize.assert_config_option_test": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_TestPytestPluginInteractions.test_addhooks_nohooks_TestPytestPluginInteractions.test_do_option_postinitialize.assert_config_option_test", "embedding": null, "metadata": {"file_path": "testing/test_pluginmanager.py", "file_name": "test_pluginmanager.py", "file_type": "text/x-python", "category": "test", "start_line": 52, "end_line": 75, "span_ids": ["TestPytestPluginInteractions.test_addhooks_nohooks", "TestPytestPluginInteractions.test_do_option_postinitialize"], "tokens": 190}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPytestPluginInteractions:\n\n def test_addhooks_nohooks(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n import sys\n def pytest_addhooks(pluginmanager):\n pluginmanager.add_hookspecs(sys)\n \"\"\"\n )\n res = pytester.runpytest()\n assert res.ret != 0\n res.stderr.fnmatch_lines([\"*did not find*sys*\"])\n\n def test_do_option_postinitialize(self, pytester: Pytester) -> None:\n config = pytester.parseconfigure()\n assert not hasattr(config.option, \"test123\")\n p = pytester.makepyfile(\n \"\"\"\n def pytest_addoption(parser):\n parser.addoption('--test123', action=\"store_true\",\n default=True)\n \"\"\"\n )\n config.pluginmanager._importconftest(p, importmode=\"prepend\")\n assert config.option.test123", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_TestPytestPluginInteractions.test_configure_TestPytestPluginInteractions.test_configure.None_4": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_TestPytestPluginInteractions.test_configure_TestPytestPluginInteractions.test_configure.None_4", "embedding": null, "metadata": {"file_path": "testing/test_pluginmanager.py", "file_name": "test_pluginmanager.py", "file_type": "text/x-python", "category": "test", "start_line": 77, "end_line": 95, "span_ids": ["TestPytestPluginInteractions.test_configure.A", "TestPytestPluginInteractions.test_configure.A.pytest_configure", "TestPytestPluginInteractions.test_configure"], "tokens": 139}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPytestPluginInteractions:\n\n def test_configure(self, pytester: Pytester) -> None:\n config = pytester.parseconfig()\n values = []\n\n class A:\n def pytest_configure(self):\n values.append(self)\n\n config.pluginmanager.register(A())\n assert len(values) == 0\n config._do_configure()\n assert len(values) == 1\n config.pluginmanager.register(A()) # leads to a configured() plugin\n assert len(values) == 2\n assert values[0] != values[1]\n\n config._ensure_unconfigure()\n config.pluginmanager.register(A())\n assert len(values) == 2", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_TestPytestPluginInteractions.test_hook_tracing_TestPytestPluginInteractions.test_hook_tracing.try_.finally_.undo_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_TestPytestPluginInteractions.test_hook_tracing_TestPytestPluginInteractions.test_hook_tracing.try_.finally_.undo_", "embedding": null, "metadata": {"file_path": "testing/test_pluginmanager.py", "file_name": "test_pluginmanager.py", "file_type": "text/x-python", "category": "test", "start_line": 97, "end_line": 128, "span_ids": ["TestPytestPluginInteractions.test_hook_tracing.api1.pytest_plugin_registered", "TestPytestPluginInteractions.test_hook_tracing", "TestPytestPluginInteractions.test_hook_tracing.api1", "TestPytestPluginInteractions.test_hook_tracing.api2", "TestPytestPluginInteractions.test_hook_tracing.api2.pytest_plugin_registered"], "tokens": 234}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPytestPluginInteractions:\n\n def test_hook_tracing(self, _config_for_test: Config) -> None:\n pytestpm = _config_for_test.pluginmanager # fully initialized with plugins\n saveindent = []\n\n class api1:\n def pytest_plugin_registered(self):\n saveindent.append(pytestpm.trace.root.indent)\n\n class api2:\n def pytest_plugin_registered(self):\n saveindent.append(pytestpm.trace.root.indent)\n raise ValueError()\n\n values: List[str] = []\n pytestpm.trace.root.setwriter(values.append)\n undo = pytestpm.enable_tracing()\n try:\n indent = pytestpm.trace.root.indent\n p = api1()\n pytestpm.register(p)\n assert pytestpm.trace.root.indent == indent\n assert len(values) >= 2\n assert \"pytest_plugin_registered\" in values[0]\n assert \"finish\" in values[1]\n\n values[:] = []\n with pytest.raises(ValueError):\n pytestpm.register(api2())\n assert pytestpm.trace.root.indent == indent\n assert saveindent[0] > indent\n finally:\n undo()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_TestPytestPluginInteractions.test_hook_proxy_TestPytestPluginInteractions.test_hook_proxy.assert_ihook_a_is_not_iho": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_TestPytestPluginInteractions.test_hook_proxy_TestPytestPluginInteractions.test_hook_proxy.assert_ihook_a_is_not_iho", "embedding": null, "metadata": {"file_path": "testing/test_pluginmanager.py", "file_name": "test_pluginmanager.py", "file_type": "text/x-python", "category": "test", "start_line": 130, "end_line": 144, "span_ids": ["TestPytestPluginInteractions.test_hook_proxy"], "tokens": 206}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPytestPluginInteractions:\n\n def test_hook_proxy(self, pytester: Pytester) -> None:\n \"\"\"Test the gethookproxy function(#2016)\"\"\"\n config = pytester.parseconfig()\n session = Session.from_config(config)\n pytester.makepyfile(**{\"tests/conftest.py\": \"\", \"tests/subdir/conftest.py\": \"\"})\n\n conftest1 = pytester.path.joinpath(\"tests/conftest.py\")\n conftest2 = pytester.path.joinpath(\"tests/subdir/conftest.py\")\n\n config.pluginmanager._importconftest(conftest1, importmode=\"prepend\")\n ihook_a = session.gethookproxy(pytester.path / \"tests\")\n assert ihook_a is not None\n config.pluginmanager._importconftest(conftest2, importmode=\"prepend\")\n ihook_b = session.gethookproxy(pytester.path / \"tests\")\n assert ihook_a is not ihook_b", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_test_default_markers_test_importplugin_error_message.assert_in_test_traceback": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_test_default_markers_test_importplugin_error_message.assert_in_test_traceback", "embedding": null, "metadata": {"file_path": "testing/test_pluginmanager.py", "file_name": "test_pluginmanager.py", "file_type": "text/x-python", "category": "test", "start_line": 177, "end_line": 204, "span_ids": ["test_importplugin_error_message", "test_default_markers"], "tokens": 217}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_default_markers(pytester: Pytester) -> None:\n result = pytester.runpytest(\"--markers\")\n result.stdout.fnmatch_lines([\"*tryfirst*first*\", \"*trylast*last*\"])\n\n\ndef test_importplugin_error_message(\n pytester: Pytester, pytestpm: PytestPluginManager\n) -> None:\n \"\"\"Don't hide import errors when importing plugins and provide\n an easy to debug message.\n\n See #375 and #1998.\n \"\"\"\n pytester.syspathinsert(pytester.path)\n pytester.makepyfile(\n qwe=\"\"\"\\\n def test_traceback():\n raise ImportError('Not possible to import: \u263a')\n test_traceback()\n \"\"\"\n )\n with pytest.raises(ImportError) as excinfo:\n pytestpm.import_plugin(\"qwe\")\n\n assert str(excinfo.value).endswith(\n 'Error importing plugin \"qwe\": Not possible to import: \u263a'\n )\n assert \"in test_traceback\" in str(excinfo.traceback[-1])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_TestPytestPluginManager_TestPytestPluginManager.test_canonical_import.assert_pm_is_registered_m": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_TestPytestPluginManager_TestPytestPluginManager.test_canonical_import.assert_pm_is_registered_m", "embedding": null, "metadata": {"file_path": "testing/test_pluginmanager.py", "file_name": "test_pluginmanager.py", "file_type": "text/x-python", "category": "test", "start_line": 207, "end_line": 226, "span_ids": ["TestPytestPluginManager", "TestPytestPluginManager.test_canonical_import", "TestPytestPluginManager.test_register_imported_modules"], "tokens": 175}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPytestPluginManager:\n def test_register_imported_modules(self) -> None:\n pm = PytestPluginManager()\n mod = types.ModuleType(\"x.y.pytest_hello\")\n pm.register(mod)\n assert pm.is_registered(mod)\n values = pm.get_plugins()\n assert mod in values\n pytest.raises(ValueError, pm.register, mod)\n pytest.raises(ValueError, lambda: pm.register(mod))\n # assert not pm.is_registered(mod2)\n assert pm.get_plugins() == values\n\n def test_canonical_import(self, monkeypatch):\n mod = types.ModuleType(\"pytest_xyz\")\n monkeypatch.setitem(sys.modules, \"pytest_xyz\", mod)\n pm = PytestPluginManager()\n pm.import_plugin(\"pytest_xyz\")\n assert pm.get_plugin(\"pytest_xyz\") == mod\n assert pm.is_registered(mod)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_TestPytestPluginManager.test_consider_module_import_module_TestPytestPluginManager.test_consider_module_import_module.assert_len_values_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_TestPytestPluginManager.test_consider_module_import_module_TestPytestPluginManager.test_consider_module_import_module.assert_len_values_1", "embedding": null, "metadata": {"file_path": "testing/test_pluginmanager.py", "file_name": "test_pluginmanager.py", "file_type": "text/x-python", "category": "test", "start_line": 240, "end_line": 256, "span_ids": ["TestPytestPluginManager.test_consider_module_import_module"], "tokens": 185}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPytestPluginManager:\n\n def test_consider_module_import_module(\n self, pytester: Pytester, _config_for_test: Config\n ) -> None:\n pytestpm = _config_for_test.pluginmanager\n mod = types.ModuleType(\"x\")\n mod.__dict__[\"pytest_plugins\"] = \"pytest_a\"\n aplugin = pytester.makepyfile(pytest_a=\"#\")\n reprec = pytester.make_hook_recorder(pytestpm)\n pytester.syspathinsert(aplugin.parent)\n pytestpm.consider_module(mod)\n call = reprec.getcall(pytestpm.hook.pytest_plugin_registered.name)\n assert call.plugin.__name__ == \"pytest_a\"\n\n # check that it is not registered twice\n pytestpm.consider_module(mod)\n values = reprec.getcalls(\"pytest_plugin_registered\")\n assert len(values) == 1", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_TestPytestPluginManager.test_consider_env_fails_to_import_TestPytestPluginManager.test_plugin_skip.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_TestPytestPluginManager.test_consider_env_fails_to_import_TestPytestPluginManager.test_plugin_skip.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_pluginmanager.py", "file_name": "test_pluginmanager.py", "file_type": "text/x-python", "category": "test", "start_line": 258, "end_line": 279, "span_ids": ["TestPytestPluginManager.test_plugin_skip", "TestPytestPluginManager.test_consider_env_fails_to_import"], "tokens": 234}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPytestPluginManager:\n\n def test_consider_env_fails_to_import(\n self, monkeypatch: MonkeyPatch, pytestpm: PytestPluginManager\n ) -> None:\n monkeypatch.setenv(\"PYTEST_PLUGINS\", \"nonexisting\", prepend=\",\")\n with pytest.raises(ImportError):\n pytestpm.consider_env()\n\n @pytest.mark.filterwarnings(\"always\")\n def test_plugin_skip(self, pytester: Pytester, monkeypatch: MonkeyPatch) -> None:\n p = pytester.makepyfile(\n skipping1=\"\"\"\n import pytest\n pytest.skip(\"hello\", allow_module_level=True)\n \"\"\"\n )\n shutil.copy(p, p.with_name(\"skipping2.py\"))\n monkeypatch.setenv(\"PYTEST_PLUGINS\", \"skipping2\")\n result = pytester.runpytest(\"-p\", \"skipping1\", syspathinsert=True)\n assert result.ret == ExitCode.NO_TESTS_COLLECTED\n result.stdout.fnmatch_lines(\n [\"*skipped plugin*skipping1*hello*\", \"*skipped plugin*skipping2*hello*\"]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_TestPytestPluginManager.test_consider_env_plugin_instantiation_TestPytestPluginManager.test_consider_env_plugin_instantiation.assert_l2_l3": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_TestPytestPluginManager.test_consider_env_plugin_instantiation_TestPytestPluginManager.test_consider_env_plugin_instantiation.assert_l2_l3", "embedding": null, "metadata": {"file_path": "testing/test_pluginmanager.py", "file_name": "test_pluginmanager.py", "file_type": "text/x-python", "category": "test", "start_line": 281, "end_line": 297, "span_ids": ["TestPytestPluginManager.test_consider_env_plugin_instantiation"], "tokens": 159}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPytestPluginManager:\n\n def test_consider_env_plugin_instantiation(\n self,\n pytester: Pytester,\n monkeypatch: MonkeyPatch,\n pytestpm: PytestPluginManager,\n ) -> None:\n pytester.syspathinsert()\n pytester.makepyfile(xy123=\"#\")\n monkeypatch.setitem(os.environ, \"PYTEST_PLUGINS\", \"xy123\")\n l1 = len(pytestpm.get_plugins())\n pytestpm.consider_env()\n l2 = len(pytestpm.get_plugins())\n assert l2 == l1 + 1\n assert pytestpm.get_plugin(\"xy123\")\n pytestpm.consider_env()\n l3 = len(pytestpm.get_plugins())\n assert l2 == l3", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_TestPytestPluginManager.test_pluginmanager_ENV_startup_TestPytestPluginManager.test_pluginmanager_ENV_startup.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_TestPytestPluginManager.test_pluginmanager_ENV_startup_TestPytestPluginManager.test_pluginmanager_ENV_startup.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_pluginmanager.py", "file_name": "test_pluginmanager.py", "file_type": "text/x-python", "category": "test", "start_line": 299, "end_line": 314, "span_ids": ["TestPytestPluginManager.test_pluginmanager_ENV_startup"], "tokens": 148}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPytestPluginManager:\n\n def test_pluginmanager_ENV_startup(\n self, pytester: Pytester, monkeypatch: MonkeyPatch\n ) -> None:\n pytester.makepyfile(pytest_x500=\"#\")\n p = pytester.makepyfile(\n \"\"\"\n import pytest\n def test_hello(pytestconfig):\n plugin = pytestconfig.pluginmanager.get_plugin('pytest_x500')\n assert plugin is not None\n \"\"\"\n )\n monkeypatch.setenv(\"PYTEST_PLUGINS\", \"pytest_x500\", prepend=\",\")\n result = pytester.runpytest(p, syspathinsert=True)\n assert result.ret == 0\n result.stdout.fnmatch_lines([\"*1 passed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_TestPytestPluginManager.test_import_plugin_importname_TestPytestPluginManager.test_import_plugin_importname.assert_plugin2_is_plugin1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_TestPytestPluginManager.test_import_plugin_importname_TestPytestPluginManager.test_import_plugin_importname.assert_plugin2_is_plugin1", "embedding": null, "metadata": {"file_path": "testing/test_pluginmanager.py", "file_name": "test_pluginmanager.py", "file_type": "text/x-python", "category": "test", "start_line": 316, "end_line": 333, "span_ids": ["TestPytestPluginManager.test_import_plugin_importname"], "tokens": 196}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPytestPluginManager:\n\n def test_import_plugin_importname(\n self, pytester: Pytester, pytestpm: PytestPluginManager\n ) -> None:\n pytest.raises(ImportError, pytestpm.import_plugin, \"qweqwex.y\")\n pytest.raises(ImportError, pytestpm.import_plugin, \"pytest_qweqwx.y\")\n\n pytester.syspathinsert()\n pluginname = \"pytest_hello\"\n pytester.makepyfile(**{pluginname: \"\"})\n pytestpm.import_plugin(\"pytest_hello\")\n len1 = len(pytestpm.get_plugins())\n pytestpm.import_plugin(\"pytest_hello\")\n len2 = len(pytestpm.get_plugins())\n assert len1 == len2\n plugin1 = pytestpm.get_plugin(\"pytest_hello\")\n assert plugin1.__name__.endswith(\"pytest_hello\")\n plugin2 = pytestpm.get_plugin(\"pytest_hello\")\n assert plugin2 is plugin1", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_TestPytestPluginManager.test_import_plugin_dotted_name_TestPytestPluginManager.test_consider_conftest_deps.with_pytest_raises_Import.pytestpm_consider_conftes": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_TestPytestPluginManager.test_import_plugin_dotted_name_TestPytestPluginManager.test_consider_conftest_deps.with_pytest_raises_Import.pytestpm_consider_conftes", "embedding": null, "metadata": {"file_path": "testing/test_pluginmanager.py", "file_name": "test_pluginmanager.py", "file_type": "text/x-python", "category": "test", "start_line": 335, "end_line": 355, "span_ids": ["TestPytestPluginManager.test_import_plugin_dotted_name", "TestPytestPluginManager.test_consider_conftest_deps"], "tokens": 212}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPytestPluginManager:\n\n def test_import_plugin_dotted_name(\n self, pytester: Pytester, pytestpm: PytestPluginManager\n ) -> None:\n pytest.raises(ImportError, pytestpm.import_plugin, \"qweqwex.y\")\n pytest.raises(ImportError, pytestpm.import_plugin, \"pytest_qweqwex.y\")\n\n pytester.syspathinsert()\n pytester.mkpydir(\"pkg\").joinpath(\"plug.py\").write_text(\"x=3\")\n pluginname = \"pkg.plug\"\n pytestpm.import_plugin(pluginname)\n mod = pytestpm.get_plugin(\"pkg.plug\")\n assert mod.x == 3\n\n def test_consider_conftest_deps(\n self,\n pytester: Pytester,\n pytestpm: PytestPluginManager,\n ) -> None:\n mod = import_path(pytester.makepyfile(\"pytest_plugins='xyz'\"))\n with pytest.raises(ImportError):\n pytestpm.consider_conftest(mod)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_TestPytestPluginManagerBootstrapming_TestPytestPluginManagerBootstrapming.test_preparse_args.with_pytest_raises_UsageE.pytestpm_consider_prepars": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_TestPytestPluginManagerBootstrapming_TestPytestPluginManagerBootstrapming.test_preparse_args.with_pytest_raises_UsageE.pytestpm_consider_prepars", "embedding": null, "metadata": {"file_path": "testing/test_pluginmanager.py", "file_name": "test_pluginmanager.py", "file_type": "text/x-python", "category": "test", "start_line": 358, "end_line": 374, "span_ids": ["TestPytestPluginManagerBootstrapming.test_preparse_args", "TestPytestPluginManagerBootstrapming"], "tokens": 179}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPytestPluginManagerBootstrapming:\n def test_preparse_args(self, pytestpm: PytestPluginManager) -> None:\n pytest.raises(\n ImportError, lambda: pytestpm.consider_preparse([\"xyz\", \"-p\", \"hello123\"])\n )\n\n # Handles -p without space (#3532).\n with pytest.raises(ImportError) as excinfo:\n pytestpm.consider_preparse([\"-phello123\"])\n assert '\"hello123\"' in excinfo.value.args[0]\n pytestpm.consider_preparse([\"-pno:hello123\"])\n\n # Handles -p without following arg (when used without argparse).\n pytestpm.consider_preparse([\"-p\"])\n\n with pytest.raises(UsageError, match=\"^plugin main cannot be disabled$\"):\n pytestpm.consider_preparse([\"-p\", \"no:main\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_TestPytestPluginManagerBootstrapming.test_plugin_prevent_register_TestPytestPluginManagerBootstrapming.test_plugin_prevent_register_unregistered_alredy_registered.assert_42_not_in_l2": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_TestPytestPluginManagerBootstrapming.test_plugin_prevent_register_TestPytestPluginManagerBootstrapming.test_plugin_prevent_register_unregistered_alredy_registered.assert_42_not_in_l2", "embedding": null, "metadata": {"file_path": "testing/test_pluginmanager.py", "file_name": "test_pluginmanager.py", "file_type": "text/x-python", "category": "test", "start_line": 376, "end_line": 392, "span_ids": ["TestPytestPluginManagerBootstrapming.test_plugin_prevent_register_unregistered_alredy_registered", "TestPytestPluginManagerBootstrapming.test_plugin_prevent_register"], "tokens": 189}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPytestPluginManagerBootstrapming:\n\n def test_plugin_prevent_register(self, pytestpm: PytestPluginManager) -> None:\n pytestpm.consider_preparse([\"xyz\", \"-p\", \"no:abc\"])\n l1 = pytestpm.get_plugins()\n pytestpm.register(42, name=\"abc\")\n l2 = pytestpm.get_plugins()\n assert len(l2) == len(l1)\n assert 42 not in l2\n\n def test_plugin_prevent_register_unregistered_alredy_registered(\n self, pytestpm: PytestPluginManager\n ) -> None:\n pytestpm.register(42, name=\"abc\")\n l1 = pytestpm.get_plugins()\n assert 42 in l1\n pytestpm.consider_preparse([\"xyz\", \"-p\", \"no:abc\"])\n l2 = pytestpm.get_plugins()\n assert 42 not in l2", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_TestPytestPluginManagerBootstrapming.test_plugin_prevent_register_stepwise_on_cacheprovider_unregister_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_TestPytestPluginManagerBootstrapming.test_plugin_prevent_register_stepwise_on_cacheprovider_unregister_", "embedding": null, "metadata": {"file_path": "testing/test_pluginmanager.py", "file_name": "test_pluginmanager.py", "file_type": "text/x-python", "category": "test", "start_line": 394, "end_line": 418, "span_ids": ["TestPytestPluginManagerBootstrapming.test_plugin_prevent_register_stepwise_on_cacheprovider_unregister", "TestPytestPluginManagerBootstrapming.test_blocked_plugin_can_be_used"], "tokens": 269}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPytestPluginManagerBootstrapming:\n\n def test_plugin_prevent_register_stepwise_on_cacheprovider_unregister(\n self, pytestpm: PytestPluginManager\n ) -> None:\n \"\"\"From PR #4304: The only way to unregister a module is documented at\n the end of https://docs.pytest.org/en/stable/plugins.html.\n\n When unregister cacheprovider, then unregister stepwise too.\n \"\"\"\n pytestpm.register(42, name=\"cacheprovider\")\n pytestpm.register(43, name=\"stepwise\")\n l1 = pytestpm.get_plugins()\n assert 42 in l1\n assert 43 in l1\n pytestpm.consider_preparse([\"xyz\", \"-p\", \"no:cacheprovider\"])\n l2 = pytestpm.get_plugins()\n assert 42 not in l2\n assert 43 not in l2\n\n def test_blocked_plugin_can_be_used(self, pytestpm: PytestPluginManager) -> None:\n pytestpm.consider_preparse([\"xyz\", \"-p\", \"no:abc\", \"-p\", \"abc\"])\n\n assert pytestpm.has_plugin(\"abc\")\n assert not pytestpm.is_blocked(\"abc\")\n assert not pytestpm.is_blocked(\"pytest_abc\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_hookrecorder_basic_test_hookrecorder_basic.None_2": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_hookrecorder_basic_test_hookrecorder_basic.None_2", "embedding": null, "metadata": {"file_path": "testing/test_pytester.py", "file_name": "test_pytester.py", "file_type": "text/x-python", "category": "test", "start_line": 187, "end_line": 199, "span_ids": ["test_hookrecorder_basic"], "tokens": 130}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\"holder\", make_holder())\ndef test_hookrecorder_basic(holder) -> None:\n pm = PytestPluginManager()\n pm.add_hookspecs(holder)\n rec = HookRecorder(pm)\n pm.hook.pytest_xyz(arg=123)\n call = rec.popcall(\"pytest_xyz\")\n assert call.arg == 123\n assert call._name == \"pytest_xyz\"\n pytest.raises(pytest.fail.Exception, rec.popcall, \"abc\")\n pm.hook.pytest_xyz_noarg()\n call = rec.popcall(\"pytest_xyz_noarg\")\n assert call._name == \"pytest_xyz_noarg\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_TestInlineRunModulesCleanup_TestInlineRunModulesCleanup.spy_factory.return.SysModulesSnapshotSpy": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_TestInlineRunModulesCleanup_TestInlineRunModulesCleanup.spy_factory.return.SysModulesSnapshotSpy", "embedding": null, "metadata": {"file_path": "testing/test_pytester.py", "file_name": "test_pytester.py", "file_type": "text/x-python", "category": "test", "start_line": 220, "end_line": 244, "span_ids": ["TestInlineRunModulesCleanup.spy_factory.SysModulesSnapshotSpy:2", "TestInlineRunModulesCleanup.spy_factory.SysModulesSnapshotSpy", "TestInlineRunModulesCleanup.test_inline_run_test_module_not_cleaned_up", "TestInlineRunModulesCleanup", "TestInlineRunModulesCleanup.spy_factory"], "tokens": 235}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestInlineRunModulesCleanup:\n def test_inline_run_test_module_not_cleaned_up(self, pytester: Pytester) -> None:\n test_mod = pytester.makepyfile(\"def test_foo(): assert True\")\n result = pytester.inline_run(str(test_mod))\n assert result.ret == ExitCode.OK\n # rewrite module, now test should fail if module was re-imported\n test_mod.write_text(\"def test_foo(): assert False\")\n result2 = pytester.inline_run(str(test_mod))\n assert result2.ret == ExitCode.TESTS_FAILED\n\n def spy_factory(self):\n class SysModulesSnapshotSpy:\n instances: List[\"SysModulesSnapshotSpy\"] = [] # noqa: F821\n\n def __init__(self, preserve=None) -> None:\n SysModulesSnapshotSpy.instances.append(self)\n self._spy_restore_count = 0\n self._spy_preserve = preserve\n self.__snapshot = SysModulesSnapshot(preserve=preserve)\n\n def restore(self):\n self._spy_restore_count += 1\n return self.__snapshot.restore()\n\n return SysModulesSnapshotSpy", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_TestInlineRunModulesCleanup.test_inline_run_taking_and_restoring_a_sys_modules_snapshot_TestInlineRunModulesCleanup.test_inline_run_taking_and_restoring_a_sys_modules_snapshot.assert_all_sys_modules_x_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_TestInlineRunModulesCleanup.test_inline_run_taking_and_restoring_a_sys_modules_snapshot_TestInlineRunModulesCleanup.test_inline_run_taking_and_restoring_a_sys_modules_snapshot.assert_all_sys_modules_x_", "embedding": null, "metadata": {"file_path": "testing/test_pytester.py", "file_name": "test_pytester.py", "file_type": "text/x-python", "category": "test", "start_line": 246, "end_line": 265, "span_ids": ["TestInlineRunModulesCleanup.test_inline_run_taking_and_restoring_a_sys_modules_snapshot"], "tokens": 209}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestInlineRunModulesCleanup:\n\n def test_inline_run_taking_and_restoring_a_sys_modules_snapshot(\n self, pytester: Pytester, monkeypatch: MonkeyPatch\n ) -> None:\n spy_factory = self.spy_factory()\n monkeypatch.setattr(pytester_mod, \"SysModulesSnapshot\", spy_factory)\n pytester.syspathinsert()\n original = dict(sys.modules)\n pytester.makepyfile(import1=\"# you son of a silly person\")\n pytester.makepyfile(import2=\"# my hovercraft is full of eels\")\n test_mod = pytester.makepyfile(\n \"\"\"\n import import1\n def test_foo(): import import2\"\"\"\n )\n pytester.inline_run(str(test_mod))\n assert len(spy_factory.instances) == 1\n spy = spy_factory.instances[0]\n assert spy._spy_restore_count == 1\n assert sys.modules == original\n assert all(sys.modules[x] is original[x] for x in sys.modules)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_TestInlineRunModulesCleanup.test_inline_run_sys_modules_snapshot_restore_preserving_modules_TestInlineRunModulesCleanup.test_external_test_module_imports_not_cleaned_up.assert_imported_data_4": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_TestInlineRunModulesCleanup.test_inline_run_sys_modules_snapshot_restore_preserving_modules_TestInlineRunModulesCleanup.test_external_test_module_imports_not_cleaned_up.assert_imported_data_4", "embedding": null, "metadata": {"file_path": "testing/test_pytester.py", "file_name": "test_pytester.py", "file_type": "text/x-python", "category": "test", "start_line": 267, "end_line": 294, "span_ids": ["TestInlineRunModulesCleanup.test_external_test_module_imports_not_cleaned_up", "TestInlineRunModulesCleanup.test_inline_run_sys_modules_snapshot_restore_preserving_modules"], "tokens": 253}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestInlineRunModulesCleanup:\n\n def test_inline_run_sys_modules_snapshot_restore_preserving_modules(\n self, pytester: Pytester, monkeypatch: MonkeyPatch\n ) -> None:\n spy_factory = self.spy_factory()\n monkeypatch.setattr(pytester_mod, \"SysModulesSnapshot\", spy_factory)\n test_mod = pytester.makepyfile(\"def test_foo(): pass\")\n pytester.inline_run(str(test_mod))\n spy = spy_factory.instances[0]\n assert not spy._spy_preserve(\"black_knight\")\n assert spy._spy_preserve(\"zope\")\n assert spy._spy_preserve(\"zope.interface\")\n assert spy._spy_preserve(\"zopelicious\")\n\n def test_external_test_module_imports_not_cleaned_up(\n self, pytester: Pytester\n ) -> None:\n pytester.syspathinsert()\n pytester.makepyfile(imported=\"data = 'you son of a silly person'\")\n import imported\n\n test_mod = pytester.makepyfile(\n \"\"\"\n def test_foo():\n import imported\n imported.data = 42\"\"\"\n )\n pytester.inline_run(str(test_mod))\n assert imported.data == 42", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_assert_outcomes_after_pytest_error_test_cwd_snapshot.None_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_assert_outcomes_after_pytest_error_test_cwd_snapshot.None_1", "embedding": null, "metadata": {"file_path": "testing/test_pytester.py", "file_name": "test_pytester.py", "file_type": "text/x-python", "category": "test", "start_line": 297, "end_line": 313, "span_ids": ["test_assert_outcomes_after_pytest_error", "test_cwd_snapshot"], "tokens": 141}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_assert_outcomes_after_pytest_error(pytester: Pytester) -> None:\n pytester.makepyfile(\"def test_foo(): assert True\")\n\n result = pytester.runpytest(\"--unexpected-argument\")\n with pytest.raises(ValueError, match=\"Pytest terminal summary report not found\"):\n result.assert_outcomes(passed=0)\n\n\ndef test_cwd_snapshot(pytester: Pytester) -> None:\n foo = pytester.mkdir(\"foo\")\n bar = pytester.mkdir(\"bar\")\n os.chdir(foo)\n snapshot = CwdSnapshot()\n os.chdir(bar)\n assert Path().absolute() == bar\n snapshot.restore()\n assert Path().absolute() == foo", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_TestSysModulesSnapshot_TestSysModulesSnapshot.test_restore_reloaded.assert_sys_modules_ori": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_TestSysModulesSnapshot_TestSysModulesSnapshot.test_restore_reloaded.assert_sys_modules_ori", "embedding": null, "metadata": {"file_path": "testing/test_pytester.py", "file_name": "test_pytester.py", "file_type": "text/x-python", "category": "test", "start_line": 316, "end_line": 347, "span_ids": ["TestSysModulesSnapshot.test_restore_reloaded", "TestSysModulesSnapshot", "TestSysModulesSnapshot.test_remove_added", "TestSysModulesSnapshot.test_add_removed"], "tokens": 255}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestSysModulesSnapshot:\n key = \"my-test-module\"\n\n def test_remove_added(self) -> None:\n original = dict(sys.modules)\n assert self.key not in sys.modules\n snapshot = SysModulesSnapshot()\n sys.modules[self.key] = ModuleType(\"something\")\n assert self.key in sys.modules\n snapshot.restore()\n assert sys.modules == original\n\n def test_add_removed(self, monkeypatch: MonkeyPatch) -> None:\n assert self.key not in sys.modules\n monkeypatch.setitem(sys.modules, self.key, ModuleType(\"something\"))\n assert self.key in sys.modules\n original = dict(sys.modules)\n snapshot = SysModulesSnapshot()\n del sys.modules[self.key]\n assert self.key not in sys.modules\n snapshot.restore()\n assert sys.modules == original\n\n def test_restore_reloaded(self, monkeypatch: MonkeyPatch) -> None:\n assert self.key not in sys.modules\n monkeypatch.setitem(sys.modules, self.key, ModuleType(\"something\"))\n assert self.key in sys.modules\n original = dict(sys.modules)\n snapshot = SysModulesSnapshot()\n sys.modules[self.key] = ModuleType(\"something else\")\n snapshot.restore()\n assert sys.modules == original", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_TestSysModulesSnapshot.test_preserve_modules_TestSysModulesSnapshot.test_preserve_container.assert_sys_modules_ori": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_TestSysModulesSnapshot.test_preserve_modules_TestSysModulesSnapshot.test_preserve_container.assert_sys_modules_ori", "embedding": null, "metadata": {"file_path": "testing/test_pytester.py", "file_name": "test_pytester.py", "file_type": "text/x-python", "category": "test", "start_line": 349, "end_line": 376, "span_ids": ["TestSysModulesSnapshot.test_preserve_modules", "TestSysModulesSnapshot.test_preserve_container"], "tokens": 284}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestSysModulesSnapshot:\n\n def test_preserve_modules(self, monkeypatch: MonkeyPatch) -> None:\n key = [self.key + str(i) for i in range(3)]\n assert not any(k in sys.modules for k in key)\n for i, k in enumerate(key):\n mod = ModuleType(\"something\" + str(i))\n monkeypatch.setitem(sys.modules, k, mod)\n original = dict(sys.modules)\n\n def preserve(name):\n return name in (key[0], key[1], \"some-other-key\")\n\n snapshot = SysModulesSnapshot(preserve=preserve)\n sys.modules[key[0]] = original[key[0]] = ModuleType(\"something else0\")\n sys.modules[key[1]] = original[key[1]] = ModuleType(\"something else1\")\n sys.modules[key[2]] = ModuleType(\"something else2\")\n snapshot.restore()\n assert sys.modules == original\n\n def test_preserve_container(self, monkeypatch: MonkeyPatch) -> None:\n original = dict(sys.modules)\n assert self.key not in original\n replacement = dict(sys.modules)\n replacement[self.key] = ModuleType(\"life of brian\")\n snapshot = SysModulesSnapshot()\n monkeypatch.setattr(sys, \"modules\", replacement)\n snapshot.restore()\n assert sys.modules is replacement\n assert sys.modules == original", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_TestSysPathsSnapshot_TestSysPathsSnapshot.test_restore.assert_getattr_sys_other": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_TestSysPathsSnapshot_TestSysPathsSnapshot.test_restore.assert_getattr_sys_other", "embedding": null, "metadata": {"file_path": "testing/test_pytester.py", "file_name": "test_pytester.py", "file_type": "text/x-python", "category": "test", "start_line": 379, "end_line": 408, "span_ids": ["TestSysPathsSnapshot.path", "TestSysPathsSnapshot.test_restore", "TestSysPathsSnapshot"], "tokens": 353}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\"path_type\", (\"path\", \"meta_path\"))\nclass TestSysPathsSnapshot:\n other_path = {\"path\": \"meta_path\", \"meta_path\": \"path\"}\n\n @staticmethod\n def path(n: int) -> str:\n return \"my-dirty-little-secret-\" + str(n)\n\n def test_restore(self, monkeypatch: MonkeyPatch, path_type) -> None:\n other_path_type = self.other_path[path_type]\n for i in range(10):\n assert self.path(i) not in getattr(sys, path_type)\n sys_path = [self.path(i) for i in range(6)]\n monkeypatch.setattr(sys, path_type, sys_path)\n original = list(sys_path)\n original_other = list(getattr(sys, other_path_type))\n snapshot = SysPathsSnapshot()\n transformation = {\"source\": (0, 1, 2, 3, 4, 5), \"target\": (6, 2, 9, 7, 5, 8)}\n assert sys_path == [self.path(x) for x in transformation[\"source\"]]\n sys_path[1] = self.path(6)\n sys_path[3] = self.path(7)\n sys_path.append(self.path(8))\n del sys_path[4]\n sys_path[3:3] = [self.path(9)]\n del sys_path[0]\n assert sys_path == [self.path(x) for x in transformation[\"target\"]]\n snapshot.restore()\n assert getattr(sys, path_type) is sys_path\n assert getattr(sys, path_type) == original\n assert getattr(sys, other_path_type) == original_other", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_recwarn.py_TestWarningsRecorderChecker_TestWarningsRecorderChecker.test_recording.with_rec_.pytest_raises_AssertionEr": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_recwarn.py_TestWarningsRecorderChecker_TestWarningsRecorderChecker.test_recording.with_rec_.pytest_raises_AssertionEr", "embedding": null, "metadata": {"file_path": "testing/test_recwarn.py", "file_name": "test_recwarn.py", "file_type": "text/x-python", "category": "test", "start_line": 30, "end_line": 45, "span_ids": ["TestWarningsRecorderChecker.test_recording", "TestWarningsRecorderChecker"], "tokens": 141}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestWarningsRecorderChecker:\n def test_recording(self) -> None:\n rec = WarningsRecorder(_ispytest=True)\n with rec:\n assert not rec.list\n warnings.warn_explicit(\"hello\", UserWarning, \"xyz\", 13)\n assert len(rec.list) == 1\n warnings.warn(DeprecationWarning(\"hello\"))\n assert len(rec.list) == 2\n warn = rec.pop()\n assert str(warn.message) == \"hello\"\n values = rec.list\n rec.clear()\n assert len(rec.list) == 0\n assert values is rec.list\n pytest.raises(AssertionError, rec.pop)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_recwarn.py_TestDeprecatedCall.test_deprecated_call_modes_TestDeprecatedCall.test_deprecated_call_modes.if_mode_call_.else_.with_pytest_deprecated_ca.assert_f_10": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_recwarn.py_TestDeprecatedCall.test_deprecated_call_modes_TestDeprecatedCall.test_deprecated_call_modes.if_mode_call_.else_.with_pytest_deprecated_ca.assert_f_10", "embedding": null, "metadata": {"file_path": "testing/test_recwarn.py", "file_name": "test_recwarn.py", "file_type": "text/x-python", "category": "test", "start_line": 140, "end_line": 162, "span_ids": ["TestDeprecatedCall.test_deprecated_call_modes"], "tokens": 198}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDeprecatedCall:\n\n @pytest.mark.parametrize(\n \"warning_type\", [PendingDeprecationWarning, DeprecationWarning]\n )\n @pytest.mark.parametrize(\"mode\", [\"context_manager\", \"call\"])\n @pytest.mark.parametrize(\"call_f_first\", [True, False])\n @pytest.mark.filterwarnings(\"ignore\")\n def test_deprecated_call_modes(self, warning_type, mode, call_f_first) -> None:\n \"\"\"Ensure deprecated_call() captures a deprecation warning as expected inside its\n block/function.\n \"\"\"\n\n def f():\n warnings.warn(warning_type(\"hi\"))\n return 10\n\n # ensure deprecated_call() can capture the warning even if it has already been triggered\n if call_f_first:\n assert f() == 10\n if mode == \"call\":\n assert pytest.deprecated_call(f) == 10\n else:\n with pytest.deprecated_call():\n assert f() == 10", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_recwarn.py_TestDeprecatedCall.test_deprecated_call_exception_is_raised_TestDeprecatedCall.test_deprecated_call_supports_match.with_pytest_raises_pytest.with_pytest_deprecated_ca.warnings_warn_this_is_no": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_recwarn.py_TestDeprecatedCall.test_deprecated_call_exception_is_raised_TestDeprecatedCall.test_deprecated_call_supports_match.with_pytest_raises_pytest.with_pytest_deprecated_ca.warnings_warn_this_is_no", "embedding": null, "metadata": {"file_path": "testing/test_recwarn.py", "file_name": "test_recwarn.py", "file_type": "text/x-python", "category": "test", "start_line": 164, "end_line": 207, "span_ids": ["TestDeprecatedCall.test_deprecated_call_supports_match", "TestDeprecatedCall.test_deprecated_call_exception_is_raised", "TestDeprecatedCall.test_deprecated_call_specificity"], "tokens": 299}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDeprecatedCall:\n\n @pytest.mark.parametrize(\"mode\", [\"context_manager\", \"call\"])\n def test_deprecated_call_exception_is_raised(self, mode) -> None:\n \"\"\"If the block of the code being tested by deprecated_call() raises an exception,\n it must raise the exception undisturbed.\n \"\"\"\n\n def f():\n raise ValueError(\"some exception\")\n\n with pytest.raises(ValueError, match=\"some exception\"):\n if mode == \"call\":\n pytest.deprecated_call(f)\n else:\n with pytest.deprecated_call():\n f()\n\n def test_deprecated_call_specificity(self) -> None:\n other_warnings = [\n Warning,\n UserWarning,\n SyntaxWarning,\n RuntimeWarning,\n FutureWarning,\n ImportWarning,\n UnicodeWarning,\n ]\n for warning in other_warnings:\n\n def f():\n warnings.warn(warning(\"hi\"))\n\n with pytest.raises(pytest.fail.Exception):\n pytest.deprecated_call(f)\n with pytest.raises(pytest.fail.Exception):\n with pytest.deprecated_call():\n f()\n\n def test_deprecated_call_supports_match(self) -> None:\n with pytest.deprecated_call(match=r\"must be \\d+$\"):\n warnings.warn(\"value must be 42\", DeprecationWarning)\n\n with pytest.raises(pytest.fail.Exception):\n with pytest.deprecated_call(match=r\"must be \\d+$\"):\n warnings.warn(\"this is not here\", DeprecationWarning)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_recwarn.py_TestWarns.test_as_contextmanager_TestWarns.test_as_contextmanager.None_3": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_recwarn.py_TestWarns.test_as_contextmanager_TestWarns.test_as_contextmanager.None_3", "embedding": null, "metadata": {"file_path": "testing/test_recwarn.py", "file_name": "test_recwarn.py", "file_type": "text/x-python", "category": "test", "start_line": 243, "end_line": 290, "span_ids": ["TestWarns.test_as_contextmanager"], "tokens": 405}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestWarns:\n\n def test_as_contextmanager(self) -> None:\n with pytest.warns(RuntimeWarning):\n warnings.warn(\"runtime\", RuntimeWarning)\n\n with pytest.warns(UserWarning):\n warnings.warn(\"user\", UserWarning)\n\n with pytest.raises(pytest.fail.Exception) as excinfo:\n with pytest.warns(RuntimeWarning):\n warnings.warn(\"user\", UserWarning)\n excinfo.match(\n r\"DID NOT WARN. No warnings of type \\(.+RuntimeWarning.+,\\) was emitted. \"\n r\"The list of emitted warnings is: \\[UserWarning\\('user',?\\)\\].\"\n )\n\n with pytest.raises(pytest.fail.Exception) as excinfo:\n with pytest.warns(UserWarning):\n warnings.warn(\"runtime\", RuntimeWarning)\n excinfo.match(\n r\"DID NOT WARN. No warnings of type \\(.+UserWarning.+,\\) was emitted. \"\n r\"The list of emitted warnings is: \\[RuntimeWarning\\('runtime',?\\)\\].\"\n )\n\n with pytest.raises(pytest.fail.Exception) as excinfo:\n with pytest.warns(UserWarning):\n pass\n excinfo.match(\n r\"DID NOT WARN. No warnings of type \\(.+UserWarning.+,\\) was emitted. \"\n r\"The list of emitted warnings is: \\[\\].\"\n )\n\n warning_classes = (UserWarning, FutureWarning)\n with pytest.raises(pytest.fail.Exception) as excinfo:\n with pytest.warns(warning_classes) as warninfo:\n warnings.warn(\"runtime\", RuntimeWarning)\n warnings.warn(\"import\", ImportWarning)\n\n message_template = (\n \"DID NOT WARN. No warnings of type {0} was emitted. \"\n \"The list of emitted warnings is: {1}.\"\n )\n excinfo.match(\n re.escape(\n message_template.format(\n warning_classes, [each.message for each in warninfo]\n )\n )\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_recwarn.py_TestWarns.test_record_TestWarns.test_record_only.assert_str_record_1_mess": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_recwarn.py_TestWarns.test_record_TestWarns.test_record_only.assert_str_record_1_mess", "embedding": null, "metadata": {"file_path": "testing/test_recwarn.py", "file_name": "test_recwarn.py", "file_type": "text/x-python", "category": "test", "start_line": 292, "end_line": 306, "span_ids": ["TestWarns.test_record", "TestWarns.test_record_only"], "tokens": 129}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestWarns:\n\n def test_record(self) -> None:\n with pytest.warns(UserWarning) as record:\n warnings.warn(\"user\", UserWarning)\n\n assert len(record) == 1\n assert str(record[0].message) == \"user\"\n\n def test_record_only(self) -> None:\n with pytest.warns(None) as record:\n warnings.warn(\"user\", UserWarning)\n warnings.warn(\"runtime\", RuntimeWarning)\n\n assert len(record) == 2\n assert str(record[0].message) == \"user\"\n assert str(record[1].message) == \"runtime\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_recwarn.py_TestWarns.test_record_by_subclass_TestWarns.test_record_by_subclass.None_5": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_recwarn.py_TestWarns.test_record_by_subclass_TestWarns.test_record_by_subclass.None_5", "embedding": null, "metadata": {"file_path": "testing/test_recwarn.py", "file_name": "test_recwarn.py", "file_type": "text/x-python", "category": "test", "start_line": 308, "end_line": 329, "span_ids": ["TestWarns.test_record_by_subclass.MyUserWarning:2", "TestWarns.test_record_by_subclass", "TestWarns.test_record_by_subclass.MyRuntimeWarning", "TestWarns.test_record_by_subclass.MyRuntimeWarning:2", "TestWarns.test_record_by_subclass.MyUserWarning"], "tokens": 173}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestWarns:\n\n def test_record_by_subclass(self) -> None:\n with pytest.warns(Warning) as record:\n warnings.warn(\"user\", UserWarning)\n warnings.warn(\"runtime\", RuntimeWarning)\n\n assert len(record) == 2\n assert str(record[0].message) == \"user\"\n assert str(record[1].message) == \"runtime\"\n\n class MyUserWarning(UserWarning):\n pass\n\n class MyRuntimeWarning(RuntimeWarning):\n pass\n\n with pytest.warns((UserWarning, RuntimeWarning)) as record:\n warnings.warn(\"user\", MyUserWarning)\n warnings.warn(\"runtime\", MyRuntimeWarning)\n\n assert len(record) == 2\n assert str(record[0].message) == \"user\"\n assert str(record[1].message) == \"runtime\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_recwarn.py_TestWarns.test_one_from_multiple_warns_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_recwarn.py_TestWarns.test_one_from_multiple_warns_", "embedding": null, "metadata": {"file_path": "testing/test_recwarn.py", "file_name": "test_recwarn.py", "file_type": "text/x-python", "category": "test", "start_line": 359, "end_line": 387, "span_ids": ["TestWarns.test_one_from_multiple_warns", "TestWarns.test_none_of_multiple_warns", "TestWarns.test_warns_context_manager_with_kwargs", "TestWarns.test_can_capture_previously_warned"], "tokens": 282}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestWarns:\n\n def test_one_from_multiple_warns(self) -> None:\n with pytest.warns(UserWarning, match=r\"aaa\"):\n warnings.warn(\"cccccccccc\", UserWarning)\n warnings.warn(\"bbbbbbbbbb\", UserWarning)\n warnings.warn(\"aaaaaaaaaa\", UserWarning)\n\n def test_none_of_multiple_warns(self) -> None:\n with pytest.raises(pytest.fail.Exception):\n with pytest.warns(UserWarning, match=r\"aaa\"):\n warnings.warn(\"bbbbbbbbbb\", UserWarning)\n warnings.warn(\"cccccccccc\", UserWarning)\n\n @pytest.mark.filterwarnings(\"ignore\")\n def test_can_capture_previously_warned(self) -> None:\n def f() -> int:\n warnings.warn(UserWarning(\"ohai\"))\n return 10\n\n assert f() == 10\n assert pytest.warns(UserWarning, f) == 10\n assert pytest.warns(UserWarning, f) == 10\n assert pytest.warns(UserWarning, f) != \"10\" # type: ignore[comparison-overlap]\n\n def test_warns_context_manager_with_kwargs(self) -> None:\n with pytest.raises(TypeError) as excinfo:\n with pytest.warns(UserWarning, foo=\"bar\"): # type: ignore\n pass\n assert \"Unexpected keyword arguments\" in str(excinfo.value)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestReportSerialization.test_xdist_report_longrepr_reprcrash_130_TestReportSerialization.test_xdist_report_longrepr_reprcrash_130.assert_added_section_in_a": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestReportSerialization.test_xdist_report_longrepr_reprcrash_130_TestReportSerialization.test_xdist_report_longrepr_reprcrash_130.assert_added_section_in_a", "embedding": null, "metadata": {"file_path": "testing/test_reports.py", "file_name": "test_reports.py", "file_type": "text/x-python", "category": "test", "start_line": 39, "end_line": 76, "span_ids": ["TestReportSerialization.test_xdist_report_longrepr_reprcrash_130"], "tokens": 424}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestReportSerialization:\n\n def test_xdist_report_longrepr_reprcrash_130(self, pytester: Pytester) -> None:\n \"\"\"Regarding issue pytest-xdist#130\n\n This test came originally from test_remote.py in xdist (ca03269).\n \"\"\"\n reprec = pytester.inline_runsource(\n \"\"\"\n def test_fail():\n assert False, 'Expected Message'\n \"\"\"\n )\n reports = reprec.getreports(\"pytest_runtest_logreport\")\n assert len(reports) == 3\n rep = reports[1]\n added_section = (\"Failure Metadata\", \"metadata metadata\", \"*\")\n assert isinstance(rep.longrepr, ExceptionRepr)\n rep.longrepr.sections.append(added_section)\n d = rep._to_json()\n a = TestReport._from_json(d)\n assert isinstance(a.longrepr, ExceptionRepr)\n # Check assembled == rep\n assert a.__dict__.keys() == rep.__dict__.keys()\n for key in rep.__dict__.keys():\n if key != \"longrepr\":\n assert getattr(a, key) == getattr(rep, key)\n assert rep.longrepr.reprcrash is not None\n assert a.longrepr.reprcrash is not None\n assert rep.longrepr.reprcrash.lineno == a.longrepr.reprcrash.lineno\n assert rep.longrepr.reprcrash.message == a.longrepr.reprcrash.message\n assert rep.longrepr.reprcrash.path == a.longrepr.reprcrash.path\n assert rep.longrepr.reprtraceback.entrysep == a.longrepr.reprtraceback.entrysep\n assert (\n rep.longrepr.reprtraceback.extraline == a.longrepr.reprtraceback.extraline\n )\n assert rep.longrepr.reprtraceback.style == a.longrepr.reprtraceback.style\n assert rep.longrepr.sections == a.longrepr.sections\n # Missing section attribute PR171\n assert added_section in a.longrepr.sections", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestReportSerialization.test_reprentries_serialization_196_TestReportSerialization.test_reprentries_serialization_196.for_i_in_range_len_a_entr.assert_rep_entries_i_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestReportSerialization.test_reprentries_serialization_196_TestReportSerialization.test_reprentries_serialization_196.for_i_in_range_len_a_entr.assert_rep_entries_i_lin", "embedding": null, "metadata": {"file_path": "testing/test_reports.py", "file_name": "test_reports.py", "file_type": "text/x-python", "category": "test", "start_line": 124, "end_line": 151, "span_ids": ["TestReportSerialization.test_reprentries_serialization_196"], "tokens": 243}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestReportSerialization:\n\n def test_reprentries_serialization_196(self, pytester: Pytester) -> None:\n \"\"\"Regarding issue pytest-xdist#196\n\n This test came originally from test_remote.py in xdist (ca03269).\n \"\"\"\n from _pytest._code.code import ReprEntryNative\n\n reprec = pytester.inline_runsource(\n \"\"\"\n def test_repr_entry_native():\n x = 0\n assert x\n \"\"\",\n \"--tb=native\",\n )\n reports = reprec.getreports(\"pytest_runtest_logreport\")\n assert len(reports) == 3\n rep = reports[1]\n assert isinstance(rep.longrepr, ExceptionRepr)\n d = rep._to_json()\n a = TestReport._from_json(d)\n assert isinstance(a.longrepr, ExceptionRepr)\n\n rep_entries = rep.longrepr.reprtraceback.reprentries\n a_entries = a.longrepr.reprtraceback.reprentries\n for i in range(len(a_entries)):\n assert isinstance(rep_entries[i], ReprEntryNative)\n assert rep_entries[i].lines == a_entries[i].lines", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestReportSerialization.test_itemreport_outcomes_TestReportSerialization.test_itemreport_outcomes.for_rep_in_reports_.if_rep_failed_.assert_newrep_longreprtex": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestReportSerialization.test_itemreport_outcomes_TestReportSerialization.test_itemreport_outcomes.for_rep_in_reports_.if_rep_failed_.assert_newrep_longreprtex", "embedding": null, "metadata": {"file_path": "testing/test_reports.py", "file_name": "test_reports.py", "file_type": "text/x-python", "category": "test", "start_line": 153, "end_line": 185, "span_ids": ["TestReportSerialization.test_itemreport_outcomes"], "tokens": 320}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestReportSerialization:\n\n def test_itemreport_outcomes(self, pytester: Pytester) -> None:\n # This test came originally from test_remote.py in xdist (ca03269).\n reprec = pytester.inline_runsource(\n \"\"\"\n import pytest\n def test_pass(): pass\n def test_fail(): 0/0\n @pytest.mark.skipif(\"True\")\n def test_skip(): pass\n def test_skip_imperative():\n pytest.skip(\"hello\")\n @pytest.mark.xfail(\"True\")\n def test_xfail(): 0/0\n def test_xfail_imperative():\n pytest.xfail(\"hello\")\n \"\"\"\n )\n reports = reprec.getreports(\"pytest_runtest_logreport\")\n assert len(reports) == 17 # with setup/teardown \"passed\" reports\n for rep in reports:\n d = rep._to_json()\n newrep = TestReport._from_json(d)\n assert newrep.passed == rep.passed\n assert newrep.failed == rep.failed\n assert newrep.skipped == rep.skipped\n if newrep.skipped and not hasattr(newrep, \"wasxfail\"):\n assert isinstance(newrep.longrepr, tuple)\n assert len(newrep.longrepr) == 3\n assert newrep.outcome == rep.outcome\n assert newrep.when == rep.when\n assert newrep.keywords == rep.keywords\n if rep.failed:\n assert newrep.longreprtext == rep.longreprtext", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestReportSerialization.test_collectreport_passed_TestReportSerialization.test_collectreport_passed.for_rep_in_reports_.assert_newrep_skipped_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestReportSerialization.test_collectreport_passed_TestReportSerialization.test_collectreport_passed.for_rep_in_reports_.assert_newrep_skipped_", "embedding": null, "metadata": {"file_path": "testing/test_reports.py", "file_name": "test_reports.py", "file_type": "text/x-python", "category": "test", "start_line": 187, "end_line": 196, "span_ids": ["TestReportSerialization.test_collectreport_passed"], "tokens": 124}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestReportSerialization:\n\n def test_collectreport_passed(self, pytester: Pytester) -> None:\n \"\"\"This test came originally from test_remote.py in xdist (ca03269).\"\"\"\n reprec = pytester.inline_runsource(\"def test_func(): pass\")\n reports = reprec.getreports(\"pytest_collectreport\")\n for rep in reports:\n d = rep._to_json()\n newrep = CollectReport._from_json(d)\n assert newrep.passed == rep.passed\n assert newrep.failed == rep.failed\n assert newrep.skipped == rep.skipped", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestReportSerialization.test_collectreport_fail_TestReportSerialization.test_collectreport_fail.for_rep_in_reports_.if_rep_failed_.assert_newrep_longrepr_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestReportSerialization.test_collectreport_fail_TestReportSerialization.test_collectreport_fail.for_rep_in_reports_.if_rep_failed_.assert_newrep_longrepr_", "embedding": null, "metadata": {"file_path": "testing/test_reports.py", "file_name": "test_reports.py", "file_type": "text/x-python", "category": "test", "start_line": 198, "end_line": 210, "span_ids": ["TestReportSerialization.test_collectreport_fail"], "tokens": 144}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestReportSerialization:\n\n def test_collectreport_fail(self, pytester: Pytester) -> None:\n \"\"\"This test came originally from test_remote.py in xdist (ca03269).\"\"\"\n reprec = pytester.inline_runsource(\"qwe abc\")\n reports = reprec.getreports(\"pytest_collectreport\")\n assert reports\n for rep in reports:\n d = rep._to_json()\n newrep = CollectReport._from_json(d)\n assert newrep.passed == rep.passed\n assert newrep.failed == rep.failed\n assert newrep.skipped == rep.skipped\n if rep.failed:\n assert newrep.longrepr == str(rep.longrepr)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestReportSerialization.test_extended_report_deserialization_TestReportSerialization.test_extended_report_deserialization.for_rep_in_reports_.if_rep_failed_.assert_newrep_longrepr_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestReportSerialization.test_extended_report_deserialization_TestReportSerialization.test_extended_report_deserialization.for_rep_in_reports_.if_rep_failed_.assert_newrep_longrepr_", "embedding": null, "metadata": {"file_path": "testing/test_reports.py", "file_name": "test_reports.py", "file_type": "text/x-python", "category": "test", "start_line": 212, "end_line": 226, "span_ids": ["TestReportSerialization.test_extended_report_deserialization"], "tokens": 164}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestReportSerialization:\n\n def test_extended_report_deserialization(self, pytester: Pytester) -> None:\n \"\"\"This test came originally from test_remote.py in xdist (ca03269).\"\"\"\n reprec = pytester.inline_runsource(\"qwe abc\")\n reports = reprec.getreports(\"pytest_collectreport\")\n assert reports\n for rep in reports:\n rep.extra = True # type: ignore[attr-defined]\n d = rep._to_json()\n newrep = CollectReport._from_json(d)\n assert newrep.extra\n assert newrep.passed == rep.passed\n assert newrep.failed == rep.failed\n assert newrep.skipped == rep.skipped\n if rep.failed:\n assert newrep.longrepr == str(rep.longrepr)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestReportSerialization.test_paths_support_TestReportSerialization.test_paths_support.assert_data_path2_s": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestReportSerialization.test_paths_support_TestReportSerialization.test_paths_support.assert_data_path2_s", "embedding": null, "metadata": {"file_path": "testing/test_reports.py", "file_name": "test_reports.py", "file_type": "text/x-python", "category": "test", "start_line": 228, "end_line": 244, "span_ids": ["TestReportSerialization.test_paths_support"], "tokens": 176}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestReportSerialization:\n\n def test_paths_support(self, pytester: Pytester) -> None:\n \"\"\"Report attributes which are py.path or pathlib objects should become strings.\"\"\"\n pytester.makepyfile(\n \"\"\"\n def test_a():\n assert False\n \"\"\"\n )\n reprec = pytester.inline_run()\n reports = reprec.getreports(\"pytest_runtest_logreport\")\n assert len(reports) == 3\n test_a_call = reports[1]\n test_a_call.path1 = py.path.local(pytester.path) # type: ignore[attr-defined]\n test_a_call.path2 = pytester.path # type: ignore[attr-defined]\n data = test_a_call._to_json()\n assert data[\"path1\"] == str(pytester.path)\n assert data[\"path2\"] == str(pytester.path)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestHooks_TestHooks.test_test_report.for_rep_in_reports_.assert_new_rep_outcome_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestHooks_TestHooks.test_test_report.for_rep_in_reports_.assert_new_rep_outcome_", "embedding": null, "metadata": {"file_path": "testing/test_reports.py", "file_name": "test_reports.py", "file_type": "text/x-python", "category": "test", "start_line": 413, "end_line": 436, "span_ids": ["TestHooks.test_test_report", "TestHooks"], "tokens": 198}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestHooks:\n \"\"\"Test that the hooks are working correctly for plugins\"\"\"\n\n def test_test_report(self, pytester: Pytester, pytestconfig: Config) -> None:\n pytester.makepyfile(\n \"\"\"\n def test_a(): assert False\n def test_b(): pass\n \"\"\"\n )\n reprec = pytester.inline_run()\n reports = reprec.getreports(\"pytest_runtest_logreport\")\n assert len(reports) == 6\n for rep in reports:\n data = pytestconfig.hook.pytest_report_to_serializable(\n config=pytestconfig, report=rep\n )\n assert data[\"$report_type\"] == \"TestReport\"\n new_rep = pytestconfig.hook.pytest_report_from_serializable(\n config=pytestconfig, data=data\n )\n assert new_rep.nodeid == rep.nodeid\n assert new_rep.when == rep.when\n assert new_rep.outcome == rep.outcome", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestHooks.test_collect_report_TestHooks.test_collect_report.for_rep_in_reports_.assert_new_rep_outcome_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestHooks.test_collect_report_TestHooks.test_collect_report.for_rep_in_reports_.assert_new_rep_outcome_", "embedding": null, "metadata": {"file_path": "testing/test_reports.py", "file_name": "test_reports.py", "file_type": "text/x-python", "category": "test", "start_line": 438, "end_line": 458, "span_ids": ["TestHooks.test_collect_report"], "tokens": 184}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestHooks:\n\n def test_collect_report(self, pytester: Pytester, pytestconfig: Config) -> None:\n pytester.makepyfile(\n \"\"\"\n def test_a(): assert False\n def test_b(): pass\n \"\"\"\n )\n reprec = pytester.inline_run()\n reports = reprec.getreports(\"pytest_collectreport\")\n assert len(reports) == 2\n for rep in reports:\n data = pytestconfig.hook.pytest_report_to_serializable(\n config=pytestconfig, report=rep\n )\n assert data[\"$report_type\"] == \"CollectReport\"\n new_rep = pytestconfig.hook.pytest_report_from_serializable(\n config=pytestconfig, data=data\n )\n assert new_rep.nodeid == rep.nodeid\n assert new_rep.when == \"collect\"\n assert new_rep.outcome == rep.outcome", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestHooks.test_invalid_report_types_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestHooks.test_invalid_report_types_", "embedding": null, "metadata": {"file_path": "testing/test_reports.py", "file_name": "test_reports.py", "file_type": "text/x-python", "category": "test", "start_line": 460, "end_line": 483, "span_ids": ["TestHooks.test_invalid_report_types"], "tokens": 182}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestHooks:\n\n @pytest.mark.parametrize(\n \"hook_name\", [\"pytest_runtest_logreport\", \"pytest_collectreport\"]\n )\n def test_invalid_report_types(\n self, pytester: Pytester, pytestconfig: Config, hook_name: str\n ) -> None:\n pytester.makepyfile(\n \"\"\"\n def test_a(): pass\n \"\"\"\n )\n reprec = pytester.inline_run()\n reports = reprec.getreports(hook_name)\n assert reports\n rep = reports[0]\n data = pytestconfig.hook.pytest_report_to_serializable(\n config=pytestconfig, report=rep\n )\n data[\"$report_type\"] = \"Unknown\"\n with pytest.raises(AssertionError):\n _ = pytestconfig.hook.pytest_report_from_serializable(\n config=pytestconfig, data=data\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_TestSetupState.test_teardown_multiple_one_fails_TestSetupState.test_teardown_multiple_one_fails.assert_r_fin3_fin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_TestSetupState.test_teardown_multiple_one_fails_TestSetupState.test_teardown_multiple_one_fails.assert_r_fin3_fin", "embedding": null, "metadata": {"file_path": "testing/test_runner.py", "file_name": "test_runner.py", "file_type": "text/x-python", "category": "test", "start_line": 56, "end_line": 77, "span_ids": ["TestSetupState.test_teardown_multiple_one_fails"], "tokens": 160}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestSetupState:\n\n def test_teardown_multiple_one_fails(self, pytester: Pytester) -> None:\n r = []\n\n def fin1():\n r.append(\"fin1\")\n\n def fin2():\n raise Exception(\"oops\")\n\n def fin3():\n r.append(\"fin3\")\n\n item = pytester.getitem(\"def test_func(): pass\")\n ss = item.session._setupstate\n ss.setup(item)\n ss.addfinalizer(fin1, item)\n ss.addfinalizer(fin2, item)\n ss.addfinalizer(fin3, item)\n with pytest.raises(Exception) as err:\n ss.teardown_exact(None)\n assert err.value.args == (\"oops\",)\n assert r == [\"fin3\", \"fin1\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_TestSetupState.test_teardown_multiple_fail_TestSetupState.test_teardown_multiple_fail.assert_err_value_args_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_TestSetupState.test_teardown_multiple_fail_TestSetupState.test_teardown_multiple_fail.assert_err_value_args_", "embedding": null, "metadata": {"file_path": "testing/test_runner.py", "file_name": "test_runner.py", "file_type": "text/x-python", "category": "test", "start_line": 79, "end_line": 95, "span_ids": ["TestSetupState.test_teardown_multiple_fail"], "tokens": 146}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestSetupState:\n\n def test_teardown_multiple_fail(self, pytester: Pytester) -> None:\n # Ensure the first exception is the one which is re-raised.\n # Ideally both would be reported however.\n def fin1():\n raise Exception(\"oops1\")\n\n def fin2():\n raise Exception(\"oops2\")\n\n item = pytester.getitem(\"def test_func(): pass\")\n ss = item.session._setupstate\n ss.setup(item)\n ss.addfinalizer(fin1, item)\n ss.addfinalizer(fin2, item)\n with pytest.raises(Exception) as err:\n ss.teardown_exact(None)\n assert err.value.args == (\"oops2\",)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_BaseFunctionalTests_BaseFunctionalTests.test_failfunction._assert_isinstance_rep_l": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_BaseFunctionalTests_BaseFunctionalTests.test_failfunction._assert_isinstance_rep_l", "embedding": null, "metadata": {"file_path": "testing/test_runner.py", "file_name": "test_runner.py", "file_type": "text/x-python", "category": "test", "start_line": 117, "end_line": 144, "span_ids": ["BaseFunctionalTests.test_passfunction", "BaseFunctionalTests.test_failfunction", "BaseFunctionalTests"], "tokens": 174}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class BaseFunctionalTests:\n def test_passfunction(self, pytester: Pytester) -> None:\n reports = pytester.runitem(\n \"\"\"\n def test_func():\n pass\n \"\"\"\n )\n rep = reports[1]\n assert rep.passed\n assert not rep.failed\n assert rep.outcome == \"passed\"\n assert not rep.longrepr\n\n def test_failfunction(self, pytester: Pytester) -> None:\n reports = pytester.runitem(\n \"\"\"\n def test_func():\n assert 0\n \"\"\"\n )\n rep = reports[1]\n assert not rep.passed\n assert not rep.skipped\n assert rep.failed\n assert rep.when == \"call\"\n assert rep.outcome == \"failed\"\n # assert isinstance(rep.longrepr, ReprExceptionInfo)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_BaseFunctionalTests.test_skipfunction_BaseFunctionalTests.test_skipfunction._assert_not_rep_skipped_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_BaseFunctionalTests.test_skipfunction_BaseFunctionalTests.test_skipfunction._assert_not_rep_skipped_", "embedding": null, "metadata": {"file_path": "testing/test_runner.py", "file_name": "test_runner.py", "file_type": "text/x-python", "category": "test", "start_line": 146, "end_line": 164, "span_ids": ["BaseFunctionalTests.test_skipfunction"], "tokens": 155}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class BaseFunctionalTests:\n\n def test_skipfunction(self, pytester: Pytester) -> None:\n reports = pytester.runitem(\n \"\"\"\n import pytest\n def test_func():\n pytest.skip(\"hello\")\n \"\"\"\n )\n rep = reports[1]\n assert not rep.failed\n assert not rep.passed\n assert rep.skipped\n assert rep.outcome == \"skipped\"\n # assert rep.skipped.when == \"call\"\n # assert rep.skipped.when == \"call\"\n # assert rep.skipped == \"%sreason == \"hello\"\n # assert rep.skipped.location.lineno == 3\n # assert rep.skipped.location.path\n # assert not rep.skipped.failurerepr", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_BaseFunctionalTests.test_skip_in_setup_function_BaseFunctionalTests.test_skip_in_setup_function._teardown": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_BaseFunctionalTests.test_skip_in_setup_function_BaseFunctionalTests.test_skip_in_setup_function._teardown", "embedding": null, "metadata": {"file_path": "testing/test_runner.py", "file_name": "test_runner.py", "file_type": "text/x-python", "category": "test", "start_line": 166, "end_line": 185, "span_ids": ["BaseFunctionalTests.test_skip_in_setup_function"], "tokens": 149}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class BaseFunctionalTests:\n\n def test_skip_in_setup_function(self, pytester: Pytester) -> None:\n reports = pytester.runitem(\n \"\"\"\n import pytest\n def setup_function(func):\n pytest.skip(\"hello\")\n def test_func():\n pass\n \"\"\"\n )\n print(reports)\n rep = reports[0]\n assert not rep.failed\n assert not rep.passed\n assert rep.skipped\n # assert rep.skipped.reason == \"hello\"\n # assert rep.skipped.location.lineno == 3\n # assert rep.skipped.location.lineno == 3\n assert len(reports) == 2\n assert reports[1].passed # teardown", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_BaseFunctionalTests.test_failure_in_setup_function_BaseFunctionalTests.test_failure_in_teardown_function.None_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_BaseFunctionalTests.test_failure_in_setup_function_BaseFunctionalTests.test_failure_in_teardown_function.None_1", "embedding": null, "metadata": {"file_path": "testing/test_runner.py", "file_name": "test_runner.py", "file_type": "text/x-python", "category": "test", "start_line": 187, "end_line": 222, "span_ids": ["BaseFunctionalTests.test_failure_in_teardown_function", "BaseFunctionalTests.test_failure_in_setup_function"], "tokens": 241}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class BaseFunctionalTests:\n\n def test_failure_in_setup_function(self, pytester: Pytester) -> None:\n reports = pytester.runitem(\n \"\"\"\n import pytest\n def setup_function(func):\n raise ValueError(42)\n def test_func():\n pass\n \"\"\"\n )\n rep = reports[0]\n assert not rep.skipped\n assert not rep.passed\n assert rep.failed\n assert rep.when == \"setup\"\n assert len(reports) == 2\n\n def test_failure_in_teardown_function(self, pytester: Pytester) -> None:\n reports = pytester.runitem(\n \"\"\"\n import pytest\n def teardown_function(func):\n raise ValueError(42)\n def test_func():\n pass\n \"\"\"\n )\n print(reports)\n assert len(reports) == 3\n rep = reports[2]\n assert not rep.skipped\n assert not rep.passed\n assert rep.failed\n assert rep.when == \"teardown\"\n # assert rep.longrepr.reprcrash.lineno == 3\n # assert rep.longrepr.reprtraceback.reprentries", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_BaseFunctionalTests.test_custom_failure_repr_BaseFunctionalTests.test_custom_failure_repr._assert_rep_failed_failu": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_BaseFunctionalTests.test_custom_failure_repr_BaseFunctionalTests.test_custom_failure_repr._assert_rep_failed_failu", "embedding": null, "metadata": {"file_path": "testing/test_runner.py", "file_name": "test_runner.py", "file_type": "text/x-python", "category": "test", "start_line": 224, "end_line": 247, "span_ids": ["BaseFunctionalTests.test_custom_failure_repr"], "tokens": 168}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class BaseFunctionalTests:\n\n def test_custom_failure_repr(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n conftest=\"\"\"\n import pytest\n class Function(pytest.Function):\n def repr_failure(self, excinfo):\n return \"hello\"\n \"\"\"\n )\n reports = pytester.runitem(\n \"\"\"\n import pytest\n def test_func():\n assert 0\n \"\"\"\n )\n rep = reports[1]\n assert not rep.skipped\n assert not rep.passed\n assert rep.failed\n # assert rep.outcome.when == \"call\"\n # assert rep.failed.where.lineno == 3\n # assert rep.failed.where.path.basename == \"test_func.py\"\n # assert rep.failed.failurerepr == \"hello\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_BaseFunctionalTests.test_teardown_final_returncode_BaseFunctionalTests.test_logstart_logfinish_hooks.for_rep_in_reps_.assert_rep_location_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_BaseFunctionalTests.test_teardown_final_returncode_BaseFunctionalTests.test_logstart_logfinish_hooks.for_rep_in_reps_.assert_rep_location_", "embedding": null, "metadata": {"file_path": "testing/test_runner.py", "file_name": "test_runner.py", "file_type": "text/x-python", "category": "test", "start_line": 249, "end_line": 275, "span_ids": ["BaseFunctionalTests.test_logstart_logfinish_hooks", "BaseFunctionalTests.test_teardown_final_returncode"], "tokens": 207}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class BaseFunctionalTests:\n\n def test_teardown_final_returncode(self, pytester: Pytester) -> None:\n rec = pytester.inline_runsource(\n \"\"\"\n def test_func():\n pass\n def teardown_function(func):\n raise ValueError(42)\n \"\"\"\n )\n assert rec.ret == 1\n\n def test_logstart_logfinish_hooks(self, pytester: Pytester) -> None:\n rec = pytester.inline_runsource(\n \"\"\"\n import pytest\n def test_func():\n pass\n \"\"\"\n )\n reps = rec.getcalls(\"pytest_runtest_logstart pytest_runtest_logfinish\")\n assert [x._name for x in reps] == [\n \"pytest_runtest_logstart\",\n \"pytest_runtest_logfinish\",\n ]\n for rep in reps:\n assert rep.nodeid == \"test_logstart_logfinish_hooks.py::test_func\"\n assert rep.location == (\"test_logstart_logfinish_hooks.py\", 1, \"test_func\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_BaseFunctionalTests.test_exact_teardown_issue90_BaseFunctionalTests.test_exact_teardown_issue90.assert_reps_5_failed": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_BaseFunctionalTests.test_exact_teardown_issue90_BaseFunctionalTests.test_exact_teardown_issue90.assert_reps_5_failed", "embedding": null, "metadata": {"file_path": "testing/test_runner.py", "file_name": "test_runner.py", "file_type": "text/x-python", "category": "test", "start_line": 277, "end_line": 315, "span_ids": ["BaseFunctionalTests.test_exact_teardown_issue90"], "tokens": 304}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class BaseFunctionalTests:\n\n def test_exact_teardown_issue90(self, pytester: Pytester) -> None:\n rec = pytester.inline_runsource(\n \"\"\"\n import pytest\n\n class TestClass(object):\n def test_method(self):\n pass\n def teardown_class(cls):\n raise Exception()\n\n def test_func():\n import sys\n # on python2 exc_info is keept till a function exits\n # so we would end up calling test functions while\n # sys.exc_info would return the indexerror\n # from guessing the lastitem\n excinfo = sys.exc_info()\n import traceback\n assert excinfo[0] is None, \\\n traceback.format_exception(*excinfo)\n def teardown_function(func):\n raise ValueError(42)\n \"\"\"\n )\n reps = rec.getreports(\"pytest_runtest_logreport\")\n print(reps)\n for i in range(2):\n assert reps[i].nodeid.endswith(\"test_method\")\n assert reps[i].passed\n assert reps[2].when == \"teardown\"\n assert reps[2].failed\n assert len(reps) == 6\n for i in range(3, 5):\n assert reps[i].nodeid.endswith(\"test_func\")\n assert reps[i].passed\n assert reps[5].when == \"teardown\"\n assert reps[5].nodeid.endswith(\"test_func\")\n assert reps[5].failed", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_BaseFunctionalTests.test_failure_in_setup_function_ignores_custom_repr_BaseFunctionalTests.test_failure_in_setup_function_ignores_custom_repr._assert_instanace_rep_fa": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_BaseFunctionalTests.test_failure_in_setup_function_ignores_custom_repr_BaseFunctionalTests.test_failure_in_setup_function_ignores_custom_repr._assert_instanace_rep_fa", "embedding": null, "metadata": {"file_path": "testing/test_runner.py", "file_name": "test_runner.py", "file_type": "text/x-python", "category": "test", "start_line": 355, "end_line": 383, "span_ids": ["BaseFunctionalTests.test_failure_in_setup_function_ignores_custom_repr"], "tokens": 205}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class BaseFunctionalTests:\n\n def test_failure_in_setup_function_ignores_custom_repr(\n self, pytester: Pytester\n ) -> None:\n pytester.makepyfile(\n conftest=\"\"\"\n import pytest\n class Function(pytest.Function):\n def repr_failure(self, excinfo):\n assert 0\n \"\"\"\n )\n reports = pytester.runitem(\n \"\"\"\n def setup_function(func):\n raise ValueError(42)\n def test_func():\n pass\n \"\"\"\n )\n assert len(reports) == 2\n rep = reports[0]\n print(rep)\n assert not rep.skipped\n assert not rep.passed\n assert rep.failed\n # assert rep.outcome.when == \"setup\"\n # assert rep.outcome.where.lineno == 3\n # assert rep.outcome.where.path.basename == \"test_func.py\"\n # assert instanace(rep.failed.failurerepr, PythonFailureRepr)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_TestSessionReports_TestSessionReports.test_collect_result.assert_res_1_name_Te": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_TestSessionReports_TestSessionReports.test_collect_result.assert_res_1_name_Te", "embedding": null, "metadata": {"file_path": "testing/test_runner.py", "file_name": "test_runner.py", "file_type": "text/x-python", "category": "test", "start_line": 435, "end_line": 456, "span_ids": ["TestSessionReports.test_collect_result", "TestSessionReports"], "tokens": 162}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestSessionReports:\n def test_collect_result(self, pytester: Pytester) -> None:\n col = pytester.getmodulecol(\n \"\"\"\n def test_func1():\n pass\n class TestClass(object):\n pass\n \"\"\"\n )\n rep = runner.collect_one_node(col)\n assert not rep.failed\n assert not rep.skipped\n assert rep.passed\n locinfo = rep.location\n assert locinfo[0] == col.fspath.basename\n assert not locinfo[1]\n assert locinfo[2] == col.fspath.basename\n res = rep.result\n assert len(res) == 2\n assert res[0].name == \"test_func1\"\n assert res[1].name == \"TestClass\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_reporttypes_test_report_extra_parameters.assert_report_newthing_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_reporttypes_test_report_extra_parameters.assert_report_newthing_", "embedding": null, "metadata": {"file_path": "testing/test_runner.py", "file_name": "test_runner.py", "file_type": "text/x-python", "category": "test", "start_line": 459, "end_line": 473, "span_ids": ["impl", "test_report_extra_parameters"], "tokens": 129}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "reporttypes: List[Type[reports.BaseReport]] = [\n reports.BaseReport,\n reports.TestReport,\n reports.CollectReport,\n]\n\n\n@pytest.mark.parametrize(\n \"reporttype\", reporttypes, ids=[x.__name__ for x in reporttypes]\n)\ndef test_report_extra_parameters(reporttype: Type[reports.BaseReport]) -> None:\n args = list(inspect.signature(reporttype.__init__).parameters.keys())[1:]\n basekw: Dict[str, List[object]] = dict.fromkeys(args, [])\n report = reporttype(newthing=1, **basekw)\n assert report.newthing == 1", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py__design_question_do_we__test_runtest_in_module_ordering.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py__design_question_do_we__test_runtest_in_module_ordering.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_runner.py", "file_name": "test_runner.py", "file_type": "text/x-python", "category": "test", "start_line": 500, "end_line": 534, "span_ids": ["test_callinfo", "test_runtest_in_module_ordering"], "tokens": 273}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "# design question: do we want general hooks in python files?\n# then something like the following functional tests makes sense\n\n\n@pytest.mark.xfail\ndef test_runtest_in_module_ordering(pytester: Pytester) -> None:\n p1 = pytester.makepyfile(\n \"\"\"\n import pytest\n def pytest_runtest_setup(item): # runs after class-level!\n item.function.mylist.append(\"module\")\n class TestClass(object):\n def pytest_runtest_setup(self, item):\n assert not hasattr(item.function, 'mylist')\n item.function.mylist = ['class']\n @pytest.fixture\n def mylist(self, request):\n return request.function.mylist\n @pytest.hookimpl(hookwrapper=True)\n def pytest_runtest_call(self, item):\n try:\n (yield).get_result()\n except ValueError:\n pass\n def test_hello1(self, mylist):\n assert mylist == ['class', 'module'], mylist\n raise ValueError()\n def test_hello2(self, mylist):\n assert mylist == ['class', 'module'], mylist\n def pytest_runtest_teardown(item):\n del item.function.mylist\n \"\"\"\n )\n result = pytester.runpytest(p1)\n result.stdout.fnmatch_lines([\"*2 passed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_test_pytest_exit_returncode_test_pytest_exit_returncode.assert_result_ret_98": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_test_pytest_exit_returncode_test_pytest_exit_returncode.assert_result_ret_98", "embedding": null, "metadata": {"file_path": "testing/test_runner.py", "file_name": "test_runner.py", "file_type": "text/x-python", "category": "test", "start_line": 586, "end_line": 614, "span_ids": ["test_pytest_exit_returncode"], "tokens": 204}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_pytest_exit_returncode(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\\\n import pytest\n def test_foo():\n pytest.exit(\"some exit msg\", 99)\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*! *Exit: some exit msg !*\"])\n\n assert _strip_resource_warnings(result.stderr.lines) == []\n assert result.ret == 99\n\n # It prints to stderr also in case of exit during pytest_sessionstart.\n pytester.makeconftest(\n \"\"\"\\\n import pytest\n\n def pytest_sessionstart():\n pytest.exit(\"during_sessionstart\", 98)\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*! *Exit: during_sessionstart !*\"])\n assert _strip_resource_warnings(result.stderr.lines) == [\n \"Exit: during_sessionstart\"\n ]\n assert result.ret == 98", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_test_pytest_no_tests_collected_exit_status_test_exception_printing_skip.try_.except_pytest_skip_Except.assert_s_startswith_Skip": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_test_pytest_no_tests_collected_exit_status_test_exception_printing_skip.try_.except_pytest_skip_Except.assert_s_startswith_Skip", "embedding": null, "metadata": {"file_path": "testing/test_runner.py", "file_name": "test_runner.py", "file_type": "text/x-python", "category": "test", "start_line": 666, "end_line": 695, "span_ids": ["test_pytest_no_tests_collected_exit_status", "test_exception_printing_skip"], "tokens": 245}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_pytest_no_tests_collected_exit_status(pytester: Pytester) -> None:\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*collected 0 items*\"])\n assert result.ret == ExitCode.NO_TESTS_COLLECTED\n\n pytester.makepyfile(\n test_foo=\"\"\"\n def test_foo():\n assert 1\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*collected 1 item*\"])\n result.stdout.fnmatch_lines([\"*1 passed*\"])\n assert result.ret == ExitCode.OK\n\n result = pytester.runpytest(\"-k nonmatch\")\n result.stdout.fnmatch_lines([\"*collected 1 item*\"])\n result.stdout.fnmatch_lines([\"*1 deselected*\"])\n assert result.ret == ExitCode.NO_TESTS_COLLECTED\n\n\ndef test_exception_printing_skip() -> None:\n assert pytest.skip.Exception == pytest.skip.Exception\n try:\n pytest.skip(\"hello\")\n except pytest.skip.Exception:\n excinfo = ExceptionInfo.from_current()\n s = excinfo.exconly(tryshort=True)\n assert s.startswith(\"Skipped\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_test_failure_in_setup_test_makereport_getsource.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_test_failure_in_setup_test_makereport_getsource.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_runner.py", "file_name": "test_runner.py", "file_type": "text/x-python", "category": "test", "start_line": 821, "end_line": 844, "span_ids": ["test_makereport_getsource", "test_failure_in_setup"], "tokens": 152}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_failure_in_setup(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n def setup_module():\n 0/0\n def test_func():\n pass\n \"\"\"\n )\n result = pytester.runpytest(\"--tb=line\")\n result.stdout.no_fnmatch_line(\"*def setup_module*\")\n\n\ndef test_makereport_getsource(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n def test_foo():\n if False: pass\n else: assert False\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.no_fnmatch_line(\"*INTERNALERROR*\")\n result.stdout.fnmatch_lines([\"*else: assert False*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_test_makereport_getsource_dynamic_code_test_makereport_getsource_dynamic_code.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_test_makereport_getsource_dynamic_code_test_makereport_getsource_dynamic_code.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_runner.py", "file_name": "test_runner.py", "file_type": "text/x-python", "category": "test", "start_line": 847, "end_line": 877, "span_ids": ["test_makereport_getsource_dynamic_code"], "tokens": 193}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_makereport_getsource_dynamic_code(\n pytester: Pytester, monkeypatch: MonkeyPatch\n) -> None:\n \"\"\"Test that exception in dynamically generated code doesn't break getting the source line.\"\"\"\n import inspect\n\n original_findsource = inspect.findsource\n\n def findsource(obj):\n # Can be triggered by dynamically created functions\n if obj.__name__ == \"foo\":\n raise IndexError()\n return original_findsource(obj)\n\n monkeypatch.setattr(inspect, \"findsource\", findsource)\n\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.fixture\n def foo(missing):\n pass\n\n def test_fix(foo):\n assert False\n \"\"\"\n )\n result = pytester.runpytest(\"-vv\")\n result.stdout.no_fnmatch_line(\"*INTERNALERROR*\")\n result.stdout.fnmatch_lines([\"*test_fix*\", \"*fixture*'missing'*not found*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_test_current_test_env_var_test_current_test_env_var.assert_PYTEST_CURRENT_TE": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_test_current_test_env_var_test_current_test_env_var.assert_PYTEST_CURRENT_TE", "embedding": null, "metadata": {"file_path": "testing/test_runner.py", "file_name": "test_runner.py", "file_type": "text/x-python", "category": "test", "start_line": 910, "end_line": 939, "span_ids": ["test_current_test_env_var"], "tokens": 254}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_current_test_env_var(pytester: Pytester, monkeypatch: MonkeyPatch) -> None:\n pytest_current_test_vars: List[Tuple[str, str]] = []\n monkeypatch.setattr(\n sys, \"pytest_current_test_vars\", pytest_current_test_vars, raising=False\n )\n pytester.makepyfile(\n \"\"\"\n import pytest\n import sys\n import os\n\n @pytest.fixture\n def fix():\n sys.pytest_current_test_vars.append(('setup', os.environ['PYTEST_CURRENT_TEST']))\n yield\n sys.pytest_current_test_vars.append(('teardown', os.environ['PYTEST_CURRENT_TEST']))\n\n def test(fix):\n sys.pytest_current_test_vars.append(('call', os.environ['PYTEST_CURRENT_TEST']))\n \"\"\"\n )\n result = pytester.runpytest_inprocess()\n assert result.ret == 0\n test_id = \"test_current_test_env_var.py::test\"\n assert pytest_current_test_vars == [\n (\"setup\", test_id + \" (setup)\"),\n (\"call\", test_id + \" (call)\"),\n (\"teardown\", test_id + \" (teardown)\"),\n ]\n assert \"PYTEST_CURRENT_TEST\" not in os.environ", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner_xunit.py_test_class_setup_test_class_setup.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner_xunit.py_test_class_setup_test_class_setup.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/test_runner_xunit.py", "file_name": "test_runner_xunit.py", "file_type": "text/x-python", "category": "test", "start_line": 80, "end_line": 103, "span_ids": ["test_class_setup"], "tokens": 165}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_class_setup(pytester: Pytester) -> None:\n reprec = pytester.inline_runsource(\n \"\"\"\n class TestSimpleClassSetup(object):\n clslevel = []\n def setup_class(cls):\n cls.clslevel.append(23)\n\n def teardown_class(cls):\n cls.clslevel.pop()\n\n def test_classlevel(self):\n assert self.clslevel[0] == 23\n\n class TestInheritedClassSetupStillWorks(TestSimpleClassSetup):\n def test_classlevel_anothertime(self):\n assert self.clslevel == [23]\n\n def test_cleanup():\n assert not TestSimpleClassSetup.clslevel\n assert not TestInheritedClassSetupStillWorks.clslevel\n \"\"\"\n )\n reprec.assertoutcome(passed=1 + 2 + 1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner_xunit.py_test_class_setup_failure_no_teardown_test_class_setup_failure_no_teardown.reprec_assertoutcome_fail": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner_xunit.py_test_class_setup_failure_no_teardown_test_class_setup_failure_no_teardown.reprec_assertoutcome_fail", "embedding": null, "metadata": {"file_path": "testing/test_runner_xunit.py", "file_name": "test_runner_xunit.py", "file_type": "text/x-python", "category": "test", "start_line": 106, "end_line": 124, "span_ids": ["test_class_setup_failure_no_teardown"], "tokens": 111}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_class_setup_failure_no_teardown(pytester: Pytester) -> None:\n reprec = pytester.inline_runsource(\n \"\"\"\n class TestSimpleClassSetup(object):\n clslevel = []\n def setup_class(cls):\n 0/0\n\n def teardown_class(cls):\n cls.clslevel.append(1)\n\n def test_classlevel(self):\n pass\n\n def test_cleanup():\n assert not TestSimpleClassSetup.clslevel\n \"\"\"\n )\n reprec.assertoutcome(failed=1, passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner_xunit.py_test_method_setup_test_method_setup.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner_xunit.py_test_method_setup_test_method_setup.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/test_runner_xunit.py", "file_name": "test_runner_xunit.py", "file_type": "text/x-python", "category": "test", "start_line": 127, "end_line": 143, "span_ids": ["test_method_setup"], "tokens": 111}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_method_setup(pytester: Pytester) -> None:\n reprec = pytester.inline_runsource(\n \"\"\"\n class TestSetupMethod(object):\n def setup_method(self, meth):\n self.methsetup = meth\n def teardown_method(self, meth):\n del self.methsetup\n\n def test_some(self):\n assert self.methsetup == self.test_some\n\n def test_other(self):\n assert self.methsetup == self.test_other\n \"\"\"\n )\n reprec.assertoutcome(passed=2)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner_xunit.py_test_method_setup_failure_no_teardown_test_method_setup_failure_no_teardown.reprec_assertoutcome_fail": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner_xunit.py_test_method_setup_failure_no_teardown_test_method_setup_failure_no_teardown.reprec_assertoutcome_fail", "embedding": null, "metadata": {"file_path": "testing/test_runner_xunit.py", "file_name": "test_runner_xunit.py", "file_type": "text/x-python", "category": "test", "start_line": 146, "end_line": 165, "span_ids": ["test_method_setup_failure_no_teardown"], "tokens": 122}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_method_setup_failure_no_teardown(pytester: Pytester) -> None:\n reprec = pytester.inline_runsource(\n \"\"\"\n class TestMethodSetup(object):\n clslevel = []\n def setup_method(self, method):\n self.clslevel.append(1)\n 0/0\n\n def teardown_method(self, method):\n self.clslevel.append(2)\n\n def test_method(self):\n pass\n\n def test_cleanup():\n assert TestMethodSetup.clslevel == [1]\n \"\"\"\n )\n reprec.assertoutcome(failed=1, passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner_xunit.py_test_method_setup_uses_fresh_instances_test_setup_fails_again_on_all_tests.reprec_assertoutcome_fail": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner_xunit.py_test_method_setup_uses_fresh_instances_test_setup_fails_again_on_all_tests.reprec_assertoutcome_fail", "embedding": null, "metadata": {"file_path": "testing/test_runner_xunit.py", "file_name": "test_runner_xunit.py", "file_type": "text/x-python", "category": "test", "start_line": 168, "end_line": 212, "span_ids": ["test_setup_fails_again_on_all_tests", "test_method_setup_uses_fresh_instances", "test_setup_that_skips_calledagain"], "tokens": 261}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_method_setup_uses_fresh_instances(pytester: Pytester) -> None:\n reprec = pytester.inline_runsource(\n \"\"\"\n class TestSelfState1(object):\n memory = []\n def test_hello(self):\n self.memory.append(self)\n\n def test_afterhello(self):\n assert self != self.memory[0]\n \"\"\"\n )\n reprec.assertoutcome(passed=2, failed=0)\n\n\ndef test_setup_that_skips_calledagain(pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n import pytest\n def setup_module(mod):\n pytest.skip(\"x\")\n def test_function1():\n pass\n def test_function2():\n pass\n \"\"\"\n )\n reprec = pytester.inline_run(p)\n reprec.assertoutcome(skipped=2)\n\n\ndef test_setup_fails_again_on_all_tests(pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n import pytest\n def setup_module(mod):\n raise ValueError(42)\n def test_function1():\n pass\n def test_function2():\n pass\n \"\"\"\n )\n reprec = pytester.inline_run(p)\n reprec.assertoutcome(failed=2)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner_xunit.py_test_setup_teardown_function_level_with_optional_argument_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner_xunit.py_test_setup_teardown_function_level_with_optional_argument_", "embedding": null, "metadata": {"file_path": "testing/test_runner_xunit.py", "file_name": "test_runner_xunit.py", "file_type": "text/x-python", "category": "test", "start_line": 243, "end_line": 298, "span_ids": ["test_setup_teardown_function_level_with_optional_argument"], "tokens": 364}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\"arg\", [\"\", \"arg\"])\ndef test_setup_teardown_function_level_with_optional_argument(\n pytester: Pytester,\n monkeypatch,\n arg: str,\n) -> None:\n \"\"\"Parameter to setup/teardown xunit-style functions parameter is now optional (#1728).\"\"\"\n import sys\n\n trace_setups_teardowns: List[str] = []\n monkeypatch.setattr(\n sys, \"trace_setups_teardowns\", trace_setups_teardowns, raising=False\n )\n p = pytester.makepyfile(\n \"\"\"\n import pytest\n import sys\n\n trace = sys.trace_setups_teardowns.append\n\n def setup_module({arg}): trace('setup_module')\n def teardown_module({arg}): trace('teardown_module')\n\n def setup_function({arg}): trace('setup_function')\n def teardown_function({arg}): trace('teardown_function')\n\n def test_function_1(): pass\n def test_function_2(): pass\n\n class Test(object):\n def setup_method(self, {arg}): trace('setup_method')\n def teardown_method(self, {arg}): trace('teardown_method')\n\n def test_method_1(self): pass\n def test_method_2(self): pass\n \"\"\".format(\n arg=arg\n )\n )\n result = pytester.inline_run(p)\n result.assertoutcome(passed=4)\n\n expected = [\n \"setup_module\",\n \"setup_function\",\n \"teardown_function\",\n \"setup_function\",\n \"teardown_function\",\n \"setup_method\",\n \"teardown_method\",\n \"setup_method\",\n \"teardown_method\",\n \"teardown_module\",\n ]\n assert trace_setups_teardowns == expected", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_session.py_SessionTests.test_nested_import_error_SessionTests.test_nested_import_error.assert_out_find_does_not": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_session.py_SessionTests.test_nested_import_error_SessionTests.test_nested_import_error.assert_out_find_does_not", "embedding": null, "metadata": {"file_path": "testing/test_session.py", "file_name": "test_session.py", "file_type": "text/x-python", "category": "test", "start_line": 40, "end_line": 56, "span_ids": ["SessionTests.test_nested_import_error"], "tokens": 132}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class SessionTests:\n\n def test_nested_import_error(self, pytester: Pytester) -> None:\n tfile = pytester.makepyfile(\n \"\"\"\n import import_fails\n def test_this():\n assert import_fails.a == 1\n \"\"\",\n import_fails=\"\"\"\n import does_not_work\n a = 1\n \"\"\",\n )\n reprec = pytester.inline_run(tfile)\n values = reprec.getfailedcollections()\n assert len(values) == 1\n out = str(values[0].longrepr)\n assert out.find(\"does_not_work\") != -1", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_session.py_SessionTests.test_broken_repr_with_showlocals_verbose_SessionTests.test_broken_repr_with_showlocals_verbose.assert_repr_locals_lines_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_session.py_SessionTests.test_broken_repr_with_showlocals_verbose_SessionTests.test_broken_repr_with_showlocals_verbose.assert_repr_locals_lines_", "embedding": null, "metadata": {"file_path": "testing/test_session.py", "file_name": "test_session.py", "file_type": "text/x-python", "category": "test", "start_line": 135, "end_line": 157, "span_ids": ["SessionTests.test_broken_repr_with_showlocals_verbose"], "tokens": 233}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class SessionTests:\n\n def test_broken_repr_with_showlocals_verbose(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n class ObjWithErrorInRepr:\n def __repr__(self):\n raise NotImplementedError\n\n def test_repr_error():\n x = ObjWithErrorInRepr()\n assert x == \"value\"\n \"\"\"\n )\n reprec = pytester.inline_run(\"--showlocals\", \"-vv\", p)\n passed, skipped, failed = reprec.listoutcomes()\n assert (len(passed), len(skipped), len(failed)) == (0, 0, 1)\n entries = failed[0].longrepr.reprtraceback.reprentries # type: ignore[union-attr]\n assert len(entries) == 1\n repr_locals = entries[0].reprlocals\n assert repr_locals.lines\n assert len(repr_locals.lines) == 1\n assert repr_locals.lines[0].startswith(\n \"x = <[NotImplementedError() raised in repr()] ObjWithErrorInRepr\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_session.py_SessionTests.test_skip_file_by_conftest_SessionTests.test_skip_file_by_conftest.assert_reports_0_skipped": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_session.py_SessionTests.test_skip_file_by_conftest_SessionTests.test_skip_file_by_conftest.assert_reports_0_skipped", "embedding": null, "metadata": {"file_path": "testing/test_session.py", "file_name": "test_session.py", "file_type": "text/x-python", "category": "test", "start_line": 159, "end_line": 176, "span_ids": ["SessionTests.test_skip_file_by_conftest"], "tokens": 141}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class SessionTests:\n\n def test_skip_file_by_conftest(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n conftest=\"\"\"\n import pytest\n def pytest_collect_file():\n pytest.skip(\"intentional\")\n \"\"\",\n test_file=\"\"\"\n def test_one(): pass\n \"\"\",\n )\n try:\n reprec = pytester.inline_run(pytester.path)\n except pytest.skip.Exception: # pragma: no cover\n pytest.fail(\"wrong skipped caught\")\n reports = reprec.getreports(\"pytest_collectreport\")\n assert len(reports) == 1\n assert reports[0].skipped", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_session.py_TestNewSession_TestNewSession.test_order_of_execution.assert_passed_7": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_session.py_TestNewSession_TestNewSession.test_order_of_execution.assert_passed_7", "embedding": null, "metadata": {"file_path": "testing/test_session.py", "file_name": "test_session.py", "file_type": "text/x-python", "category": "test", "start_line": 179, "end_line": 204, "span_ids": ["TestNewSession.test_order_of_execution", "TestNewSession"], "tokens": 193}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestNewSession(SessionTests):\n def test_order_of_execution(self, pytester: Pytester) -> None:\n reprec = pytester.inline_runsource(\n \"\"\"\n values = []\n def test_1():\n values.append(1)\n def test_2():\n values.append(2)\n def test_3():\n assert values == [1,2]\n class Testmygroup(object):\n reslist = values\n def test_1(self):\n self.reslist.append(1)\n def test_2(self):\n self.reslist.append(2)\n def test_3(self):\n self.reslist.append(3)\n def test_4(self):\n assert self.reslist == [1,2,1,2,3]\n \"\"\"\n )\n passed, skipped, failed = reprec.countoutcomes()\n assert failed == skipped == 0\n assert passed == 7", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_session.py_TestNewSession.test_collect_only_with_various_situations_TestNewSession.test_collect_only_with_various_situations.assert_len_colfail_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_session.py_TestNewSession.test_collect_only_with_various_situations_TestNewSession.test_collect_only_with_various_situations.assert_len_colfail_1", "embedding": null, "metadata": {"file_path": "testing/test_session.py", "file_name": "test_session.py", "file_type": "text/x-python", "category": "test", "start_line": 206, "end_line": 232, "span_ids": ["TestNewSession.test_collect_only_with_various_situations"], "tokens": 222}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestNewSession(SessionTests):\n\n def test_collect_only_with_various_situations(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n test_one=\"\"\"\n def test_one():\n raise ValueError()\n\n class TestX(object):\n def test_method_one(self):\n pass\n\n class TestY(TestX):\n pass\n \"\"\",\n test_three=\"xxxdsadsadsadsa\",\n __init__=\"\",\n )\n reprec = pytester.inline_run(\"--collect-only\", p.parent)\n\n itemstarted = reprec.getcalls(\"pytest_itemcollected\")\n assert len(itemstarted) == 3\n assert not reprec.getreports(\"pytest_runtest_logreport\")\n started = reprec.getcalls(\"pytest_collectstart\")\n finished = reprec.getreports(\"pytest_collectreport\")\n assert len(started) == len(finished)\n assert len(started) == 8\n colfail = [x for x in finished if x.failed]\n assert len(colfail) == 1", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_session.py_TestNewSession.test_minus_x_import_error_TestNewSession.test_minus_x_overridden_by_maxfail.assert_len_colfail_2": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_session.py_TestNewSession.test_minus_x_import_error_TestNewSession.test_minus_x_overridden_by_maxfail.assert_len_colfail_2", "embedding": null, "metadata": {"file_path": "testing/test_session.py", "file_name": "test_session.py", "file_type": "text/x-python", "category": "test", "start_line": 234, "end_line": 248, "span_ids": ["TestNewSession.test_minus_x_import_error", "TestNewSession.test_minus_x_overridden_by_maxfail"], "tokens": 213}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestNewSession(SessionTests):\n\n def test_minus_x_import_error(self, pytester: Pytester) -> None:\n pytester.makepyfile(__init__=\"\")\n pytester.makepyfile(test_one=\"xxxx\", test_two=\"yyyy\")\n reprec = pytester.inline_run(\"-x\", pytester.path)\n finished = reprec.getreports(\"pytest_collectreport\")\n colfail = [x for x in finished if x.failed]\n assert len(colfail) == 1\n\n def test_minus_x_overridden_by_maxfail(self, pytester: Pytester) -> None:\n pytester.makepyfile(__init__=\"\")\n pytester.makepyfile(test_one=\"xxxx\", test_two=\"yyyy\", test_third=\"zzz\")\n reprec = pytester.inline_run(\"-x\", \"--maxfail=2\", pytester.path)\n finished = reprec.getreports(\"pytest_collectreport\")\n colfail = [x for x in finished if x.failed]\n assert len(colfail) == 2", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_session.py_test_plugin_specify_test_exclude.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_session.py_test_plugin_specify_test_exclude.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_session.py", "file_name": "test_session.py", "file_type": "text/x-python", "category": "test", "start_line": 251, "end_line": 274, "span_ids": ["test_plugin_already_exists", "test_exclude", "test_plugin_specify"], "tokens": 233}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_plugin_specify(pytester: Pytester) -> None:\n with pytest.raises(ImportError):\n pytester.parseconfig(\"-p\", \"nqweotexistent\")\n # pytest.raises(ImportError,\n # \"config.do_configure(config)\"\n # )\n\n\ndef test_plugin_already_exists(pytester: Pytester) -> None:\n config = pytester.parseconfig(\"-p\", \"terminal\")\n assert config.option.plugins == [\"terminal\"]\n config._do_configure()\n config._ensure_unconfigure()\n\n\ndef test_exclude(pytester: Pytester) -> None:\n hellodir = pytester.mkdir(\"hello\")\n hellodir.joinpath(\"test_hello.py\").write_text(\"x y syntaxerror\")\n hello2dir = pytester.mkdir(\"hello2\")\n hello2dir.joinpath(\"test_hello2.py\").write_text(\"x y syntaxerror\")\n pytester.makepyfile(test_ok=\"def test_pass(): pass\")\n result = pytester.runpytest(\"--ignore=hello\", \"--ignore=hello2\")\n assert result.ret == 0\n result.stdout.fnmatch_lines([\"*1 passed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_session.py_test_exclude_glob_test_exclude_glob.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_session.py_test_exclude_glob_test_exclude_glob.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_session.py", "file_name": "test_session.py", "file_type": "text/x-python", "category": "test", "start_line": 277, "end_line": 289, "span_ids": ["test_exclude_glob"], "tokens": 190}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_exclude_glob(pytester: Pytester) -> None:\n hellodir = pytester.mkdir(\"hello\")\n hellodir.joinpath(\"test_hello.py\").write_text(\"x y syntaxerror\")\n hello2dir = pytester.mkdir(\"hello2\")\n hello2dir.joinpath(\"test_hello2.py\").write_text(\"x y syntaxerror\")\n hello3dir = pytester.mkdir(\"hallo3\")\n hello3dir.joinpath(\"test_hello3.py\").write_text(\"x y syntaxerror\")\n subdir = pytester.mkdir(\"sub\")\n subdir.joinpath(\"test_hello4.py\").write_text(\"x y syntaxerror\")\n pytester.makepyfile(test_ok=\"def test_pass(): pass\")\n result = pytester.runpytest(\"--ignore-glob=*h[ea]llo*\")\n assert result.ret == 0\n result.stdout.fnmatch_lines([\"*1 passed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_session.py_test_rootdir_option_arg_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_session.py_test_rootdir_option_arg_", "embedding": null, "metadata": {"file_path": "testing/test_session.py", "file_name": "test_session.py", "file_type": "text/x-python", "category": "test", "start_line": 338, "end_line": 370, "span_ids": ["test_rootdir_wrong_option_arg", "test_rootdir_option_arg"], "tokens": 247}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\"path\", [\"root\", \"{relative}/root\", \"{environment}/root\"])\ndef test_rootdir_option_arg(\n pytester: Pytester, monkeypatch: MonkeyPatch, path: str\n) -> None:\n monkeypatch.setenv(\"PY_ROOTDIR_PATH\", str(pytester.path))\n path = path.format(relative=str(pytester.path), environment=\"$PY_ROOTDIR_PATH\")\n\n rootdir = pytester.path / \"root\" / \"tests\"\n rootdir.mkdir(parents=True)\n pytester.makepyfile(\n \"\"\"\n import os\n def test_one():\n assert 1\n \"\"\"\n )\n\n result = pytester.runpytest(f\"--rootdir={path}\")\n result.stdout.fnmatch_lines(\n [\n f\"*rootdir: {pytester.path}/root\",\n \"root/test_rootdir_option_arg.py *\",\n \"*1 passed*\",\n ]\n )\n\n\ndef test_rootdir_wrong_option_arg(pytester: Pytester) -> None:\n result = pytester.runpytest(\"--rootdir=wrong_dir\")\n result.stderr.fnmatch_lines(\n [\"*Directory *wrong_dir* not found. Check your '--rootdir' option.*\"]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail_TestXFail.test_xfail_simple.assert_callreport_wasxfai": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail_TestXFail.test_xfail_simple.assert_callreport_wasxfai", "embedding": null, "metadata": {"file_path": "testing/test_skipping.py", "file_name": "test_skipping.py", "file_type": "text/x-python", "category": "test", "start_line": 290, "end_line": 306, "span_ids": ["TestXFail", "TestXFail.test_xfail_simple"], "tokens": 127}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestXFail:\n @pytest.mark.parametrize(\"strict\", [True, False])\n def test_xfail_simple(self, pytester: Pytester, strict: bool) -> None:\n item = pytester.getitem(\n \"\"\"\n import pytest\n @pytest.mark.xfail(strict=%s)\n def test_func():\n assert 0\n \"\"\"\n % strict\n )\n reports = runtestprotocol(item, log=False)\n assert len(reports) == 3\n callreport = reports[1]\n assert callreport.skipped\n assert callreport.wasxfail == \"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_xfail_not_run_xfail_reporting_TestXFail.test_xfail_not_run_xfail_reporting.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_xfail_not_run_xfail_reporting_TestXFail.test_xfail_not_run_xfail_reporting.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_skipping.py", "file_name": "test_skipping.py", "file_type": "text/x-python", "category": "test", "start_line": 426, "end_line": 450, "span_ids": ["TestXFail.test_xfail_not_run_xfail_reporting"], "tokens": 191}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestXFail:\n\n def test_xfail_not_run_xfail_reporting(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n test_one=\"\"\"\n import pytest\n @pytest.mark.xfail(run=False, reason=\"noway\")\n def test_this():\n assert 0\n @pytest.mark.xfail(\"True\", run=False)\n def test_this_true():\n assert 0\n @pytest.mark.xfail(\"False\", run=False, reason=\"huh\")\n def test_this_false():\n assert 1\n \"\"\"\n )\n result = pytester.runpytest(p, \"-rx\")\n result.stdout.fnmatch_lines(\n [\n \"*test_one*test_this*\",\n \"*NOTRUN*noway\",\n \"*test_one*test_this_true*\",\n \"*NOTRUN*condition:*True*\",\n \"*1 passed*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_xfail_not_run_no_setup_run_TestXFail.test_xfail_xpass.assert_result_ret_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_xfail_not_run_no_setup_run_TestXFail.test_xfail_xpass.assert_result_ret_0", "embedding": null, "metadata": {"file_path": "testing/test_skipping.py", "file_name": "test_skipping.py", "file_type": "text/x-python", "category": "test", "start_line": 452, "end_line": 479, "span_ids": ["TestXFail.test_xfail_xpass", "TestXFail.test_xfail_not_run_no_setup_run"], "tokens": 222}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestXFail:\n\n def test_xfail_not_run_no_setup_run(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n test_one=\"\"\"\n import pytest\n @pytest.mark.xfail(run=False, reason=\"hello\")\n def test_this():\n assert 0\n def setup_module(mod):\n raise ValueError(42)\n \"\"\"\n )\n result = pytester.runpytest(p, \"-rx\")\n result.stdout.fnmatch_lines(\n [\"*test_one*test_this*\", \"*NOTRUN*hello\", \"*1 xfailed*\"]\n )\n\n def test_xfail_xpass(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n test_one=\"\"\"\n import pytest\n @pytest.mark.xfail\n def test_that():\n assert 1\n \"\"\"\n )\n result = pytester.runpytest(p, \"-rX\")\n result.stdout.fnmatch_lines([\"*XPASS*test_that*\", \"*1 xpassed*\"])\n assert result.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_xfail_imperative_TestXFail.test_xfail_imperative.None_2": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_xfail_imperative_TestXFail.test_xfail_imperative.None_2", "embedding": null, "metadata": {"file_path": "testing/test_skipping.py", "file_name": "test_skipping.py", "file_type": "text/x-python", "category": "test", "start_line": 481, "end_line": 494, "span_ids": ["TestXFail.test_xfail_imperative"], "tokens": 135}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestXFail:\n\n def test_xfail_imperative(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n import pytest\n def test_this():\n pytest.xfail(\"hello\")\n \"\"\"\n )\n result = pytester.runpytest(p)\n result.stdout.fnmatch_lines([\"*1 xfailed*\"])\n result = pytester.runpytest(p, \"-rx\")\n result.stdout.fnmatch_lines([\"*XFAIL*test_this*\", \"*reason:*hello*\"])\n result = pytester.runpytest(p, \"--runxfail\")\n result.stdout.fnmatch_lines([\"*1 pass*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_xfail_imperative_in_setup_function_TestXFail.test_xfail_imperative_in_setup_function.None_2": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_xfail_imperative_in_setup_function_TestXFail.test_xfail_imperative_in_setup_function.None_2", "embedding": null, "metadata": {"file_path": "testing/test_skipping.py", "file_name": "test_skipping.py", "file_type": "text/x-python", "category": "test", "start_line": 496, "end_line": 517, "span_ids": ["TestXFail.test_xfail_imperative_in_setup_function"], "tokens": 161}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestXFail:\n\n def test_xfail_imperative_in_setup_function(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n import pytest\n def setup_function(function):\n pytest.xfail(\"hello\")\n\n def test_this():\n assert 0\n \"\"\"\n )\n result = pytester.runpytest(p)\n result.stdout.fnmatch_lines([\"*1 xfailed*\"])\n result = pytester.runpytest(p, \"-rx\")\n result.stdout.fnmatch_lines([\"*XFAIL*test_this*\", \"*reason:*hello*\"])\n result = pytester.runpytest(p, \"--runxfail\")\n result.stdout.fnmatch_lines(\n \"\"\"\n *def test_this*\n *1 fail*\n \"\"\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_xfail_raises_TestXFail.test_xfail_raises.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_xfail_raises_TestXFail.test_xfail_raises.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_skipping.py", "file_name": "test_skipping.py", "file_type": "text/x-python", "category": "test", "start_line": 589, "end_line": 611, "span_ids": ["TestXFail.test_xfail_raises"], "tokens": 180}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestXFail:\n\n @pytest.mark.parametrize(\n \"expected, actual, matchline\",\n [\n (\"TypeError\", \"TypeError\", \"*1 xfailed*\"),\n (\"(AttributeError, TypeError)\", \"TypeError\", \"*1 xfailed*\"),\n (\"TypeError\", \"IndexError\", \"*1 failed*\"),\n (\"(AttributeError, TypeError)\", \"IndexError\", \"*1 failed*\"),\n ],\n )\n def test_xfail_raises(\n self, expected, actual, matchline, pytester: Pytester\n ) -> None:\n p = pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.mark.xfail(raises=%s)\n def test_raises():\n raise %s()\n \"\"\"\n % (expected, actual)\n )\n result = pytester.runpytest(p)\n result.stdout.fnmatch_lines([matchline])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_strict_sanity_TestXFail.test_strict_sanity.assert_result_ret_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_strict_sanity_TestXFail.test_strict_sanity.assert_result_ret_0", "embedding": null, "metadata": {"file_path": "testing/test_skipping.py", "file_name": "test_skipping.py", "file_type": "text/x-python", "category": "test", "start_line": 613, "end_line": 626, "span_ids": ["TestXFail.test_strict_sanity"], "tokens": 128}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestXFail:\n\n def test_strict_sanity(self, pytester: Pytester) -> None:\n \"\"\"Sanity check for xfail(strict=True): a failing test should behave\n exactly like a normal xfail.\"\"\"\n p = pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.mark.xfail(reason='unsupported feature', strict=True)\n def test_foo():\n assert 0\n \"\"\"\n )\n result = pytester.runpytest(p, \"-rxX\")\n result.stdout.fnmatch_lines([\"*XFAIL*\", \"*unsupported feature*\"])\n assert result.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_strict_xfail_condition_TestXFail.test_strict_xfail_condition.assert_result_ret_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_strict_xfail_condition_TestXFail.test_strict_xfail_condition.assert_result_ret_0", "embedding": null, "metadata": {"file_path": "testing/test_skipping.py", "file_name": "test_skipping.py", "file_type": "text/x-python", "category": "test", "start_line": 655, "end_line": 669, "span_ids": ["TestXFail.test_strict_xfail_condition"], "tokens": 122}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestXFail:\n\n @pytest.mark.parametrize(\"strict\", [True, False])\n def test_strict_xfail_condition(self, pytester: Pytester, strict: bool) -> None:\n p = pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.mark.xfail(False, reason='unsupported feature', strict=%s)\n def test_foo():\n pass\n \"\"\"\n % strict\n )\n result = pytester.runpytest(p, \"-rxX\")\n result.stdout.fnmatch_lines([\"*1 passed*\"])\n assert result.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_xfail_condition_keyword_TestXFail.test_xfail_condition_keyword.assert_result_ret_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_xfail_condition_keyword_TestXFail.test_xfail_condition_keyword.assert_result_ret_0", "embedding": null, "metadata": {"file_path": "testing/test_skipping.py", "file_name": "test_skipping.py", "file_type": "text/x-python", "category": "test", "start_line": 671, "end_line": 685, "span_ids": ["TestXFail.test_xfail_condition_keyword"], "tokens": 122}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestXFail:\n\n @pytest.mark.parametrize(\"strict\", [True, False])\n def test_xfail_condition_keyword(self, pytester: Pytester, strict: bool) -> None:\n p = pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.mark.xfail(condition=False, reason='unsupported feature', strict=%s)\n def test_foo():\n pass\n \"\"\"\n % strict\n )\n result = pytester.runpytest(p, \"-rxX\")\n result.stdout.fnmatch_lines([\"*1 passed*\"])\n assert result.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_strict_xfail_default_from_file_TestXFail.test_strict_xfail_default_from_file.assert_result_ret_1_i": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_strict_xfail_default_from_file_TestXFail.test_strict_xfail_default_from_file.assert_result_ret_1_i", "embedding": null, "metadata": {"file_path": "testing/test_skipping.py", "file_name": "test_skipping.py", "file_type": "text/x-python", "category": "test", "start_line": 687, "end_line": 709, "span_ids": ["TestXFail.test_strict_xfail_default_from_file"], "tokens": 172}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestXFail:\n\n @pytest.mark.parametrize(\"strict_val\", [\"true\", \"false\"])\n def test_strict_xfail_default_from_file(\n self, pytester: Pytester, strict_val\n ) -> None:\n pytester.makeini(\n \"\"\"\n [pytest]\n xfail_strict = %s\n \"\"\"\n % strict_val\n )\n p = pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.mark.xfail(reason='unsupported feature')\n def test_foo():\n pass\n \"\"\"\n )\n result = pytester.runpytest(p, \"-rxX\")\n strict = strict_val == \"true\"\n result.stdout.fnmatch_lines([\"*1 failed*\" if strict else \"*1 xpassed*\"])\n assert result.ret == (1 if strict else 0)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFailwithSetupTeardown_TestXFailwithSetupTeardown.test_failing_teardown_issue9.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFailwithSetupTeardown_TestXFailwithSetupTeardown.test_failing_teardown_issue9.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_skipping.py", "file_name": "test_skipping.py", "file_type": "text/x-python", "category": "test", "start_line": 739, "end_line": 768, "span_ids": ["TestXFailwithSetupTeardown.test_failing_teardown_issue9", "TestXFailwithSetupTeardown.test_failing_setup_issue9", "TestXFailwithSetupTeardown"], "tokens": 176}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestXFailwithSetupTeardown:\n def test_failing_setup_issue9(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n def setup_function(func):\n assert 0\n\n @pytest.mark.xfail\n def test_func():\n pass\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*1 xfail*\"])\n\n def test_failing_teardown_issue9(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n def teardown_function(func):\n assert 0\n\n @pytest.mark.xfail\n def test_func():\n pass\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*1 xfail*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestSkip_TestSkip.test_skip_with_reason.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestSkip_TestSkip.test_skip_with_reason.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_skipping.py", "file_name": "test_skipping.py", "file_type": "text/x-python", "category": "test", "start_line": 771, "end_line": 836, "span_ids": ["TestSkip.test_skip_with_reason", "TestSkip", "TestSkip.test_skips_on_false_string", "TestSkip.test_arg_as_reason", "TestSkip.test_skip_no_reason", "TestSkip.test_skip_class"], "tokens": 401}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestSkip:\n def test_skip_class(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.mark.skip\n class TestSomething(object):\n def test_foo(self):\n pass\n def test_bar(self):\n pass\n\n def test_baz():\n pass\n \"\"\"\n )\n rec = pytester.inline_run()\n rec.assertoutcome(skipped=2, passed=1)\n\n def test_skips_on_false_string(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.mark.skip('False')\n def test_foo():\n pass\n \"\"\"\n )\n rec = pytester.inline_run()\n rec.assertoutcome(skipped=1)\n\n def test_arg_as_reason(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.mark.skip('testing stuff')\n def test_bar():\n pass\n \"\"\"\n )\n result = pytester.runpytest(\"-rs\")\n result.stdout.fnmatch_lines([\"*testing stuff*\", \"*1 skipped*\"])\n\n def test_skip_no_reason(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.mark.skip\n def test_foo():\n pass\n \"\"\"\n )\n result = pytester.runpytest(\"-rs\")\n result.stdout.fnmatch_lines([\"*unconditional skip*\", \"*1 skipped*\"])\n\n def test_skip_with_reason(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.mark.skip(reason=\"for lolz\")\n def test_bar():\n pass\n \"\"\"\n )\n result = pytester.runpytest(\"-rs\")\n result.stdout.fnmatch_lines([\"*for lolz*\", \"*1 skipped*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestSkip.test_only_skips_marked_test_TestSkip.test_strict_and_skip.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestSkip.test_only_skips_marked_test_TestSkip.test_strict_and_skip.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_skipping.py", "file_name": "test_skipping.py", "file_type": "text/x-python", "category": "test", "start_line": 838, "end_line": 865, "span_ids": ["TestSkip.test_strict_and_skip", "TestSkip.test_only_skips_marked_test"], "tokens": 191}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestSkip:\n\n def test_only_skips_marked_test(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.mark.skip\n def test_foo():\n pass\n @pytest.mark.skip(reason=\"nothing in particular\")\n def test_bar():\n pass\n def test_baz():\n assert True\n \"\"\"\n )\n result = pytester.runpytest(\"-rs\")\n result.stdout.fnmatch_lines([\"*nothing in particular*\", \"*1 passed*2 skipped*\"])\n\n def test_strict_and_skip(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.mark.skip\n def test_hello():\n pass\n \"\"\"\n )\n result = pytester.runpytest(\"-rs\")\n result.stdout.fnmatch_lines([\"*unconditional skip*\", \"*1 skipped*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestSkipif_TestSkipif.test_skipif_reporting.assert_result_ret_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestSkipif_TestSkipif.test_skipif_reporting.assert_result_ret_0", "embedding": null, "metadata": {"file_path": "testing/test_skipping.py", "file_name": "test_skipping.py", "file_type": "text/x-python", "category": "test", "start_line": 868, "end_line": 896, "span_ids": ["TestSkipif.test_skipif_conditional", "TestSkipif", "TestSkipif.test_skipif_reporting"], "tokens": 243}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestSkipif:\n def test_skipif_conditional(self, pytester: Pytester) -> None:\n item = pytester.getitem(\n \"\"\"\n import pytest\n @pytest.mark.skipif(\"hasattr(os, 'sep')\")\n def test_func():\n pass\n \"\"\"\n )\n x = pytest.raises(pytest.skip.Exception, lambda: pytest_runtest_setup(item))\n assert x.value.msg == \"condition: hasattr(os, 'sep')\"\n\n @pytest.mark.parametrize(\n \"params\", [\"\\\"hasattr(sys, 'platform')\\\"\", 'True, reason=\"invalid platform\"']\n )\n def test_skipif_reporting(self, pytester: Pytester, params) -> None:\n p = pytester.makepyfile(\n test_foo=\"\"\"\n import pytest\n @pytest.mark.skipif(%(params)s)\n def test_that():\n assert 0\n \"\"\"\n % dict(params=params)\n )\n result = pytester.runpytest(p, \"-s\", \"-rs\")\n result.stdout.fnmatch_lines([\"*SKIP*1*test_foo.py*platform*\", \"*1 skipped*\"])\n assert result.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestSkipif.test_skipif_using_platform_TestSkipif.test_skipif_reporting_multiple.assert_result_ret_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestSkipif.test_skipif_using_platform_TestSkipif.test_skipif_reporting_multiple.assert_result_ret_0", "embedding": null, "metadata": {"file_path": "testing/test_skipping.py", "file_name": "test_skipping.py", "file_type": "text/x-python", "category": "test", "start_line": 898, "end_line": 931, "span_ids": ["TestSkipif.test_skipif_using_platform", "TestSkipif.test_skipif_reporting_multiple"], "tokens": 277}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestSkipif:\n\n def test_skipif_using_platform(self, pytester: Pytester) -> None:\n item = pytester.getitem(\n \"\"\"\n import pytest\n @pytest.mark.skipif(\"platform.platform() == platform.platform()\")\n def test_func():\n pass\n \"\"\"\n )\n pytest.raises(pytest.skip.Exception, lambda: pytest_runtest_setup(item))\n\n @pytest.mark.parametrize(\n \"marker, msg1, msg2\",\n [(\"skipif\", \"SKIP\", \"skipped\"), (\"xfail\", \"XPASS\", \"xpassed\")],\n )\n def test_skipif_reporting_multiple(\n self, pytester: Pytester, marker, msg1, msg2\n ) -> None:\n pytester.makepyfile(\n test_foo=\"\"\"\n import pytest\n @pytest.mark.{marker}(False, reason='first_condition')\n @pytest.mark.{marker}(True, reason='second_condition')\n def test_foobar():\n assert 1\n \"\"\".format(\n marker=marker\n )\n )\n result = pytester.runpytest(\"-s\", \"-rsxX\")\n result.stdout.fnmatch_lines(\n [f\"*{msg1}*test_foo.py*second_condition*\", f\"*1 {msg2}*\"]\n )\n assert result.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_test_skip_not_report_default_test_skipif_class.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_test_skip_not_report_default_test_skipif_class.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_skipping.py", "file_name": "test_skipping.py", "file_type": "text/x-python", "category": "test", "start_line": 934, "end_line": 965, "span_ids": ["test_skipif_class", "test_skip_not_report_default"], "tokens": 184}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_skip_not_report_default(pytester: Pytester) -> None:\n p = pytester.makepyfile(\n test_one=\"\"\"\n import pytest\n def test_this():\n pytest.skip(\"hello\")\n \"\"\"\n )\n result = pytester.runpytest(p, \"-v\")\n result.stdout.fnmatch_lines(\n [\n # \"*HINT*use*-r*\",\n \"*1 skipped*\"\n ]\n )\n\n\ndef test_skipif_class(pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n import pytest\n\n class TestClass(object):\n pytestmark = pytest.mark.skipif(\"True\")\n def test_that(self):\n assert 0\n def test_though(self):\n assert 0\n \"\"\"\n )\n result = pytester.runpytest(p)\n result.stdout.fnmatch_lines([\"*2 skipped*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_test_skipped_reasons_functional_test_skipped_reasons_functional.assert_result_ret_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_test_skipped_reasons_functional_test_skipped_reasons_functional.assert_result_ret_0", "embedding": null, "metadata": {"file_path": "testing/test_skipping.py", "file_name": "test_skipping.py", "file_type": "text/x-python", "category": "test", "start_line": 968, "end_line": 1002, "span_ids": ["test_skipped_reasons_functional"], "tokens": 208}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_skipped_reasons_functional(pytester: Pytester) -> None:\n pytester.makepyfile(\n test_one=\"\"\"\n import pytest\n from conftest import doskip\n\n def setup_function(func):\n doskip()\n\n def test_func():\n pass\n\n class TestClass(object):\n def test_method(self):\n doskip()\n\n @pytest.mark.skip(\"via_decorator\")\n def test_deco(self):\n assert 0\n \"\"\",\n conftest=\"\"\"\n import pytest, sys\n def doskip():\n assert sys._getframe().f_lineno == 3\n pytest.skip('test')\n \"\"\",\n )\n result = pytester.runpytest(\"-rs\")\n result.stdout.fnmatch_lines_random(\n [\n \"SKIPPED [[]2[]] conftest.py:4: test\",\n \"SKIPPED [[]1[]] test_one.py:14: via_decorator\",\n ]\n )\n assert result.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_test_skipped_folding_test_skipped_folding.assert_result_ret_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_test_skipped_folding_test_skipped_folding.assert_result_ret_0", "embedding": null, "metadata": {"file_path": "testing/test_skipping.py", "file_name": "test_skipping.py", "file_type": "text/x-python", "category": "test", "start_line": 1005, "end_line": 1021, "span_ids": ["test_skipped_folding"], "tokens": 113}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_skipped_folding(pytester: Pytester) -> None:\n pytester.makepyfile(\n test_one=\"\"\"\n import pytest\n pytestmark = pytest.mark.skip(\"Folding\")\n def setup_function(func):\n pass\n def test_func():\n pass\n class TestClass(object):\n def test_method(self):\n pass\n \"\"\"\n )\n result = pytester.runpytest(\"-rs\")\n result.stdout.fnmatch_lines([\"*SKIP*2*test_one.py: Folding\"])\n assert result.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_test_reportchars_test_reportchars_error.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_test_reportchars_test_reportchars_error.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_skipping.py", "file_name": "test_skipping.py", "file_type": "text/x-python", "category": "test", "start_line": 1024, "end_line": 1058, "span_ids": ["test_reportchars", "test_reportchars_error"], "tokens": 226}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_reportchars(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n def test_1():\n assert 0\n @pytest.mark.xfail\n def test_2():\n assert 0\n @pytest.mark.xfail\n def test_3():\n pass\n def test_4():\n pytest.skip(\"four\")\n \"\"\"\n )\n result = pytester.runpytest(\"-rfxXs\")\n result.stdout.fnmatch_lines(\n [\"FAIL*test_1*\", \"XFAIL*test_2*\", \"XPASS*test_3*\", \"SKIP*four*\"]\n )\n\n\ndef test_reportchars_error(pytester: Pytester) -> None:\n pytester.makepyfile(\n conftest=\"\"\"\n def pytest_runtest_teardown():\n assert 0\n \"\"\",\n test_simple=\"\"\"\n def test_foo():\n pass\n \"\"\",\n )\n result = pytester.runpytest(\"-rE\")\n result.stdout.fnmatch_lines([\"ERROR*test_foo*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_test_reportchars_all_test_reportchars_all_error.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_test_reportchars_all_test_reportchars_all_error.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_skipping.py", "file_name": "test_skipping.py", "file_type": "text/x-python", "category": "test", "start_line": 1061, "end_line": 1106, "span_ids": ["test_reportchars_all", "test_reportchars_all_error"], "tokens": 268}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_reportchars_all(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n def test_1():\n assert 0\n @pytest.mark.xfail\n def test_2():\n assert 0\n @pytest.mark.xfail\n def test_3():\n pass\n def test_4():\n pytest.skip(\"four\")\n @pytest.fixture\n def fail():\n assert 0\n def test_5(fail):\n pass\n \"\"\"\n )\n result = pytester.runpytest(\"-ra\")\n result.stdout.fnmatch_lines(\n [\n \"SKIP*four*\",\n \"XFAIL*test_2*\",\n \"XPASS*test_3*\",\n \"ERROR*test_5*\",\n \"FAIL*test_1*\",\n ]\n )\n\n\ndef test_reportchars_all_error(pytester: Pytester) -> None:\n pytester.makepyfile(\n conftest=\"\"\"\n def pytest_runtest_teardown():\n assert 0\n \"\"\",\n test_simple=\"\"\"\n def test_foo():\n pass\n \"\"\",\n )\n result = pytester.runpytest(\"-ra\")\n result.stdout.fnmatch_lines([\"ERROR*test_foo*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_test_errors_in_xfail_skip_expressions_test_errors_in_xfail_skip_expressions.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_test_errors_in_xfail_skip_expressions_test_errors_in_xfail_skip_expressions.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_skipping.py", "file_name": "test_skipping.py", "file_type": "text/x-python", "category": "test", "start_line": 1109, "end_line": 1143, "span_ids": ["test_errors_in_xfail_skip_expressions"], "tokens": 255}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_errors_in_xfail_skip_expressions(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.mark.skipif(\"asd\")\n def test_nameerror():\n pass\n @pytest.mark.xfail(\"syntax error\")\n def test_syntax():\n pass\n\n def test_func():\n pass\n \"\"\"\n )\n result = pytester.runpytest()\n markline = \" ^\"\n pypy_version_info = getattr(sys, \"pypy_version_info\", None)\n if pypy_version_info is not None and pypy_version_info < (6,):\n markline = markline[5:]\n elif sys.version_info >= (3, 8) or hasattr(sys, \"pypy_version_info\"):\n markline = markline[4:]\n result.stdout.fnmatch_lines(\n [\n \"*ERROR*test_nameerror*\",\n \"*evaluating*skipif*condition*\",\n \"*asd*\",\n \"*ERROR*test_syntax*\",\n \"*evaluating*xfail*condition*\",\n \" syntax error\",\n markline,\n \"SyntaxError: invalid syntax\",\n \"*1 pass*2 errors*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_test_xfail_skipif_with_globals_test_xfail_skipif_with_globals.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_test_xfail_skipif_with_globals_test_xfail_skipif_with_globals.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_skipping.py", "file_name": "test_skipping.py", "file_type": "text/x-python", "category": "test", "start_line": 1146, "end_line": 1160, "span_ids": ["test_xfail_skipif_with_globals"], "tokens": 122}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_xfail_skipif_with_globals(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n x = 3\n @pytest.mark.skipif(\"x == 3\")\n def test_skip1():\n pass\n @pytest.mark.xfail(\"x == 3\")\n def test_boolean():\n assert 0\n \"\"\"\n )\n result = pytester.runpytest(\"-rsx\")\n result.stdout.fnmatch_lines([\"*SKIP*x == 3*\", \"*XFAIL*test_boolean*\", \"*x == 3*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_test_imperativeskip_on_xfail_test_test_imperativeskip_on_xfail_test.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_test_imperativeskip_on_xfail_test_test_imperativeskip_on_xfail_test.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_skipping.py", "file_name": "test_skipping.py", "file_type": "text/x-python", "category": "test", "start_line": 1194, "end_line": 1221, "span_ids": ["test_imperativeskip_on_xfail_test"], "tokens": 149}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_imperativeskip_on_xfail_test(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.mark.xfail\n def test_that_fails():\n assert 0\n\n @pytest.mark.skipif(\"True\")\n def test_hello():\n pass\n \"\"\"\n )\n pytester.makeconftest(\n \"\"\"\n import pytest\n def pytest_runtest_setup(item):\n pytest.skip(\"abc\")\n \"\"\"\n )\n result = pytester.runpytest(\"-rsxX\")\n result.stdout.fnmatch_lines_random(\n \"\"\"\n *SKIP*abc*\n *SKIP*condition: True*\n *2 skipped*\n \"\"\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_test_xfail_item_test_xfail_item.assert_xfailed": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_test_xfail_item_test_xfail_item.assert_xfailed", "embedding": null, "metadata": {"file_path": "testing/test_skipping.py", "file_name": "test_skipping.py", "file_type": "text/x-python", "category": "test", "start_line": 1279, "end_line": 1298, "span_ids": ["test_xfail_item"], "tokens": 145}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_xfail_item(pytester: Pytester) -> None:\n # Ensure pytest.xfail works with non-Python Item\n pytester.makeconftest(\n \"\"\"\n import pytest\n\n class MyItem(pytest.Item):\n nodeid = 'foo'\n def runtest(self):\n pytest.xfail(\"Expected Failure\")\n\n def pytest_collect_file(path, parent):\n return MyItem.from_parent(name=\"foo\", parent=parent)\n \"\"\"\n )\n result = pytester.inline_run()\n passed, skipped, failed = result.listoutcomes()\n assert not failed\n xfailed = [r for r in skipped if hasattr(r, \"wasxfail\")]\n assert xfailed", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_test_module_level_skip_error_test_invalid_skip_keyword_parameter.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_test_module_level_skip_error_test_invalid_skip_keyword_parameter.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_skipping.py", "file_name": "test_skipping.py", "file_type": "text/x-python", "category": "test", "start_line": 1301, "end_line": 1345, "span_ids": ["test_module_level_skip_with_allow_module_level", "test_module_level_skip_error", "test_invalid_skip_keyword_parameter"], "tokens": 287}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_module_level_skip_error(pytester: Pytester) -> None:\n \"\"\"Verify that using pytest.skip at module level causes a collection error.\"\"\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n pytest.skip(\"skip_module_level\")\n\n def test_func():\n assert True\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n [\"*Using pytest.skip outside of a test is not allowed*\"]\n )\n\n\ndef test_module_level_skip_with_allow_module_level(pytester: Pytester) -> None:\n \"\"\"Verify that using pytest.skip(allow_module_level=True) is allowed.\"\"\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n pytest.skip(\"skip_module_level\", allow_module_level=True)\n\n def test_func():\n assert 0\n \"\"\"\n )\n result = pytester.runpytest(\"-rxs\")\n result.stdout.fnmatch_lines([\"*SKIP*skip_module_level\"])\n\n\ndef test_invalid_skip_keyword_parameter(pytester: Pytester) -> None:\n \"\"\"Verify that using pytest.skip() with unknown parameter raises an error.\"\"\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n pytest.skip(\"skip_module_level\", unknown=1)\n\n def test_func():\n assert 0\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*TypeError:*['unknown']*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_test_mark_xfail_item_test_mark_xfail_item.assert_xfailed": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_test_mark_xfail_item_test_mark_xfail_item.assert_xfailed", "embedding": null, "metadata": {"file_path": "testing/test_skipping.py", "file_name": "test_skipping.py", "file_type": "text/x-python", "category": "test", "start_line": 1348, "end_line": 1372, "span_ids": ["test_mark_xfail_item"], "tokens": 196}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_mark_xfail_item(pytester: Pytester) -> None:\n # Ensure pytest.mark.xfail works with non-Python Item\n pytester.makeconftest(\n \"\"\"\n import pytest\n\n class MyItem(pytest.Item):\n nodeid = 'foo'\n def setup(self):\n marker = pytest.mark.xfail(\"1 == 2\", reason=\"Expected failure - false\")\n self.add_marker(marker)\n marker = pytest.mark.xfail(True, reason=\"Expected failure - true\")\n self.add_marker(marker)\n def runtest(self):\n assert False\n\n def pytest_collect_file(path, parent):\n return MyItem.from_parent(name=\"foo\", parent=parent)\n \"\"\"\n )\n result = pytester.inline_run()\n passed, skipped, failed = result.listoutcomes()\n assert not failed\n xfailed = [r for r in skipped if hasattr(r, \"wasxfail\")]\n assert xfailed", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_stepwise.py_test_fail_and_continue_with_stepwise_test_fail_and_continue_with_stepwise.None_7": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_stepwise.py_test_fail_and_continue_with_stepwise_test_fail_and_continue_with_stepwise.None_7", "embedding": null, "metadata": {"file_path": "testing/test_stepwise.py", "file_name": "test_stepwise.py", "file_type": "text/x-python", "category": "test", "start_line": 114, "end_line": 135, "span_ids": ["test_fail_and_continue_with_stepwise"], "tokens": 233}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_fail_and_continue_with_stepwise(stepwise_pytester: Pytester) -> None:\n # Run the tests with a failing second test.\n result = stepwise_pytester.runpytest(\n \"-v\", \"--strict-markers\", \"--stepwise\", \"--fail\"\n )\n assert _strip_resource_warnings(result.stderr.lines) == []\n\n stdout = result.stdout.str()\n # Make sure we stop after first failing test.\n assert \"test_success_before_fail PASSED\" in stdout\n assert \"test_fail_on_flag FAILED\" in stdout\n assert \"test_success_after_fail\" not in stdout\n\n # \"Fix\" the test that failed in the last run and run it again.\n result = stepwise_pytester.runpytest(\"-v\", \"--strict-markers\", \"--stepwise\")\n assert _strip_resource_warnings(result.stderr.lines) == []\n\n stdout = result.stdout.str()\n # Make sure the latest failing test runs and then continues.\n assert \"test_success_before_fail\" not in stdout\n assert \"test_fail_on_flag PASSED\" in stdout\n assert \"test_success_after_fail PASSED\" in stdout", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_plugin_nameversion_test_plugin_nameversion.assert_result_expected": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_plugin_nameversion_test_plugin_nameversion.assert_result_expected", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 57, "end_line": 75, "span_ids": ["test_plugin_nameversion"], "tokens": 141}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\n \"input,expected\",\n [\n ([DistInfo(project_name=\"test\", version=1)], [\"test-1\"]),\n ([DistInfo(project_name=\"pytest-test\", version=1)], [\"test-1\"]),\n (\n [\n DistInfo(project_name=\"test\", version=1),\n DistInfo(project_name=\"test\", version=1),\n ],\n [\"test-1\"],\n ),\n ],\n ids=[\"normal\", \"prefix-strip\", \"deduplicate\"],\n)\ndef test_plugin_nameversion(input, expected):\n pluginlist = [(None, x) for x in input]\n result = _plugin_nameversions(pluginlist)\n assert result == expected", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminal_TestTerminal.test_pass_skip_fail.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminal_TestTerminal.test_pass_skip_fail.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 82, "end_line": 110, "span_ids": ["TestTerminal", "TestTerminal.test_pass_skip_fail"], "tokens": 213}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTerminal:\n def test_pass_skip_fail(self, pytester: Pytester, option) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n def test_ok():\n pass\n def test_skip():\n pytest.skip(\"xx\")\n def test_func():\n assert 0\n \"\"\"\n )\n result = pytester.runpytest(*option.args)\n if option.verbosity > 0:\n result.stdout.fnmatch_lines(\n [\n \"*test_pass_skip_fail.py::test_ok PASS*\",\n \"*test_pass_skip_fail.py::test_skip SKIP*\",\n \"*test_pass_skip_fail.py::test_func FAIL*\",\n ]\n )\n elif option.verbosity == 0:\n result.stdout.fnmatch_lines([\"*test_pass_skip_fail.py .sF*\"])\n else:\n result.stdout.fnmatch_lines([\".sF*\"])\n result.stdout.fnmatch_lines(\n [\" def test_func():\", \"> assert 0\", \"E assert 0\"]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminal.test_internalerror_TestTerminal.test_writeline.assert_lines_2_hello": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminal.test_internalerror_TestTerminal.test_writeline.assert_lines_2_hello", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 112, "end_line": 128, "span_ids": ["TestTerminal.test_internalerror", "TestTerminal.test_writeline"], "tokens": 216}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTerminal:\n\n def test_internalerror(self, pytester: Pytester, linecomp) -> None:\n modcol = pytester.getmodulecol(\"def test_one(): pass\")\n rep = TerminalReporter(modcol.config, file=linecomp.stringio)\n with pytest.raises(ValueError) as excinfo:\n raise ValueError(\"hello\")\n rep.pytest_internalerror(excinfo.getrepr())\n linecomp.assert_contains_lines([\"INTERNALERROR> *ValueError*hello*\"])\n\n def test_writeline(self, pytester: Pytester, linecomp) -> None:\n modcol = pytester.getmodulecol(\"def test_one(): pass\")\n rep = TerminalReporter(modcol.config, file=linecomp.stringio)\n rep.write_fspath_result(modcol.nodeid, \".\")\n rep.write_line(\"hello world\")\n lines = linecomp.stringio.getvalue().split(\"\\n\")\n assert not lines[0]\n assert lines[1].endswith(modcol.name + \" .\")\n assert lines[2] == \"hello world\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminal.test_show_runtest_logstart_TestTerminal.test_runtest_location_shown_before_test_starts.child_kill_15_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminal.test_show_runtest_logstart_TestTerminal.test_runtest_location_shown_before_test_starts.child_kill_15_", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 130, "end_line": 153, "span_ids": ["TestTerminal.test_runtest_location_shown_before_test_starts", "TestTerminal.test_show_runtest_logstart"], "tokens": 205}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTerminal:\n\n def test_show_runtest_logstart(self, pytester: Pytester, linecomp) -> None:\n item = pytester.getitem(\"def test_func(): pass\")\n tr = TerminalReporter(item.config, file=linecomp.stringio)\n item.config.pluginmanager.register(tr)\n location = item.reportinfo()\n tr.config.hook.pytest_runtest_logstart(\n nodeid=item.nodeid, location=location, fspath=str(item.fspath)\n )\n linecomp.assert_contains_lines([\"*test_show_runtest_logstart.py*\"])\n\n def test_runtest_location_shown_before_test_starts(\n self, pytester: Pytester\n ) -> None:\n pytester.makepyfile(\n \"\"\"\n def test_1():\n import time\n time.sleep(20)\n \"\"\"\n )\n child = pytester.spawn_pytest(\"\")\n child.expect(\".*test_runtest_location.*py\")\n child.sendeof()\n child.kill(15)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminal.test_keyboard_interrupt_TestTerminal.test_keyboard_interrupt.None_2": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminal.test_keyboard_interrupt_TestTerminal.test_keyboard_interrupt.None_2", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 255, "end_line": 285, "span_ids": ["TestTerminal.test_keyboard_interrupt"], "tokens": 233}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTerminal:\n\n @pytest.mark.parametrize(\"fulltrace\", (\"\", \"--fulltrace\"))\n def test_keyboard_interrupt(self, pytester: Pytester, fulltrace) -> None:\n pytester.makepyfile(\n \"\"\"\n def test_foobar():\n assert 0\n def test_spamegg():\n import py; pytest.skip('skip me please!')\n def test_interrupt_me():\n raise KeyboardInterrupt # simulating the user\n \"\"\"\n )\n\n result = pytester.runpytest(fulltrace, no_reraise_ctrlc=True)\n result.stdout.fnmatch_lines(\n [\n \" def test_foobar():\",\n \"> assert 0\",\n \"E assert 0\",\n \"*_keyboard_interrupt.py:6: KeyboardInterrupt*\",\n ]\n )\n if fulltrace:\n result.stdout.fnmatch_lines(\n [\"*raise KeyboardInterrupt # simulating the user*\"]\n )\n else:\n result.stdout.fnmatch_lines(\n [\"(to show a full traceback on KeyboardInterrupt use --full-trace)\"]\n )\n result.stdout.fnmatch_lines([\"*KeyboardInterrupt*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminal.test_keyboard_in_sessionstart_TestTerminal.test_rewrite.assert_f_getvalue_h": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminal.test_keyboard_in_sessionstart_TestTerminal.test_rewrite.assert_f_getvalue_h", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 287, "end_line": 324, "span_ids": ["TestTerminal.test_keyboard_in_sessionstart", "TestTerminal.test_collect_single_item", "TestTerminal.test_rewrite"], "tokens": 285}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTerminal:\n\n def test_keyboard_in_sessionstart(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n def pytest_sessionstart():\n raise KeyboardInterrupt\n \"\"\"\n )\n pytester.makepyfile(\n \"\"\"\n def test_foobar():\n pass\n \"\"\"\n )\n\n result = pytester.runpytest(no_reraise_ctrlc=True)\n assert result.ret == 2\n result.stdout.fnmatch_lines([\"*KeyboardInterrupt*\"])\n\n def test_collect_single_item(self, pytester: Pytester) -> None:\n \"\"\"Use singular 'item' when reporting a single test item\"\"\"\n pytester.makepyfile(\n \"\"\"\n def test_foobar():\n pass\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"collected 1 item\"])\n\n def test_rewrite(self, pytester: Pytester, monkeypatch) -> None:\n config = pytester.parseconfig()\n f = StringIO()\n monkeypatch.setattr(f, \"isatty\", lambda *args: True)\n tr = TerminalReporter(config, f)\n tr._tw.fullwidth = 10\n tr.write(\"hello\")\n tr.rewrite(\"hey\", erase=True)\n assert f.getvalue() == \"hello\" + \"\\r\" + \"hey\" + (6 * \" \")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestCollectonly.test_collectonly_simple_TestCollectonly.test_collectonly_simple.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestCollectonly.test_collectonly_simple_TestCollectonly.test_collectonly_simple.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 475, "end_line": 495, "span_ids": ["TestCollectonly.test_collectonly_simple"], "tokens": 137}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCollectonly:\n\n def test_collectonly_simple(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n def test_func1():\n pass\n class TestClass(object):\n def test_method(self):\n pass\n \"\"\"\n )\n result = pytester.runpytest(\"--collect-only\", p)\n # assert stderr.startswith(\"inserting into sys.path\")\n assert result.ret == 0\n result.stdout.fnmatch_lines(\n [\n \"*\",\n \"* \",\n \"* \",\n \"* \",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestCollectonly.test_collectonly_error_TestCollectonly.test_collectonly_more_quiet.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestCollectonly.test_collectonly_error_TestCollectonly.test_collectonly_more_quiet.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 497, "end_line": 529, "span_ids": ["TestCollectonly.test_collectonly_quiet", "TestCollectonly.test_collectonly_more_quiet", "TestCollectonly.test_collectonly_missing_path", "TestCollectonly.test_collectonly_error"], "tokens": 322}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCollectonly:\n\n def test_collectonly_error(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\"import Errlkjqweqwe\")\n result = pytester.runpytest(\"--collect-only\", p)\n assert result.ret == 2\n result.stdout.fnmatch_lines(\n textwrap.dedent(\n \"\"\"\\\n *ERROR*\n *ImportError*\n *No module named *Errlk*\n *1 error*\n \"\"\"\n ).strip()\n )\n\n def test_collectonly_missing_path(self, pytester: Pytester) -> None:\n \"\"\"Issue 115: failure in parseargs will cause session not to\n have the items attribute.\"\"\"\n result = pytester.runpytest(\"--collect-only\", \"uhm_missing_path\")\n assert result.ret == 4\n result.stderr.fnmatch_lines(\n [\"*ERROR: file or directory not found: uhm_missing_path\"]\n )\n\n def test_collectonly_quiet(self, pytester: Pytester) -> None:\n pytester.makepyfile(\"def test_foo(): pass\")\n result = pytester.runpytest(\"--collect-only\", \"-q\")\n result.stdout.fnmatch_lines([\"*test_foo*\"])\n\n def test_collectonly_more_quiet(self, pytester: Pytester) -> None:\n pytester.makepyfile(test_fun=\"def test_foo(): pass\")\n result = pytester.runpytest(\"--collect-only\", \"-qq\")\n result.stdout.fnmatch_lines([\"*test_fun.py: 1*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestFixtureReporting_TestFixtureReporting.test_setup_fixture_error.assert_result_ret_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestFixtureReporting_TestFixtureReporting.test_setup_fixture_error.assert_result_ret_0", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 574, "end_line": 595, "span_ids": ["TestFixtureReporting.test_setup_fixture_error", "TestFixtureReporting"], "tokens": 127}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFixtureReporting:\n def test_setup_fixture_error(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n def setup_function(function):\n print(\"setup func\")\n assert 0\n def test_nada():\n pass\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n [\n \"*ERROR at setup of test_nada*\",\n \"*setup_function(function):*\",\n \"*setup func*\",\n \"*assert 0*\",\n \"*1 error*\",\n ]\n )\n assert result.ret != 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestFixtureReporting.test_teardown_fixture_error_TestFixtureReporting.test_teardown_fixture_error.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestFixtureReporting.test_teardown_fixture_error_TestFixtureReporting.test_teardown_fixture_error.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 597, "end_line": 617, "span_ids": ["TestFixtureReporting.test_teardown_fixture_error"], "tokens": 130}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFixtureReporting:\n\n def test_teardown_fixture_error(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n def test_nada():\n pass\n def teardown_function(function):\n print(\"teardown func\")\n assert 0\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n [\n \"*ERROR at teardown*\",\n \"*teardown_function(function):*\",\n \"*assert 0*\",\n \"*Captured stdout*\",\n \"*teardown func*\",\n \"*1 passed*1 error*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestFixtureReporting.test_teardown_fixture_error_and_test_failure_TestFixtureReporting.test_teardown_fixture_error_and_test_failure.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestFixtureReporting.test_teardown_fixture_error_and_test_failure_TestFixtureReporting.test_teardown_fixture_error_and_test_failure.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 619, "end_line": 643, "span_ids": ["TestFixtureReporting.test_teardown_fixture_error_and_test_failure"], "tokens": 160}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFixtureReporting:\n\n def test_teardown_fixture_error_and_test_failure(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n def test_fail():\n assert 0, \"failingfunc\"\n\n def teardown_function(function):\n print(\"teardown func\")\n assert False\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n [\n \"*ERROR at teardown of test_fail*\",\n \"*teardown_function(function):*\",\n \"*assert False*\",\n \"*Captured stdout*\",\n \"*teardown func*\",\n \"*test_fail*\",\n \"*def test_fail():\",\n \"*failingfunc*\",\n \"*1 failed*1 error*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestFixtureReporting.test_setup_teardown_output_and_test_failure_TestFixtureReporting.test_setup_teardown_output_and_test_failure.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestFixtureReporting.test_setup_teardown_output_and_test_failure_TestFixtureReporting.test_setup_teardown_output_and_test_failure.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 645, "end_line": 671, "span_ids": ["TestFixtureReporting.test_setup_teardown_output_and_test_failure"], "tokens": 163}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFixtureReporting:\n\n def test_setup_teardown_output_and_test_failure(self, pytester: Pytester) -> None:\n \"\"\"Test for issue #442.\"\"\"\n pytester.makepyfile(\n \"\"\"\n def setup_function(function):\n print(\"setup func\")\n\n def test_fail():\n assert 0, \"failingfunc\"\n\n def teardown_function(function):\n print(\"teardown func\")\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n [\n \"*test_fail*\",\n \"*def test_fail():\",\n \"*failingfunc*\",\n \"*Captured stdout setup*\",\n \"*setup func*\",\n \"*Captured stdout teardown*\",\n \"*teardown func*\",\n \"*1 failed*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminalFunctional_TestTerminalFunctional.test_deselected.assert_result_ret_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminalFunctional_TestTerminalFunctional.test_deselected.assert_result_ret_0", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 674, "end_line": 690, "span_ids": ["TestTerminalFunctional.test_deselected", "TestTerminalFunctional"], "tokens": 121}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTerminalFunctional:\n def test_deselected(self, pytester: Pytester) -> None:\n testpath = pytester.makepyfile(\n \"\"\"\n def test_one():\n pass\n def test_two():\n pass\n def test_three():\n pass\n \"\"\"\n )\n result = pytester.runpytest(\"-k\", \"test_two:\", testpath)\n result.stdout.fnmatch_lines(\n [\"collected 3 items / 1 deselected / 2 selected\", \"*test_deselected.py ..*\"]\n )\n assert result.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminalFunctional.test_deselected_with_hookwrapper_TestTerminalFunctional.test_deselected_with_hookwrapper.assert_result_ret_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminalFunctional.test_deselected_with_hookwrapper_TestTerminalFunctional.test_deselected_with_hookwrapper.assert_result_ret_0", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 692, "end_line": 721, "span_ids": ["TestTerminalFunctional.test_deselected_with_hookwrapper"], "tokens": 190}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTerminalFunctional:\n\n def test_deselected_with_hookwrapper(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n import pytest\n\n @pytest.hookimpl(hookwrapper=True)\n def pytest_collection_modifyitems(config, items):\n yield\n deselected = items.pop()\n config.hook.pytest_deselected(items=[deselected])\n \"\"\"\n )\n testpath = pytester.makepyfile(\n \"\"\"\n def test_one():\n pass\n def test_two():\n pass\n def test_three():\n pass\n \"\"\"\n )\n result = pytester.runpytest(testpath)\n result.stdout.fnmatch_lines(\n [\n \"collected 3 items / 1 deselected / 2 selected\",\n \"*= 2 passed, 1 deselected in*\",\n ]\n )\n assert result.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminalFunctional.test_show_deselected_items_using_markexpr_before_test_execution_TestTerminalFunctional.test_show_deselected_items_using_markexpr_before_test_execution.assert_result_ret_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminalFunctional.test_show_deselected_items_using_markexpr_before_test_execution_TestTerminalFunctional.test_show_deselected_items_using_markexpr_before_test_execution.assert_result_ret_0", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 723, "end_line": 751, "span_ids": ["TestTerminalFunctional.test_show_deselected_items_using_markexpr_before_test_execution"], "tokens": 189}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTerminalFunctional:\n\n def test_show_deselected_items_using_markexpr_before_test_execution(\n self, pytester: Pytester\n ) -> None:\n pytester.makepyfile(\n test_show_deselected=\"\"\"\n import pytest\n\n @pytest.mark.foo\n def test_foobar():\n pass\n\n @pytest.mark.bar\n def test_bar():\n pass\n\n def test_pass():\n pass\n \"\"\"\n )\n result = pytester.runpytest(\"-m\", \"not foo\")\n result.stdout.fnmatch_lines(\n [\n \"collected 3 items / 1 deselected / 2 selected\",\n \"*test_show_deselected.py ..*\",\n \"*= 2 passed, 1 deselected in * =*\",\n ]\n )\n result.stdout.no_fnmatch_line(\"*= 1 deselected =*\")\n assert result.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminalFunctional.test_no_skip_summary_if_failure_TestTerminalFunctional.test_passes.assert_result_ret_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminalFunctional.test_no_skip_summary_if_failure_TestTerminalFunctional.test_passes.assert_result_ret_0", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 753, "end_line": 786, "span_ids": ["TestTerminalFunctional.test_no_skip_summary_if_failure", "TestTerminalFunctional.test_passes"], "tokens": 216}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTerminalFunctional:\n\n def test_no_skip_summary_if_failure(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n def test_ok():\n pass\n def test_fail():\n assert 0\n def test_skip():\n pytest.skip(\"dontshow\")\n \"\"\"\n )\n result = pytester.runpytest()\n assert result.stdout.str().find(\"skip test summary\") == -1\n assert result.ret == 1\n\n def test_passes(self, pytester: Pytester) -> None:\n p1 = pytester.makepyfile(\n \"\"\"\n def test_passes():\n pass\n class TestClass(object):\n def test_method(self):\n pass\n \"\"\"\n )\n old = p1.parent\n pytester.chdir()\n try:\n result = pytester.runpytest()\n finally:\n os.chdir(old)\n result.stdout.fnmatch_lines([\"test_passes.py ..*\", \"* 2 pass*\"])\n assert result.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminalFunctional.test_header_trailer_info_TestTerminalFunctional.test_header_trailer_info.if_request_config_pluginm.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminalFunctional.test_header_trailer_info_TestTerminalFunctional.test_header_trailer_info.if_request_config_pluginm.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 788, "end_line": 816, "span_ids": ["TestTerminalFunctional.test_header_trailer_info"], "tokens": 222}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTerminalFunctional:\n\n def test_header_trailer_info(\n self, monkeypatch: MonkeyPatch, pytester: Pytester, request\n ) -> None:\n monkeypatch.delenv(\"PYTEST_DISABLE_PLUGIN_AUTOLOAD\")\n pytester.makepyfile(\n \"\"\"\n def test_passes():\n pass\n \"\"\"\n )\n result = pytester.runpytest()\n verinfo = \".\".join(map(str, sys.version_info[:3]))\n result.stdout.fnmatch_lines(\n [\n \"*===== test session starts ====*\",\n \"platform %s -- Python %s*pytest-%s*py-%s*pluggy-%s\"\n % (\n sys.platform,\n verinfo,\n pytest.__version__,\n py.__version__,\n pluggy.__version__,\n ),\n \"*test_header_trailer_info.py .*\",\n \"=* 1 passed*in *.[0-9][0-9]s *=\",\n ]\n )\n if request.config.pluginmanager.list_plugin_distinfo():\n result.stdout.fnmatch_lines([\"plugins: *\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminalFunctional.test_header_TestTerminalFunctional.test_header.None_7": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminalFunctional.test_header_TestTerminalFunctional.test_header.None_7", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 843, "end_line": 870, "span_ids": ["TestTerminalFunctional.test_header"], "tokens": 246}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTerminalFunctional:\n\n def test_header(self, pytester: Pytester) -> None:\n pytester.path.joinpath(\"tests\").mkdir()\n pytester.path.joinpath(\"gui\").mkdir()\n\n # no ini file\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"rootdir: *test_header0\"])\n\n # with configfile\n pytester.makeini(\"\"\"[pytest]\"\"\")\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"rootdir: *test_header0, configfile: tox.ini\"])\n\n # with testpaths option, and not passing anything in the command-line\n pytester.makeini(\n \"\"\"\n [pytest]\n testpaths = tests gui\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n [\"rootdir: *test_header0, configfile: tox.ini, testpaths: tests, gui\"]\n )\n\n # with testpaths option, passing directory in command-line: do not show testpaths then\n result = pytester.runpytest(\"tests\")\n result.stdout.fnmatch_lines([\"rootdir: *test_header0, configfile: tox.ini\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminalFunctional.test_verbose_reporting_xdist_TestTerminalFunctional.test_verbose_reporting_xdist.assert_result_ret_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminalFunctional.test_verbose_reporting_xdist_TestTerminalFunctional.test_verbose_reporting_xdist.assert_result_ret_1", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 996, "end_line": 1013, "span_ids": ["TestTerminalFunctional.test_verbose_reporting_xdist"], "tokens": 145}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTerminalFunctional:\n\n def test_verbose_reporting_xdist(\n self,\n verbose_testfile,\n monkeypatch: MonkeyPatch,\n pytester: Pytester,\n pytestconfig,\n ) -> None:\n if not pytestconfig.pluginmanager.get_plugin(\"xdist\"):\n pytest.skip(\"xdist plugin not installed\")\n\n monkeypatch.delenv(\"PYTEST_DISABLE_PLUGIN_AUTOLOAD\")\n result = pytester.runpytest(\n verbose_testfile, \"-v\", \"-n 1\", \"-Walways::pytest.PytestWarning\"\n )\n result.stdout.fnmatch_lines(\n [\"*FAIL*test_verbose_reporting_xdist.py::test_fail*\"]\n )\n assert result.ret == 1", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminalFunctional.test_quiet_reporting_TestTerminalFunctional.test_more_quiet_reporting.assert_passed_not_in_s": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminalFunctional.test_quiet_reporting_TestTerminalFunctional.test_more_quiet_reporting.assert_passed_not_in_s", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 1015, "end_line": 1031, "span_ids": ["TestTerminalFunctional.test_quiet_reporting", "TestTerminalFunctional.test_more_quiet_reporting"], "tokens": 184}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTerminalFunctional:\n\n def test_quiet_reporting(self, pytester: Pytester) -> None:\n p1 = pytester.makepyfile(\"def test_pass(): pass\")\n result = pytester.runpytest(p1, \"-q\")\n s = result.stdout.str()\n assert \"test session starts\" not in s\n assert p1.name not in s\n assert \"===\" not in s\n assert \"passed\" in s\n\n def test_more_quiet_reporting(self, pytester: Pytester) -> None:\n p1 = pytester.makepyfile(\"def test_pass(): pass\")\n result = pytester.runpytest(p1, \"-qq\")\n s = result.stdout.str()\n assert \"test session starts\" not in s\n assert p1.name not in s\n assert \"===\" not in s\n assert \"passed\" not in s", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminalFunctional.test_report_collectionfinish_hook_TestTerminalFunctional.test_report_collectionfinish_hook.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminalFunctional.test_report_collectionfinish_hook_TestTerminalFunctional.test_report_collectionfinish_hook.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 1033, "end_line": 1052, "span_ids": ["TestTerminalFunctional.test_report_collectionfinish_hook"], "tokens": 170}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTerminalFunctional:\n\n @pytest.mark.parametrize(\n \"params\", [(), (\"--collect-only\",)], ids=[\"no-params\", \"collect-only\"]\n )\n def test_report_collectionfinish_hook(self, pytester: Pytester, params) -> None:\n pytester.makeconftest(\n \"\"\"\n def pytest_report_collectionfinish(config, startpath, startdir, items):\n return ['hello from hook: {0} items'.format(len(items))]\n \"\"\"\n )\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.mark.parametrize('i', range(3))\n def test(i):\n pass\n \"\"\"\n )\n result = pytester.runpytest(*params)\n result.stdout.fnmatch_lines([\"collected 3 items\", \"hello from hook: 3 items\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_pass_output_reporting_test_pass_output_reporting.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_pass_output_reporting_test_pass_output_reporting.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 1118, "end_line": 1155, "span_ids": ["test_pass_output_reporting"], "tokens": 272}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_pass_output_reporting(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n def setup_module():\n print(\"setup_module\")\n\n def teardown_module():\n print(\"teardown_module\")\n\n def test_pass_has_output():\n print(\"Four score and seven years ago...\")\n\n def test_pass_no_output():\n pass\n \"\"\"\n )\n result = pytester.runpytest()\n s = result.stdout.str()\n assert \"test_pass_has_output\" not in s\n assert \"Four score and seven years ago...\" not in s\n assert \"test_pass_no_output\" not in s\n result = pytester.runpytest(\"-rPp\")\n result.stdout.fnmatch_lines(\n [\n \"*= PASSES =*\",\n \"*_ test_pass_has_output _*\",\n \"*- Captured stdout setup -*\",\n \"setup_module\",\n \"*- Captured stdout call -*\",\n \"Four score and seven years ago...\",\n \"*- Captured stdout teardown -*\",\n \"teardown_module\",\n \"*= short test summary info =*\",\n \"PASSED test_pass_output_reporting.py::test_pass_has_output\",\n \"PASSED test_pass_output_reporting.py::test_pass_no_output\",\n \"*= 2 passed in *\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_terminalreporter_reportopt_addopts_test_terminalreporter_reportopt_addopts.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_terminalreporter_reportopt_addopts_test_terminalreporter_reportopt_addopts.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 1306, "end_line": 1322, "span_ids": ["test_terminalreporter_reportopt_addopts"], "tokens": 122}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_terminalreporter_reportopt_addopts(pytester: Pytester) -> None:\n pytester.makeini(\"[pytest]\\naddopts=-rs\")\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.fixture\n def tr(request):\n tr = request.config.pluginmanager.getplugin(\"terminalreporter\")\n return tr\n def test_opt(tr):\n assert tr.hasopt('skipped')\n assert not tr.hasopt('qwe')\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*1 passed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestGenericReporting_TestGenericReporting.test_maxfailures.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestGenericReporting_TestGenericReporting.test_maxfailures.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 1355, "end_line": 1385, "span_ids": ["TestGenericReporting.test_collect_fail", "TestGenericReporting", "TestGenericReporting.test_maxfailures"], "tokens": 227}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestGenericReporting:\n \"\"\"Test class which can be subclassed with a different option provider to\n run e.g. distributed tests.\"\"\"\n\n def test_collect_fail(self, pytester: Pytester, option) -> None:\n pytester.makepyfile(\"import xyz\\n\")\n result = pytester.runpytest(*option.args)\n result.stdout.fnmatch_lines(\n [\"ImportError while importing*\", \"*No module named *xyz*\", \"*1 error*\"]\n )\n\n def test_maxfailures(self, pytester: Pytester, option) -> None:\n pytester.makepyfile(\n \"\"\"\n def test_1():\n assert 0\n def test_2():\n assert 0\n def test_3():\n assert 0\n \"\"\"\n )\n result = pytester.runpytest(\"--maxfail=2\", *option.args)\n result.stdout.fnmatch_lines(\n [\n \"*def test_1():*\",\n \"*def test_2():*\",\n \"*! stopping after 2 failures !*\",\n \"*2 failed*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestGenericReporting.test_tb_option_TestGenericReporting.test_tb_option.for_tbopt_in_long_sh.if_tbopt_no_.else_.assert_IndexError_not_i": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestGenericReporting.test_tb_option_TestGenericReporting.test_tb_option.for_tbopt_in_long_sh.if_tbopt_no_.else_.assert_IndexError_not_i", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 1406, "end_line": 1431, "span_ids": ["TestGenericReporting.test_tb_option"], "tokens": 221}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestGenericReporting:\n\n def test_tb_option(self, pytester: Pytester, option) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n def g():\n raise IndexError\n def test_func():\n print(6*7)\n g() # --calling--\n \"\"\"\n )\n for tbopt in [\"long\", \"short\", \"no\"]:\n print(\"testing --tb=%s...\" % tbopt)\n result = pytester.runpytest(\"-rN\", \"--tb=%s\" % tbopt)\n s = result.stdout.str()\n if tbopt == \"long\":\n assert \"print(6*7)\" in s\n else:\n assert \"print(6*7)\" not in s\n if tbopt != \"no\":\n assert \"--calling--\" in s\n assert \"IndexError\" in s\n else:\n assert \"FAILURES\" not in s\n assert \"--calling--\" not in s\n assert \"IndexError\" not in s", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestGenericReporting.test_tb_crashline_TestGenericReporting.test_tb_crashline.assert_def_test_func2_n": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestGenericReporting.test_tb_crashline_TestGenericReporting.test_tb_crashline.assert_def_test_func2_n", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 1433, "end_line": 1452, "span_ids": ["TestGenericReporting.test_tb_crashline"], "tokens": 157}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestGenericReporting:\n\n def test_tb_crashline(self, pytester: Pytester, option) -> None:\n p = pytester.makepyfile(\n \"\"\"\n import pytest\n def g():\n raise IndexError\n def test_func1():\n print(6*7)\n g() # --calling--\n def test_func2():\n assert 0, \"hello\"\n \"\"\"\n )\n result = pytester.runpytest(\"--tb=line\")\n bn = p.name\n result.stdout.fnmatch_lines(\n [\"*%s:3: IndexError*\" % bn, \"*%s:8: AssertionError: hello*\" % bn]\n )\n s = result.stdout.str()\n assert \"def test_func2\" not in s", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestGenericReporting.test_pytest_report_header_TestGenericReporting.test_pytest_report_header.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestGenericReporting.test_pytest_report_header_TestGenericReporting.test_pytest_report_header.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 1454, "end_line": 1470, "span_ids": ["TestGenericReporting.test_pytest_report_header"], "tokens": 154}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestGenericReporting:\n\n def test_pytest_report_header(self, pytester: Pytester, option) -> None:\n pytester.makeconftest(\n \"\"\"\n def pytest_sessionstart(session):\n session.config._somevalue = 42\n def pytest_report_header(config):\n return \"hello: %s\" % config._somevalue\n \"\"\"\n )\n pytester.mkdir(\"a\").joinpath(\"conftest.py\").write_text(\n \"\"\"\ndef pytest_report_header(config, startdir, startpath):\n return [\"line1\", str(startpath)]\n\"\"\"\n )\n result = pytester.runpytest(\"a\")\n result.stdout.fnmatch_lines([\"*hello: 42*\", \"line1\", str(pytester.path)])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestGenericReporting.test_show_capture_TestGenericReporting.test_show_capture.None_11": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestGenericReporting.test_show_capture_TestGenericReporting.test_show_capture.None_11", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 1472, "end_line": 1521, "span_ids": ["TestGenericReporting.test_show_capture"], "tokens": 417}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestGenericReporting:\n\n def test_show_capture(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import sys\n import logging\n def test_one():\n sys.stdout.write('!This is stdout!')\n sys.stderr.write('!This is stderr!')\n logging.warning('!This is a warning log msg!')\n assert False, 'Something failed'\n \"\"\"\n )\n\n result = pytester.runpytest(\"--tb=short\")\n result.stdout.fnmatch_lines(\n [\n \"!This is stdout!\",\n \"!This is stderr!\",\n \"*WARNING*!This is a warning log msg!\",\n ]\n )\n\n result = pytester.runpytest(\"--show-capture=all\", \"--tb=short\")\n result.stdout.fnmatch_lines(\n [\n \"!This is stdout!\",\n \"!This is stderr!\",\n \"*WARNING*!This is a warning log msg!\",\n ]\n )\n\n stdout = pytester.runpytest(\"--show-capture=stdout\", \"--tb=short\").stdout.str()\n assert \"!This is stderr!\" not in stdout\n assert \"!This is stdout!\" in stdout\n assert \"!This is a warning log msg!\" not in stdout\n\n stdout = pytester.runpytest(\"--show-capture=stderr\", \"--tb=short\").stdout.str()\n assert \"!This is stdout!\" not in stdout\n assert \"!This is stderr!\" in stdout\n assert \"!This is a warning log msg!\" not in stdout\n\n stdout = pytester.runpytest(\"--show-capture=log\", \"--tb=short\").stdout.str()\n assert \"!This is stdout!\" not in stdout\n assert \"!This is stderr!\" not in stdout\n assert \"!This is a warning log msg!\" in stdout\n\n stdout = pytester.runpytest(\"--show-capture=no\", \"--tb=short\").stdout.str()\n assert \"!This is stdout!\" not in stdout\n assert \"!This is stderr!\" not in stdout\n assert \"!This is a warning log msg!\" not in stdout", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestGenericReporting.test_show_capture_with_teardown_logs_TestGenericReporting.test_show_capture_with_teardown_logs.None_11": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestGenericReporting.test_show_capture_with_teardown_logs_TestGenericReporting.test_show_capture_with_teardown_logs.None_11", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 1523, "end_line": 1561, "span_ids": ["TestGenericReporting.test_show_capture_with_teardown_logs"], "tokens": 308}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestGenericReporting:\n\n def test_show_capture_with_teardown_logs(self, pytester: Pytester) -> None:\n \"\"\"Ensure that the capturing of teardown logs honor --show-capture setting\"\"\"\n pytester.makepyfile(\n \"\"\"\n import logging\n import sys\n import pytest\n\n @pytest.fixture(scope=\"function\", autouse=\"True\")\n def hook_each_test(request):\n yield\n sys.stdout.write(\"!stdout!\")\n sys.stderr.write(\"!stderr!\")\n logging.warning(\"!log!\")\n\n def test_func():\n assert False\n \"\"\"\n )\n\n result = pytester.runpytest(\"--show-capture=stdout\", \"--tb=short\").stdout.str()\n assert \"!stdout!\" in result\n assert \"!stderr!\" not in result\n assert \"!log!\" not in result\n\n result = pytester.runpytest(\"--show-capture=stderr\", \"--tb=short\").stdout.str()\n assert \"!stdout!\" not in result\n assert \"!stderr!\" in result\n assert \"!log!\" not in result\n\n result = pytester.runpytest(\"--show-capture=log\", \"--tb=short\").stdout.str()\n assert \"!stdout!\" not in result\n assert \"!stderr!\" not in result\n assert \"!log!\" in result\n\n result = pytester.runpytest(\"--show-capture=no\", \"--tb=short\").stdout.str()\n assert \"!stdout!\" not in result\n assert \"!stderr!\" not in result\n assert \"!log!\" not in result", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_fdopen_kept_alive_issue124_test_fdopen_kept_alive_issue124.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_fdopen_kept_alive_issue124_test_fdopen_kept_alive_issue124.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 1564, "end_line": 1580, "span_ids": ["test_fdopen_kept_alive_issue124"], "tokens": 123}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.xfail(\"not hasattr(os, 'dup')\")\ndef test_fdopen_kept_alive_issue124(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import os, sys\n k = []\n def test_open_file_and_keep_alive(capfd):\n stdout = os.fdopen(1, 'w', 1)\n k.append(stdout)\n\n def test_close_kept_alive_file():\n stdout = k.pop()\n stdout.close()\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*2 passed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_terminal_summary_warnings_are_displayed_test_terminal_summary_warnings_are_displayed.assert_stdout_count_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_terminal_summary_warnings_are_displayed_test_terminal_summary_warnings_are_displayed.assert_stdout_count_", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 1621, "end_line": 1655, "span_ids": ["test_terminal_summary_warnings_are_displayed"], "tokens": 246}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.filterwarnings(\"default\")\ndef test_terminal_summary_warnings_are_displayed(pytester: Pytester) -> None:\n \"\"\"Test that warnings emitted during pytest_terminal_summary are displayed.\n (#1305).\n \"\"\"\n pytester.makeconftest(\n \"\"\"\n import warnings\n def pytest_terminal_summary(terminalreporter):\n warnings.warn(UserWarning('internal warning'))\n \"\"\"\n )\n pytester.makepyfile(\n \"\"\"\n def test_failure():\n import warnings\n warnings.warn(\"warning_from_\" + \"test\")\n assert 0\n \"\"\"\n )\n result = pytester.runpytest(\"-ra\")\n result.stdout.fnmatch_lines(\n [\n \"*= warnings summary =*\",\n \"*warning_from_test*\",\n \"*= short test summary info =*\",\n \"*= warnings summary (final) =*\",\n \"*conftest.py:3:*internal warning\",\n \"*== 1 failed, 2 warnings in *\",\n ]\n )\n result.stdout.no_fnmatch_line(\"*None*\")\n stdout = result.stdout.str()\n assert stdout.count(\"warning_from_test\") == 1\n assert stdout.count(\"=== warnings summary \") == 2", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestClassicOutputStyle.test_verbose_TestClassicOutputStyle.test_quiet.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestClassicOutputStyle.test_verbose_TestClassicOutputStyle.test_quiet.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 1906, "end_line": 1921, "span_ids": ["TestClassicOutputStyle.test_verbose", "TestClassicOutputStyle.test_quiet"], "tokens": 205}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestClassicOutputStyle:\n\n def test_verbose(self, pytester: Pytester, test_files) -> None:\n result = pytester.runpytest(\"-o\", \"console_output_style=classic\", \"-v\")\n result.stdout.fnmatch_lines(\n [\n \"test_one.py::test_one PASSED\",\n \"test_two.py::test_two FAILED\",\n f\"sub{os.sep}test_three.py::test_three_1 PASSED\",\n f\"sub{os.sep}test_three.py::test_three_2 FAILED\",\n f\"sub{os.sep}test_three.py::test_three_3 PASSED\",\n \"*2 failed, 3 passed in*\",\n ]\n )\n\n def test_quiet(self, pytester: Pytester, test_files) -> None:\n result = pytester.runpytest(\"-o\", \"console_output_style=classic\", \"-q\")\n result.stdout.fnmatch_lines([\".F.F.\", \"*2 failed, 3 passed in*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestProgressOutputStyle.test_verbose_TestProgressOutputStyle.test_verbose.output_stdout_re_match_li": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestProgressOutputStyle.test_verbose_TestProgressOutputStyle.test_verbose.output_stdout_re_match_li", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 2041, "end_line": 2049, "span_ids": ["TestProgressOutputStyle.test_verbose"], "tokens": 126}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestProgressOutputStyle:\n\n def test_verbose(self, many_tests_files, pytester: Pytester) -> None:\n output = pytester.runpytest(\"-v\")\n output.stdout.re_match_lines(\n [\n r\"test_bar.py::test_bar\\[0\\] PASSED \\s+ \\[ 5%\\]\",\n r\"test_foo.py::test_foo\\[4\\] PASSED \\s+ \\[ 75%\\]\",\n r\"test_foobar.py::test_foobar\\[4\\] PASSED \\s+ \\[100%\\]\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestProgressOutputStyle.test_verbose_count_TestProgressOutputStyle.test_verbose_count.output_stdout_re_match_li": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestProgressOutputStyle.test_verbose_count_TestProgressOutputStyle.test_verbose_count.output_stdout_re_match_li", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 2051, "end_line": 2065, "span_ids": ["TestProgressOutputStyle.test_verbose_count"], "tokens": 154}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestProgressOutputStyle:\n\n def test_verbose_count(self, many_tests_files, pytester: Pytester) -> None:\n pytester.makeini(\n \"\"\"\n [pytest]\n console_output_style = count\n \"\"\"\n )\n output = pytester.runpytest(\"-v\")\n output.stdout.re_match_lines(\n [\n r\"test_bar.py::test_bar\\[0\\] PASSED \\s+ \\[ 1/20\\]\",\n r\"test_foo.py::test_foo\\[4\\] PASSED \\s+ \\[15/20\\]\",\n r\"test_foobar.py::test_foobar\\[4\\] PASSED \\s+ \\[20/20\\]\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestProgressOutputStyle.test_xdist_normal_TestProgressOutputStyle.test_xdist_normal_count.output_stdout_re_match_li": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestProgressOutputStyle.test_xdist_normal_TestProgressOutputStyle.test_xdist_normal_count.output_stdout_re_match_li", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 2067, "end_line": 2087, "span_ids": ["TestProgressOutputStyle.test_xdist_normal", "TestProgressOutputStyle.test_xdist_normal_count"], "tokens": 202}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestProgressOutputStyle:\n\n def test_xdist_normal(\n self, many_tests_files, pytester: Pytester, monkeypatch\n ) -> None:\n pytest.importorskip(\"xdist\")\n monkeypatch.delenv(\"PYTEST_DISABLE_PLUGIN_AUTOLOAD\", raising=False)\n output = pytester.runpytest(\"-n2\")\n output.stdout.re_match_lines([r\"\\.{20} \\s+ \\[100%\\]\"])\n\n def test_xdist_normal_count(\n self, many_tests_files, pytester: Pytester, monkeypatch\n ) -> None:\n pytest.importorskip(\"xdist\")\n monkeypatch.delenv(\"PYTEST_DISABLE_PLUGIN_AUTOLOAD\", raising=False)\n pytester.makeini(\n \"\"\"\n [pytest]\n console_output_style = count\n \"\"\"\n )\n output = pytester.runpytest(\"-n2\")\n output.stdout.re_match_lines([r\"\\.{20} \\s+ \\[20/20\\]\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestProgressWithTeardown.test_teardown_simple_TestProgressWithTeardown.test_teardown_many.output_stdout_re_match_li": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestProgressWithTeardown.test_teardown_simple_TestProgressWithTeardown.test_teardown_many.output_stdout_re_match_li", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 2162, "end_line": 2196, "span_ids": ["TestProgressWithTeardown.test_teardown_with_test_also_failing", "TestProgressWithTeardown.test_teardown_simple", "TestProgressWithTeardown.test_teardown_many"], "tokens": 315}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestProgressWithTeardown:\n\n def test_teardown_simple(\n self, pytester: Pytester, contest_with_teardown_fixture\n ) -> None:\n pytester.makepyfile(\n \"\"\"\n def test_foo(fail_teardown):\n pass\n \"\"\"\n )\n output = pytester.runpytest()\n output.stdout.re_match_lines([r\"test_teardown_simple.py \\.E\\s+\\[100%\\]\"])\n\n def test_teardown_with_test_also_failing(\n self, pytester: Pytester, contest_with_teardown_fixture\n ) -> None:\n pytester.makepyfile(\n \"\"\"\n def test_foo(fail_teardown):\n assert 0\n \"\"\"\n )\n output = pytester.runpytest(\"-rfE\")\n output.stdout.re_match_lines(\n [\n r\"test_teardown_with_test_also_failing.py FE\\s+\\[100%\\]\",\n \"FAILED test_teardown_with_test_also_failing.py::test_foo - assert 0\",\n \"ERROR test_teardown_with_test_also_failing.py::test_foo - assert False\",\n ]\n )\n\n def test_teardown_many(self, pytester: Pytester, many_files) -> None:\n output = pytester.runpytest()\n output.stdout.re_match_lines(\n [r\"test_bar.py (\\.E){5}\\s+\\[ 25%\\]\", r\"test_foo.py (\\.E){15}\\s+\\[100%\\]\"]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestProgressWithTeardown.test_teardown_many_verbose_TestProgressWithTeardown.test_xdist_normal.output_stdout_re_match_li": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestProgressWithTeardown.test_teardown_many_verbose_TestProgressWithTeardown.test_xdist_normal.output_stdout_re_match_li", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 2198, "end_line": 2219, "span_ids": ["TestProgressWithTeardown.test_xdist_normal", "TestProgressWithTeardown.test_teardown_many_verbose"], "tokens": 264}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestProgressWithTeardown:\n\n def test_teardown_many_verbose(\n self, pytester: Pytester, many_files, color_mapping\n ) -> None:\n result = pytester.runpytest(\"-v\")\n result.stdout.fnmatch_lines(\n color_mapping.format_for_fnmatch(\n [\n \"test_bar.py::test_bar[0] PASSED * [ 5%]\",\n \"test_bar.py::test_bar[0] ERROR * [ 5%]\",\n \"test_bar.py::test_bar[4] PASSED * [ 25%]\",\n \"test_foo.py::test_foo[14] PASSED * [100%]\",\n \"test_foo.py::test_foo[14] ERROR * [100%]\",\n \"=* 20 passed, 20 errors in *\",\n ]\n )\n )\n\n def test_xdist_normal(self, many_files, pytester: Pytester, monkeypatch) -> None:\n pytest.importorskip(\"xdist\")\n monkeypatch.delenv(\"PYTEST_DISABLE_PLUGIN_AUTOLOAD\", raising=False)\n output = pytester.runpytest(\"-n2\")\n output.stdout.re_match_lines([r\"[\\.E]{40} \\s+ \\[100%\\]\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_tmpdir.py_TestTempdirHandler_TestTempdirHandler.test_tmppath_relative_basetemp_absolute.assert_t_getbasetemp_re": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_tmpdir.py_TestTempdirHandler_TestTempdirHandler.test_tmppath_relative_basetemp_absolute.assert_t_getbasetemp_re", "embedding": null, "metadata": {"file_path": "testing/test_tmpdir.py", "file_name": "test_tmpdir.py", "file_type": "text/x-python", "category": "test", "start_line": 49, "end_line": 68, "span_ids": ["TestTempdirHandler.test_tmppath_relative_basetemp_absolute", "TestTempdirHandler", "TestTempdirHandler.test_mktemp"], "tokens": 214}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTempdirHandler:\n def test_mktemp(self, tmp_path):\n config = cast(Config, FakeConfig(tmp_path))\n t = TempdirFactory(\n TempPathFactory.from_config(config, _ispytest=True), _ispytest=True\n )\n tmp = t.mktemp(\"world\")\n assert tmp.relto(t.getbasetemp()) == \"world0\"\n tmp = t.mktemp(\"this\")\n assert tmp.relto(t.getbasetemp()).startswith(\"this\")\n tmp2 = t.mktemp(\"this\")\n assert tmp2.relto(t.getbasetemp()).startswith(\"this\")\n assert tmp2 != tmp\n\n def test_tmppath_relative_basetemp_absolute(self, tmp_path, monkeypatch):\n \"\"\"#4425\"\"\"\n monkeypatch.chdir(tmp_path)\n config = cast(Config, FakeConfig(\"hello\"))\n t = TempPathFactory.from_config(config, _ispytest=True)\n assert t.getbasetemp().resolve() == (tmp_path / \"hello\").resolve()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_tmpdir.py_test_tmpdir_always_is_realpath_test_tmpdir_always_is_realpath.assert_not_result_ret": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_tmpdir.py_test_tmpdir_always_is_realpath_test_tmpdir_always_is_realpath.assert_not_result_ret", "embedding": null, "metadata": {"file_path": "testing/test_tmpdir.py", "file_name": "test_tmpdir.py", "file_type": "text/x-python", "category": "test", "start_line": 122, "end_line": 140, "span_ids": ["test_tmpdir_always_is_realpath"], "tokens": 204}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_tmpdir_always_is_realpath(pytester: Pytester) -> None:\n # the reason why tmpdir should be a realpath is that\n # when you cd to it and do \"os.getcwd()\" you will anyway\n # get the realpath. Using the symlinked path can thus\n # easily result in path-inequality\n # XXX if that proves to be a problem, consider using\n # os.environ[\"PWD\"]\n realtemp = pytester.mkdir(\"myrealtemp\")\n linktemp = pytester.path.joinpath(\"symlinktemp\")\n attempt_symlink_to(linktemp, str(realtemp))\n p = pytester.makepyfile(\n \"\"\"\n def test_1(tmpdir):\n import os\n assert os.path.realpath(str(tmpdir)) == str(tmpdir)\n \"\"\"\n )\n result = pytester.runpytest(\"-s\", p, \"--basetemp=%s/bt\" % linktemp)\n assert not result.ret", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_tmpdir.py_test_tmp_path_always_is_realpath_test_tmp_path_always_is_realpath.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_tmpdir.py_test_tmp_path_always_is_realpath_test_tmp_path_always_is_realpath.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/test_tmpdir.py", "file_name": "test_tmpdir.py", "file_type": "text/x-python", "category": "test", "start_line": 143, "end_line": 156, "span_ids": ["test_tmp_path_always_is_realpath"], "tokens": 143}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_tmp_path_always_is_realpath(pytester: Pytester, monkeypatch) -> None:\n # for reasoning see: test_tmpdir_always_is_realpath test-case\n realtemp = pytester.mkdir(\"myrealtemp\")\n linktemp = pytester.path.joinpath(\"symlinktemp\")\n attempt_symlink_to(linktemp, str(realtemp))\n monkeypatch.setenv(\"PYTEST_DEBUG_TEMPROOT\", str(linktemp))\n pytester.makepyfile(\n \"\"\"\n def test_1(tmp_path):\n assert tmp_path.resolve() == tmp_path\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_tmpdir.py_test_tmpdir_too_long_on_parametrization_test_tmpdir_factory.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_tmpdir.py_test_tmpdir_too_long_on_parametrization_test_tmpdir_factory.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/test_tmpdir.py", "file_name": "test_tmpdir.py", "file_type": "text/x-python", "category": "test", "start_line": 159, "end_line": 184, "span_ids": ["test_tmpdir_factory", "test_tmpdir_too_long_on_parametrization"], "tokens": 178}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_tmpdir_too_long_on_parametrization(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.mark.parametrize(\"arg\", [\"1\"*1000])\n def test_some(arg, tmpdir):\n tmpdir.ensure(\"hello\")\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=1)\n\n\ndef test_tmpdir_factory(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.fixture(scope='session')\n def session_dir(tmpdir_factory):\n return tmpdir_factory.mktemp('data', numbered=False)\n def test_some(session_dir):\n assert session_dir.isdir()\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_tmpdir.py_test_tmpdir_fallback_tox_env_break_getuser.for_envvar_in_LOGNAME_.monkeypatch_delenv_envvar": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_tmpdir.py_test_tmpdir_fallback_tox_env_break_getuser.for_envvar_in_LOGNAME_.monkeypatch_delenv_envvar", "embedding": null, "metadata": {"file_path": "testing/test_tmpdir.py", "file_name": "test_tmpdir.py", "file_type": "text/x-python", "category": "test", "start_line": 187, "end_line": 208, "span_ids": ["test_tmpdir_fallback_tox_env", "break_getuser"], "tokens": 184}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_tmpdir_fallback_tox_env(pytester: Pytester, monkeypatch) -> None:\n \"\"\"Test that tmpdir works even if environment variables required by getpass\n module are missing (#1010).\n \"\"\"\n monkeypatch.delenv(\"USER\", raising=False)\n monkeypatch.delenv(\"USERNAME\", raising=False)\n pytester.makepyfile(\n \"\"\"\n def test_some(tmpdir):\n assert tmpdir.isdir()\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=1)\n\n\n@pytest.fixture\ndef break_getuser(monkeypatch):\n monkeypatch.setattr(\"os.getuid\", lambda: -1)\n # taken from python 2.7/3.4\n for envvar in (\"LOGNAME\", \"USER\", \"LNAME\", \"USERNAME\"):\n monkeypatch.delenv(envvar, raising=False)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_tmpdir.py_test_tmpdir_fallback_uid_not_found_test_tmpdir_fallback_uid_not_found.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_tmpdir.py_test_tmpdir_fallback_uid_not_found_test_tmpdir_fallback_uid_not_found.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/test_tmpdir.py", "file_name": "test_tmpdir.py", "file_type": "text/x-python", "category": "test", "start_line": 211, "end_line": 225, "span_ids": ["test_tmpdir_fallback_uid_not_found"], "tokens": 118}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.usefixtures(\"break_getuser\")\n@pytest.mark.skipif(sys.platform.startswith(\"win\"), reason=\"no os.getuid on windows\")\ndef test_tmpdir_fallback_uid_not_found(pytester: Pytester) -> None:\n \"\"\"Test that tmpdir works even if the current process's user id does not\n correspond to a valid user.\n \"\"\"\n\n pytester.makepyfile(\n \"\"\"\n def test_some(tmpdir):\n assert tmpdir.isdir()\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_tmpdir.py_test_get_user_uid_not_found_test_get_user.assert_get_user_is_None": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_tmpdir.py_test_get_user_uid_not_found_test_get_user.assert_get_user_is_None", "embedding": null, "metadata": {"file_path": "testing/test_tmpdir.py", "file_name": "test_tmpdir.py", "file_type": "text/x-python", "category": "test", "start_line": 221, "end_line": 239, "span_ids": ["test_get_user_uid_not_found", "test_get_user"], "tokens": 179}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.usefixtures(\"break_getuser\")\n@pytest.mark.skipif(sys.platform.startswith(\"win\"), reason=\"no os.getuid on windows\")\ndef test_get_user_uid_not_found():\n \"\"\"Test that get_user() function works even if the current process's\n user id does not correspond to a valid user (e.g. running pytest in a\n Docker container with 'docker run -u'.\n \"\"\"\n assert get_user() is None\n\n\n@pytest.mark.skipif(not sys.platform.startswith(\"win\"), reason=\"win only\")\ndef test_get_user(monkeypatch):\n \"\"\"Test that get_user() function works even if environment variables\n required by getpass module are missing from the environment on Windows\n (#1010).\n \"\"\"\n monkeypatch.delenv(\"USER\", raising=False)\n monkeypatch.delenv(\"USERNAME\", raising=False)\n assert get_user() is None", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_setup_test_setup.assert_rep_failed_and_42": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_setup_test_setup.assert_rep_failed_and_42", "embedding": null, "metadata": {"file_path": "testing/test_unittest.py", "file_name": "test_unittest.py", "file_type": "text/x-python", "category": "test", "start_line": 65, "end_line": 85, "span_ids": ["test_setup"], "tokens": 167}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_setup(pytester: Pytester) -> None:\n testpath = pytester.makepyfile(\n \"\"\"\n import unittest\n class MyTestCase(unittest.TestCase):\n def setUp(self):\n self.foo = 1\n def setup_method(self, method):\n self.foo2 = 1\n def test_both(self):\n self.assertEqual(1, self.foo)\n assert self.foo2 == 1\n def teardown_method(self, method):\n assert 0, \"42\"\n\n \"\"\"\n )\n reprec = pytester.inline_run(\"-s\", testpath)\n assert reprec.matchreport(\"test_both\", when=\"call\").passed\n rep = reprec.matchreport(\"test_both\", when=\"teardown\")\n assert rep.failed and \"42\" in str(rep.longrepr)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_setUpModule_test_setUpModule_failing_no_teardown.assert_not_call_item_modu": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_setUpModule_test_setUpModule_failing_no_teardown.assert_not_call_item_modu", "embedding": null, "metadata": {"file_path": "testing/test_unittest.py", "file_name": "test_unittest.py", "file_type": "text/x-python", "category": "test", "start_line": 88, "end_line": 128, "span_ids": ["test_setUpModule", "test_setUpModule_failing_no_teardown"], "tokens": 219}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_setUpModule(pytester: Pytester) -> None:\n testpath = pytester.makepyfile(\n \"\"\"\n values = []\n\n def setUpModule():\n values.append(1)\n\n def tearDownModule():\n del values[0]\n\n def test_hello():\n assert values == [1]\n\n def test_world():\n assert values == [1]\n \"\"\"\n )\n result = pytester.runpytest(testpath)\n result.stdout.fnmatch_lines([\"*2 passed*\"])\n\n\ndef test_setUpModule_failing_no_teardown(pytester: Pytester) -> None:\n testpath = pytester.makepyfile(\n \"\"\"\n values = []\n\n def setUpModule():\n 0/0\n\n def tearDownModule():\n values.append(1)\n\n def test_hello():\n pass\n \"\"\"\n )\n reprec = pytester.inline_run(testpath)\n reprec.assertoutcome(passed=0, failed=1)\n call = reprec.getcalls(\"pytest_runtest_setup\")[0]\n assert not call.item.module.values", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_teardown_issue1649_test_teardown_issue1649.for_obj_in_gc_get_objects.assert_type_obj___name__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_teardown_issue1649_test_teardown_issue1649.for_obj_in_gc_get_objects.assert_type_obj___name__", "embedding": null, "metadata": {"file_path": "testing/test_unittest.py", "file_name": "test_unittest.py", "file_type": "text/x-python", "category": "test", "start_line": 191, "end_line": 213, "span_ids": ["test_teardown_issue1649"], "tokens": 164}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_teardown_issue1649(pytester: Pytester) -> None:\n \"\"\"\n Are TestCase objects cleaned up? Often unittest TestCase objects set\n attributes that are large and expensive during setUp.\n\n The TestCase will not be cleaned up if the test fails, because it\n would then exist in the stackframe.\n \"\"\"\n testpath = pytester.makepyfile(\n \"\"\"\n import unittest\n class TestCaseObjectsShouldBeCleanedUp(unittest.TestCase):\n def setUp(self):\n self.an_expensive_object = 1\n def test_demo(self):\n pass\n\n \"\"\"\n )\n pytester.inline_run(\"-s\", testpath)\n gc.collect()\n for obj in gc.get_objects():\n assert type(obj).__name__ != \"TestCaseObjectsShouldBeCleanedUp\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_setup_setUpClass_test_setup_setUpClass.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_setup_setUpClass_test_setup_setUpClass.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/test_unittest.py", "file_name": "test_unittest.py", "file_type": "text/x-python", "category": "test", "start_line": 280, "end_line": 302, "span_ids": ["test_setup_setUpClass"], "tokens": 154}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_setup_setUpClass(pytester: Pytester) -> None:\n testpath = pytester.makepyfile(\n \"\"\"\n import unittest\n import pytest\n class MyTestCase(unittest.TestCase):\n x = 0\n @classmethod\n def setUpClass(cls):\n cls.x += 1\n def test_func1(self):\n assert self.x == 1\n def test_func2(self):\n assert self.x == 1\n @classmethod\n def tearDownClass(cls):\n cls.x -= 1\n def test_teareddown():\n assert MyTestCase.x == 0\n \"\"\"\n )\n reprec = pytester.inline_run(testpath)\n reprec.assertoutcome(passed=3)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_setup_class_test_setup_class.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_setup_class_test_setup_class.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/test_unittest.py", "file_name": "test_unittest.py", "file_type": "text/x-python", "category": "test", "start_line": 305, "end_line": 325, "span_ids": ["test_setup_class"], "tokens": 144}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_setup_class(pytester: Pytester) -> None:\n testpath = pytester.makepyfile(\n \"\"\"\n import unittest\n import pytest\n class MyTestCase(unittest.TestCase):\n x = 0\n def setup_class(cls):\n cls.x += 1\n def test_func1(self):\n assert self.x == 1\n def test_func2(self):\n assert self.x == 1\n def teardown_class(cls):\n cls.x -= 1\n def test_teareddown():\n assert MyTestCase.x == 0\n \"\"\"\n )\n reprec = pytester.inline_run(testpath)\n reprec.assertoutcome(passed=3)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_testcase_custom_exception_info_test_testcase_custom_exception_info.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_testcase_custom_exception_info_test_testcase_custom_exception_info.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_unittest.py", "file_name": "test_unittest.py", "file_type": "text/x-python", "category": "test", "start_line": 352, "end_line": 386, "span_ids": ["test_testcase_custom_exception_info"], "tokens": 238}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\"type\", [\"Error\", \"Failure\"])\ndef test_testcase_custom_exception_info(pytester: Pytester, type: str) -> None:\n pytester.makepyfile(\n \"\"\"\n from unittest import TestCase\n import py, pytest\n import _pytest._code\n class MyTestCase(TestCase):\n def run(self, result):\n excinfo = pytest.raises(ZeroDivisionError, lambda: 0/0)\n # we fake an incompatible exception info\n from _pytest.monkeypatch import MonkeyPatch\n mp = MonkeyPatch()\n def t(*args):\n mp.undo()\n raise TypeError()\n mp.setattr(_pytest._code, 'ExceptionInfo', t)\n try:\n excinfo = excinfo._excinfo\n result.add%(type)s(self, excinfo)\n finally:\n mp.undo()\n def test_hello(self):\n pass\n \"\"\"\n % locals()\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n [\n \"NOTE: Incompatible Exception Representation*\",\n \"*ZeroDivisionError*\",\n \"*1 failed*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_testcase_totally_incompatible_exception_info_test_module_level_pytestmark.reprec_assertoutcome_skip": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_testcase_totally_incompatible_exception_info_test_module_level_pytestmark.reprec_assertoutcome_skip", "embedding": null, "metadata": {"file_path": "testing/test_unittest.py", "file_name": "test_unittest.py", "file_type": "text/x-python", "category": "test", "start_line": 389, "end_line": 419, "span_ids": ["test_testcase_totally_incompatible_exception_info", "test_module_level_pytestmark"], "tokens": 221}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_testcase_totally_incompatible_exception_info(pytester: Pytester) -> None:\n import _pytest.unittest\n\n (item,) = pytester.getitems(\n \"\"\"\n from unittest import TestCase\n class MyTestCase(TestCase):\n def test_hello(self):\n pass\n \"\"\"\n )\n assert isinstance(item, _pytest.unittest.TestCaseFunction)\n item.addError(None, 42) # type: ignore[arg-type]\n excinfo = item._excinfo\n assert excinfo is not None\n assert \"ERROR: Unknown Incompatible\" in str(excinfo.pop(0).getrepr())\n\n\ndef test_module_level_pytestmark(pytester: Pytester) -> None:\n testpath = pytester.makepyfile(\n \"\"\"\n import unittest\n import pytest\n pytestmark = pytest.mark.xfail\n class MyTestCase(unittest.TestCase):\n def test_func1(self):\n assert 0\n \"\"\"\n )\n reprec = pytester.inline_run(testpath, \"-s\")\n reprec.assertoutcome(skipped=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_TestTrialUnittest_TestTrialUnittest.test_trial_testcase_runtest_not_collected.None_3": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_TestTrialUnittest_TestTrialUnittest.test_trial_testcase_runtest_not_collected.None_3", "embedding": null, "metadata": {"file_path": "testing/test_unittest.py", "file_name": "test_unittest.py", "file_type": "text/x-python", "category": "test", "start_line": 422, "end_line": 451, "span_ids": ["TestTrialUnittest.test_trial_testcase_runtest_not_collected", "TestTrialUnittest.setup_class", "TestTrialUnittest"], "tokens": 225}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTrialUnittest:\n def setup_class(cls):\n cls.ut = pytest.importorskip(\"twisted.trial.unittest\")\n # on windows trial uses a socket for a reactor and apparently doesn't close it properly\n # https://twistedmatrix.com/trac/ticket/9227\n cls.ignore_unclosed_socket_warning = (\"-W\", \"always\")\n\n def test_trial_testcase_runtest_not_collected(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n from twisted.trial.unittest import TestCase\n\n class TC(TestCase):\n def test_hello(self):\n pass\n \"\"\"\n )\n reprec = pytester.inline_run(*self.ignore_unclosed_socket_warning)\n reprec.assertoutcome(passed=1)\n pytester.makepyfile(\n \"\"\"\n from twisted.trial.unittest import TestCase\n\n class TC(TestCase):\n def runTest(self):\n pass\n \"\"\"\n )\n reprec = pytester.inline_run(*self.ignore_unclosed_socket_warning)\n reprec.assertoutcome(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_TestTrialUnittest.test_trial_error_TestTrialUnittest.test_trial_error.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_TestTrialUnittest.test_trial_error_TestTrialUnittest.test_trial_error.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_unittest.py", "file_name": "test_unittest.py", "file_type": "text/x-python", "category": "test", "start_line": 503, "end_line": 563, "span_ids": ["TestTrialUnittest.test_trial_error"], "tokens": 430}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTrialUnittest:\n\n def test_trial_error(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n from twisted.trial.unittest import TestCase\n from twisted.internet.defer import Deferred\n from twisted.internet import reactor\n\n class TC(TestCase):\n def test_one(self):\n crash\n\n def test_two(self):\n def f(_):\n crash\n\n d = Deferred()\n d.addCallback(f)\n reactor.callLater(0.3, d.callback, None)\n return d\n\n def test_three(self):\n def f():\n pass # will never get called\n reactor.callLater(0.3, f)\n # will crash at teardown\n\n def test_four(self):\n def f(_):\n reactor.callLater(0.3, f)\n crash\n\n d = Deferred()\n d.addCallback(f)\n reactor.callLater(0.3, d.callback, None)\n return d\n # will crash both at test time and at teardown\n \"\"\"\n )\n result = pytester.runpytest(\"-vv\", \"-oconsole_output_style=classic\")\n result.stdout.fnmatch_lines(\n [\n \"test_trial_error.py::TC::test_four FAILED\",\n \"test_trial_error.py::TC::test_four ERROR\",\n \"test_trial_error.py::TC::test_one FAILED\",\n \"test_trial_error.py::TC::test_three FAILED\",\n \"test_trial_error.py::TC::test_two FAILED\",\n \"*ERRORS*\",\n \"*_ ERROR at teardown of TC.test_four _*\",\n \"*DelayedCalls*\",\n \"*= FAILURES =*\",\n \"*_ TC.test_four _*\",\n \"*NameError*crash*\",\n \"*_ TC.test_one _*\",\n \"*NameError*crash*\",\n \"*_ TC.test_three _*\",\n \"*DelayedCalls*\",\n \"*_ TC.test_two _*\",\n \"*NameError*crash*\",\n \"*= 4 failed, 1 error in *\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_djangolike_testcase_test_djangolike_testcase.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_djangolike_testcase_test_djangolike_testcase.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_unittest.py", "file_name": "test_unittest.py", "file_type": "text/x-python", "category": "test", "start_line": 634, "end_line": 689, "span_ids": ["test_djangolike_testcase"], "tokens": 320}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_djangolike_testcase(pytester: Pytester) -> None:\n # contributed from Morten Breekevold\n pytester.makepyfile(\n \"\"\"\n from unittest import TestCase, main\n\n class DjangoLikeTestCase(TestCase):\n\n def setUp(self):\n print(\"setUp()\")\n\n def test_presetup_has_been_run(self):\n print(\"test_thing()\")\n self.assertTrue(hasattr(self, 'was_presetup'))\n\n def tearDown(self):\n print(\"tearDown()\")\n\n def __call__(self, result=None):\n try:\n self._pre_setup()\n except (KeyboardInterrupt, SystemExit):\n raise\n except Exception:\n import sys\n result.addError(self, sys.exc_info())\n return\n super(DjangoLikeTestCase, self).__call__(result)\n try:\n self._post_teardown()\n except (KeyboardInterrupt, SystemExit):\n raise\n except Exception:\n import sys\n result.addError(self, sys.exc_info())\n return\n\n def _pre_setup(self):\n print(\"_pre_setup()\")\n self.was_presetup = True\n\n def _post_teardown(self):\n print(\"_post_teardown()\")\n \"\"\"\n )\n result = pytester.runpytest(\"-s\")\n assert result.ret == 0\n result.stdout.fnmatch_lines(\n [\n \"*_pre_setup()*\",\n \"*setUp()*\",\n \"*test_thing()*\",\n \"*tearDown()*\",\n \"*_post_teardown()*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_unittest_setup_interaction_test_unittest_setup_interaction.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_unittest_setup_interaction_test_unittest_setup_interaction.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_unittest.py", "file_name": "test_unittest.py", "file_type": "text/x-python", "category": "test", "start_line": 799, "end_line": 829, "span_ids": ["test_unittest_setup_interaction"], "tokens": 219}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\"stmt\", [\"return\", \"yield\"])\ndef test_unittest_setup_interaction(pytester: Pytester, stmt: str) -> None:\n pytester.makepyfile(\n \"\"\"\n import unittest\n import pytest\n class MyTestCase(unittest.TestCase):\n @pytest.fixture(scope=\"class\", autouse=True)\n def perclass(self, request):\n request.cls.hello = \"world\"\n {stmt}\n @pytest.fixture(scope=\"function\", autouse=True)\n def perfunction(self, request):\n request.instance.funcname = request.function.__name__\n {stmt}\n\n def test_method1(self):\n assert self.funcname == \"test_method1\"\n assert self.hello == \"world\"\n\n def test_method2(self):\n assert self.funcname == \"test_method2\"\n\n def test_classattr(self):\n assert self.__class__.hello == \"world\"\n \"\"\".format(\n stmt=stmt\n )\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*3 passed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_non_unittest_no_setupclass_support_test_non_unittest_no_setupclass_support.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_non_unittest_no_setupclass_support_test_non_unittest_no_setupclass_support.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/test_unittest.py", "file_name": "test_unittest.py", "file_type": "text/x-python", "category": "test", "start_line": 832, "end_line": 855, "span_ids": ["test_non_unittest_no_setupclass_support"], "tokens": 134}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_non_unittest_no_setupclass_support(pytester: Pytester) -> None:\n testpath = pytester.makepyfile(\n \"\"\"\n class TestFoo(object):\n x = 0\n\n @classmethod\n def setUpClass(cls):\n cls.x = 1\n\n def test_method1(self):\n assert self.x == 0\n\n @classmethod\n def tearDownClass(cls):\n cls.x = 1\n\n def test_not_teareddown():\n assert TestFoo.x == 0\n\n \"\"\"\n )\n reprec = pytester.inline_run(testpath)\n reprec.assertoutcome(passed=2)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_no_teardown_if_setupclass_failed_test_no_teardown_if_setupclass_failed.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_no_teardown_if_setupclass_failed_test_no_teardown_if_setupclass_failed.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/test_unittest.py", "file_name": "test_unittest.py", "file_type": "text/x-python", "category": "test", "start_line": 858, "end_line": 883, "span_ids": ["test_no_teardown_if_setupclass_failed"], "tokens": 145}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_no_teardown_if_setupclass_failed(pytester: Pytester) -> None:\n testpath = pytester.makepyfile(\n \"\"\"\n import unittest\n\n class MyTestCase(unittest.TestCase):\n x = 0\n\n @classmethod\n def setUpClass(cls):\n cls.x = 1\n assert False\n\n def test_func1(self):\n cls.x = 10\n\n @classmethod\n def tearDownClass(cls):\n cls.x = 100\n\n def test_notTornDown():\n assert MyTestCase.x == 1\n \"\"\"\n )\n reprec = pytester.inline_run(testpath)\n reprec.assertoutcome(passed=1, failed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_issue333_result_clearing_test_issue333_result_clearing.reprec_assertoutcome_fail": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_issue333_result_clearing_test_issue333_result_clearing.reprec_assertoutcome_fail", "embedding": null, "metadata": {"file_path": "testing/test_unittest.py", "file_name": "test_unittest.py", "file_type": "text/x-python", "category": "test", "start_line": 917, "end_line": 937, "span_ids": ["test_issue333_result_clearing"], "tokens": 116}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_issue333_result_clearing(pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n import pytest\n @pytest.hookimpl(hookwrapper=True)\n def pytest_runtest_call(item):\n yield\n assert 0\n \"\"\"\n )\n pytester.makepyfile(\n \"\"\"\n import unittest\n class TestIt(unittest.TestCase):\n def test_func(self):\n 0/0\n \"\"\"\n )\n\n reprec = pytester.inline_run()\n reprec.assertoutcome(failed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_unittest_raise_skip_issue748_test_unittest_raise_skip_issue748.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_unittest_raise_skip_issue748_test_unittest_raise_skip_issue748.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_unittest.py", "file_name": "test_unittest.py", "file_type": "text/x-python", "category": "test", "start_line": 940, "end_line": 956, "span_ids": ["test_unittest_raise_skip_issue748"], "tokens": 111}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_unittest_raise_skip_issue748(pytester: Pytester) -> None:\n pytester.makepyfile(\n test_foo=\"\"\"\n import unittest\n\n class MyTestCase(unittest.TestCase):\n def test_one(self):\n raise unittest.SkipTest('skipping due to reasons')\n \"\"\"\n )\n result = pytester.runpytest(\"-v\", \"-rs\")\n result.stdout.fnmatch_lines(\n \"\"\"\n *SKIP*[1]*test_foo.py*skipping due to reasons*\n *1 skipped*\n \"\"\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_unittest_skip_issue1169_test_class_method_containing_test_issue1558.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_unittest_skip_issue1169_test_class_method_containing_test_issue1558.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/test_unittest.py", "file_name": "test_unittest.py", "file_type": "text/x-python", "category": "test", "start_line": 959, "end_line": 993, "span_ids": ["test_unittest_skip_issue1169", "test_class_method_containing_test_issue1558"], "tokens": 207}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_unittest_skip_issue1169(pytester: Pytester) -> None:\n pytester.makepyfile(\n test_foo=\"\"\"\n import unittest\n\n class MyTestCase(unittest.TestCase):\n @unittest.skip(\"skipping due to reasons\")\n def test_skip(self):\n self.fail()\n \"\"\"\n )\n result = pytester.runpytest(\"-v\", \"-rs\")\n result.stdout.fnmatch_lines(\n \"\"\"\n *SKIP*[1]*skipping due to reasons*\n *1 skipped*\n \"\"\"\n )\n\n\ndef test_class_method_containing_test_issue1558(pytester: Pytester) -> None:\n pytester.makepyfile(\n test_foo=\"\"\"\n import unittest\n\n class MyTestCase(unittest.TestCase):\n def test_should_run(self):\n pass\n def test_should_not_run(self):\n pass\n test_should_not_run.__test__ = False\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_usefixtures_marker_on_unittest_test_usefixtures_marker_on_unittest.result_assert_outcomes_pa": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_usefixtures_marker_on_unittest_test_usefixtures_marker_on_unittest.result_assert_outcomes_pa", "embedding": null, "metadata": {"file_path": "testing/test_unittest.py", "file_name": "test_unittest.py", "file_type": "text/x-python", "category": "test", "start_line": 996, "end_line": 1056, "span_ids": ["test_usefixtures_marker_on_unittest"], "tokens": 347}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\"base\", [\"builtins.object\", \"unittest.TestCase\"])\ndef test_usefixtures_marker_on_unittest(base, pytester: Pytester) -> None:\n \"\"\"#3498\"\"\"\n module = base.rsplit(\".\", 1)[0]\n pytest.importorskip(module)\n pytester.makepyfile(\n conftest=\"\"\"\n import pytest\n\n @pytest.fixture(scope='function')\n def fixture1(request, monkeypatch):\n monkeypatch.setattr(request.instance, 'fixture1', True )\n\n\n @pytest.fixture(scope='function')\n def fixture2(request, monkeypatch):\n monkeypatch.setattr(request.instance, 'fixture2', True )\n\n def node_and_marks(item):\n print(item.nodeid)\n for mark in item.iter_markers():\n print(\" \", mark)\n\n @pytest.fixture(autouse=True)\n def my_marks(request):\n node_and_marks(request.node)\n\n def pytest_collection_modifyitems(items):\n for item in items:\n node_and_marks(item)\n\n \"\"\"\n )\n\n pytester.makepyfile(\n \"\"\"\n import pytest\n import {module}\n\n class Tests({base}):\n fixture1 = False\n fixture2 = False\n\n @pytest.mark.usefixtures(\"fixture1\")\n def test_one(self):\n assert self.fixture1\n assert not self.fixture2\n\n @pytest.mark.usefixtures(\"fixture1\", \"fixture2\")\n def test_two(self):\n assert self.fixture1\n assert self.fixture2\n\n\n \"\"\".format(\n module=module, base=base\n )\n )\n\n result = pytester.runpytest(\"-s\")\n result.assert_outcomes(passed=2)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_test_normal_flow_test_normal_flow.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_test_normal_flow_test_normal_flow.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_warnings.py", "file_name": "test_warnings.py", "file_type": "text/x-python", "category": "test", "start_line": 41, "end_line": 55, "span_ids": ["test_normal_flow"], "tokens": 149}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.filterwarnings(\"default\")\ndef test_normal_flow(pytester: Pytester, pyfile_with_warnings) -> None:\n \"\"\"Check that the warnings section is displayed.\"\"\"\n result = pytester.runpytest(pyfile_with_warnings)\n result.stdout.fnmatch_lines(\n [\n \"*== %s ==*\" % WARNINGS_SUMMARY_HEADER,\n \"test_normal_flow.py::test_func\",\n \"*normal_flow_module.py:3: UserWarning: user warning\",\n '* warnings.warn(UserWarning(\"user warning\"))',\n \"*normal_flow_module.py:4: RuntimeWarning: runtime warning\",\n '* warnings.warn(RuntimeWarning(\"runtime warning\"))',\n \"* 1 passed, 2 warnings*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_test_setup_teardown_warnings_test_setup_teardown_warnings.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_test_setup_teardown_warnings_test_setup_teardown_warnings.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_warnings.py", "file_name": "test_warnings.py", "file_type": "text/x-python", "category": "test", "start_line": 58, "end_line": 85, "span_ids": ["test_setup_teardown_warnings"], "tokens": 191}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.filterwarnings(\"always\")\ndef test_setup_teardown_warnings(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import warnings\n import pytest\n\n @pytest.fixture\n def fix():\n warnings.warn(UserWarning(\"warning during setup\"))\n yield\n warnings.warn(UserWarning(\"warning during teardown\"))\n\n def test_func(fix):\n pass\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n [\n \"*== %s ==*\" % WARNINGS_SUMMARY_HEADER,\n \"*test_setup_teardown_warnings.py:6: UserWarning: warning during setup\",\n '*warnings.warn(UserWarning(\"warning during setup\"))',\n \"*test_setup_teardown_warnings.py:8: UserWarning: warning during teardown\",\n '*warnings.warn(UserWarning(\"warning during teardown\"))',\n \"* 1 passed, 2 warnings*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_test_as_errors_test_as_errors.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_test_as_errors_test_as_errors.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_warnings.py", "file_name": "test_warnings.py", "file_type": "text/x-python", "category": "test", "start_line": 88, "end_line": 107, "span_ids": ["test_as_errors"], "tokens": 160}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\"method\", [\"cmdline\", \"ini\"])\ndef test_as_errors(pytester: Pytester, pyfile_with_warnings, method) -> None:\n args = (\"-W\", \"error\") if method == \"cmdline\" else ()\n if method == \"ini\":\n pytester.makeini(\n \"\"\"\n [pytest]\n filterwarnings=error\n \"\"\"\n )\n # Use a subprocess, since changing logging level affects other threads\n # (xdist).\n result = pytester.runpytest_subprocess(*args, pyfile_with_warnings)\n result.stdout.fnmatch_lines(\n [\n \"E UserWarning: user warning\",\n \"as_errors_module.py:3: UserWarning\",\n \"* 1 failed in *\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_test_ignore_test_ignore.assert_WARNINGS_SUMMARY_H": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_test_ignore_test_ignore.assert_WARNINGS_SUMMARY_H", "embedding": null, "metadata": {"file_path": "testing/test_warnings.py", "file_name": "test_warnings.py", "file_type": "text/x-python", "category": "test", "start_line": 110, "end_line": 123, "span_ids": ["test_ignore"], "tokens": 121}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\"method\", [\"cmdline\", \"ini\"])\ndef test_ignore(pytester: Pytester, pyfile_with_warnings, method) -> None:\n args = (\"-W\", \"ignore\") if method == \"cmdline\" else ()\n if method == \"ini\":\n pytester.makeini(\n \"\"\"\n [pytest]\n filterwarnings= ignore\n \"\"\"\n )\n\n result = pytester.runpytest(*args, pyfile_with_warnings)\n result.stdout.fnmatch_lines([\"* 1 passed in *\"])\n assert WARNINGS_SUMMARY_HEADER not in result.stdout.str()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_test_unicode_test_unicode.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_test_unicode_test_unicode.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_warnings.py", "file_name": "test_warnings.py", "file_type": "text/x-python", "category": "test", "start_line": 126, "end_line": 150, "span_ids": ["test_unicode"], "tokens": 137}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.filterwarnings(\"always\")\ndef test_unicode(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import warnings\n import pytest\n\n\n @pytest.fixture\n def fix():\n warnings.warn(\"\u6d4b\u8bd5\")\n yield\n\n def test_func(fix):\n pass\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n [\n \"*== %s ==*\" % WARNINGS_SUMMARY_HEADER,\n \"*test_unicode.py:7: UserWarning: \\u6d4b\\u8bd5*\",\n \"* 1 passed, 1 warning*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_test_works_with_filterwarnings_test_works_with_filterwarnings.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_test_works_with_filterwarnings_test_works_with_filterwarnings.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_warnings.py", "file_name": "test_warnings.py", "file_type": "text/x-python", "category": "test", "start_line": 153, "end_line": 174, "span_ids": ["test_works_with_filterwarnings"], "tokens": 135}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_works_with_filterwarnings(pytester: Pytester) -> None:\n \"\"\"Ensure our warnings capture does not mess with pre-installed filters (#2430).\"\"\"\n pytester.makepyfile(\n \"\"\"\n import warnings\n\n class MyWarning(Warning):\n pass\n\n warnings.filterwarnings(\"error\", category=MyWarning)\n\n class TestWarnings(object):\n def test_my_warning(self):\n try:\n warnings.warn(MyWarning(\"warn!\"))\n assert False\n except MyWarning:\n assert True\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*== 1 passed in *\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_test_filterwarnings_mark_test_filterwarnings_mark.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_test_filterwarnings_mark_test_filterwarnings_mark.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_warnings.py", "file_name": "test_warnings.py", "file_type": "text/x-python", "category": "test", "start_line": 177, "end_line": 206, "span_ids": ["test_filterwarnings_mark"], "tokens": 204}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\"default_config\", [\"ini\", \"cmdline\"])\ndef test_filterwarnings_mark(pytester: Pytester, default_config) -> None:\n \"\"\"Test ``filterwarnings`` mark works and takes precedence over command\n line and ini options.\"\"\"\n if default_config == \"ini\":\n pytester.makeini(\n \"\"\"\n [pytest]\n filterwarnings = always\n \"\"\"\n )\n pytester.makepyfile(\n \"\"\"\n import warnings\n import pytest\n\n @pytest.mark.filterwarnings('ignore::RuntimeWarning')\n def test_ignore_runtime_warning():\n warnings.warn(RuntimeWarning())\n\n @pytest.mark.filterwarnings('error')\n def test_warning_error():\n warnings.warn(RuntimeWarning())\n\n def test_show_warning():\n warnings.warn(RuntimeWarning())\n \"\"\"\n )\n result = pytester.runpytest(\"-W always\" if default_config == \"cmdline\" else \"\")\n result.stdout.fnmatch_lines([\"*= 1 failed, 2 passed, 1 warning in *\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_test_non_string_warning_argument_test_filterwarnings_mark_registration.assert_result_ret_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_test_non_string_warning_argument_test_filterwarnings_mark_registration.assert_result_ret_0", "embedding": null, "metadata": {"file_path": "testing/test_warnings.py", "file_name": "test_warnings.py", "file_type": "text/x-python", "category": "test", "start_line": 209, "end_line": 236, "span_ids": ["test_filterwarnings_mark_registration", "test_non_string_warning_argument"], "tokens": 176}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_non_string_warning_argument(pytester: Pytester) -> None:\n \"\"\"Non-str argument passed to warning breaks pytest (#2956)\"\"\"\n pytester.makepyfile(\n \"\"\"\\\n import warnings\n import pytest\n\n def test():\n warnings.warn(UserWarning(1, 'foo'))\n \"\"\"\n )\n result = pytester.runpytest(\"-W\", \"always\")\n result.stdout.fnmatch_lines([\"*= 1 passed, 1 warning in *\"])\n\n\ndef test_filterwarnings_mark_registration(pytester: Pytester) -> None:\n \"\"\"Ensure filterwarnings mark is registered\"\"\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.mark.filterwarnings('error')\n def test_func():\n pass\n \"\"\"\n )\n result = pytester.runpytest(\"--strict-markers\")\n assert result.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_test_collection_warnings_test_collection_warnings.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_test_collection_warnings_test_collection_warnings.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_warnings.py", "file_name": "test_warnings.py", "file_type": "text/x-python", "category": "test", "start_line": 300, "end_line": 321, "span_ids": ["test_collection_warnings"], "tokens": 146}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.filterwarnings(\"always\")\ndef test_collection_warnings(pytester: Pytester) -> None:\n \"\"\"Check that we also capture warnings issued during test collection (#3251).\"\"\"\n pytester.makepyfile(\n \"\"\"\n import warnings\n\n warnings.warn(UserWarning(\"collection warning\"))\n\n def test_foo():\n pass\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n [\n \"*== %s ==*\" % WARNINGS_SUMMARY_HEADER,\n \" *collection_warnings.py:3: UserWarning: collection warning\",\n ' warnings.warn(UserWarning(\"collection warning\"))',\n \"* 1 passed, 1 warning*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_test_mark_regex_escape_test_mark_regex_escape.assert_WARNINGS_SUMMARY_H": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_test_mark_regex_escape_test_mark_regex_escape.assert_WARNINGS_SUMMARY_H", "embedding": null, "metadata": {"file_path": "testing/test_warnings.py", "file_name": "test_warnings.py", "file_type": "text/x-python", "category": "test", "start_line": 324, "end_line": 337, "span_ids": ["test_mark_regex_escape"], "tokens": 113}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.filterwarnings(\"always\")\ndef test_mark_regex_escape(pytester: Pytester) -> None:\n \"\"\"@pytest.mark.filterwarnings should not try to escape regex characters (#3936)\"\"\"\n pytester.makepyfile(\n r\"\"\"\n import pytest, warnings\n\n @pytest.mark.filterwarnings(r\"ignore:some \\(warning\\)\")\n def test_foo():\n warnings.warn(UserWarning(\"some (warning)\"))\n \"\"\"\n )\n result = pytester.runpytest()\n assert WARNINGS_SUMMARY_HEADER not in result.stdout.str()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_test_hide_pytest_internal_warnings_test_hide_pytest_internal_warnings.None_1.else_.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_test_hide_pytest_internal_warnings_test_hide_pytest_internal_warnings.None_1.else_.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_warnings.py", "file_name": "test_warnings.py", "file_type": "text/x-python", "category": "test", "start_line": 340, "end_line": 379, "span_ids": ["test_hide_pytest_internal_warnings"], "tokens": 274}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.filterwarnings(\"default\")\n@pytest.mark.parametrize(\"ignore_pytest_warnings\", [\"no\", \"ini\", \"cmdline\"])\ndef test_hide_pytest_internal_warnings(\n pytester: Pytester, ignore_pytest_warnings\n) -> None:\n \"\"\"Make sure we can ignore internal pytest warnings using a warnings filter.\"\"\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n import warnings\n\n warnings.warn(pytest.PytestWarning(\"some internal warning\"))\n\n def test_bar():\n pass\n \"\"\"\n )\n if ignore_pytest_warnings == \"ini\":\n pytester.makeini(\n \"\"\"\n [pytest]\n filterwarnings = ignore::pytest.PytestWarning\n \"\"\"\n )\n args = (\n [\"-W\", \"ignore::pytest.PytestWarning\"]\n if ignore_pytest_warnings == \"cmdline\"\n else []\n )\n result = pytester.runpytest(*args)\n if ignore_pytest_warnings != \"no\":\n assert WARNINGS_SUMMARY_HEADER not in result.stdout.str()\n else:\n result.stdout.fnmatch_lines(\n [\n \"*== %s ==*\" % WARNINGS_SUMMARY_HEADER,\n \"*test_hide_pytest_internal_warnings.py:4: PytestWarning: some internal warning\",\n \"* 1 passed, 1 warning *\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_test_option_precedence_cmdline_over_ini_test_option_precedence_cmdline_over_ini.if_ignore_on_cmdline_.else_.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_test_option_precedence_cmdline_over_ini_test_option_precedence_cmdline_over_ini.if_ignore_on_cmdline_.else_.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_warnings.py", "file_name": "test_warnings.py", "file_type": "text/x-python", "category": "test", "start_line": 382, "end_line": 405, "span_ids": ["test_option_precedence_cmdline_over_ini"], "tokens": 178}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\"ignore_on_cmdline\", [True, False])\ndef test_option_precedence_cmdline_over_ini(\n pytester: Pytester, ignore_on_cmdline\n) -> None:\n \"\"\"Filters defined in the command-line should take precedence over filters in ini files (#3946).\"\"\"\n pytester.makeini(\n \"\"\"\n [pytest]\n filterwarnings = error\n \"\"\"\n )\n pytester.makepyfile(\n \"\"\"\n import warnings\n def test():\n warnings.warn(UserWarning('hello'))\n \"\"\"\n )\n args = [\"-W\", \"ignore\"] if ignore_on_cmdline else []\n result = pytester.runpytest(*args)\n if ignore_on_cmdline:\n result.stdout.fnmatch_lines([\"* 1 passed in*\"])\n else:\n result.stdout.fnmatch_lines([\"* 1 failed in*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_test_option_precedence_mark_test_option_precedence_mark.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_test_option_precedence_mark_test_option_precedence_mark.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_warnings.py", "file_name": "test_warnings.py", "file_type": "text/x-python", "category": "test", "start_line": 408, "end_line": 425, "span_ids": ["test_option_precedence_mark"], "tokens": 120}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_option_precedence_mark(pytester: Pytester) -> None:\n \"\"\"Filters defined by marks should always take precedence (#3946).\"\"\"\n pytester.makeini(\n \"\"\"\n [pytest]\n filterwarnings = ignore\n \"\"\"\n )\n pytester.makepyfile(\n \"\"\"\n import pytest, warnings\n @pytest.mark.filterwarnings('error')\n def test():\n warnings.warn(UserWarning('hello'))\n \"\"\"\n )\n result = pytester.runpytest(\"-W\", \"ignore\")\n result.stdout.fnmatch_lines([\"* 1 failed in*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_TestDeprecationWarningsByDefault.test_shown_by_default_TestDeprecationWarningsByDefault.test_shown_by_default.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_TestDeprecationWarningsByDefault.test_shown_by_default_TestDeprecationWarningsByDefault.test_shown_by_default.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_warnings.py", "file_name": "test_warnings.py", "file_type": "text/x-python", "category": "test", "start_line": 449, "end_line": 469, "span_ids": ["TestDeprecationWarningsByDefault.test_shown_by_default"], "tokens": 190}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDeprecationWarningsByDefault:\n\n @pytest.mark.parametrize(\"customize_filters\", [True, False])\n def test_shown_by_default(self, pytester: Pytester, customize_filters) -> None:\n \"\"\"Show deprecation warnings by default, even if user has customized the warnings filters (#4013).\"\"\"\n self.create_file(pytester)\n if customize_filters:\n pytester.makeini(\n \"\"\"\n [pytest]\n filterwarnings =\n once::UserWarning\n \"\"\"\n )\n result = pytester.runpytest_subprocess()\n result.stdout.fnmatch_lines(\n [\n \"*== %s ==*\" % WARNINGS_SUMMARY_HEADER,\n \"*test_shown_by_default.py:3: DeprecationWarning: collection\",\n \"*test_shown_by_default.py:7: PendingDeprecationWarning: test run\",\n \"* 1 passed, 2 warnings*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_TestDeprecationWarningsByDefault.test_hidden_by_ini_TestDeprecationWarningsByDefault.test_hidden_by_mark.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_TestDeprecationWarningsByDefault.test_hidden_by_ini_TestDeprecationWarningsByDefault.test_hidden_by_mark.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_warnings.py", "file_name": "test_warnings.py", "file_type": "text/x-python", "category": "test", "start_line": 471, "end_line": 499, "span_ids": ["TestDeprecationWarningsByDefault.test_hidden_by_mark", "TestDeprecationWarningsByDefault.test_hidden_by_ini"], "tokens": 230}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDeprecationWarningsByDefault:\n\n def test_hidden_by_ini(self, pytester: Pytester) -> None:\n self.create_file(pytester)\n pytester.makeini(\n \"\"\"\n [pytest]\n filterwarnings =\n ignore::DeprecationWarning\n ignore::PendingDeprecationWarning\n \"\"\"\n )\n result = pytester.runpytest_subprocess()\n assert WARNINGS_SUMMARY_HEADER not in result.stdout.str()\n\n def test_hidden_by_mark(self, pytester: Pytester) -> None:\n \"\"\"Should hide the deprecation warning from the function, but the warning during collection should\n be displayed normally.\n \"\"\"\n self.create_file(\n pytester,\n mark='@pytest.mark.filterwarnings(\"ignore::PendingDeprecationWarning\")',\n )\n result = pytester.runpytest_subprocess()\n result.stdout.fnmatch_lines(\n [\n \"*== %s ==*\" % WARNINGS_SUMMARY_HEADER,\n \"*test_hidden_by_mark.py:3: DeprecationWarning: collection\",\n \"* 1 passed, 1 warning*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_TestDeprecationWarningsByDefault.test_hidden_by_cmdline_TestDeprecationWarningsByDefault.test_hidden_by_system.assert_WARNINGS_SUMMARY_H": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_TestDeprecationWarningsByDefault.test_hidden_by_cmdline_TestDeprecationWarningsByDefault.test_hidden_by_system.assert_WARNINGS_SUMMARY_H", "embedding": null, "metadata": {"file_path": "testing/test_warnings.py", "file_name": "test_warnings.py", "file_type": "text/x-python", "category": "test", "start_line": 501, "end_line": 515, "span_ids": ["TestDeprecationWarningsByDefault.test_hidden_by_cmdline", "TestDeprecationWarningsByDefault.test_hidden_by_system"], "tokens": 152}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDeprecationWarningsByDefault:\n\n def test_hidden_by_cmdline(self, pytester: Pytester) -> None:\n self.create_file(pytester)\n result = pytester.runpytest_subprocess(\n \"-W\",\n \"ignore::DeprecationWarning\",\n \"-W\",\n \"ignore::PendingDeprecationWarning\",\n )\n assert WARNINGS_SUMMARY_HEADER not in result.stdout.str()\n\n def test_hidden_by_system(self, pytester: Pytester, monkeypatch) -> None:\n self.create_file(pytester)\n monkeypatch.setenv(\"PYTHONWARNINGS\", \"once::UserWarning\")\n result = pytester.runpytest_subprocess()\n assert WARNINGS_SUMMARY_HEADER not in result.stdout.str()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_TracebackEntry_TracebackEntry.getfirstlinesource.return.self_frame_code_firstline": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_TracebackEntry_TracebackEntry.getfirstlinesource.return.self_frame_code_firstline", "embedding": null, "metadata": {"file_path": "src/_pytest/_code/code.py", "file_name": "code.py", "file_type": "text/x-python", "category": "implementation", "start_line": 187, "end_line": 238, "span_ids": ["TracebackEntry.getfirstlinesource", "TracebackEntry.path", "TracebackEntry.__repr__", "TracebackEntry.frame", "TracebackEntry.relline", "TracebackEntry.locals", "TracebackEntry.set_repr_style", "TracebackEntry.lineno", "TracebackEntry.statement", "TracebackEntry"], "tokens": 386}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TracebackEntry:\n \"\"\"A single entry in a Traceback.\"\"\"\n\n __slots__ = (\"_rawentry\", \"_excinfo\", \"_repr_style\")\n\n def __init__(\n self,\n rawentry: TracebackType,\n excinfo: Optional[\"ReferenceType[ExceptionInfo[BaseException]]\"] = None,\n ) -> None:\n self._rawentry = rawentry\n self._excinfo = excinfo\n self._repr_style: Optional['Literal[\"short\", \"long\"]'] = None\n\n @property\n def lineno(self) -> int:\n return self._rawentry.tb_lineno - 1\n\n def set_repr_style(self, mode: \"Literal['short', 'long']\") -> None:\n assert mode in (\"short\", \"long\")\n self._repr_style = mode\n\n @property\n def frame(self) -> Frame:\n return Frame(self._rawentry.tb_frame)\n\n @property\n def relline(self) -> int:\n return self.lineno - self.frame.code.firstlineno\n\n def __repr__(self) -> str:\n return \"\" % (self.frame.code.path, self.lineno + 1)\n\n @property\n def statement(self) -> \"Source\":\n \"\"\"_pytest._code.Source object for the current statement.\"\"\"\n source = self.frame.code.fullsource\n assert source is not None\n return source.getstatement(self.lineno)\n\n @property\n def path(self) -> Union[Path, str]:\n \"\"\"Path to the source code.\"\"\"\n return self.frame.code.path\n\n @property\n def locals(self) -> Dict[str, Any]:\n \"\"\"Locals of underlying frame.\"\"\"\n return self.frame.f_locals\n\n def getfirstlinesource(self) -> int:\n return self.frame.code.firstlineno", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_FormattedExcinfo.repr_excinfo_FormattedExcinfo.repr_excinfo.return.ExceptionChainRepr_repr_c": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_FormattedExcinfo.repr_excinfo_FormattedExcinfo.repr_excinfo.return.ExceptionChainRepr_repr_c", "embedding": null, "metadata": {"file_path": "src/_pytest/_code/code.py", "file_name": "code.py", "file_type": "text/x-python", "category": "implementation", "start_line": 897, "end_line": 944, "span_ids": ["FormattedExcinfo.repr_excinfo"], "tokens": 431}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@attr.s\nclass FormattedExcinfo:\n\n def repr_excinfo(\n self, excinfo: ExceptionInfo[BaseException]\n ) -> \"ExceptionChainRepr\":\n repr_chain: List[\n Tuple[ReprTraceback, Optional[ReprFileLocation], Optional[str]]\n ] = []\n e: Optional[BaseException] = excinfo.value\n excinfo_: Optional[ExceptionInfo[BaseException]] = excinfo\n descr = None\n seen: Set[int] = set()\n while e is not None and id(e) not in seen:\n seen.add(id(e))\n if excinfo_:\n reprtraceback = self.repr_traceback(excinfo_)\n reprcrash: Optional[ReprFileLocation] = (\n excinfo_._getreprcrash() if self.style != \"value\" else None\n )\n else:\n # Fallback to native repr if the exception doesn't have a traceback:\n # ExceptionInfo objects require a full traceback to work.\n reprtraceback = ReprTracebackNative(\n traceback.format_exception(type(e), e, None)\n )\n reprcrash = None\n\n repr_chain += [(reprtraceback, reprcrash, descr)]\n if e.__cause__ is not None and self.chain:\n e = e.__cause__\n excinfo_ = (\n ExceptionInfo((type(e), e, e.__traceback__))\n if e.__traceback__\n else None\n )\n descr = \"The above exception was the direct cause of the following exception:\"\n elif (\n e.__context__ is not None and not e.__suppress_context__ and self.chain\n ):\n e = e.__context__\n excinfo_ = (\n ExceptionInfo((type(e), e, e.__traceback__))\n if e.__traceback__\n else None\n )\n descr = \"During handling of the above exception, another exception occurred:\"\n else:\n e = None\n repr_chain.reverse()\n return ExceptionChainRepr(repr_chain)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_ExceptionChainRepr_ReprExceptionInfo.toterminal.super_toterminal_tw_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_ExceptionChainRepr_ReprExceptionInfo.toterminal.super_toterminal_tw_", "embedding": null, "metadata": {"file_path": "src/_pytest/_code/code.py", "file_name": "code.py", "file_type": "text/x-python", "category": "implementation", "start_line": 983, "end_line": 1014, "span_ids": ["ReprExceptionInfo", "ExceptionChainRepr.__attrs_post_init__", "ReprExceptionInfo.toterminal", "ExceptionChainRepr", "ExceptionChainRepr.toterminal"], "tokens": 264}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@attr.s(eq=False)\nclass ExceptionChainRepr(ExceptionRepr):\n chain = attr.ib(\n type=Sequence[\n Tuple[\"ReprTraceback\", Optional[\"ReprFileLocation\"], Optional[str]]\n ]\n )\n\n def __attrs_post_init__(self) -> None:\n super().__attrs_post_init__()\n # reprcrash and reprtraceback of the outermost (the newest) exception\n # in the chain.\n self.reprtraceback = self.chain[-1][0]\n self.reprcrash = self.chain[-1][1]\n\n def toterminal(self, tw: TerminalWriter) -> None:\n for element in self.chain:\n element[0].toterminal(tw)\n if element[2] is not None:\n tw.line(\"\")\n tw.line(element[2], yellow=True)\n super().toterminal(tw)\n\n\n@attr.s(eq=False)\nclass ReprExceptionInfo(ExceptionRepr):\n reprtraceback = attr.ib(type=\"ReprTraceback\")\n reprcrash = attr.ib(type=\"ReprFileLocation\")\n\n def toterminal(self, tw: TerminalWriter) -> None:\n self.reprtraceback.toterminal(tw)\n super().toterminal(tw)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py__Rewrite_assertion_AST__PYC_TAIL._PYTEST_TAG_PYC_EX": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py__Rewrite_assertion_AST__PYC_TAIL._PYTEST_TAG_PYC_EX", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/rewrite.py", "file_name": "rewrite.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 52, "span_ids": ["impl", "impl:2", "docstring", "imports", "imports:37"], "tokens": 315}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"Rewrite assertion AST to produce nice error messages.\"\"\"\nimport ast\nimport errno\nimport functools\nimport importlib.abc\nimport importlib.machinery\nimport importlib.util\nimport io\nimport itertools\nimport marshal\nimport os\nimport struct\nimport sys\nimport tokenize\nimport types\nfrom pathlib import Path\nfrom pathlib import PurePath\nfrom typing import Callable\nfrom typing import Dict\nfrom typing import IO\nfrom typing import Iterable\nfrom typing import List\nfrom typing import Optional\nfrom typing import Sequence\nfrom typing import Set\nfrom typing import Tuple\nfrom typing import TYPE_CHECKING\nfrom typing import Union\n\nfrom _pytest._io.saferepr import saferepr\nfrom _pytest._version import version\nfrom _pytest.assertion import util\nfrom _pytest.assertion.util import ( # noqa: F401\n format_explanation as _format_explanation,\n)\nfrom _pytest.config import Config\nfrom _pytest.main import Session\nfrom _pytest.pathlib import absolutepath\nfrom _pytest.pathlib import fnmatch_ex\nfrom _pytest.store import StoreKey\n\nif TYPE_CHECKING:\n from _pytest.assertion import AssertionState\n\n\nassertstate_key = StoreKey[\"AssertionState\"]()\n\n\n# pytest caches rewritten pycs in pycache dirs\nPYTEST_TAG = f\"{sys.implementation.cache_tag}-pytest-{version}\"\nPYC_EXT = \".py\" + (__debug__ and \"c\" or \"o\")\nPYC_TAIL = \".\" + PYTEST_TAG + PYC_EXT", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_rewrite_asserts__saferepr.return.saferepr_obj_replace_n": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_rewrite_asserts__saferepr.return.saferepr_obj_replace_n", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/rewrite.py", "file_name": "rewrite.py", "file_type": "text/x-python", "category": "implementation", "start_line": 410, "end_line": 430, "span_ids": ["_saferepr", "rewrite_asserts"], "tokens": 186}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def rewrite_asserts(\n mod: ast.Module,\n source: bytes,\n module_path: Optional[str] = None,\n config: Optional[Config] = None,\n) -> None:\n \"\"\"Rewrite the assert statements in mod.\"\"\"\n AssertionRewriter(module_path, config, source).run(mod)\n\n\ndef _saferepr(obj: object) -> str:\n r\"\"\"Get a safe repr of an object for assertion error messages.\n\n The assertion formatting (util.format_explanation()) requires\n newlines to be escaped since they are a special character for it.\n Normally assertion.util.format_explanation() does this but for a\n custom repr it is possible to contain one of the special escape\n sequences, especially '\\n{' and '\\n}' are likely to be present in\n JSON reprs.\n \"\"\"\n return saferepr(obj).replace(\"\\n\", \"\\\\n\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewriter.visit_Call_AssertionRewriter.visit_Call.return.res_outer_expl": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewriter.visit_Call_AssertionRewriter.visit_Call.return.res_outer_expl", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/rewrite.py", "file_name": "rewrite.py", "file_type": "text/x-python", "category": "implementation", "start_line": 998, "end_line": 1020, "span_ids": ["AssertionRewriter.visit_Call"], "tokens": 245}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class AssertionRewriter(ast.NodeVisitor):\n\n def visit_Call(self, call: ast.Call) -> Tuple[ast.Name, str]:\n new_func, func_expl = self.visit(call.func)\n arg_expls = []\n new_args = []\n new_kwargs = []\n for arg in call.args:\n res, expl = self.visit(arg)\n arg_expls.append(expl)\n new_args.append(res)\n for keyword in call.keywords:\n res, expl = self.visit(keyword.value)\n new_kwargs.append(ast.keyword(keyword.arg, res))\n if keyword.arg:\n arg_expls.append(keyword.arg + \"=\" + expl)\n else: # **args have `arg` keywords with an .arg of None\n arg_expls.append(\"**\" + expl)\n\n expl = \"{}({})\".format(func_expl, \", \".join(arg_expls))\n new_call = ast.Call(new_func, new_args, new_kwargs)\n res = self.assign(new_call)\n res_expl = self.explanation_param(self.display(res))\n outer_expl = f\"{res_expl}\\n{{{res_expl} = {expl}\\n}}\"\n return res, outer_expl", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewriter.visit_Starred_AssertionRewriter.visit_Attribute.return.res_expl": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewriter.visit_Starred_AssertionRewriter.visit_Attribute.return.res_expl", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/rewrite.py", "file_name": "rewrite.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1022, "end_line": 1036, "span_ids": ["AssertionRewriter.visit_Starred", "AssertionRewriter.visit_Attribute"], "tokens": 200}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class AssertionRewriter(ast.NodeVisitor):\n\n def visit_Starred(self, starred: ast.Starred) -> Tuple[ast.Starred, str]:\n # A Starred node can appear in a function call.\n res, expl = self.visit(starred.value)\n new_starred = ast.Starred(res, starred.ctx)\n return new_starred, \"*\" + expl\n\n def visit_Attribute(self, attr: ast.Attribute) -> Tuple[ast.Name, str]:\n if not isinstance(attr.ctx, ast.Load):\n return self.generic_visit(attr)\n value, value_expl = self.visit(attr.value)\n res = self.assign(ast.Attribute(value, attr.attr, ast.Load()))\n res_expl = self.explanation_param(self.display(res))\n pat = \"%s\\n{%s = %s.%s\\n}\"\n expl = pat % (res_expl, res_expl, value_expl, attr.attr)\n return res, expl", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/util.py__diff_text__diff_text.return.explanation": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/util.py__diff_text__diff_text.return.explanation", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/util.py", "file_name": "util.py", "file_type": "text/x-python", "category": "implementation", "start_line": 200, "end_line": 245, "span_ids": ["_diff_text"], "tokens": 386}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _diff_text(left: str, right: str, verbose: int = 0) -> List[str]:\n \"\"\"Return the explanation for the diff between text.\n\n Unless --verbose is used this will skip leading and trailing\n characters which are identical to keep the diff minimal.\n \"\"\"\n from difflib import ndiff\n\n explanation: List[str] = []\n\n if verbose < 1:\n i = 0 # just in case left or right has zero length\n for i in range(min(len(left), len(right))):\n if left[i] != right[i]:\n break\n if i > 42:\n i -= 10 # Provide some context\n explanation = [\n \"Skipping %s identical leading characters in diff, use -v to show\" % i\n ]\n left = left[i:]\n right = right[i:]\n if len(left) == len(right):\n for i in range(len(left)):\n if left[-i] != right[-i]:\n break\n if i > 42:\n i -= 10 # Provide some context\n explanation += [\n \"Skipping {} identical trailing \"\n \"characters in diff, use -v to show\".format(i)\n ]\n left = left[:-i]\n right = right[:-i]\n keepends = True\n if left.isspace() or right.isspace():\n left = repr(str(left))\n right = repr(str(right))\n explanation += [\"Strings contain only whitespace, escaping them using repr()\"]\n # \"right\" is the expected base against which we compare \"left\",\n # see https://github.com/pytest-dev/pytest/issues/3333\n explanation += [\n line.strip(\"\\n\")\n for line in ndiff(right.splitlines(keepends), left.splitlines(keepends))\n ]\n return explanation", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_CaptureFixture.readouterr_CaptureFixture.readouterr.return.CaptureResult_captured_ou": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_CaptureFixture.readouterr_CaptureFixture.readouterr.return.CaptureResult_captured_ou", "embedding": null, "metadata": {"file_path": "src/_pytest/capture.py", "file_name": "capture.py", "file_type": "text/x-python", "category": "implementation", "start_line": 864, "end_line": 879, "span_ids": ["CaptureFixture.readouterr"], "tokens": 154}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class CaptureFixture(Generic[AnyStr]):\n\n def readouterr(self) -> CaptureResult[AnyStr]:\n \"\"\"Read and return the captured output so far, resetting the internal\n buffer.\n\n :returns:\n The captured content as a namedtuple with ``out`` and ``err``\n string attributes.\n \"\"\"\n captured_out, captured_err = self._captured_out, self._captured_err\n if self._capture is not None:\n out, err = self._capture.readouterr()\n captured_out += out\n captured_err += err\n self._captured_out = self.captureclass.EMPTY_BUFFER\n self._captured_err = self.captureclass.EMPTY_BUFFER\n return CaptureResult(captured_out, captured_err)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_CaptureFixture._suspend_CaptureFixture.disabled.with_capmanager_global_an.yield": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_CaptureFixture._suspend_CaptureFixture.disabled.with_capmanager_global_an.yield", "embedding": null, "metadata": {"file_path": "src/_pytest/capture.py", "file_name": "capture.py", "file_type": "text/x-python", "category": "implementation", "start_line": 881, "end_line": 902, "span_ids": ["CaptureFixture._resume", "CaptureFixture._suspend", "CaptureFixture._is_started", "CaptureFixture.disabled"], "tokens": 188}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class CaptureFixture(Generic[AnyStr]):\n\n def _suspend(self) -> None:\n \"\"\"Suspend this fixture's own capturing temporarily.\"\"\"\n if self._capture is not None:\n self._capture.suspend_capturing()\n\n def _resume(self) -> None:\n \"\"\"Resume this fixture's own capturing temporarily.\"\"\"\n if self._capture is not None:\n self._capture.resume_capturing()\n\n def _is_started(self) -> bool:\n \"\"\"Whether actively capturing -- not disabled or closed.\"\"\"\n if self._capture is not None:\n return self._capture.is_started()\n return False\n\n @contextlib.contextmanager\n def disabled(self) -> Generator[None, None, None]:\n \"\"\"Temporarily disable capturing while inside the ``with`` block.\"\"\"\n capmanager = self.request.config.pluginmanager.getplugin(\"capturemanager\")\n with capmanager.global_and_fixture_disabled():\n yield", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/debugging.py_pytestPDB._get_pdb_wrapper_class_pytestPDB._get_pdb_wrapper_class.PytestPdbWrapper.do_continue.return.ret": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/debugging.py_pytestPDB._get_pdb_wrapper_class_pytestPDB._get_pdb_wrapper_class.PytestPdbWrapper.do_continue.return.ret", "embedding": null, "metadata": {"file_path": "src/_pytest/debugging.py", "file_name": "debugging.py", "file_type": "text/x-python", "category": "implementation", "start_line": 150, "end_line": 191, "span_ids": ["pytestPDB._get_pdb_wrapper_class.PytestPdbWrapper", "pytestPDB._get_pdb_wrapper_class.PytestPdbWrapper:2", "pytestPDB._get_pdb_wrapper_class"], "tokens": 340}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class pytestPDB:\n\n @classmethod\n def _get_pdb_wrapper_class(cls, pdb_cls, capman: Optional[\"CaptureManager\"]):\n import _pytest.config\n\n # Type ignored because mypy doesn't support \"dynamic\"\n # inheritance like this.\n class PytestPdbWrapper(pdb_cls): # type: ignore[valid-type,misc]\n _pytest_capman = capman\n _continued = False\n\n def do_debug(self, arg):\n cls._recursive_debug += 1\n ret = super().do_debug(arg)\n cls._recursive_debug -= 1\n return ret\n\n def do_continue(self, arg):\n ret = super().do_continue(arg)\n if cls._recursive_debug == 0:\n assert cls._config is not None\n tw = _pytest.config.create_terminal_writer(cls._config)\n tw.line()\n\n capman = self._pytest_capman\n capturing = pytestPDB._is_capturing(capman)\n if capturing:\n if capturing == \"global\":\n tw.sep(\">\", \"PDB continue (IO-capturing resumed)\")\n else:\n tw.sep(\n \">\",\n \"PDB continue (IO-capturing resumed for %s)\"\n % capturing,\n )\n assert capman is not None\n capman.resume()\n else:\n tw.sep(\">\", \"PDB continue\")\n assert cls._pluginmanager is not None\n cls._pluginmanager.hook.pytest_leave_pdb(config=cls._config, pdb=self)\n self._continued = True\n return ret\n # ... other code", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/debugging.py_pytestPDB._get_pdb_wrapper_class.PytestPdbWrapper.do_c_pytestPDB._get_pdb_wrapper_class.return.PytestPdbWrapper": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/debugging.py_pytestPDB._get_pdb_wrapper_class.PytestPdbWrapper.do_c_pytestPDB._get_pdb_wrapper_class.return.PytestPdbWrapper", "embedding": null, "metadata": {"file_path": "src/_pytest/debugging.py", "file_name": "debugging.py", "file_type": "text/x-python", "category": "implementation", "start_line": 193, "end_line": 235, "span_ids": ["pytestPDB._get_pdb_wrapper_class.PytestPdbWrapper:2"], "tokens": 357}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class pytestPDB:\n\n @classmethod\n def _get_pdb_wrapper_class(cls, pdb_cls, capman: Optional[\"CaptureManager\"]):\n class PytestPdbWrapper(pdb_cls):\n\n do_c = do_cont = do_continue\n\n def do_quit(self, arg):\n \"\"\"Raise Exit outcome when quit command is used in pdb.\n\n This is a bit of a hack - it would be better if BdbQuit\n could be handled, but this would require to wrap the\n whole pytest run, and adjust the report etc.\n \"\"\"\n ret = super().do_quit(arg)\n\n if cls._recursive_debug == 0:\n outcomes.exit(\"Quitting debugger\")\n\n return ret\n\n do_q = do_quit\n do_exit = do_quit\n\n def setup(self, f, tb):\n \"\"\"Suspend on setup().\n\n Needed after do_continue resumed, and entering another\n breakpoint again.\n \"\"\"\n ret = super().setup(f, tb)\n if not ret and self._continued:\n # pdb.setup() returns True if the command wants to exit\n # from the interaction: do not suspend capturing then.\n if self._pytest_capman:\n self._pytest_capman.suspend_global_capture(in_=True)\n return ret\n\n def get_stack(self, f, t):\n stack, i = super().get_stack(f, t)\n if f is None:\n # Find last non-hidden frame.\n i = max(0, len(stack) - 1)\n while i and stack[i][0].f_locals.get(\"__tracebackhide__\", False):\n i -= 1\n return stack, i\n\n return PytestPdbWrapper", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/debugging.py_pytestPDB._init_pdb_pytestPDB.set_trace._pdb_set_trace_frame_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/debugging.py_pytestPDB._init_pdb_pytestPDB.set_trace._pdb_set_trace_frame_", "embedding": null, "metadata": {"file_path": "src/_pytest/debugging.py", "file_name": "debugging.py", "file_type": "text/x-python", "category": "implementation", "start_line": 237, "end_line": 282, "span_ids": ["pytestPDB._init_pdb", "pytestPDB.set_trace"], "tokens": 380}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class pytestPDB:\n\n @classmethod\n def _init_pdb(cls, method, *args, **kwargs):\n \"\"\"Initialize PDB debugging, dropping any IO capturing.\"\"\"\n import _pytest.config\n\n if cls._pluginmanager is None:\n capman: Optional[CaptureManager] = None\n else:\n capman = cls._pluginmanager.getplugin(\"capturemanager\")\n if capman:\n capman.suspend(in_=True)\n\n if cls._config:\n tw = _pytest.config.create_terminal_writer(cls._config)\n tw.line()\n\n if cls._recursive_debug == 0:\n # Handle header similar to pdb.set_trace in py37+.\n header = kwargs.pop(\"header\", None)\n if header is not None:\n tw.sep(\">\", header)\n else:\n capturing = cls._is_capturing(capman)\n if capturing == \"global\":\n tw.sep(\">\", f\"PDB {method} (IO-capturing turned off)\")\n elif capturing:\n tw.sep(\n \">\",\n \"PDB %s (IO-capturing turned off for %s)\"\n % (method, capturing),\n )\n else:\n tw.sep(\">\", f\"PDB {method}\")\n\n _pdb = cls._import_pdb_cls(capman)(**kwargs)\n\n if cls._pluginmanager:\n cls._pluginmanager.hook.pytest_enter_pdb(config=cls._config, pdb=_pdb)\n return _pdb\n\n @classmethod\n def set_trace(cls, *args, **kwargs) -> None:\n \"\"\"Invoke debugging via ``Pdb.set_trace``, dropping any IO capturing.\"\"\"\n frame = sys._getframe().f_back\n _pdb = cls._init_pdb(\"set_trace\", *args, **kwargs)\n _pdb.set_trace(frame)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureDef.execute_FixtureDef.__repr__.return._FixtureDef_argname_r_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureDef.execute_FixtureDef.__repr__.return._FixtureDef_argname_r_", "embedding": null, "metadata": {"file_path": "src/_pytest/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1051, "end_line": 1088, "span_ids": ["FixtureDef.__repr__", "FixtureDef.execute", "FixtureDef.cache_key"], "tokens": 387}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass FixtureDef(Generic[_FixtureValue]):\n\n def execute(self, request: SubRequest) -> _FixtureValue:\n # Get required arguments and register our own finish()\n # with their finalization.\n for argname in self.argnames:\n fixturedef = request._get_active_fixturedef(argname)\n if argname != \"request\":\n # PseudoFixtureDef is only for \"request\".\n assert isinstance(fixturedef, FixtureDef)\n fixturedef.addfinalizer(functools.partial(self.finish, request=request))\n\n my_cache_key = self.cache_key(request)\n if self.cached_result is not None:\n # note: comparison with `==` can fail (or be expensive) for e.g.\n # numpy arrays (#6497).\n cache_key = self.cached_result[1]\n if my_cache_key is cache_key:\n if self.cached_result[2] is not None:\n _, val, tb = self.cached_result[2]\n raise val.with_traceback(tb)\n else:\n result = self.cached_result[0]\n return result\n # We have a previous but differently parametrized fixture instance\n # so we need to tear it down before creating a new one.\n self.finish(request)\n assert self.cached_result is None\n\n hook = self._fixturemanager.session.gethookproxy(request.node.fspath)\n result = hook.pytest_fixture_setup(fixturedef=self, request=request)\n return result\n\n def cache_key(self, request: SubRequest) -> object:\n return request.param_index if not hasattr(request, \"param\") else request.param\n\n def __repr__(self) -> str:\n return \"\".format(\n self.argname, self.scope, self.baseid\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureManager._get_direct_parametrize_args_FixtureManager._get_direct_parametrize_args.return.parametrize_argnames": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureManager._get_direct_parametrize_args_FixtureManager._get_direct_parametrize_args.return.parametrize_argnames", "embedding": null, "metadata": {"file_path": "src/_pytest/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1444, "end_line": 1461, "span_ids": ["FixtureManager._get_direct_parametrize_args"], "tokens": 164}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class FixtureManager:\n\n def _get_direct_parametrize_args(self, node: nodes.Node) -> List[str]:\n \"\"\"Return all direct parametrization arguments of a node, so we don't\n mistake them for fixtures.\n\n Check https://github.com/pytest-dev/pytest/issues/5036.\n\n These things are done later as well when dealing with parametrization\n so this could be improved.\n \"\"\"\n parametrize_argnames: List[str] = []\n for marker in node.iter_markers(name=\"parametrize\"):\n if not marker.kwargs.get(\"indirect\", False):\n p_argnames, _ = ParameterSet._parse_parametrize_args(\n *marker.args, **marker.kwargs\n )\n parametrize_argnames.extend(p_argnames)\n\n return parametrize_argnames", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py_record_xml_attribute__check_record_param_type.if_not_isinstance_v_str_.raise_TypeError_msg_forma": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py_record_xml_attribute__check_record_param_type.if_not_isinstance_v_str_.raise_TypeError_msg_forma", "embedding": null, "metadata": {"file_path": "src/_pytest/junitxml.py", "file_name": "junitxml.py", "file_type": "text/x-python", "category": "implementation", "start_line": 304, "end_line": 339, "span_ids": ["record_xml_attribute", "_check_record_param_type"], "tokens": 294}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.fixture\ndef record_xml_attribute(request: FixtureRequest) -> Callable[[str, object], None]:\n \"\"\"Add extra xml attributes to the tag for the calling test.\n\n The fixture is callable with ``name, value``. The value is\n automatically XML-encoded.\n \"\"\"\n from _pytest.warning_types import PytestExperimentalApiWarning\n\n request.node.warn(\n PytestExperimentalApiWarning(\"record_xml_attribute is an experimental feature\")\n )\n\n _warn_incompatibility_with_xunit2(request, \"record_xml_attribute\")\n\n # Declare noop\n def add_attr_noop(name: str, value: object) -> None:\n pass\n\n attr_func = add_attr_noop\n\n xml = request.config._store.get(xml_key, None)\n if xml is not None:\n node_reporter = xml.node_reporter(request.node.nodeid)\n attr_func = node_reporter.add_attribute\n\n return attr_func\n\n\ndef _check_record_param_type(param: str, v: str) -> None:\n \"\"\"Used by record_testsuite_property to check that the given parameter name is of the proper\n type.\"\"\"\n __tracebackhide__ = True\n if not isinstance(v, str):\n msg = \"{param} parameter needs to be a string, but {g} given\" # type: ignore[unreachable]\n raise TypeError(msg.format(param=param, g=type(v).__name__))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py_record_testsuite_property_record_testsuite_property.return.record_func": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py_record_testsuite_property_record_testsuite_property.return.record_func", "embedding": null, "metadata": {"file_path": "src/_pytest/junitxml.py", "file_name": "junitxml.py", "file_type": "text/x-python", "category": "implementation", "start_line": 342, "end_line": 376, "span_ids": ["record_testsuite_property"], "tokens": 332}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.fixture(scope=\"session\")\ndef record_testsuite_property(request: FixtureRequest) -> Callable[[str, object], None]:\n \"\"\"Record a new ```` tag as child of the root ````.\n\n This is suitable to writing global information regarding the entire test\n suite, and is compatible with ``xunit2`` JUnit family.\n\n This is a ``session``-scoped fixture which is called with ``(name, value)``. Example:\n\n .. code-block:: python\n\n def test_foo(record_testsuite_property):\n record_testsuite_property(\"ARCH\", \"PPC\")\n record_testsuite_property(\"STORAGE_TYPE\", \"CEPH\")\n\n ``name`` must be a string, ``value`` will be converted to a string and properly xml-escaped.\n\n .. warning::\n\n Currently this fixture **does not work** with the\n `pytest-xdist `__ plugin. See issue\n `#7767 `__ for details.\n \"\"\"\n\n __tracebackhide__ = True\n\n def record_func(name: str, value: object) -> None:\n \"\"\"No-op function in case --junitxml was not passed in the command-line.\"\"\"\n __tracebackhide__ = True\n _check_record_param_type(\"name\", name)\n\n xml = request.config._store.get(xml_key, None)\n if xml is not None:\n record_func = xml.add_global_property # noqa\n return record_func", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py_pytest_addoption_pytest_addoption.None_6": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py_pytest_addoption_pytest_addoption.None_6", "embedding": null, "metadata": {"file_path": "src/_pytest/junitxml.py", "file_name": "junitxml.py", "file_type": "text/x-python", "category": "implementation", "start_line": 379, "end_line": 423, "span_ids": ["pytest_addoption"], "tokens": 335}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_addoption(parser: Parser) -> None:\n group = parser.getgroup(\"terminal reporting\")\n group.addoption(\n \"--junitxml\",\n \"--junit-xml\",\n action=\"store\",\n dest=\"xmlpath\",\n metavar=\"path\",\n type=functools.partial(filename_arg, optname=\"--junitxml\"),\n default=None,\n help=\"create junit-xml style report file at given path.\",\n )\n group.addoption(\n \"--junitprefix\",\n \"--junit-prefix\",\n action=\"store\",\n metavar=\"str\",\n default=None,\n help=\"prepend prefix to classnames in junit-xml output\",\n )\n parser.addini(\n \"junit_suite_name\", \"Test suite name for JUnit report\", default=\"pytest\"\n )\n parser.addini(\n \"junit_logging\",\n \"Write captured log messages to JUnit report: \"\n \"one of no|log|system-out|system-err|out-err|all\",\n default=\"no\",\n )\n parser.addini(\n \"junit_log_passing_tests\",\n \"Capture log information for passing tests to JUnit report: \",\n type=\"bool\",\n default=True,\n )\n parser.addini(\n \"junit_duration_report\",\n \"Duration time to report: one of total|call\",\n default=\"total\",\n ) # choices=['total', 'call'])\n parser.addini(\n \"junit_family\",\n \"Emit XML for schema: one of legacy|xunit1|xunit2\",\n default=\"xunit2\",\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_ColoredLevelFormatter_ColoredLevelFormatter.format.return.super_format_record_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_ColoredLevelFormatter_ColoredLevelFormatter.format.return.super_format_record_", "embedding": null, "metadata": {"file_path": "src/_pytest/logging.py", "file_name": "logging.py", "file_type": "text/x-python", "category": "implementation", "start_line": 49, "end_line": 92, "span_ids": ["ColoredLevelFormatter", "ColoredLevelFormatter.format"], "tokens": 401}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class ColoredLevelFormatter(logging.Formatter):\n \"\"\"A logging formatter which colorizes the %(levelname)..s part of the\n log format passed to __init__.\"\"\"\n\n LOGLEVEL_COLOROPTS: Mapping[int, AbstractSet[str]] = {\n logging.CRITICAL: {\"red\"},\n logging.ERROR: {\"red\", \"bold\"},\n logging.WARNING: {\"yellow\"},\n logging.WARN: {\"yellow\"},\n logging.INFO: {\"green\"},\n logging.DEBUG: {\"purple\"},\n logging.NOTSET: set(),\n }\n LEVELNAME_FMT_REGEX = re.compile(r\"%\\(levelname\\)([+-.]?\\d*s)\")\n\n def __init__(self, terminalwriter: TerminalWriter, *args, **kwargs) -> None:\n super().__init__(*args, **kwargs)\n self._original_fmt = self._style._fmt\n self._level_to_fmt_mapping: Dict[int, str] = {}\n\n assert self._fmt is not None\n levelname_fmt_match = self.LEVELNAME_FMT_REGEX.search(self._fmt)\n if not levelname_fmt_match:\n return\n levelname_fmt = levelname_fmt_match.group()\n\n for level, color_opts in self.LOGLEVEL_COLOROPTS.items():\n formatted_levelname = levelname_fmt % {\n \"levelname\": logging.getLevelName(level)\n }\n\n # add ANSI escape sequences around the formatted levelname\n color_kwargs = {name: True for name in color_opts}\n colorized_formatted_levelname = terminalwriter.markup(\n formatted_levelname, **color_kwargs\n )\n self._level_to_fmt_mapping[level] = self.LEVELNAME_FMT_REGEX.sub(\n colorized_formatted_levelname, self._fmt\n )\n\n def format(self, record: logging.LogRecord) -> str:\n fmt = self._level_to_fmt_mapping.get(record.levelno, self._original_fmt)\n self._style._fmt = fmt\n return super().format(record)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_LoggingPlugin._create_formatter_LoggingPlugin._create_formatter.return.formatter": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_LoggingPlugin._create_formatter_LoggingPlugin._create_formatter.return.formatter", "embedding": null, "metadata": {"file_path": "src/_pytest/logging.py", "file_name": "logging.py", "file_type": "text/x-python", "category": "implementation", "start_line": 583, "end_line": 599, "span_ids": ["LoggingPlugin._create_formatter"], "tokens": 145}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class LoggingPlugin:\n\n def _create_formatter(self, log_format, log_date_format, auto_indent):\n # Color option doesn't exist if terminal plugin is disabled.\n color = getattr(self._config.option, \"color\", \"no\")\n if color != \"no\" and ColoredLevelFormatter.LEVELNAME_FMT_REGEX.search(\n log_format\n ):\n formatter: logging.Formatter = ColoredLevelFormatter(\n create_terminal_writer(self._config), log_format, log_date_format\n )\n else:\n formatter = logging.Formatter(log_format, log_date_format)\n\n formatter._style = PercentStyleMultiline(\n formatter._style._fmt, auto_indent=auto_indent\n )\n\n return formatter", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/structures.py_ParameterSet._parse_parametrize_args_ParameterSet._parse_parametrize_parameters.return._": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/structures.py_ParameterSet._parse_parametrize_args_ParameterSet._parse_parametrize_parameters.return._", "embedding": null, "metadata": {"file_path": "src/_pytest/mark/structures.py", "file_name": "structures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 137, "end_line": 158, "span_ids": ["ParameterSet._parse_parametrize_parameters", "ParameterSet._parse_parametrize_args"], "tokens": 253}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class ParameterSet(\n NamedTuple(\n \"ParameterSet\",\n [\n (\"values\", Sequence[Union[object, NotSetType]]),\n (\"marks\", Collection[Union[\"MarkDecorator\", \"Mark\"]]),\n (\"id\", Optional[str]),\n ],\n )\n):\n\n @staticmethod\n def _parse_parametrize_args(\n argnames: Union[str, List[str], Tuple[str, ...]],\n argvalues: Iterable[Union[\"ParameterSet\", Sequence[object], object]],\n *args,\n **kwargs,\n ) -> Tuple[Union[List[str], Tuple[str, ...]], bool]:\n if not isinstance(argnames, (tuple, list)):\n argnames = [x.strip() for x in argnames.split(\",\") if x.strip()]\n force_tuple = len(argnames) == 1\n else:\n force_tuple = False\n return argnames, force_tuple\n\n @staticmethod\n def _parse_parametrize_parameters(\n argvalues: Iterable[Union[\"ParameterSet\", Sequence[object], object]],\n force_tuple: bool,\n ) -> List[\"ParameterSet\"]:\n return [\n ParameterSet.extract_from(x, force_tuple=force_tuple) for x in argvalues\n ]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/monkeypatch.py_MonkeyPatch.setitem_MonkeyPatch.delitem.if_name_not_in_dic_.else_.del_dic_name_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/monkeypatch.py_MonkeyPatch.setitem_MonkeyPatch.delitem.if_name_not_in_dic_.else_.del_dic_name_", "embedding": null, "metadata": {"file_path": "src/_pytest/monkeypatch.py", "file_name": "monkeypatch.py", "file_type": "text/x-python", "category": "implementation", "start_line": 264, "end_line": 280, "span_ids": ["MonkeyPatch.setitem", "MonkeyPatch.delitem"], "tokens": 171}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass MonkeyPatch:\n\n def setitem(self, dic: MutableMapping[K, V], name: K, value: V) -> None:\n \"\"\"Set dictionary entry ``name`` to value.\"\"\"\n self._setitem.append((dic, name, dic.get(name, notset)))\n dic[name] = value\n\n def delitem(self, dic: MutableMapping[K, V], name: K, raising: bool = True) -> None:\n \"\"\"Delete ``name`` from dict.\n\n Raises ``KeyError`` if it doesn't exist, unless ``raising`` is set to\n False.\n \"\"\"\n if name not in dic:\n if raising:\n raise KeyError(name)\n else:\n self._setitem.append((dic, name, dic.get(name, notset)))\n del dic[name]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/reports.py_TestReport_TestReport.__repr__.return._r_when_r_outco": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/reports.py_TestReport_TestReport.__repr__.return._r_when_r_outco", "embedding": null, "metadata": {"file_path": "src/_pytest/reports.py", "file_name": "reports.py", "file_type": "text/x-python", "category": "implementation", "start_line": 238, "end_line": 299, "span_ids": ["TestReport", "TestReport.__repr__"], "tokens": 523}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass TestReport(BaseReport):\n \"\"\"Basic test report object (also used for setup and teardown calls if\n they fail).\"\"\"\n\n __test__ = False\n\n def __init__(\n self,\n nodeid: str,\n location: Tuple[str, Optional[int], str],\n keywords,\n outcome: \"Literal['passed', 'failed', 'skipped']\",\n longrepr: Union[\n None, ExceptionInfo[BaseException], Tuple[str, int, str], str, TerminalRepr\n ],\n when: \"Literal['setup', 'call', 'teardown']\",\n sections: Iterable[Tuple[str, str]] = (),\n duration: float = 0,\n user_properties: Optional[Iterable[Tuple[str, object]]] = None,\n **extra,\n ) -> None:\n #: Normalized collection nodeid.\n self.nodeid = nodeid\n\n #: A (filesystempath, lineno, domaininfo) tuple indicating the\n #: actual location of a test item - it might be different from the\n #: collected one e.g. if a method is inherited from a different module.\n self.location: Tuple[str, Optional[int], str] = location\n\n #: A name -> value dictionary containing all keywords and\n #: markers associated with a test invocation.\n self.keywords = keywords\n\n #: Test outcome, always one of \"passed\", \"failed\", \"skipped\".\n self.outcome = outcome\n\n #: None or a failure representation.\n self.longrepr = longrepr\n\n #: One of 'setup', 'call', 'teardown' to indicate runtest phase.\n self.when = when\n\n #: User properties is a list of tuples (name, value) that holds user\n #: defined properties of the test.\n self.user_properties = list(user_properties or [])\n\n #: List of pairs ``(str, str)`` of extra information which needs to\n #: marshallable. Used by pytest to add captured text\n #: from ``stdout`` and ``stderr``, but may be used by other plugins\n #: to add arbitrary information to reports.\n self.sections = list(sections)\n\n #: Time it took to run just the test.\n self.duration = duration\n\n self.__dict__.update(extra)\n\n def __repr__(self) -> str:\n return \"<{} {!r} when={!r} outcome={!r}>\".format(\n self.__class__.__name__, self.nodeid, self.when, self.outcome\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/reports.py_CollectReport_CollectReport.__repr__.return._CollectReport_r_lenr": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/reports.py_CollectReport_CollectReport.__repr__.return._CollectReport_r_lenr", "embedding": null, "metadata": {"file_path": "src/_pytest/reports.py", "file_name": "reports.py", "file_type": "text/x-python", "category": "implementation", "start_line": 356, "end_line": 401, "span_ids": ["CollectReport", "CollectReport.location", "CollectReport.__repr__"], "tokens": 332}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass CollectReport(BaseReport):\n \"\"\"Collection report object.\"\"\"\n\n when = \"collect\"\n\n def __init__(\n self,\n nodeid: str,\n outcome: \"Literal['passed', 'failed', 'skipped']\",\n longrepr: Union[\n None, ExceptionInfo[BaseException], Tuple[str, int, str], str, TerminalRepr\n ],\n result: Optional[List[Union[Item, Collector]]],\n sections: Iterable[Tuple[str, str]] = (),\n **extra,\n ) -> None:\n #: Normalized collection nodeid.\n self.nodeid = nodeid\n\n #: Test outcome, always one of \"passed\", \"failed\", \"skipped\".\n self.outcome = outcome\n\n #: None or a failure representation.\n self.longrepr = longrepr\n\n #: The collected items and collection nodes.\n self.result = result or []\n\n #: List of pairs ``(str, str)`` of extra information which needs to\n #: marshallable.\n # Used by pytest to add captured text : from ``stdout`` and ``stderr``,\n # but may be used by other plugins : to add arbitrary information to\n # reports.\n self.sections = list(sections)\n\n self.__dict__.update(extra)\n\n @property\n def location(self):\n return (self.fspath, None, self.fspath)\n\n def __repr__(self) -> str:\n return \"\".format(\n self.nodeid, len(self.result), self.outcome\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py__get_line_with_reprcrash_message__get_line_with_reprcrash_message.return.line": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py__get_line_with_reprcrash_message__get_line_with_reprcrash_message.return.line", "embedding": null, "metadata": {"file_path": "src/_pytest/terminal.py", "file_name": "terminal.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1286, "end_line": 1307, "span_ids": ["_get_line_with_reprcrash_message"], "tokens": 171}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _get_line_with_reprcrash_message(\n config: Config, rep: BaseReport, termwidth: int\n) -> str:\n \"\"\"Get summary line for a report, trying to add reprcrash message.\"\"\"\n verbose_word = rep._get_verbose_word(config)\n pos = _get_pos(config, rep)\n\n line = f\"{verbose_word} {pos}\"\n line_width = wcswidth(line)\n\n try:\n # Type ignored intentionally -- possible AttributeError expected.\n msg = rep.longrepr.reprcrash.message # type: ignore[union-attr]\n except AttributeError:\n pass\n else:\n available_width = termwidth - line_width\n msg = _format_trimmed(\" - {}\", msg, available_width)\n if msg is not None:\n line += msg\n\n return line", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/tmpdir.py_TempPathFactory.getbasetemp_TempPathFactory.getbasetemp.return.t": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/tmpdir.py_TempPathFactory.getbasetemp_TempPathFactory.getbasetemp.return.t", "embedding": null, "metadata": {"file_path": "src/_pytest/tmpdir.py", "file_name": "tmpdir.py", "file_type": "text/x-python", "category": "implementation", "start_line": 102, "end_line": 125, "span_ids": ["TempPathFactory.getbasetemp"], "tokens": 260}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\n@attr.s(init=False)\nclass TempPathFactory:\n\n def getbasetemp(self) -> Path:\n \"\"\"Return base temporary directory.\"\"\"\n if self._basetemp is not None:\n return self._basetemp\n\n if self._given_basetemp is not None:\n basetemp = self._given_basetemp\n ensure_reset_dir(basetemp)\n basetemp = basetemp.resolve()\n else:\n from_env = os.environ.get(\"PYTEST_DEBUG_TEMPROOT\")\n temproot = Path(from_env or tempfile.gettempdir()).resolve()\n user = get_user() or \"unknown\"\n # use a sub-directory in the temproot to speed-up\n # make_numbered_dir() call\n rootdir = temproot.joinpath(f\"pytest-of-{user}\")\n rootdir.mkdir(exist_ok=True)\n basetemp = make_numbered_dir_with_cleanup(\n prefix=\"pytest-\", root=rootdir, keep=3, lock_timeout=LOCK_TIMEOUT\n )\n assert basetemp is not None, basetemp\n self._basetemp = t = basetemp\n self._trace(\"new basetemp\", t)\n return t", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_os_TestGeneralUsage.test_root_conftest_syntax_error.assert_result_ret_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_os_TestGeneralUsage.test_root_conftest_syntax_error.assert_result_ret_0", "embedding": null, "metadata": {"file_path": "testing/acceptance_test.py", "file_name": "acceptance_test.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 34, "span_ids": ["TestGeneralUsage.test_root_conftest_syntax_error", "TestGeneralUsage.test_config_error", "imports", "TestGeneralUsage", "prepend_pythonpath"], "tokens": 244}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import os\nimport sys\nimport types\n\nimport attr\nimport py\n\nimport pytest\nfrom _pytest.compat import importlib_metadata\nfrom _pytest.config import ExitCode\nfrom _pytest.pathlib import symlink_or_skip\nfrom _pytest.pytester import Pytester\n\n\ndef prepend_pythonpath(*dirs) -> str:\n cur = os.getenv(\"PYTHONPATH\")\n if cur:\n dirs += (cur,)\n return os.pathsep.join(str(p) for p in dirs)\n\n\nclass TestGeneralUsage:\n def test_config_error(self, pytester: Pytester) -> None:\n pytester.copy_example(\"conftest_usageerror/conftest.py\")\n result = pytester.runpytest(pytester.path)\n assert result.ret == ExitCode.USAGE_ERROR\n result.stderr.fnmatch_lines([\"*ERROR: hello\"])\n result.stdout.fnmatch_lines([\"*pytest_unconfigure_called\"])\n\n def test_root_conftest_syntax_error(self, pytester: Pytester) -> None:\n pytester.makepyfile(conftest=\"raise SyntaxError\\n\")\n result = pytester.runpytest()\n result.stderr.fnmatch_lines([\"*raise SyntaxError*\"])\n assert result.ret != 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_reprexcinfo_getrepr_TestFormattedExcinfo.test_reprexcinfo_unicode.assert_x_\u044f_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_reprexcinfo_getrepr_TestFormattedExcinfo.test_reprexcinfo_unicode.assert_x_\u044f_", "embedding": null, "metadata": {"file_path": "testing/code/test_excinfo.py", "file_name": "test_excinfo.py", "file_type": "text/x-python", "category": "test", "start_line": 892, "end_line": 921, "span_ids": ["TestFormattedExcinfo.test_reprexcinfo_unicode", "TestFormattedExcinfo.test_reprexcinfo_unicode.MyRepr", "TestFormattedExcinfo.test_reprexcinfo_getrepr", "TestFormattedExcinfo.test_reprexcinfo_unicode.MyRepr.toterminal"], "tokens": 228}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFormattedExcinfo:\n\n def test_reprexcinfo_getrepr(self, importasmod) -> None:\n mod = importasmod(\n \"\"\"\n def f(x):\n raise ValueError(x)\n def entry():\n f(0)\n \"\"\"\n )\n excinfo = pytest.raises(ValueError, mod.entry)\n\n styles: Tuple[_TracebackStyle, ...] = (\"short\", \"long\", \"no\")\n for style in styles:\n for showlocals in (True, False):\n repr = excinfo.getrepr(style=style, showlocals=showlocals)\n assert repr.reprtraceback.style == style\n\n assert isinstance(repr, ExceptionChainRepr)\n for r in repr.chain:\n assert r[0].style == style\n\n def test_reprexcinfo_unicode(self):\n from _pytest._code.code import TerminalRepr\n\n class MyRepr(TerminalRepr):\n def toterminal(self, tw: TerminalWriter) -> None:\n tw.line(\"\u044f\")\n\n x = str(MyRepr())\n assert x == \"\u044f\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_fixture.py_logging_test_change_level.assert_CRITICAL_in_capl": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_fixture.py_logging_test_change_level.assert_CRITICAL_in_capl", "embedding": null, "metadata": {"file_path": "testing/logging/test_fixture.py", "file_name": "test_fixture.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 28, "span_ids": ["test_fixture_help", "test_change_level", "impl", "imports"], "tokens": 187}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import logging\n\nimport pytest\nfrom _pytest.logging import caplog_records_key\nfrom _pytest.pytester import Pytester\n\nlogger = logging.getLogger(__name__)\nsublogger = logging.getLogger(__name__ + \".baz\")\n\n\ndef test_fixture_help(pytester: Pytester) -> None:\n result = pytester.runpytest(\"--fixtures\")\n result.stdout.fnmatch_lines([\"*caplog*\"])\n\n\ndef test_change_level(caplog):\n caplog.set_level(logging.INFO)\n logger.debug(\"handler DEBUG level\")\n logger.info(\"handler INFO level\")\n\n caplog.set_level(logging.CRITICAL, logger=sublogger.name)\n sublogger.warning(\"logger WARNING level\")\n sublogger.critical(\"logger CRITICAL level\")\n\n assert \"DEBUG\" not in caplog.text\n assert \"INFO\" in caplog.text\n assert \"WARNING\" not in caplog.text\n assert \"CRITICAL\" in caplog.text", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_formatter.py_logging_test_coloredlogformatter.assert_output_dummyp": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_formatter.py_logging_test_coloredlogformatter.assert_output_dummyp", "embedding": null, "metadata": {"file_path": "testing/logging/test_formatter.py", "file_name": "test_formatter.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 36, "span_ids": ["test_coloredlogformatter.ColorConfig", "test_coloredlogformatter.ColorConfig.option:2", "imports", "test_coloredlogformatter"], "tokens": 232}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import logging\nfrom typing import Any\n\nfrom _pytest._io import TerminalWriter\nfrom _pytest.logging import ColoredLevelFormatter\n\n\ndef test_coloredlogformatter() -> None:\n logfmt = \"%(filename)-25s %(lineno)4d %(levelname)-8s %(message)s\"\n\n record = logging.LogRecord(\n name=\"dummy\",\n level=logging.INFO,\n pathname=\"dummypath\",\n lineno=10,\n msg=\"Test Message\",\n args=(),\n exc_info=None,\n )\n\n class ColorConfig:\n class option:\n pass\n\n tw = TerminalWriter()\n tw.hasmarkup = True\n formatter = ColoredLevelFormatter(tw, logfmt)\n output = formatter.format(record)\n assert output == (\n \"dummypath 10 \\x1b[32mINFO \\x1b[0m Test Message\"\n )\n\n tw.hasmarkup = False\n formatter = ColoredLevelFormatter(tw, logfmt)\n output = formatter.format(record)\n assert output == (\"dummypath 10 INFO Test Message\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_formatter.py_test_colored_short_level_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_formatter.py_test_colored_short_level_", "embedding": null, "metadata": {"file_path": "testing/logging/test_formatter.py", "file_name": "test_formatter.py", "file_type": "text/x-python", "category": "test", "start_line": 128, "end_line": 151, "span_ids": ["test_colored_short_level.ColorConfig.option:2", "test_colored_short_level.ColorConfig", "test_colored_short_level"], "tokens": 148}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_colored_short_level() -> None:\n logfmt = \"%(levelname).1s %(message)s\"\n\n record = logging.LogRecord(\n name=\"dummy\",\n level=logging.INFO,\n pathname=\"dummypath\",\n lineno=10,\n msg=\"Test Message\",\n args=(),\n exc_info=None,\n )\n\n class ColorConfig:\n class option:\n pass\n\n tw = TerminalWriter()\n tw.hasmarkup = True\n formatter = ColoredLevelFormatter(tw, logfmt)\n output = formatter.format(record)\n # the I (of INFO) is colored\n assert output == (\"\\x1b[32mI\\x1b[0m Test Message\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_io_test_nothing_logged.with_pytest_raises_pytest.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_io_test_nothing_logged.with_pytest_raises_pytest.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/logging/test_reporting.py", "file_name": "test_reporting.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 30, "span_ids": ["test_nothing_logged", "imports"], "tokens": 201}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import io\nimport os\nimport re\nfrom typing import cast\n\nimport pytest\nfrom _pytest.capture import CaptureManager\nfrom _pytest.config import ExitCode\nfrom _pytest.fixtures import FixtureRequest\nfrom _pytest.pytester import Pytester\nfrom _pytest.terminal import TerminalReporter\n\n\ndef test_nothing_logged(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import sys\n\n def test_foo():\n sys.stdout.write('text going to stdout')\n sys.stderr.write('text going to stderr')\n assert False\n \"\"\"\n )\n result = pytester.runpytest()\n assert result.ret == 1\n result.stdout.fnmatch_lines([\"*- Captured stdout call -*\", \"text going to stdout\"])\n result.stdout.fnmatch_lines([\"*- Captured stderr call -*\", \"text going to stderr\"])\n with pytest.raises(pytest.fail.Exception):\n result.stdout.fnmatch_lines([\"*- Captured *log call -*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_collection_live_logging_test_collection_collect_only_live_logging.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_collection_live_logging_test_collection_collect_only_live_logging.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/logging/test_reporting.py", "file_name": "test_reporting.py", "file_type": "text/x-python", "category": "test", "start_line": 870, "end_line": 918, "span_ids": ["test_collection_collect_only_live_logging", "test_collection_live_logging"], "tokens": 325}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_collection_live_logging(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import logging\n\n logging.getLogger().info(\"Normal message\")\n \"\"\"\n )\n\n result = pytester.runpytest(\"--log-cli-level=INFO\")\n result.stdout.fnmatch_lines(\n [\"*--- live log collection ---*\", \"*Normal message*\", \"collected 0 items\"]\n )\n\n\n@pytest.mark.parametrize(\"verbose\", [\"\", \"-q\", \"-qq\"])\ndef test_collection_collect_only_live_logging(pytester: Pytester, verbose: str) -> None:\n pytester.makepyfile(\n \"\"\"\n def test_simple():\n pass\n \"\"\"\n )\n\n result = pytester.runpytest(\"--collect-only\", \"--log-cli-level=INFO\", verbose)\n\n expected_lines = []\n\n if not verbose:\n expected_lines.extend(\n [\n \"*collected 1 item*\",\n \"**\",\n \"*1 test collected*\",\n ]\n )\n elif verbose == \"-q\":\n result.stdout.no_fnmatch_line(\"*collected 1 item**\")\n expected_lines.extend(\n [\n \"*test_collection_collect_only_live_logging.py::test_simple*\",\n \"1 test collected in [0-9].[0-9][0-9]s\",\n ]\n )\n elif verbose == \"-qq\":\n result.stdout.no_fnmatch_line(\"*collected 1 item**\")\n expected_lines.extend([\"*test_collection_collect_only_live_logging.py: 1*\"])\n\n result.stdout.fnmatch_lines(expected_lines)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_collection_logging_to_file_test_collection_logging_to_file.with_open_log_file_encod.assert_info_message_in_t": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_collection_logging_to_file_test_collection_logging_to_file.with_open_log_file_encod.assert_info_message_in_t", "embedding": null, "metadata": {"file_path": "testing/logging/test_reporting.py", "file_name": "test_reporting.py", "file_type": "text/x-python", "category": "test", "start_line": 921, "end_line": 956, "span_ids": ["test_collection_logging_to_file"], "tokens": 214}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_collection_logging_to_file(pytester: Pytester) -> None:\n log_file = str(pytester.path.joinpath(\"pytest.log\"))\n\n pytester.makeini(\n \"\"\"\n [pytest]\n log_file={}\n log_file_level = INFO\n \"\"\".format(\n log_file\n )\n )\n\n pytester.makepyfile(\n \"\"\"\n import logging\n\n logging.getLogger().info(\"Normal message\")\n\n def test_simple():\n logging.getLogger().debug(\"debug message in test_simple\")\n logging.getLogger().info(\"info message in test_simple\")\n \"\"\"\n )\n\n result = pytester.runpytest()\n\n result.stdout.no_fnmatch_line(\"*--- live log collection ---*\")\n\n assert result.ret == 0\n assert os.path.isfile(log_file)\n with open(log_file, encoding=\"utf-8\") as rfh:\n contents = rfh.read()\n assert \"Normal message\" in contents\n assert \"debug message in test_simple\" not in contents\n assert \"info message in test_simple\" in contents", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_log_set_path_test_log_set_path.None_1.assert_message_from_test": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_log_set_path_test_log_set_path.None_1.assert_message_from_test", "embedding": null, "metadata": {"file_path": "testing/logging/test_reporting.py", "file_name": "test_reporting.py", "file_type": "text/x-python", "category": "test", "start_line": 1029, "end_line": 1074, "span_ids": ["test_log_set_path"], "tokens": 297}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_log_set_path(pytester: Pytester) -> None:\n report_dir_base = str(pytester.path)\n\n pytester.makeini(\n \"\"\"\n [pytest]\n log_file_level = DEBUG\n log_cli=true\n \"\"\"\n )\n pytester.makeconftest(\n \"\"\"\n import os\n import pytest\n @pytest.hookimpl(hookwrapper=True, tryfirst=True)\n def pytest_runtest_setup(item):\n config = item.config\n logging_plugin = config.pluginmanager.get_plugin(\"logging-plugin\")\n report_file = os.path.join({}, item._request.node.name)\n logging_plugin.set_log_path(report_file)\n yield\n \"\"\".format(\n repr(report_dir_base)\n )\n )\n pytester.makepyfile(\n \"\"\"\n import logging\n logger = logging.getLogger(\"testcase-logger\")\n def test_first():\n logger.info(\"message from test 1\")\n assert True\n\n def test_second():\n logger.debug(\"message from test 2\")\n assert True\n \"\"\"\n )\n pytester.runpytest()\n with open(os.path.join(report_dir_base, \"test_first\")) as rfh:\n content = rfh.read()\n assert \"message from test 1\" in content\n\n with open(os.path.join(report_dir_base, \"test_second\")) as rfh:\n content = rfh.read()\n assert \"message from test 2\" in content", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_colored_captured_log_test_colored_captured_log.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_colored_captured_log_test_colored_captured_log.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/logging/test_reporting.py", "file_name": "test_reporting.py", "file_type": "text/x-python", "category": "test", "start_line": 1077, "end_line": 1098, "span_ids": ["test_colored_captured_log"], "tokens": 155}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_colored_captured_log(pytester: Pytester) -> None:\n \"\"\"Test that the level names of captured log messages of a failing test\n are colored.\"\"\"\n pytester.makepyfile(\n \"\"\"\n import logging\n\n logger = logging.getLogger(__name__)\n\n def test_foo():\n logger.info('text going to logger from call')\n assert False\n \"\"\"\n )\n result = pytester.runpytest(\"--log-level=INFO\", \"--color=yes\")\n assert result.ret == 1\n result.stdout.fnmatch_lines(\n [\n \"*-- Captured log call --*\",\n \"\\x1b[32mINFO \\x1b[0m*text going to logger from call\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestScopeOrdering.test_multiple_packages_TestScopeOrdering.test_multiple_packages.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestScopeOrdering.test_multiple_packages_TestScopeOrdering.test_multiple_packages.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 4168, "end_line": 4236, "span_ids": ["TestScopeOrdering.test_multiple_packages"], "tokens": 479}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestScopeOrdering:\n\n def test_multiple_packages(self, pytester: Pytester) -> None:\n \"\"\"Complex test involving multiple package fixtures. Make sure teardowns\n are executed in order.\n .\n \u2514\u2500\u2500 root\n \u251c\u2500\u2500 __init__.py\n \u251c\u2500\u2500 sub1\n \u2502 \u251c\u2500\u2500 __init__.py\n \u2502 \u251c\u2500\u2500 conftest.py\n \u2502 \u2514\u2500\u2500 test_1.py\n \u2514\u2500\u2500 sub2\n \u251c\u2500\u2500 __init__.py\n \u251c\u2500\u2500 conftest.py\n \u2514\u2500\u2500 test_2.py\n \"\"\"\n root = pytester.mkdir(\"root\")\n root.joinpath(\"__init__.py\").write_text(\"values = []\")\n sub1 = root.joinpath(\"sub1\")\n sub1.mkdir()\n sub1.joinpath(\"__init__.py\").touch()\n sub1.joinpath(\"conftest.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n import pytest\n from .. import values\n @pytest.fixture(scope=\"package\")\n def fix():\n values.append(\"pre-sub1\")\n yield values\n assert values.pop() == \"pre-sub1\"\n \"\"\"\n )\n )\n sub1.joinpath(\"test_1.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n from .. import values\n def test_1(fix):\n assert values == [\"pre-sub1\"]\n \"\"\"\n )\n )\n sub2 = root.joinpath(\"sub2\")\n sub2.mkdir()\n sub2.joinpath(\"__init__.py\").touch()\n sub2.joinpath(\"conftest.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n import pytest\n from .. import values\n @pytest.fixture(scope=\"package\")\n def fix():\n values.append(\"pre-sub2\")\n yield values\n assert values.pop() == \"pre-sub2\"\n \"\"\"\n )\n )\n sub2.joinpath(\"test_2.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n from .. import values\n def test_2(fix):\n assert values == [\"pre-sub2\"]\n \"\"\"\n )\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=2)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_idmaker_with_idfn_and_config_TestMetafunc.test_idmaker_with_idfn_and_config.for_config_expected_in_v.assert_result_expecte": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_idmaker_with_idfn_and_config_TestMetafunc.test_idmaker_with_idfn_and_config.for_config_expected_in_v.assert_result_expecte", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 489, "end_line": 522, "span_ids": ["TestMetafunc.test_idmaker_with_idfn_and_config.MockConfig.__init__", "TestMetafunc.test_idmaker_with_idfn_and_config", "TestMetafunc.test_idmaker_with_idfn_and_config.MockConfig"], "tokens": 231}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafunc:\n\n def test_idmaker_with_idfn_and_config(self) -> None:\n \"\"\"Unit test for expected behavior to create ids with idfn and\n disable_test_id_escaping_and_forfeit_all_rights_to_community_support\n option (#5294).\n \"\"\"\n\n class MockConfig:\n def __init__(self, config):\n self.config = config\n\n @property\n def hook(self):\n return self\n\n def pytest_make_parametrize_id(self, **kw):\n pass\n\n def getini(self, name):\n return self.config[name]\n\n option = \"disable_test_id_escaping_and_forfeit_all_rights_to_community_support\"\n\n values: List[Tuple[Any, str]] = [\n (MockConfig({option: True}), \"a\u00e7\u00e3o\"),\n (MockConfig({option: False}), \"a\\\\xe7\\\\xe3o\"),\n ]\n for config, expected in values:\n result = idmaker(\n (\"a\",),\n [pytest.param(\"string\")],\n idfn=lambda _: \"a\u00e7\u00e3o\",\n config=config,\n )\n assert result == [expected]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_idmaker_with_ids_and_config_TestMetafunc.test_idmaker_with_ids_and_config.for_config_expected_in_v.assert_result_expecte": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_idmaker_with_ids_and_config_TestMetafunc.test_idmaker_with_ids_and_config.for_config_expected_in_v.assert_result_expecte", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 524, "end_line": 557, "span_ids": ["TestMetafunc.test_idmaker_with_ids_and_config.MockConfig", "TestMetafunc.test_idmaker_with_ids_and_config", "TestMetafunc.test_idmaker_with_ids_and_config.MockConfig.__init__"], "tokens": 226}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafunc:\n\n def test_idmaker_with_ids_and_config(self) -> None:\n \"\"\"Unit test for expected behavior to create ids with ids and\n disable_test_id_escaping_and_forfeit_all_rights_to_community_support\n option (#5294).\n \"\"\"\n\n class MockConfig:\n def __init__(self, config):\n self.config = config\n\n @property\n def hook(self):\n return self\n\n def pytest_make_parametrize_id(self, **kw):\n pass\n\n def getini(self, name):\n return self.config[name]\n\n option = \"disable_test_id_escaping_and_forfeit_all_rights_to_community_support\"\n\n values: List[Tuple[Any, str]] = [\n (MockConfig({option: True}), \"a\u00e7\u00e3o\"),\n (MockConfig({option: False}), \"a\\\\xe7\\\\xe3o\"),\n ]\n for config, expected in values:\n result = idmaker(\n (\"a\",),\n [pytest.param(\"string\")],\n ids=[\"a\u00e7\u00e3o\"],\n config=config,\n )\n assert result == [expected]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_argcomplete.py_subprocess__wrapcall.try_.except_subprocess_CalledP.return._": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_argcomplete.py_subprocess__wrapcall.try_.except_subprocess_CalledP.return._", "embedding": null, "metadata": {"file_path": "testing/test_argcomplete.py", "file_name": "test_argcomplete.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 32, "span_ids": ["equal_with_bash", "imports", "_wrapcall"], "tokens": 239}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import subprocess\nimport sys\nfrom pathlib import Path\n\nimport pytest\nfrom _pytest.monkeypatch import MonkeyPatch\n\n# Test for _argcomplete but not specific for any application.\n\n\ndef equal_with_bash(prefix, ffc, fc, out=None):\n res = ffc(prefix)\n res_bash = set(fc(prefix))\n retval = set(res) == res_bash\n if out:\n out.write(f\"equal_with_bash({prefix}) {retval} {res}\\n\")\n if not retval:\n out.write(\" python - bash: %s\\n\" % (set(res) - res_bash))\n out.write(\" bash - python: %s\\n\" % (res_bash - set(res)))\n return retval\n\n\n# Copied from argcomplete.completers as import from there.\n# Also pulls in argcomplete.__init__ which opens filedescriptor 9.\n# This gives an OSError at the end of testrun.\n\n\ndef _wrapcall(*args, **kargs):\n try:\n return subprocess.check_output(*args, **kargs).decode().splitlines()\n except subprocess.CalledProcessError:\n return []", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_test_recursion_source_decode_test_diff_newline_at_end.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_test_recursion_source_decode_test_diff_newline_at_end.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 1481, "end_line": 1536, "span_ids": ["test_AssertionError_message", "test_diff_newline_at_end", "test_recursion_source_decode"], "tokens": 289}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_recursion_source_decode(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n def test_something():\n pass\n \"\"\"\n )\n pytester.makeini(\n \"\"\"\n [pytest]\n python_files = *.py\n \"\"\"\n )\n result = pytester.runpytest(\"--collect-only\")\n result.stdout.fnmatch_lines(\n \"\"\"\n \n \"\"\"\n )\n\n\ndef test_AssertionError_message(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n def test_hello():\n x,y = 1,2\n assert 0, (x,y)\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n \"\"\"\n *def test_hello*\n *assert 0, (x,y)*\n *AssertionError: (1, 2)*\n \"\"\"\n )\n\n\ndef test_diff_newline_at_end(pytester: Pytester) -> None:\n pytester.makepyfile(\n r\"\"\"\n def test_diff():\n assert 'asdf' == 'asdf\\n'\n \"\"\"\n )\n\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n r\"\"\"\n *assert 'asdf' == 'asdf\\n'\n * - asdf\n * ? -\n * + asdf\n \"\"\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_test_assert_tuple_warning_test_assert_tuple_warning.assert_msg_not_in_result_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_test_assert_tuple_warning_test_assert_tuple_warning.assert_msg_not_in_result_", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 1539, "end_line": 1559, "span_ids": ["test_assert_tuple_warning"], "tokens": 140}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.filterwarnings(\"default\")\ndef test_assert_tuple_warning(pytester: Pytester) -> None:\n msg = \"assertion is always true\"\n pytester.makepyfile(\n \"\"\"\n def test_tuple():\n assert(False, 'you shall not pass')\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([f\"*test_assert_tuple_warning.py:2:*{msg}*\"])\n\n # tuples with size != 2 should not trigger the warning\n pytester.makepyfile(\n \"\"\"\n def test_tuple():\n assert ()\n \"\"\"\n )\n result = pytester.runpytest()\n assert msg not in result.stdout.str()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_ast_rewrite.return.tree": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_ast_rewrite.return.tree", "embedding": null, "metadata": {"file_path": "testing/test_assertrewrite.py", "file_name": "test_assertrewrite.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 37, "span_ids": ["rewrite", "imports"], "tokens": 222}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import ast\nimport errno\nimport glob\nimport importlib\nimport marshal\nimport os\nimport py_compile\nimport stat\nimport sys\nimport textwrap\nimport zipfile\nfrom functools import partial\nfrom pathlib import Path\nfrom typing import Dict\nfrom typing import List\nfrom typing import Mapping\nfrom typing import Optional\nfrom typing import Set\n\nimport _pytest._code\nimport pytest\nfrom _pytest.assertion import util\nfrom _pytest.assertion.rewrite import _get_assertion_exprs\nfrom _pytest.assertion.rewrite import AssertionRewritingHook\nfrom _pytest.assertion.rewrite import get_cache_dir\nfrom _pytest.assertion.rewrite import PYC_TAIL\nfrom _pytest.assertion.rewrite import PYTEST_TAG\nfrom _pytest.assertion.rewrite import rewrite_asserts\nfrom _pytest.config import ExitCode\nfrom _pytest.pathlib import make_numbered_dir\nfrom _pytest.pytester import Pytester\n\n\ndef rewrite(src: str) -> ast.Module:\n tree = ast.parse(src)\n rewrite_asserts(tree, src.encode())\n return tree", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_contains_unicode_TestDoctests.test_ignore_import_errors_on_doctest.reprec_assertoutcome_skip": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_contains_unicode_TestDoctests.test_ignore_import_errors_on_doctest.reprec_assertoutcome_skip", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 676, "end_line": 707, "span_ids": ["TestDoctests.test_ignore_import_errors_on_doctest", "TestDoctests.test_contains_unicode"], "tokens": 217}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDoctests:\n\n def test_contains_unicode(self, pytester: Pytester):\n \"\"\"Fix internal error with docstrings containing non-ascii characters.\"\"\"\n pytester.makepyfile(\n '''\\\n def foo():\n \"\"\"\n >>> name = '\u0441' # not letter 'c' but instead Cyrillic 's'.\n 'anything'\n \"\"\"\n '''\n )\n result = pytester.runpytest(\"--doctest-modules\")\n result.stdout.fnmatch_lines([\"Got nothing\", \"* 1 failed in*\"])\n\n def test_ignore_import_errors_on_doctest(self, pytester: Pytester):\n p = pytester.makepyfile(\n \"\"\"\n import asdf\n\n def add_one(x):\n '''\n >>> add_one(1)\n 2\n '''\n return x + 1\n \"\"\"\n )\n\n reprec = pytester.inline_run(\n p, \"--doctest-modules\", \"--doctest-ignore-import-errors\"\n )\n reprec.assertoutcome(skipped=1, failed=1, passed=0)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestLiterals.test_unicode_string_TestLiterals.test_bytes_literal.reprec_assertoutcome_fail": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestLiterals.test_unicode_string_TestLiterals.test_bytes_literal.reprec_assertoutcome_fail", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 909, "end_line": 934, "span_ids": ["TestLiterals.test_bytes_literal", "TestLiterals.test_unicode_string"], "tokens": 199}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestLiterals:\n\n def test_unicode_string(self, pytester: Pytester):\n \"\"\"Test that doctests which output unicode fail in Python 2 when\n the ALLOW_UNICODE option is not used. The same test should pass\n in Python 3.\n \"\"\"\n pytester.maketxtfile(\n test_doc=\"\"\"\n >>> b'12'.decode('ascii')\n '12'\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=1)\n\n def test_bytes_literal(self, pytester: Pytester):\n \"\"\"Test that doctests which output bytes fail in Python 3 when\n the ALLOW_BYTES option is not used. (#1287).\n \"\"\"\n pytester.maketxtfile(\n test_doc=\"\"\"\n >>> b'foo'\n 'foo'\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(failed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_entry_points.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_entry_points.py__", "embedding": null, "metadata": {"file_path": "testing/test_entry_points.py", "file_name": "test_entry_points.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 8, "span_ids": ["test_pytest_entry_points_are_identical", "imports"], "tokens": 62}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from _pytest.compat import importlib_metadata\n\n\ndef test_pytest_entry_points_are_identical():\n dist = importlib_metadata.distribution(\"pytest\")\n entry_map = {ep.name: ep for ep in dist.entry_points}\n assert entry_map[\"pytest\"].value == entry_map[\"py.test\"].value", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_record_testsuite_property_test_record_testsuite_property.p2_node_assert_attr_name_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_record_testsuite_property_test_record_testsuite_property.p2_node_assert_attr_name_", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 1526, "end_line": 1546, "span_ids": ["test_record_testsuite_property"], "tokens": 202}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@parametrize_families\ndef test_record_testsuite_property(\n pytester: Pytester, run_and_parse: RunAndParse, xunit_family: str\n) -> None:\n pytester.makepyfile(\n \"\"\"\n def test_func1(record_testsuite_property):\n record_testsuite_property(\"stats\", \"all good\")\n\n def test_func2(record_testsuite_property):\n record_testsuite_property(\"stats\", 10)\n \"\"\"\n )\n result, dom = run_and_parse(family=xunit_family)\n assert result.ret == 0\n node = dom.find_first_by_tag(\"testsuite\")\n properties_node = node.find_first_by_tag(\"properties\")\n p1_node = properties_node.find_nth_by_tag(\"property\", 0)\n p2_node = properties_node.find_nth_by_tag(\"property\", 1)\n p1_node.assert_attr(name=\"stats\", value=\"all good\")\n p2_node.assert_attr(name=\"stats\", value=\"10\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_record_testsuite_property_junit_disabled_test_record_testsuite_property_type_checking.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_record_testsuite_property_junit_disabled_test_record_testsuite_property_type_checking.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 1549, "end_line": 1575, "span_ids": ["test_record_testsuite_property_junit_disabled", "test_record_testsuite_property_type_checking"], "tokens": 201}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_record_testsuite_property_junit_disabled(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n def test_func1(record_testsuite_property):\n record_testsuite_property(\"stats\", \"all good\")\n \"\"\"\n )\n result = pytester.runpytest()\n assert result.ret == 0\n\n\n@pytest.mark.parametrize(\"junit\", [True, False])\ndef test_record_testsuite_property_type_checking(\n pytester: Pytester, junit: bool\n) -> None:\n pytester.makepyfile(\n \"\"\"\n def test_func1(record_testsuite_property):\n record_testsuite_property(1, 2)\n \"\"\"\n )\n args = (\"--junitxml=tests.xml\",) if junit else ()\n result = pytester.runpytest(*args)\n assert result.ret == 1\n result.stdout.fnmatch_lines(\n [\"*TypeError: name parameter needs to be a string, but int given\"]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_escaped_skipreason_issue3533_test_escaped_skipreason_issue3533.snode_assert_attr_message": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_escaped_skipreason_issue3533_test_escaped_skipreason_issue3533.snode_assert_attr_message", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 1610, "end_line": 1625, "span_ids": ["test_escaped_skipreason_issue3533"], "tokens": 124}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_escaped_skipreason_issue3533(\n pytester: Pytester, run_and_parse: RunAndParse\n) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.mark.skip(reason='1 <> 2')\n def test_skip():\n pass\n \"\"\"\n )\n _, dom = run_and_parse()\n node = dom.find_first_by_tag(\"testcase\")\n snode = node.find_first_by_tag(\"skipped\")\n assert \"1 <> 2\" in snode.text\n snode.assert_attr(message=\"1 <> 2\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_parametrize_iterator_test_parametrize_iterator.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_parametrize_iterator_test_parametrize_iterator.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_mark.py", "file_name": "test_mark.py", "file_type": "text/x-python", "category": "test", "start_line": 450, "end_line": 469, "span_ids": ["test_parametrize_iterator"], "tokens": 129}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_parametrize_iterator(pytester: Pytester) -> None:\n \"\"\"`parametrize` should work with generators (#5354).\"\"\"\n py_file = pytester.makepyfile(\n \"\"\"\\\n import pytest\n\n def gen():\n yield 1\n yield 2\n yield 3\n\n @pytest.mark.parametrize('a', gen())\n def test(a):\n assert a >= 1\n \"\"\"\n )\n result = pytester.runpytest(py_file)\n assert result.ret == 0\n # should not skip any tests\n result.stdout.fnmatch_lines([\"*3 passed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_nose.py_pytest_test_nose_setup.result_assert_outcomes_pa": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_nose.py_pytest_test_nose_setup.result_assert_outcomes_pa", "embedding": null, "metadata": {"file_path": "testing/test_nose.py", "file_name": "test_nose.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 27, "span_ids": ["setup_module", "imports", "test_nose_setup"], "tokens": 166}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import pytest\nfrom _pytest.pytester import Pytester\n\n\ndef setup_module(mod):\n mod.nose = pytest.importorskip(\"nose\")\n\n\ndef test_nose_setup(pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n values = []\n from nose.tools import with_setup\n\n @with_setup(lambda: values.append(1), lambda: values.append(2))\n def test_hello():\n assert values == [1]\n\n def test_world():\n assert values == [1,2]\n\n test_hello.setup = lambda: values.append(1)\n test_hello.teardown = lambda: values.append(2)\n \"\"\"\n )\n result = pytester.runpytest(p, \"-p\", \"nose\")\n result.assert_outcomes(passed=2)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_os_TestPytestPluginInteractions.test_addhooks_conftestplugin.assert_res_11_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_os_TestPytestPluginInteractions.test_addhooks_conftestplugin.assert_res_11_", "embedding": null, "metadata": {"file_path": "testing/test_pluginmanager.py", "file_name": "test_pluginmanager.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 50, "span_ids": ["pytestpm", "TestPytestPluginInteractions", "imports", "TestPytestPluginInteractions.test_addhooks_conftestplugin"], "tokens": 316}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import os\nimport shutil\nimport sys\nimport types\nfrom typing import List\n\nimport pytest\nfrom _pytest.config import Config\nfrom _pytest.config import ExitCode\nfrom _pytest.config import PytestPluginManager\nfrom _pytest.config.exceptions import UsageError\nfrom _pytest.main import Session\nfrom _pytest.monkeypatch import MonkeyPatch\nfrom _pytest.pathlib import import_path\nfrom _pytest.pytester import Pytester\n\n\n@pytest.fixture\ndef pytestpm() -> PytestPluginManager:\n return PytestPluginManager()\n\n\nclass TestPytestPluginInteractions:\n def test_addhooks_conftestplugin(\n self, pytester: Pytester, _config_for_test: Config\n ) -> None:\n pytester.makepyfile(\n newhooks=\"\"\"\n def pytest_myhook(xyz):\n \"new hook\"\n \"\"\"\n )\n conf = pytester.makeconftest(\n \"\"\"\n import newhooks\n def pytest_addhooks(pluginmanager):\n pluginmanager.add_hookspecs(newhooks)\n def pytest_myhook(xyz):\n return xyz + 1\n \"\"\"\n )\n config = _config_for_test\n pm = config.pluginmanager\n pm.hook.pytest_addhooks.call_historic(\n kwargs=dict(pluginmanager=config.pluginmanager)\n )\n config.pluginmanager._importconftest(conf, importmode=\"prepend\")\n # print(config.pluginmanager.get_plugins())\n res = config.hook.pytest_myhook(xyz=10)\n assert res == [11]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_makepyfile_unicode_test_makepyfile_utf8.assert_mixed_encoding_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_makepyfile_unicode_test_makepyfile_utf8.assert_mixed_encoding_", "embedding": null, "metadata": {"file_path": "testing/test_pytester.py", "file_name": "test_pytester.py", "file_type": "text/x-python", "category": "test", "start_line": 206, "end_line": 217, "span_ids": ["test_makepyfile_utf8", "test_makepyfile_unicode"], "tokens": 117}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_makepyfile_unicode(pytester: Pytester) -> None:\n pytester.makepyfile(chr(0xFFFD))\n\n\ndef test_makepyfile_utf8(pytester: Pytester) -> None:\n \"\"\"Ensure makepyfile accepts utf-8 bytes as input (#2738)\"\"\"\n utf8_contents = \"\"\"\n def setup_function(function):\n mixed_encoding = 'S\u00e3o Paulo'\n \"\"\".encode()\n p = pytester.makepyfile(utf8_contents)\n assert \"mixed_encoding = 'S\u00e3o Paulo'\".encode() in p.read_bytes()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_recwarn.py_re_test_recwarn_functional.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_recwarn.py_re_test_recwarn_functional.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/test_recwarn.py", "file_name": "test_recwarn.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 27, "span_ids": ["test_recwarn_stacklevel", "imports", "test_recwarn_functional"], "tokens": 152}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import re\nimport warnings\nfrom typing import Optional\n\nimport pytest\nfrom _pytest.pytester import Pytester\nfrom _pytest.recwarn import WarningsRecorder\n\n\ndef test_recwarn_stacklevel(recwarn: WarningsRecorder) -> None:\n warnings.warn(\"hello\")\n warn = recwarn.pop()\n assert warn.filename == __file__\n\n\ndef test_recwarn_functional(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import warnings\n def test_method(recwarn):\n warnings.warn(\"hello\")\n warn = recwarn.pop()\n assert isinstance(warn.message, UserWarning)\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_session.py_pytest_SessionTests.test_basic_testitem_events._assert_colreports_1_re": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_session.py_pytest_SessionTests.test_basic_testitem_events._assert_colreports_1_re", "embedding": null, "metadata": {"file_path": "testing/test_session.py", "file_name": "test_session.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 38, "span_ids": ["SessionTests.test_basic_testitem_events", "imports", "SessionTests"], "tokens": 283}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import pytest\nfrom _pytest.config import ExitCode\nfrom _pytest.monkeypatch import MonkeyPatch\nfrom _pytest.pytester import Pytester\n\n\nclass SessionTests:\n def test_basic_testitem_events(self, pytester: Pytester) -> None:\n tfile = pytester.makepyfile(\n \"\"\"\n def test_one():\n pass\n def test_one_one():\n assert 0\n def test_other():\n raise ValueError(23)\n class TestClass(object):\n def test_two(self, someargs):\n pass\n \"\"\"\n )\n reprec = pytester.inline_run(tfile)\n passed, skipped, failed = reprec.listoutcomes()\n assert len(skipped) == 0\n assert len(passed) == 1\n assert len(failed) == 3\n\n def end(x):\n return x.nodeid.split(\"::\")[-1]\n\n assert end(failed[0]) == \"test_one_one\"\n assert end(failed[1]) == \"test_other\"\n itemstarted = reprec.getcalls(\"pytest_itemcollected\")\n assert len(itemstarted) == 4\n # XXX check for failing funcarg setup\n # colreports = reprec.getcalls(\"pytest_collectreport\")\n # assert len(colreports) == 4\n # assert colreports[1].report.failed", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_fail_extra_reporting_test_fail_extra_reporting.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_fail_extra_reporting_test_fail_extra_reporting.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 1084, "end_line": 1095, "span_ids": ["test_fail_extra_reporting"], "tokens": 121}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_fail_extra_reporting(pytester: Pytester, monkeypatch) -> None:\n monkeypatch.setenv(\"COLUMNS\", \"80\")\n pytester.makepyfile(\"def test_this(): assert 0, 'this_failed' * 100\")\n result = pytester.runpytest(\"-rN\")\n result.stdout.no_fnmatch_line(\"*short test summary*\")\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n [\n \"*test summary*\",\n \"FAILED test_fail_extra_reporting.py::test_this - AssertionError: this_failedt...\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_skip_reasons_folding_test_skip_reasons_folding.assert_reason_message": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_skip_reasons_folding_test_skip_reasons_folding.assert_reason_message", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 2222, "end_line": 2253, "span_ids": ["test_skip_reasons_folding", "test_skip_reasons_folding.X:2", "test_skip_reasons_folding.X"], "tokens": 257}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_skip_reasons_folding() -> None:\n path = \"xyz\"\n lineno = 3\n message = \"justso\"\n longrepr = (path, lineno, message)\n\n class X:\n pass\n\n ev1 = cast(CollectReport, X())\n ev1.when = \"execute\"\n ev1.skipped = True # type: ignore[misc]\n ev1.longrepr = longrepr\n\n ev2 = cast(CollectReport, X())\n ev2.when = \"execute\"\n ev2.longrepr = longrepr\n ev2.skipped = True # type: ignore[misc]\n\n # ev3 might be a collection report\n ev3 = cast(CollectReport, X())\n ev3.when = \"collect\"\n ev3.longrepr = longrepr\n ev3.skipped = True # type: ignore[misc]\n\n values = _folded_skips(Path.cwd(), [ev1, ev2, ev3])\n assert len(values) == 1\n num, fspath, lineno_, reason = values[0]\n assert num == 3\n assert fspath == path\n assert lineno_ == lineno\n assert reason == message", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_gc_test_simple_unittest.None_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_gc_test_simple_unittest.None_1", "embedding": null, "metadata": {"file_path": "testing/test_unittest.py", "file_name": "test_unittest.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 24, "span_ids": ["test_simple_unittest", "imports"], "tokens": 150}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import gc\nimport sys\nfrom typing import List\n\nimport pytest\nfrom _pytest.config import ExitCode\nfrom _pytest.monkeypatch import MonkeyPatch\nfrom _pytest.pytester import Pytester\n\n\ndef test_simple_unittest(pytester: Pytester) -> None:\n testpath = pytester.makepyfile(\n \"\"\"\n import unittest\n class MyTestCase(unittest.TestCase):\n def testpassing(self):\n self.assertEqual('foo', 'foo')\n def test_failing(self):\n self.assertEqual('foo', 'bar')\n \"\"\"\n )\n reprec = pytester.inline_run(testpath)\n assert reprec.matchreport(\"testpassing\").passed\n assert reprec.matchreport(\"test_failing\").failed", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_new_instances_test_function_item_obj_is_instance.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_new_instances_test_function_item_obj_is_instance.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_unittest.py", "file_name": "test_unittest.py", "file_type": "text/x-python", "category": "test", "start_line": 131, "end_line": 166, "span_ids": ["test_function_item_obj_is_instance", "test_new_instances"], "tokens": 236}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_new_instances(pytester: Pytester) -> None:\n testpath = pytester.makepyfile(\n \"\"\"\n import unittest\n class MyTestCase(unittest.TestCase):\n def test_func1(self):\n self.x = 2\n def test_func2(self):\n assert not hasattr(self, 'x')\n \"\"\"\n )\n reprec = pytester.inline_run(testpath)\n reprec.assertoutcome(passed=2)\n\n\ndef test_function_item_obj_is_instance(pytester: Pytester) -> None:\n \"\"\"item.obj should be a bound method on unittest.TestCase function items (#5390).\"\"\"\n pytester.makeconftest(\n \"\"\"\n def pytest_runtest_makereport(item, call):\n if call.when == 'call':\n class_ = item.parent.obj\n assert isinstance(item.obj.__self__, class_)\n \"\"\"\n )\n pytester.makepyfile(\n \"\"\"\n import unittest\n\n class Test(unittest.TestCase):\n def test_foo(self):\n pass\n \"\"\"\n )\n result = pytester.runpytest_inprocess()\n result.stdout.fnmatch_lines([\"* 1 passed in*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_teardown_test_teardown.assert_passed_skipped_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_teardown_test_teardown.assert_passed_skipped_", "embedding": null, "metadata": {"file_path": "testing/test_unittest.py", "file_name": "test_unittest.py", "file_type": "text/x-python", "category": "test", "start_line": 169, "end_line": 188, "span_ids": ["test_teardown"], "tokens": 136}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_teardown(pytester: Pytester) -> None:\n testpath = pytester.makepyfile(\n \"\"\"\n import unittest\n class MyTestCase(unittest.TestCase):\n values = []\n def test_one(self):\n pass\n def tearDown(self):\n self.values.append(None)\n class Second(unittest.TestCase):\n def test_check(self):\n self.assertEqual(MyTestCase.values, [None])\n \"\"\"\n )\n reprec = pytester.inline_run(testpath)\n passed, skipped, failed = reprec.countoutcomes()\n assert failed == 0, failed\n assert passed == 2\n assert passed + skipped + failed == 2", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_TestTrialUnittest.test_trial_exceptions_with_skips_TestTrialUnittest.test_trial_exceptions_with_skips.assert_result_ret_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_TestTrialUnittest.test_trial_exceptions_with_skips_TestTrialUnittest.test_trial_exceptions_with_skips.assert_result_ret_1", "embedding": null, "metadata": {"file_path": "testing/test_unittest.py", "file_name": "test_unittest.py", "file_type": "text/x-python", "category": "test", "start_line": 453, "end_line": 501, "span_ids": ["TestTrialUnittest.test_trial_exceptions_with_skips"], "tokens": 336}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTrialUnittest:\n\n def test_trial_exceptions_with_skips(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n from twisted.trial import unittest\n import pytest\n class TC(unittest.TestCase):\n def test_hello(self):\n pytest.skip(\"skip_in_method\")\n @pytest.mark.skipif(\"sys.version_info != 1\")\n def test_hello2(self):\n pass\n @pytest.mark.xfail(reason=\"iwanto\")\n def test_hello3(self):\n assert 0\n def test_hello4(self):\n pytest.xfail(\"i2wanto\")\n def test_trial_skip(self):\n pass\n test_trial_skip.skip = \"trialselfskip\"\n\n def test_trial_todo(self):\n assert 0\n test_trial_todo.todo = \"mytodo\"\n\n def test_trial_todo_success(self):\n pass\n test_trial_todo_success.todo = \"mytodo\"\n\n class TC2(unittest.TestCase):\n def setup_class(cls):\n pytest.skip(\"skip_in_setup_class\")\n def test_method(self):\n pass\n \"\"\"\n )\n result = pytester.runpytest(\"-rxs\", *self.ignore_unclosed_socket_warning)\n result.stdout.fnmatch_lines_random(\n [\n \"*XFAIL*test_trial_todo*\",\n \"*trialselfskip*\",\n \"*skip_in_setup_class*\",\n \"*iwanto*\",\n \"*i2wanto*\",\n \"*sys.version_info*\",\n \"*skip_in_method*\",\n \"*1 failed*4 skipped*3 xfailed*\",\n ]\n )\n assert result.ret == 1", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_unittest_expected_failure_for_passing_test_is_fail_test_unittest_expected_failure_for_passing_test_is_fail.assert_result_ret_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_unittest_expected_failure_for_passing_test_is_fail_test_unittest_expected_failure_for_passing_test_is_fail.assert_result_ret_1", "embedding": null, "metadata": {"file_path": "testing/test_unittest.py", "file_name": "test_unittest.py", "file_type": "text/x-python", "category": "test", "start_line": 766, "end_line": 796, "span_ids": ["test_unittest_expected_failure_for_passing_test_is_fail"], "tokens": 194}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\"runner\", [\"pytest\", \"unittest\"])\ndef test_unittest_expected_failure_for_passing_test_is_fail(\n pytester: Pytester,\n runner: str,\n) -> None:\n script = pytester.makepyfile(\n \"\"\"\n import unittest\n class MyTestCase(unittest.TestCase):\n @unittest.expectedFailure\n def test_passing_test_is_fail(self):\n assert True\n if __name__ == '__main__':\n unittest.main()\n \"\"\"\n )\n\n if runner == \"pytest\":\n result = pytester.runpytest(\"-rxX\")\n result.stdout.fnmatch_lines(\n [\n \"*MyTestCase*test_passing_test_is_fail*\",\n \"Unexpected success\",\n \"*1 failed*\",\n ]\n )\n else:\n result = pytester.runpython(script)\n result.stderr.fnmatch_lines([\"*1 test in*\", \"*(unexpected successes=1)*\"])\n\n assert result.ret == 1", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_test_deprecation_warning_as_error_test_deprecation_warning_as_error.if_change_default_is_None.else_.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_test_deprecation_warning_as_error_test_deprecation_warning_as_error.if_change_default_is_None.else_.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_warnings.py", "file_name": "test_warnings.py", "file_type": "text/x-python", "category": "test", "start_line": 518, "end_line": 554, "span_ids": ["test_deprecation_warning_as_error"], "tokens": 284}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\"change_default\", [None, \"ini\", \"cmdline\"])\n@pytest.mark.skip(\n reason=\"This test should be enabled again before pytest 7.0 is released\"\n)\ndef test_deprecation_warning_as_error(pytester: Pytester, change_default) -> None:\n \"\"\"This ensures that PytestDeprecationWarnings raised by pytest are turned into errors.\n\n This test should be enabled as part of each major release, and skipped again afterwards\n to ensure our deprecations are turning into warnings as expected.\n \"\"\"\n pytester.makepyfile(\n \"\"\"\n import warnings, pytest\n def test():\n warnings.warn(pytest.PytestDeprecationWarning(\"some warning\"))\n \"\"\"\n )\n if change_default == \"ini\":\n pytester.makeini(\n \"\"\"\n [pytest]\n filterwarnings =\n ignore::pytest.PytestDeprecationWarning\n \"\"\"\n )\n\n args = (\n (\"-Wignore::pytest.PytestDeprecationWarning\",)\n if change_default == \"cmdline\"\n else ()\n )\n result = pytester.runpytest(*args)\n if change_default is None:\n result.stdout.fnmatch_lines([\"* 1 failed in *\"])\n else:\n assert change_default in (\"ini\", \"cmdline\")\n result.stdout.fnmatch_lines([\"* 1 passed in *\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_ExceptionInfo.match_ExceptionInfo.match.return.True": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_ExceptionInfo.match_ExceptionInfo.match.return.True", "embedding": null, "metadata": {"file_path": "src/_pytest/_code/code.py", "file_name": "code.py", "file_type": "text/x-python", "category": "implementation", "start_line": 654, "end_line": 666, "span_ids": ["ExceptionInfo.match"], "tokens": 173}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\n@attr.s(repr=False)\nclass ExceptionInfo(Generic[_E]):\n\n def match(self, regexp: Union[str, Pattern[str]]) -> \"Literal[True]\":\n \"\"\"Check whether the regular expression `regexp` matches the string\n representation of the exception using :func:`python:re.search`.\n\n If it matches `True` is returned, otherwise an `AssertionError` is raised.\n \"\"\"\n __tracebackhide__ = True\n msg = \"Regex pattern {!r} does not match {!r}.\"\n if regexp == str(self.value):\n msg += \" Did you mean to `re.escape()` the regex?\"\n assert re.search(regexp, str(self.value)), msg.format(regexp, str(self.value))\n # Return True to allow for \"assert excinfo.match()\".\n return True", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewritingHook_AssertionRewritingHook._find_spec.importlib_machinery_PathF": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewritingHook_AssertionRewritingHook._find_spec.importlib_machinery_PathF", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/rewrite.py", "file_name": "rewrite.py", "file_type": "text/x-python", "category": "implementation", "start_line": 55, "end_line": 79, "span_ids": ["AssertionRewritingHook:3", "AssertionRewritingHook", "AssertionRewritingHook.set_session"], "tokens": 272}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class AssertionRewritingHook(importlib.abc.MetaPathFinder, importlib.abc.Loader):\n \"\"\"PEP302/PEP451 import hook which rewrites asserts.\"\"\"\n\n def __init__(self, config: Config) -> None:\n self.config = config\n try:\n self.fnpats = config.getini(\"python_files\")\n except ValueError:\n self.fnpats = [\"test_*.py\", \"*_test.py\"]\n self.session: Optional[Session] = None\n self._rewritten_names: Set[str] = set()\n self._must_rewrite: Set[str] = set()\n # flag to guard against trying to rewrite a pyc file while we are already writing another pyc file,\n # which might result in infinite recursion (#3506)\n self._writing_pyc = False\n self._basenames_to_check_rewrite = {\"conftest\"}\n self._marked_for_rewrite_cache: Dict[str, bool] = {}\n self._session_paths_checked = False\n\n def set_session(self, session: Optional[Session]) -> None:\n self.session = session\n self._session_paths_checked = False\n\n # Indirection so we can mock calls to find_spec originated from the hook during testing\n _find_spec = importlib.machinery.PathFinder.find_spec", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewritingHook.find_spec_AssertionRewritingHook.create_module._default_behaviour_is_fi": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewritingHook.find_spec_AssertionRewritingHook.create_module._default_behaviour_is_fi", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/rewrite.py", "file_name": "rewrite.py", "file_type": "text/x-python", "category": "implementation", "start_line": 81, "end_line": 127, "span_ids": ["AssertionRewritingHook.create_module", "AssertionRewritingHook.find_spec"], "tokens": 390}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class AssertionRewritingHook(importlib.abc.MetaPathFinder, importlib.abc.Loader):\n\n def find_spec(\n self,\n name: str,\n path: Optional[Sequence[Union[str, bytes]]] = None,\n target: Optional[types.ModuleType] = None,\n ) -> Optional[importlib.machinery.ModuleSpec]:\n if self._writing_pyc:\n return None\n state = self.config._store[assertstate_key]\n if self._early_rewrite_bailout(name, state):\n return None\n state.trace(\"find_module called for: %s\" % name)\n\n # Type ignored because mypy is confused about the `self` binding here.\n spec = self._find_spec(name, path) # type: ignore\n if (\n # the import machinery could not find a file to import\n spec is None\n # this is a namespace package (without `__init__.py`)\n # there's nothing to rewrite there\n # python3.6: `namespace`\n # python3.7+: `None`\n or spec.origin == \"namespace\"\n or spec.origin is None\n # we can only rewrite source files\n or not isinstance(spec.loader, importlib.machinery.SourceFileLoader)\n # if the file doesn't exist, we can't rewrite it\n or not os.path.exists(spec.origin)\n ):\n return None\n else:\n fn = spec.origin\n\n if not self._should_rewrite(name, fn, state):\n return None\n\n return importlib.util.spec_from_file_location(\n name,\n fn,\n loader=self,\n submodule_search_locations=spec.submodule_search_locations,\n )\n\n def create_module(\n self, spec: importlib.machinery.ModuleSpec\n ) -> Optional[types.ModuleType]:\n return None # default behaviour is fine", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewritingHook.exec_module_AssertionRewritingHook.exec_module.exec_co_module___dict___": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewritingHook.exec_module_AssertionRewritingHook.exec_module.exec_co_module___dict___", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/rewrite.py", "file_name": "rewrite.py", "file_type": "text/x-python", "category": "implementation", "start_line": 129, "end_line": 169, "span_ids": ["AssertionRewritingHook.exec_module"], "tokens": 449}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class AssertionRewritingHook(importlib.abc.MetaPathFinder, importlib.abc.Loader):\n\n def exec_module(self, module: types.ModuleType) -> None:\n assert module.__spec__ is not None\n assert module.__spec__.origin is not None\n fn = Path(module.__spec__.origin)\n state = self.config._store[assertstate_key]\n\n self._rewritten_names.add(module.__name__)\n\n # The requested module looks like a test file, so rewrite it. This is\n # the most magical part of the process: load the source, rewrite the\n # asserts, and load the rewritten source. We also cache the rewritten\n # module code in a special pyc. We must be aware of the possibility of\n # concurrent pytest processes rewriting and loading pycs. To avoid\n # tricky race conditions, we maintain the following invariant: The\n # cached pyc is always a complete, valid pyc. Operations on it must be\n # atomic. POSIX's atomic rename comes in handy.\n write = not sys.dont_write_bytecode\n cache_dir = get_cache_dir(fn)\n if write:\n ok = try_makedirs(cache_dir)\n if not ok:\n write = False\n state.trace(f\"read only directory: {cache_dir}\")\n\n cache_name = fn.name[:-3] + PYC_TAIL\n pyc = cache_dir / cache_name\n # Notice that even if we're in a read-only directory, I'm going\n # to check for a cached pyc. This may not be optimal...\n co = _read_pyc(fn, pyc, state.trace)\n if co is None:\n state.trace(f\"rewriting {fn!r}\")\n source_stat, co = _rewrite_test(fn, self.config)\n if write:\n self._writing_pyc = True\n try:\n _write_pyc(state, co, source_stat, pyc)\n finally:\n self._writing_pyc = False\n else:\n state.trace(f\"found cached rewritten pyc for {fn}\")\n exec(co, module.__dict__)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewritingHook._warn_already_imported_AssertionRewritingHook.get_data.with_open_pathname_rb_.return.f_read_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewritingHook._warn_already_imported_AssertionRewritingHook.get_data.with_open_pathname_rb_.return.f_read_", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/rewrite.py", "file_name": "rewrite.py", "file_type": "text/x-python", "category": "implementation", "start_line": 262, "end_line": 275, "span_ids": ["AssertionRewritingHook._warn_already_imported", "AssertionRewritingHook.get_data"], "tokens": 134}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class AssertionRewritingHook(importlib.abc.MetaPathFinder, importlib.abc.Loader):\n\n def _warn_already_imported(self, name: str) -> None:\n from _pytest.warning_types import PytestAssertRewriteWarning\n\n self.config.issue_config_time_warning(\n PytestAssertRewriteWarning(\n \"Module already imported so cannot be rewritten: %s\" % name\n ),\n stacklevel=5,\n )\n\n def get_data(self, pathname: Union[str, bytes]) -> bytes:\n \"\"\"Optional PEP302 get_data API.\"\"\"\n with open(pathname, \"rb\") as f:\n return f.read()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/util.py_issequence_isiterable.try_.except_TypeError_.return.False": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/util.py_issequence_isiterable.try_.except_TypeError_.return.False", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/util.py", "file_name": "util.py", "file_type": "text/x-python", "category": "implementation", "start_line": 97, "end_line": 130, "span_ids": ["isdict", "issequence", "isdatacls", "isiterable", "isnamedtuple", "istext", "isattrs", "isset"], "tokens": 199}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def issequence(x: Any) -> bool:\n return isinstance(x, collections.abc.Sequence) and not isinstance(x, str)\n\n\ndef istext(x: Any) -> bool:\n return isinstance(x, str)\n\n\ndef isdict(x: Any) -> bool:\n return isinstance(x, dict)\n\n\ndef isset(x: Any) -> bool:\n return isinstance(x, (set, frozenset))\n\n\ndef isnamedtuple(obj: Any) -> bool:\n return isinstance(obj, tuple) and getattr(obj, \"_fields\", None) is not None\n\n\ndef isdatacls(obj: Any) -> bool:\n return getattr(obj, \"__dataclass_fields__\", None) is not None\n\n\ndef isattrs(obj: Any) -> bool:\n return getattr(obj, \"__attrs_attrs__\", None) is not None\n\n\ndef isiterable(obj: Any) -> bool:\n try:\n iter(obj)\n return not istext(obj)\n except TypeError:\n return False", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_main_main.try_.except_UsageError_as_e_.return.ExitCode_USAGE_ERROR": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_main_main.try_.except_UsageError_as_e_.return.ExitCode_USAGE_ERROR", "embedding": null, "metadata": {"file_path": "src/_pytest/config/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 132, "end_line": 177, "span_ids": ["main"], "tokens": 356}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def main(\n args: Optional[Union[List[str], \"os.PathLike[str]\"]] = None,\n plugins: Optional[Sequence[Union[str, _PluggyPlugin]]] = None,\n) -> Union[int, ExitCode]:\n \"\"\"Perform an in-process test run.\n\n :param args: List of command line arguments.\n :param plugins: List of plugin objects to be auto-registered during initialization.\n\n :returns: An exit code.\n \"\"\"\n try:\n try:\n config = _prepareconfig(args, plugins)\n except ConftestImportFailure as e:\n exc_info = ExceptionInfo(e.excinfo)\n tw = TerminalWriter(sys.stderr)\n tw.line(f\"ImportError while loading conftest '{e.path}'.\", red=True)\n exc_info.traceback = exc_info.traceback.filter(\n filter_traceback_for_conftest_import_failure\n )\n exc_repr = (\n exc_info.getrepr(style=\"short\", chain=False)\n if exc_info.traceback\n else exc_info.exconly()\n )\n formatted_tb = str(exc_repr)\n for line in formatted_tb.splitlines():\n tw.line(line.rstrip(), red=True)\n return ExitCode.USAGE_ERROR\n else:\n try:\n ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main(\n config=config\n )\n try:\n return ExitCode(ret)\n except ValueError:\n return ret\n finally:\n config._ensure_unconfigure()\n except UsageError as e:\n tw = TerminalWriter(sys.stderr)\n for msg in e.args:\n tw.line(f\"ERROR: {msg}\\n\", red=True)\n return ExitCode.USAGE_ERROR", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_PytestPluginManager_PytestPluginManager.__init__.self._configured.False": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_PytestPluginManager_PytestPluginManager.__init__.self._configured.False", "embedding": null, "metadata": {"file_path": "src/_pytest/config/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 333, "end_line": 384, "span_ids": ["PytestPluginManager"], "tokens": 469}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass PytestPluginManager(PluginManager):\n \"\"\"A :py:class:`pluggy.PluginManager ` with\n additional pytest-specific functionality:\n\n * Loading plugins from the command line, ``PYTEST_PLUGINS`` env variable and\n ``pytest_plugins`` global variables found in plugins being loaded.\n * ``conftest.py`` loading during start-up.\n \"\"\"\n\n def __init__(self) -> None:\n import _pytest.assertion\n\n super().__init__(\"pytest\")\n # The objects are module objects, only used generically.\n self._conftest_plugins: Set[types.ModuleType] = set()\n\n # State related to local conftest plugins.\n self._dirpath2confmods: Dict[Path, List[types.ModuleType]] = {}\n self._conftestpath2mod: Dict[Path, types.ModuleType] = {}\n self._confcutdir: Optional[Path] = None\n self._noconftest = False\n self._duplicatepaths: Set[Path] = set()\n\n # plugins that were explicitly skipped with pytest.skip\n # list of (module name, skip reason)\n # previously we would issue a warning when a plugin was skipped, but\n # since we refactored warnings as first citizens of Config, they are\n # just stored here to be used later.\n self.skipped_plugins: List[Tuple[str, str]] = []\n\n self.add_hookspecs(_pytest.hookspec)\n self.register(self)\n if os.environ.get(\"PYTEST_DEBUG\"):\n err: IO[str] = sys.stderr\n encoding: str = getattr(err, \"encoding\", \"utf8\")\n try:\n err = open(\n os.dup(err.fileno()),\n mode=err.mode,\n buffering=1,\n encoding=encoding,\n )\n except Exception:\n pass\n self.trace.root.setwriter(err.write)\n self.enable_tracing()\n\n # Config._consider_importhook will set a real object if required.\n self.rewrite_hook = _pytest.assertion.DummyRewriteHook()\n # Used to know when we are importing conftests after the pytest_configure stage.\n self._configured = False", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_PytestPluginManager.register_PytestPluginManager.register.return.ret": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_PytestPluginManager.register_PytestPluginManager.register.return.ret", "embedding": null, "metadata": {"file_path": "src/_pytest/config/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 432, "end_line": 453, "span_ids": ["PytestPluginManager.register"], "tokens": 167}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass PytestPluginManager(PluginManager):\n\n def register(\n self, plugin: _PluggyPlugin, name: Optional[str] = None\n ) -> Optional[str]:\n if name in _pytest.deprecated.DEPRECATED_EXTERNAL_PLUGINS:\n warnings.warn(\n PytestConfigWarning(\n \"{} plugin has been merged into the core, \"\n \"please remove it from your requirements.\".format(\n name.replace(\"_\", \"-\")\n )\n )\n )\n return None\n ret: Optional[str] = super().register(plugin, name)\n if ret:\n self.hook.pytest_plugin_registered.call_historic(\n kwargs=dict(plugin=plugin, manager=self)\n )\n\n if isinstance(plugin, types.ModuleType):\n self.consider_module(plugin)\n return ret", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_PytestPluginManager.getplugin_PytestPluginManager.None_3": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_PytestPluginManager.getplugin_PytestPluginManager.None_3", "embedding": null, "metadata": {"file_path": "src/_pytest/config/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 455, "end_line": 482, "span_ids": ["PytestPluginManager.pytest_configure", "PytestPluginManager.getplugin", "PytestPluginManager.hasplugin"], "tokens": 249}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass PytestPluginManager(PluginManager):\n\n def getplugin(self, name: str):\n # Support deprecated naming because plugins (xdist e.g.) use it.\n plugin: Optional[_PluggyPlugin] = self.get_plugin(name)\n return plugin\n\n def hasplugin(self, name: str) -> bool:\n \"\"\"Return whether a plugin with the given name is registered.\"\"\"\n return bool(self.get_plugin(name))\n\n def pytest_configure(self, config: \"Config\") -> None:\n \"\"\":meta private:\"\"\"\n # XXX now that the pluginmanager exposes hookimpl(tryfirst...)\n # we should remove tryfirst/trylast as markers.\n config.addinivalue_line(\n \"markers\",\n \"tryfirst: mark a hook implementation function such that the \"\n \"plugin machinery will try to call it first/as early as possible.\",\n )\n config.addinivalue_line(\n \"markers\",\n \"trylast: mark a hook implementation function such that the \"\n \"plugin machinery will try to call it last/as late as possible.\",\n )\n self._configured = True\n\n #\n # Internal API for local conftest plugin handling.\n #", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_MyOptionParser.if_sys_version_info_2__MyOptionParser.if_sys_version_info_2_._parse_optional.return.None_arg_string_None": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_MyOptionParser.if_sys_version_info_2__MyOptionParser.if_sys_version_info_2_._parse_optional.return.None_arg_string_None", "embedding": null, "metadata": {"file_path": "src/_pytest/config/argparsing.py", "file_name": "argparsing.py", "file_type": "text/x-python", "category": "implementation", "start_line": 419, "end_line": 455, "span_ids": ["MyOptionParser:2"], "tokens": 382}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class MyOptionParser(argparse.ArgumentParser):\n\n if sys.version_info[:2] < (3, 9): # pragma: no cover\n # Backport of https://github.com/python/cpython/pull/14316 so we can\n # disable long --argument abbreviations without breaking short flags.\n def _parse_optional(\n self, arg_string: str\n ) -> Optional[Tuple[Optional[argparse.Action], str, Optional[str]]]:\n if not arg_string:\n return None\n if not arg_string[0] in self.prefix_chars:\n return None\n if arg_string in self._option_string_actions:\n action = self._option_string_actions[arg_string]\n return action, arg_string, None\n if len(arg_string) == 1:\n return None\n if \"=\" in arg_string:\n option_string, explicit_arg = arg_string.split(\"=\", 1)\n if option_string in self._option_string_actions:\n action = self._option_string_actions[option_string]\n return action, option_string, explicit_arg\n if self.allow_abbrev or not arg_string.startswith(\"--\"):\n option_tuples = self._get_option_tuples(arg_string)\n if len(option_tuples) > 1:\n msg = gettext(\n \"ambiguous option: %(option)s could match %(matches)s\"\n )\n options = \", \".join(option for _, option, _ in option_tuples)\n self.error(msg % {\"option\": arg_string, \"matches\": options})\n elif len(option_tuples) == 1:\n (option_tuple,) = option_tuples\n return option_tuple\n if self._negative_number_matcher.match(arg_string):\n if not self._has_negative_number_optionals:\n return None\n if \" \" in arg_string:\n return None\n return None, arg_string, None", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_report_all_failed_collections_initargs_TestGeneralUsage.test_report_all_failed_collections_initargs.assert_result_ret_Exit": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_report_all_failed_collections_initargs_TestGeneralUsage.test_report_all_failed_collections_initargs.assert_result_ret_Exit", "embedding": null, "metadata": {"file_path": "testing/acceptance_test.py", "file_name": "acceptance_test.py", "file_type": "text/x-python", "category": "test", "start_line": 393, "end_line": 407, "span_ids": ["TestGeneralUsage.test_report_all_failed_collections_initargs"], "tokens": 160}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestGeneralUsage:\n\n def test_report_all_failed_collections_initargs(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n from _pytest.config import ExitCode\n\n def pytest_sessionfinish(exitstatus):\n assert exitstatus == ExitCode.USAGE_ERROR\n print(\"pytest_sessionfinish_called\")\n \"\"\"\n )\n pytester.makepyfile(test_a=\"def\", test_b=\"def\")\n result = pytester.runpytest(\"test_a.py::a\", \"test_b.py::b\")\n result.stderr.fnmatch_lines([\"*ERROR*test_a.py::a*\", \"*ERROR*test_b.py::b*\"])\n result.stdout.fnmatch_lines([\"pytest_sessionfinish_called\"])\n assert result.ret == ExitCode.USAGE_ERROR", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestInvocationVariants.test_cmdline_python_package_TestInvocationVariants.test_cmdline_python_package.result_stderr_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestInvocationVariants.test_cmdline_python_package_TestInvocationVariants.test_cmdline_python_package.result_stderr_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/acceptance_test.py", "file_name": "acceptance_test.py", "file_type": "text/x-python", "category": "test", "start_line": 645, "end_line": 672, "span_ids": ["TestInvocationVariants.test_cmdline_python_package"], "tokens": 349}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestInvocationVariants:\n\n def test_cmdline_python_package(self, pytester: Pytester, monkeypatch) -> None:\n import warnings\n\n monkeypatch.delenv(\"PYTHONDONTWRITEBYTECODE\", False)\n path = pytester.mkpydir(\"tpkg\")\n path.joinpath(\"test_hello.py\").write_text(\"def test_hello(): pass\")\n path.joinpath(\"test_world.py\").write_text(\"def test_world(): pass\")\n result = pytester.runpytest(\"--pyargs\", \"tpkg\")\n assert result.ret == 0\n result.stdout.fnmatch_lines([\"*2 passed*\"])\n result = pytester.runpytest(\"--pyargs\", \"tpkg.test_hello\", syspathinsert=True)\n assert result.ret == 0\n result.stdout.fnmatch_lines([\"*1 passed*\"])\n\n empty_package = pytester.mkpydir(\"empty_package\")\n monkeypatch.setenv(\"PYTHONPATH\", str(empty_package), prepend=os.pathsep)\n # the path which is not a package raises a warning on pypy;\n # no idea why only pypy and not normal python warn about it here\n with warnings.catch_warnings():\n warnings.simplefilter(\"ignore\", ImportWarning)\n result = pytester.runpytest(\"--pyargs\", \".\")\n assert result.ret == 0\n result.stdout.fnmatch_lines([\"*2 passed*\"])\n\n monkeypatch.setenv(\"PYTHONPATH\", str(pytester), prepend=os.pathsep)\n result = pytester.runpytest(\"--pyargs\", \"tpkg.test_missing\", syspathinsert=True)\n assert result.ret != 0\n result.stderr.fnmatch_lines([\"*not*found*test_missing*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestClass.test_setup_teardown_class_as_classmethod_TestClass.test_issue2234_property.assert_result_ret_Exit": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestClass.test_setup_teardown_class_as_classmethod_TestClass.test_issue2234_property.assert_result_ret_Exit", "embedding": null, "metadata": {"file_path": "testing/python/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 209, "end_line": 262, "span_ids": ["TestClass.test_issue1579_namedtuple", "TestClass.test_setup_teardown_class_as_classmethod", "TestClass.test_issue1035_obj_has_getattr", "TestClass.test_issue2234_property"], "tokens": 342}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestClass:\n\n def test_setup_teardown_class_as_classmethod(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n test_mod1=\"\"\"\n class TestClassMethod(object):\n @classmethod\n def setup_class(cls):\n pass\n def test_1(self):\n pass\n @classmethod\n def teardown_class(cls):\n pass\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*1 passed*\"])\n\n def test_issue1035_obj_has_getattr(self, pytester: Pytester) -> None:\n modcol = pytester.getmodulecol(\n \"\"\"\n class Chameleon(object):\n def __getattr__(self, name):\n return True\n chameleon = Chameleon()\n \"\"\"\n )\n colitems = modcol.collect()\n assert len(colitems) == 0\n\n def test_issue1579_namedtuple(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import collections\n\n TestCase = collections.namedtuple('TestCase', ['a'])\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n \"*cannot collect test class 'TestCase' \"\n \"because it has a __new__ constructor*\"\n )\n\n def test_issue2234_property(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n class TestCase(object):\n @property\n def prop(self):\n raise NotImplementedError()\n \"\"\"\n )\n result = pytester.runpytest()\n assert result.ret == ExitCode.NO_TESTS_COLLECTED", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_test_customized_python_discovery_functions_test_unorderable_types.assert_result_ret_Exit": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_test_customized_python_discovery_functions_test_unorderable_types.assert_result_ret_Exit", "embedding": null, "metadata": {"file_path": "testing/python/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1200, "end_line": 1237, "span_ids": ["test_customized_python_discovery_functions", "test_unorderable_types"], "tokens": 221}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_customized_python_discovery_functions(pytester: Pytester) -> None:\n pytester.makeini(\n \"\"\"\n [pytest]\n python_functions=_test\n \"\"\"\n )\n pytester.makepyfile(\n \"\"\"\n def _test_underscore():\n pass\n \"\"\"\n )\n result = pytester.runpytest(\"--collect-only\", \"-s\")\n result.stdout.fnmatch_lines([\"*_test_underscore*\"])\n\n result = pytester.runpytest()\n assert result.ret == 0\n result.stdout.fnmatch_lines([\"*1 passed*\"])\n\n\ndef test_unorderable_types(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n class TestJoinEmpty(object):\n pass\n\n def make_test():\n class Test(object):\n pass\n Test.__name__ = \"TestFoo\"\n return Test\n TestFoo = make_test()\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.no_fnmatch_line(\"*TypeError*\")\n assert result.ret == ExitCode.NO_TESTS_COLLECTED", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_rewrites_plugin_as_a_package_TestAssertionRewrite.test_rewrites_plugin_as_a_package.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_rewrites_plugin_as_a_package_TestAssertionRewrite.test_rewrites_plugin_as_a_package.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_assertrewrite.py", "file_name": "test_assertrewrite.py", "file_type": "text/x-python", "category": "test", "start_line": 127, "end_line": 140, "span_ids": ["TestAssertionRewrite.test_rewrites_plugin_as_a_package"], "tokens": 171}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssertionRewrite:\n\n def test_rewrites_plugin_as_a_package(self, pytester: Pytester) -> None:\n pkgdir = pytester.mkpydir(\"plugin\")\n pkgdir.joinpath(\"__init__.py\").write_text(\n \"import pytest\\n\"\n \"@pytest.fixture\\n\"\n \"def special_asserter():\\n\"\n \" def special_assert(x, y):\\n\"\n \" assert x == y\\n\"\n \" return special_assert\\n\"\n )\n pytester.makeconftest('pytest_plugins = [\"plugin\"]')\n pytester.makepyfile(\"def test(special_asserter): special_asserter(1, 2)\\n\")\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*assert 1 == 2*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestRewriteOnImport.test_cached_pyc_includes_pytest_version_TestRewriteOnImport.test_cached_pyc_includes_pytest_version.assert_found_names_pyc_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestRewriteOnImport.test_cached_pyc_includes_pytest_version_TestRewriteOnImport.test_cached_pyc_includes_pytest_version.assert_found_names_pyc_", "embedding": null, "metadata": {"file_path": "testing/test_assertrewrite.py", "file_name": "test_assertrewrite.py", "file_type": "text/x-python", "category": "test", "start_line": 830, "end_line": 847, "span_ids": ["TestRewriteOnImport.test_cached_pyc_includes_pytest_version"], "tokens": 178}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRewriteOnImport:\n\n def test_cached_pyc_includes_pytest_version(\n self, pytester: Pytester, monkeypatch\n ) -> None:\n \"\"\"Avoid stale caches (#1671)\"\"\"\n monkeypatch.delenv(\"PYTHONDONTWRITEBYTECODE\", raising=False)\n monkeypatch.delenv(\"PYTHONPYCACHEPREFIX\", raising=False)\n pytester.makepyfile(\n test_foo=\"\"\"\n def test_foo():\n assert True\n \"\"\"\n )\n result = pytester.runpytest_subprocess()\n assert result.ret == 0\n found_names = glob.glob(f\"__pycache__/*-pytest-{pytest.__version__}.pyc\")\n assert found_names, \"pyc with expected tag not found in names: {}\".format(\n glob.glob(\"__pycache__/*.pyc\")\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewriteHookDetails_TestAssertionRewriteHookDetails.test_write_pyc.assert_not__write_pyc_sta": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewriteHookDetails_TestAssertionRewriteHookDetails.test_write_pyc.assert_not__write_pyc_sta", "embedding": null, "metadata": {"file_path": "testing/test_assertrewrite.py", "file_name": "test_assertrewrite.py", "file_type": "text/x-python", "category": "test", "start_line": 976, "end_line": 1017, "span_ids": ["TestAssertionRewriteHookDetails", "TestAssertionRewriteHookDetails.test_write_pyc", "TestAssertionRewriteHookDetails.test_sys_meta_path_munged"], "tokens": 333}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssertionRewriteHookDetails:\n def test_sys_meta_path_munged(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n def test_meta_path():\n import sys; sys.meta_path = []\"\"\"\n )\n assert pytester.runpytest().ret == 0\n\n def test_write_pyc(self, pytester: Pytester, tmp_path, monkeypatch) -> None:\n from _pytest.assertion.rewrite import _write_pyc\n from _pytest.assertion import AssertionState\n\n config = pytester.parseconfig()\n state = AssertionState(config, \"rewrite\")\n tmp_path.joinpath(\"source.py\").touch()\n source_path = str(tmp_path)\n pycpath = tmp_path.joinpath(\"pyc\")\n co = compile(\"1\", \"f.py\", \"single\")\n assert _write_pyc(state, co, os.stat(source_path), pycpath)\n\n if sys.platform == \"win32\":\n from contextlib import contextmanager\n\n @contextmanager\n def atomic_write_failed(fn, mode=\"r\", overwrite=False):\n e = OSError()\n e.errno = 10\n raise e\n yield # type:ignore[unreachable]\n\n monkeypatch.setattr(\n _pytest.assertion.rewrite, \"atomic_write\", atomic_write_failed\n )\n else:\n\n def raise_oserror(*args):\n raise OSError()\n\n monkeypatch.setattr(\"os.rename\", raise_oserror)\n\n assert not _write_pyc(state, co, os.stat(source_path), pycpath)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestEarlyRewriteBailout.test_pattern_contains_subdirectories_TestEarlyRewriteBailout.test_pattern_contains_subdirectories.assert_self_find_spec_cal": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestEarlyRewriteBailout.test_pattern_contains_subdirectories_TestEarlyRewriteBailout.test_pattern_contains_subdirectories.assert_self_find_spec_cal", "embedding": null, "metadata": {"file_path": "testing/test_assertrewrite.py", "file_name": "test_assertrewrite.py", "file_type": "text/x-python", "category": "test", "start_line": 1365, "end_line": 1382, "span_ids": ["TestEarlyRewriteBailout.test_pattern_contains_subdirectories"], "tokens": 172}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestEarlyRewriteBailout:\n\n def test_pattern_contains_subdirectories(\n self, pytester: Pytester, hook: AssertionRewritingHook\n ) -> None:\n \"\"\"If one of the python_files patterns contain subdirectories (\"tests/**.py\") we can't bailout early\n because we need to match with the full path, which can only be found by calling PathFinder.find_spec\n \"\"\"\n pytester.makepyfile(\n **{\n \"tests/file.py\": \"\"\"\\\n def test_simple_failure():\n assert 1 + 1 == 3\n \"\"\"\n }\n )\n pytester.syspathinsert(\"tests\")\n hook.fnpats[:] = [\"tests/**.py\"]\n assert hook.find_spec(\"file\") is not None\n assert self.find_spec_calls == [\"file\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_lastfailed_no_failures_behavior_all_passed_TestLastFailed.test_lastfailed_no_failures_behavior_all_passed.assert_result_ret_Exit": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_lastfailed_no_failures_behavior_all_passed_TestLastFailed.test_lastfailed_no_failures_behavior_all_passed.assert_result_ret_Exit", "embedding": null, "metadata": {"file_path": "testing/test_cacheprovider.py", "file_name": "test_cacheprovider.py", "file_type": "text/x-python", "category": "test", "start_line": 740, "end_line": 780, "span_ids": ["TestLastFailed.test_lastfailed_no_failures_behavior_all_passed"], "tokens": 307}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestLastFailed:\n\n def test_lastfailed_no_failures_behavior_all_passed(\n self, pytester: Pytester\n ) -> None:\n pytester.makepyfile(\n \"\"\"\n def test_1(): pass\n def test_2(): pass\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*2 passed*\"])\n result = pytester.runpytest(\"--lf\")\n result.stdout.fnmatch_lines([\"*2 passed*\"])\n result = pytester.runpytest(\"--lf\", \"--lfnf\", \"all\")\n result.stdout.fnmatch_lines([\"*2 passed*\"])\n\n # Ensure the list passed to pytest_deselected is a copy,\n # and not a reference which is cleared right after.\n pytester.makeconftest(\n \"\"\"\n deselected = []\n\n def pytest_deselected(items):\n global deselected\n deselected = items\n\n def pytest_sessionfinish():\n print(\"\\\\ndeselected={}\".format(len(deselected)))\n \"\"\"\n )\n\n result = pytester.runpytest(\"--lf\", \"--lfnf\", \"none\")\n result.stdout.fnmatch_lines(\n [\n \"collected 2 items / 2 deselected\",\n \"run-last-failure: no previously failed tests, deselecting all items.\",\n \"deselected=2\",\n \"* 2 deselected in *\",\n ]\n )\n assert result.ret == ExitCode.NO_TESTS_COLLECTED", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestConfigCmdlineParsing_TestConfigCmdlineParsing.test_absolute_win32_path.assert_ret_ExitCode_OK": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestConfigCmdlineParsing_TestConfigCmdlineParsing.test_absolute_win32_path.assert_ret_ExitCode_OK", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 456, "end_line": 520, "span_ids": ["TestConfigCmdlineParsing.test_explicitly_specified_config_file_is_loaded", "TestConfigCmdlineParsing", "TestConfigCmdlineParsing.test_absolute_win32_path", "TestConfigCmdlineParsing.test_parsing_again_fails"], "tokens": 427}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestConfigCmdlineParsing:\n def test_parsing_again_fails(self, pytester: Pytester) -> None:\n config = pytester.parseconfig()\n pytest.raises(AssertionError, lambda: config.parse([]))\n\n def test_explicitly_specified_config_file_is_loaded(\n self, pytester: Pytester\n ) -> None:\n pytester.makeconftest(\n \"\"\"\n def pytest_addoption(parser):\n parser.addini(\"custom\", \"\")\n \"\"\"\n )\n pytester.makeini(\n \"\"\"\n [pytest]\n custom = 0\n \"\"\"\n )\n pytester.makefile(\n \".ini\",\n custom=\"\"\"\n [pytest]\n custom = 1\n \"\"\",\n )\n config = pytester.parseconfig(\"-c\", \"custom.ini\")\n assert config.getini(\"custom\") == \"1\"\n\n pytester.makefile(\n \".cfg\",\n custom_tool_pytest_section=\"\"\"\n [tool:pytest]\n custom = 1\n \"\"\",\n )\n config = pytester.parseconfig(\"-c\", \"custom_tool_pytest_section.cfg\")\n assert config.getini(\"custom\") == \"1\"\n\n pytester.makefile(\n \".toml\",\n custom=\"\"\"\n [tool.pytest.ini_options]\n custom = 1\n value = [\n ] # this is here on purpose, as it makes this an invalid '.ini' file\n \"\"\",\n )\n config = pytester.parseconfig(\"-c\", \"custom.toml\")\n assert config.getini(\"custom\") == \"1\"\n\n def test_absolute_win32_path(self, pytester: Pytester) -> None:\n temp_ini_file = pytester.makefile(\n \".ini\",\n custom=\"\"\"\n [pytest]\n addopts = --version\n \"\"\",\n )\n from os.path import normpath\n\n temp_ini_file_norm = normpath(str(temp_ini_file))\n ret = pytest.main([\"-c\", temp_ini_file_norm])\n assert ret == ExitCode.OK", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_conftest_symlink_files_test_conftest_symlink_files.assert_result_ret_Exit": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_conftest_symlink_files_test_conftest_symlink_files.assert_result_ret_Exit", "embedding": null, "metadata": {"file_path": "testing/test_conftest.py", "file_name": "test_conftest.py", "file_type": "text/x-python", "category": "test", "start_line": 262, "end_line": 292, "span_ids": ["test_conftest_symlink_files"], "tokens": 264}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_conftest_symlink_files(pytester: Pytester) -> None:\n \"\"\"Symlinked conftest.py are found when pytest is executed in a directory with symlinked\n files.\"\"\"\n real = pytester.mkdir(\"real\")\n source = {\n \"app/test_foo.py\": \"def test1(fixture): pass\",\n \"app/__init__.py\": \"\",\n \"app/conftest.py\": textwrap.dedent(\n \"\"\"\n import pytest\n\n print(\"conftest_loaded\")\n\n @pytest.fixture\n def fixture():\n print(\"fixture_used\")\n \"\"\"\n ),\n }\n pytester.makepyfile(**{\"real/%s\" % k: v for k, v in source.items()})\n\n # Create a build directory that contains symlinks to actual files\n # but doesn't symlink actual directories.\n build = pytester.mkdir(\"build\")\n build.joinpath(\"app\").mkdir()\n for f in source:\n symlink_or_skip(real.joinpath(f), build.joinpath(f))\n os.chdir(build)\n result = pytester.runpytest(\"-vs\", \"app/test_foo.py\")\n result.stdout.fnmatch_lines([\"*conftest_loaded*\", \"PASSED\"])\n assert result.ret == ExitCode.OK", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_inspect_TestDoctests.test_collect_testtextfile.assert_len_items_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_inspect_TestDoctests.test_collect_testtextfile.assert_len_items_0", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 38, "span_ids": ["TestDoctests", "imports", "TestDoctests.test_collect_testtextfile"], "tokens": 293}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import inspect\nimport textwrap\nfrom pathlib import Path\nfrom typing import Callable\nfrom typing import Optional\n\nimport pytest\nfrom _pytest.doctest import _get_checker\nfrom _pytest.doctest import _is_mocked\nfrom _pytest.doctest import _is_setup_py\nfrom _pytest.doctest import _patch_unwrap_mock_aware\nfrom _pytest.doctest import DoctestItem\nfrom _pytest.doctest import DoctestModule\nfrom _pytest.doctest import DoctestTextfile\nfrom _pytest.pytester import Pytester\n\n\nclass TestDoctests:\n def test_collect_testtextfile(self, pytester: Pytester):\n w = pytester.maketxtfile(whatever=\"\")\n checkfile = pytester.maketxtfile(\n test_something=\"\"\"\n alskdjalsdk\n >>> i = 5\n >>> i-1\n 4\n \"\"\"\n )\n\n for x in (pytester.path, checkfile):\n # print \"checking that %s returns custom items\" % (x,)\n items, reprec = pytester.inline_genitems(x)\n assert len(items) == 1\n assert isinstance(items[0], DoctestItem)\n assert isinstance(items[0].parent, DoctestTextfile)\n # Empty file has no items.\n items, reprec = pytester.inline_genitems(w)\n assert len(items) == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_test_doctest_mock_objects_dont_recurse_missbehaved_Broken.__getattr__.raise_KeyError_This_shou": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_test_doctest_mock_objects_dont_recurse_missbehaved_Broken.__getattr__.raise_KeyError_This_shou", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 1458, "end_line": 1479, "span_ids": ["Broken.__getattr__", "test_doctest_mock_objects_dont_recurse_missbehaved", "Broken"], "tokens": 147}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\"mock_module\", [\"mock\", \"unittest.mock\"])\ndef test_doctest_mock_objects_dont_recurse_missbehaved(mock_module, pytester: Pytester):\n pytest.importorskip(mock_module)\n pytester.makepyfile(\n \"\"\"\n from {mock_module} import call\n class Example(object):\n '''\n >>> 1 + 1\n 2\n '''\n \"\"\".format(\n mock_module=mock_module\n )\n )\n result = pytester.runpytest(\"--doctest-modules\")\n result.stdout.fnmatch_lines([\"* 1 passed *\"])\n\n\nclass Broken:\n def __getattr__(self, _):\n raise KeyError(\"This should be an AttributeError\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_faulthandler.py_test_timeout_test_timeout.assert_result_ret_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_faulthandler.py_test_timeout_test_timeout.assert_result_ret_0", "embedding": null, "metadata": {"file_path": "testing/test_faulthandler.py", "file_name": "test_faulthandler.py", "file_type": "text/x-python", "category": "test", "start_line": 71, "end_line": 107, "span_ids": ["test_timeout"], "tokens": 249}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\n \"enabled\",\n [\n pytest.param(\n True, marks=pytest.mark.skip(reason=\"sometimes crashes on CI (#7022)\")\n ),\n False,\n ],\n)\ndef test_timeout(pytester: Pytester, enabled: bool) -> None:\n \"\"\"Test option to dump tracebacks after a certain timeout.\n\n If faulthandler is disabled, no traceback will be dumped.\n \"\"\"\n pytester.makepyfile(\n \"\"\"\n import os, time\n def test_timeout():\n time.sleep(1 if \"CI\" in os.environ else 0.1)\n \"\"\"\n )\n pytester.makeini(\n \"\"\"\n [pytest]\n faulthandler_timeout = 0.01\n \"\"\"\n )\n args = [\"-p\", \"no:faulthandler\"] if not enabled else []\n\n result = pytester.runpytest_subprocess(*args)\n tb_output = \"most recent call first\"\n if enabled:\n result.stderr.fnmatch_lines([\"*%s*\" % tb_output])\n else:\n assert tb_output not in result.stderr.str()\n result.stdout.fnmatch_lines([\"*1 passed*\"])\n assert result.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_parameterset_for_fail_at_collect_test_parameterset_for_fail_at_collect.assert_result_ret_Exit": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_parameterset_for_fail_at_collect_test_parameterset_for_fail_at_collect.assert_result_ret_Exit", "embedding": null, "metadata": {"file_path": "testing/test_mark.py", "file_name": "test_mark.py", "file_type": "text/x-python", "category": "test", "start_line": 969, "end_line": 1008, "span_ids": ["test_parameterset_for_fail_at_collect"], "tokens": 243}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_parameterset_for_fail_at_collect(pytester: Pytester) -> None:\n pytester.makeini(\n \"\"\"\n [pytest]\n {}=fail_at_collect\n \"\"\".format(\n EMPTY_PARAMETERSET_OPTION\n )\n )\n\n config = pytester.parseconfig()\n from _pytest.mark import pytest_configure, get_empty_parameterset_mark\n\n pytest_configure(config)\n\n with pytest.raises(\n Collector.CollectError,\n match=r\"Empty parameter set in 'pytest_configure' at line \\d\\d+\",\n ):\n get_empty_parameterset_mark(config, [\"a\"], pytest_configure)\n\n p1 = pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.mark.parametrize(\"empty\", [])\n def test():\n pass\n \"\"\"\n )\n result = pytester.runpytest(str(p1))\n result.stdout.fnmatch_lines(\n [\n \"collected 0 items / 1 error\",\n \"* ERROR collecting test_parameterset_for_fail_at_collect.py *\",\n \"Empty parameter set in 'test' at line 3\",\n \"*= 1 error in *\",\n ]\n )\n assert result.ret == ExitCode.INTERRUPTED", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_session.py_test_deselect_test_sessionfinish_with_start.assert_res_ret_ExitCod": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_session.py_test_deselect_test_sessionfinish_with_start.assert_res_ret_ExitCod", "embedding": null, "metadata": {"file_path": "testing/test_session.py", "file_name": "test_session.py", "file_type": "text/x-python", "category": "test", "start_line": 292, "end_line": 335, "span_ids": ["test_deselect", "test_sessionfinish_with_start"], "tokens": 296}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_deselect(pytester: Pytester) -> None:\n pytester.makepyfile(\n test_a=\"\"\"\n import pytest\n\n def test_a1(): pass\n\n @pytest.mark.parametrize('b', range(3))\n def test_a2(b): pass\n\n class TestClass:\n def test_c1(self): pass\n\n def test_c2(self): pass\n \"\"\"\n )\n result = pytester.runpytest(\n \"-v\",\n \"--deselect=test_a.py::test_a2[1]\",\n \"--deselect=test_a.py::test_a2[2]\",\n \"--deselect=test_a.py::TestClass::test_c1\",\n )\n assert result.ret == 0\n result.stdout.fnmatch_lines([\"*3 passed, 3 deselected*\"])\n for line in result.stdout.lines:\n assert not line.startswith((\"test_a.py::test_a2[1]\", \"test_a.py::test_a2[2]\"))\n\n\ndef test_sessionfinish_with_start(pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n import os\n values = []\n def pytest_sessionstart():\n values.append(os.getcwd())\n os.chdir(\"..\")\n\n def pytest_sessionfinish():\n assert values[0] == os.getcwd()\n\n \"\"\"\n )\n res = pytester.runpytest(\"--collect-only\")\n assert res.ret == ExitCode.NO_TESTS_COLLECTED", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_tbstyle_short_test_traceconfig.assert_result_ret_Exit": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_tbstyle_short_test_traceconfig.assert_result_ret_Exit", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 1325, "end_line": 1352, "span_ids": ["test_traceconfig", "test_tbstyle_short"], "tokens": 218}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_tbstyle_short(pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.fixture\n def arg(request):\n return 42\n def test_opt(arg):\n x = 0\n assert x\n \"\"\"\n )\n result = pytester.runpytest(\"--tb=short\")\n s = result.stdout.str()\n assert \"arg = 42\" not in s\n assert \"x = 0\" not in s\n result.stdout.fnmatch_lines([\"*%s:8*\" % p.name, \" assert x\", \"E assert*\"])\n result = pytester.runpytest()\n s = result.stdout.str()\n assert \"x = 0\" in s\n assert \"assert x\" in s\n\n\ndef test_traceconfig(pytester: Pytester) -> None:\n result = pytester.runpytest(\"--traceconfig\")\n result.stdout.fnmatch_lines([\"*active plugins*\"])\n assert result.ret == ExitCode.NO_TESTS_COLLECTED", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_runTest_method_test_isclasscheck_issue53.assert_result_ret_Exit": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_runTest_method_test_isclasscheck_issue53.assert_result_ret_Exit", "embedding": null, "metadata": {"file_path": "testing/test_unittest.py", "file_name": "test_unittest.py", "file_type": "text/x-python", "category": "test", "start_line": 27, "end_line": 62, "span_ids": ["test_isclasscheck_issue53", "test_runTest_method"], "tokens": 224}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_runTest_method(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import unittest\n class MyTestCaseWithRunTest(unittest.TestCase):\n def runTest(self):\n self.assertEqual('foo', 'foo')\n class MyTestCaseWithoutRunTest(unittest.TestCase):\n def runTest(self):\n self.assertEqual('foo', 'foo')\n def test_something(self):\n pass\n \"\"\"\n )\n result = pytester.runpytest(\"-v\")\n result.stdout.fnmatch_lines(\n \"\"\"\n *MyTestCaseWithRunTest::runTest*\n *MyTestCaseWithoutRunTest::test_something*\n *2 passed*\n \"\"\"\n )\n\n\ndef test_isclasscheck_issue53(pytester: Pytester) -> None:\n testpath = pytester.makepyfile(\n \"\"\"\n import unittest\n class _E(object):\n def __getattr__(self, tag):\n pass\n E = _E()\n \"\"\"\n )\n result = pytester.runpytest(testpath)\n assert result.ret == ExitCode.NO_TESTS_COLLECTED", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warning_types.py_inspect_test_warning_types.assert_warning_class___mo": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warning_types.py_inspect_test_warning_types.assert_warning_class___mo", "embedding": null, "metadata": {"file_path": "testing/test_warning_types.py", "file_name": "test_warning_types.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 20, "span_ids": ["test_warning_types", "imports"], "tokens": 124}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import inspect\n\nimport pytest\nfrom _pytest import warning_types\nfrom _pytest.pytester import Pytester\n\n\n@pytest.mark.parametrize(\n \"warning_class\",\n [\n w\n for n, w in vars(warning_types).items()\n if inspect.isclass(w) and issubclass(w, Warning)\n ],\n)\ndef test_warning_types(warning_class: UserWarning) -> None:\n \"\"\"Make sure all warnings declared in _pytest.warning_types are displayed as coming\n from 'pytest' instead of the internal module (#5452).\n \"\"\"\n assert warning_class.__module__ == \"pytest\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warning_types.py_test_pytest_warnings_repr_integration_test_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warning_types.py_test_pytest_warnings_repr_integration_test_", "embedding": null, "metadata": {"file_path": "testing/test_warning_types.py", "file_name": "test_warning_types.py", "file_type": "text/x-python", "category": "test", "start_line": 23, "end_line": 39, "span_ids": ["test_pytest_warnings_repr_integration_test"], "tokens": 124}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.filterwarnings(\"error::pytest.PytestWarning\")\ndef test_pytest_warnings_repr_integration_test(pytester: Pytester) -> None:\n \"\"\"Small integration test to ensure our small hack of setting the __module__ attribute\n of our warnings actually works (#5452).\n \"\"\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n import warnings\n\n def test():\n warnings.warn(pytest.PytestWarning(\"some warning\"))\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"E pytest.PytestWarning: some warning\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_ExceptionInfo_ExceptionInfo.from_exc_info.return.cls_exc_info__striptext_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_ExceptionInfo_ExceptionInfo.from_exc_info.return.cls_exc_info__striptext_", "embedding": null, "metadata": {"file_path": "src/_pytest/_code/code.py", "file_name": "code.py", "file_type": "text/x-python", "category": "implementation", "start_line": 443, "end_line": 479, "span_ids": ["ExceptionInfo", "ExceptionInfo.from_exc_info"], "tokens": 307}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\n@attr.s(repr=False)\nclass ExceptionInfo(Generic[_E]):\n \"\"\"Wraps sys.exc_info() objects and offers help for navigating the traceback.\"\"\"\n\n _assert_start_repr = \"AssertionError('assert \"\n\n _excinfo = attr.ib(type=Optional[Tuple[Type[\"_E\"], \"_E\", TracebackType]])\n _striptext = attr.ib(type=str, default=\"\")\n _traceback = attr.ib(type=Optional[Traceback], default=None)\n\n @classmethod\n def from_exc_info(\n cls,\n exc_info: Tuple[Type[_E], _E, TracebackType],\n exprinfo: Optional[str] = None,\n ) -> \"ExceptionInfo[_E]\":\n \"\"\"Return an ExceptionInfo for an existing exc_info tuple.\n\n .. warning::\n\n Experimental API\n\n :param exprinfo:\n A text string helping to determine if we should strip\n ``AssertionError`` from the output. Defaults to the exception\n message/``__str__()``.\n \"\"\"\n _striptext = \"\"\n if exprinfo is None and isinstance(exc_info[1], AssertionError):\n exprinfo = getattr(exc_info[1], \"msg\", None)\n if exprinfo is None:\n exprinfo = saferepr(exc_info[1])\n if exprinfo and exprinfo.startswith(cls._assert_start_repr):\n _striptext = \"AssertionError: \"\n\n return cls(exc_info, _striptext)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_ExceptionInfo.exconly_ExceptionInfo.exconly.return.text": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_ExceptionInfo.exconly_ExceptionInfo.exconly.return.text", "embedding": null, "metadata": {"file_path": "src/_pytest/_code/code.py", "file_name": "code.py", "file_type": "text/x-python", "category": "implementation", "start_line": 563, "end_line": 577, "span_ids": ["ExceptionInfo.exconly"], "tokens": 155}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\n@attr.s(repr=False)\nclass ExceptionInfo(Generic[_E]):\n\n def exconly(self, tryshort: bool = False) -> str:\n \"\"\"Return the exception as a string.\n\n When 'tryshort' resolves to True, and the exception is a\n _pytest._code._AssertionError, only the actual exception part of\n the exception representation is returned (so 'AssertionError: ' is\n removed from the beginning).\n \"\"\"\n lines = format_exception_only(self.type, self.value)\n text = \"\".join(lines)\n text = text.rstrip()\n if tryshort:\n if text.startswith(self._striptext):\n text = text[len(self._striptext) :]\n return text", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_ExceptionInfo.errisinstance_ExceptionInfo._getreprcrash.return.ReprFileLocation_path_li": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_ExceptionInfo.errisinstance_ExceptionInfo._getreprcrash.return.ReprFileLocation_path_li", "embedding": null, "metadata": {"file_path": "src/_pytest/_code/code.py", "file_name": "code.py", "file_type": "text/x-python", "category": "implementation", "start_line": 579, "end_line": 592, "span_ids": ["ExceptionInfo.errisinstance", "ExceptionInfo._getreprcrash"], "tokens": 163}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\n@attr.s(repr=False)\nclass ExceptionInfo(Generic[_E]):\n\n def errisinstance(\n self, exc: Union[Type[BaseException], Tuple[Type[BaseException], ...]]\n ) -> bool:\n \"\"\"Return True if the exception is an instance of exc.\n\n Consider using ``isinstance(excinfo.value, exc)`` instead.\n \"\"\"\n return isinstance(self.value, exc)\n\n def _getreprcrash(self) -> \"ReprFileLocation\":\n exconly = self.exconly(tryshort=True)\n entry = self.traceback.getcrashentry()\n path, lineno = entry.frame.code.raw.co_filename, entry.lineno\n return ReprFileLocation(path, lineno + 1, exconly)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_io/saferepr.py_pprint__ellipsize.return.s": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_io/saferepr.py_pprint__ellipsize.return.s", "embedding": null, "metadata": {"file_path": "src/_pytest/_io/saferepr.py", "file_name": "saferepr.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 35, "span_ids": ["_ellipsize", "_try_repr_or_str", "_format_repr_exception", "imports"], "tokens": 263}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import pprint\nimport reprlib\nfrom typing import Any\nfrom typing import Dict\nfrom typing import IO\nfrom typing import Optional\n\n\ndef _try_repr_or_str(obj: object) -> str:\n try:\n return repr(obj)\n except (KeyboardInterrupt, SystemExit):\n raise\n except BaseException:\n return '{}(\"{}\")'.format(type(obj).__name__, obj)\n\n\ndef _format_repr_exception(exc: BaseException, obj: object) -> str:\n try:\n exc_info = _try_repr_or_str(exc)\n except (KeyboardInterrupt, SystemExit):\n raise\n except BaseException as exc:\n exc_info = \"unpresentable exception ({})\".format(_try_repr_or_str(exc))\n return \"<[{} raised in repr()] {} object at 0x{:x}>\".format(\n exc_info, type(obj).__name__, id(obj)\n )\n\n\ndef _ellipsize(s: str, maxsize: int) -> str:\n if len(s) > maxsize:\n i = max(0, (maxsize - 3) // 2)\n j = max(0, maxsize - 3 - i)\n return s[:i] + \"...\" + s[len(s) - j :]\n return s", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_io/saferepr.py_SafeRepr_SafeRepr.repr_instance.return._ellipsize_s_self_maxsiz": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_io/saferepr.py_SafeRepr_SafeRepr.repr_instance.return._ellipsize_s_self_maxsiz", "embedding": null, "metadata": {"file_path": "src/_pytest/_io/saferepr.py", "file_name": "saferepr.py", "file_type": "text/x-python", "category": "implementation", "start_line": 38, "end_line": 63, "span_ids": ["SafeRepr.repr_instance", "SafeRepr.repr", "SafeRepr"], "tokens": 204}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class SafeRepr(reprlib.Repr):\n \"\"\"repr.Repr that limits the resulting size of repr() and includes\n information on exceptions raised during the call.\"\"\"\n\n def __init__(self, maxsize: int) -> None:\n super().__init__()\n self.maxstring = maxsize\n self.maxsize = maxsize\n\n def repr(self, x: object) -> str:\n try:\n s = super().repr(x)\n except (KeyboardInterrupt, SystemExit):\n raise\n except BaseException as exc:\n s = _format_repr_exception(exc, x)\n return _ellipsize(s, self.maxsize)\n\n def repr_instance(self, x: object, level: int) -> str:\n try:\n s = repr(x)\n except (KeyboardInterrupt, SystemExit):\n raise\n except BaseException as exc:\n s = _format_repr_exception(exc, x)\n return _ellipsize(s, self.maxsize)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py__get_assertion_exprs__get_assertion_exprs.return.ret": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py__get_assertion_exprs__get_assertion_exprs.return.ret", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/rewrite.py", "file_name": "rewrite.py", "file_type": "text/x-python", "category": "implementation", "start_line": 545, "end_line": 596, "span_ids": ["_get_assertion_exprs"], "tokens": 458}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _get_assertion_exprs(src: bytes) -> Dict[int, str]:\n \"\"\"Return a mapping from {lineno: \"assertion test expression\"}.\"\"\"\n ret: Dict[int, str] = {}\n\n depth = 0\n lines: List[str] = []\n assert_lineno: Optional[int] = None\n seen_lines: Set[int] = set()\n\n def _write_and_reset() -> None:\n nonlocal depth, lines, assert_lineno, seen_lines\n assert assert_lineno is not None\n ret[assert_lineno] = \"\".join(lines).rstrip().rstrip(\"\\\\\")\n depth = 0\n lines = []\n assert_lineno = None\n seen_lines = set()\n\n tokens = tokenize.tokenize(io.BytesIO(src).readline)\n for tp, source, (lineno, offset), _, line in tokens:\n if tp == tokenize.NAME and source == \"assert\":\n assert_lineno = lineno\n elif assert_lineno is not None:\n # keep track of depth for the assert-message `,` lookup\n if tp == tokenize.OP and source in \"([{\":\n depth += 1\n elif tp == tokenize.OP and source in \")]}\":\n depth -= 1\n\n if not lines:\n lines.append(line[offset:])\n seen_lines.add(lineno)\n # a non-nested comma separates the expression from the message\n elif depth == 0 and tp == tokenize.OP and source == \",\":\n # one line assert with message\n if lineno in seen_lines and len(lines) == 1:\n offset_in_trimmed = offset + len(lines[-1]) - len(line)\n lines[-1] = lines[-1][:offset_in_trimmed]\n # multi-line assert with message\n elif lineno in seen_lines:\n lines[-1] = lines[-1][:offset]\n # multi line assert with escapd newline before message\n else:\n lines.append(line[:offset])\n _write_and_reset()\n elif tp in {tokenize.NEWLINE, tokenize.ENDMARKER}:\n _write_and_reset()\n elif lines and lineno not in seen_lines:\n lines.append(line)\n seen_lines.add(lineno)\n\n return ret", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewriter_AssertionRewriter._assert_expr_to_lineno.return._get_assertion_exprs_self": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewriter_AssertionRewriter._assert_expr_to_lineno.return._get_assertion_exprs_self", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/rewrite.py", "file_name": "rewrite.py", "file_type": "text/x-python", "category": "implementation", "start_line": 599, "end_line": 668, "span_ids": ["AssertionRewriter._assert_expr_to_lineno", "AssertionRewriter"], "tokens": 667}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class AssertionRewriter(ast.NodeVisitor):\n \"\"\"Assertion rewriting implementation.\n\n The main entrypoint is to call .run() with an ast.Module instance,\n this will then find all the assert statements and rewrite them to\n provide intermediate values and a detailed assertion error. See\n http://pybites.blogspot.be/2011/07/behind-scenes-of-pytests-new-assertion.html\n for an overview of how this works.\n\n The entry point here is .run() which will iterate over all the\n statements in an ast.Module and for each ast.Assert statement it\n finds call .visit() with it. Then .visit_Assert() takes over and\n is responsible for creating new ast statements to replace the\n original assert statement: it rewrites the test of an assertion\n to provide intermediate values and replace it with an if statement\n which raises an assertion error with a detailed explanation in\n case the expression is false and calls pytest_assertion_pass hook\n if expression is true.\n\n For this .visit_Assert() uses the visitor pattern to visit all the\n AST nodes of the ast.Assert.test field, each visit call returning\n an AST node and the corresponding explanation string. During this\n state is kept in several instance attributes:\n\n :statements: All the AST statements which will replace the assert\n statement.\n\n :variables: This is populated by .variable() with each variable\n used by the statements so that they can all be set to None at\n the end of the statements.\n\n :variable_counter: Counter to create new unique variables needed\n by statements. Variables are created using .variable() and\n have the form of \"@py_assert0\".\n\n :expl_stmts: The AST statements which will be executed to get\n data from the assertion. This is the code which will construct\n the detailed assertion message that is used in the AssertionError\n or for the pytest_assertion_pass hook.\n\n :explanation_specifiers: A dict filled by .explanation_param()\n with %-formatting placeholders and their corresponding\n expressions to use in the building of an assertion message.\n This is used by .pop_format_context() to build a message.\n\n :stack: A stack of the explanation_specifiers dicts maintained by\n .push_format_context() and .pop_format_context() which allows\n to build another %-formatted string while already building one.\n\n This state is reset on every new assert statement visited and used\n by the other visitors.\n \"\"\"\n\n def __init__(\n self, module_path: Optional[str], config: Optional[Config], source: bytes\n ) -> None:\n super().__init__()\n self.module_path = module_path\n self.config = config\n if config is not None:\n self.enable_assertion_pass_hook = config.getini(\n \"enable_assertion_pass_hook\"\n )\n else:\n self.enable_assertion_pass_hook = False\n self.source = source\n\n @functools.lru_cache(maxsize=1)\n def _assert_expr_to_lineno(self) -> Dict[int, str]:\n return _get_assertion_exprs(self.source)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/util.py__Utilities_for_assertio__assertion_pass.None": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/util.py__Utilities_for_assertio__assertion_pass.None", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/util.py", "file_name": "util.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 27, "span_ids": ["impl", "docstring", "imports"], "tokens": 221}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"Utilities for assertion debugging.\"\"\"\nimport collections.abc\nimport pprint\nfrom typing import AbstractSet\nfrom typing import Any\nfrom typing import Callable\nfrom typing import Iterable\nfrom typing import List\nfrom typing import Mapping\nfrom typing import Optional\nfrom typing import Sequence\n\nimport _pytest._code\nfrom _pytest import outcomes\nfrom _pytest._io.saferepr import _pformat_dispatch\nfrom _pytest._io.saferepr import safeformat\nfrom _pytest._io.saferepr import saferepr\n\n# The _reprcompare attribute on the util module is used by the new assertion\n# interpretation code and assertion rewriter to detect this plugin was\n# loaded and in turn call the hooks defined here as part of the\n# DebugInterpreter.\n_reprcompare: Optional[Callable[[str, object, object], Optional[str]]] = None\n\n# Works similarly as _reprcompare attribute. Is populated with the hook call\n# when pytest_runtest_setup is called.\n_assertion_pass: Optional[Callable[[int, str, str], None]] = None", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/util.py__compare_eq_sequence__compare_eq_sequence.return.explanation": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/util.py__compare_eq_sequence__compare_eq_sequence.return.explanation", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/util.py", "file_name": "util.py", "file_type": "text/x-python", "category": "implementation", "start_line": 303, "end_line": 353, "span_ids": ["_compare_eq_sequence"], "tokens": 414}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _compare_eq_sequence(\n left: Sequence[Any], right: Sequence[Any], verbose: int = 0\n) -> List[str]:\n comparing_bytes = isinstance(left, bytes) and isinstance(right, bytes)\n explanation: List[str] = []\n len_left = len(left)\n len_right = len(right)\n for i in range(min(len_left, len_right)):\n if left[i] != right[i]:\n if comparing_bytes:\n # when comparing bytes, we want to see their ascii representation\n # instead of their numeric values (#5260)\n # using a slice gives us the ascii representation:\n # >>> s = b'foo'\n # >>> s[0]\n # 102\n # >>> s[0:1]\n # b'f'\n left_value = left[i : i + 1]\n right_value = right[i : i + 1]\n else:\n left_value = left[i]\n right_value = right[i]\n\n explanation += [f\"At index {i} diff: {left_value!r} != {right_value!r}\"]\n break\n\n if comparing_bytes:\n # when comparing bytes, it doesn't help to show the \"sides contain one or more\n # items\" longer explanation, so skip it\n\n return explanation\n\n len_diff = len_left - len_right\n if len_diff:\n if len_diff > 0:\n dir_with_more = \"Left\"\n extra = saferepr(left[len_right])\n else:\n len_diff = 0 - len_diff\n dir_with_more = \"Right\"\n extra = saferepr(right[len_left])\n\n if len_diff == 1:\n explanation += [f\"{dir_with_more} contains one more item: {extra}\"]\n else:\n explanation += [\n \"%s contains %d more items, first extra item: %s\"\n % (dir_with_more, len_diff, extra)\n ]\n return explanation", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/compat.py_num_mock_patch_args_num_mock_patch_args.return.len_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/compat.py_num_mock_patch_args_num_mock_patch_args.return.len_", "embedding": null, "metadata": {"file_path": "src/_pytest/compat.py", "file_name": "compat.py", "file_type": "text/x-python", "category": "implementation", "start_line": 93, "end_line": 109, "span_ids": ["num_mock_patch_args"], "tokens": 131}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def num_mock_patch_args(function) -> int:\n \"\"\"Return number of arguments used up by mock arguments (if any).\"\"\"\n patchings = getattr(function, \"patchings\", None)\n if not patchings:\n return 0\n\n mock_sentinel = getattr(sys.modules.get(\"mock\"), \"DEFAULT\", object())\n ut_mock_sentinel = getattr(sys.modules.get(\"unittest.mock\"), \"DEFAULT\", object())\n\n return len(\n [\n p\n for p in patchings\n if not p.attribute_name\n and (p.new is mock_sentinel or p.new is ut_mock_sentinel)\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_get_config_get_plugin_manager.return.get_config_pluginmanage": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_get_config_get_plugin_manager.return.get_config_pluginmanage", "embedding": null, "metadata": {"file_path": "src/_pytest/config/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 265, "end_line": 298, "span_ids": ["get_plugin_manager", "get_config"], "tokens": 226}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def get_config(\n args: Optional[List[str]] = None,\n plugins: Optional[Sequence[Union[str, _PluggyPlugin]]] = None,\n) -> \"Config\":\n # subsequent calls to main will create a fresh instance\n pluginmanager = PytestPluginManager()\n config = Config(\n pluginmanager,\n invocation_params=Config.InvocationParams(\n args=args or (),\n plugins=plugins,\n dir=Path.cwd(),\n ),\n )\n\n if args is not None:\n # Handle any \"-p no:plugin\" args.\n pluginmanager.consider_preparse(args, exclude_only=True)\n\n for spec in default_plugins:\n pluginmanager.import_plugin(spec)\n\n return config\n\n\ndef get_plugin_manager() -> \"PytestPluginManager\":\n \"\"\"Obtain a new instance of the\n :py:class:`_pytest.config.PytestPluginManager`, with default plugins\n already loaded.\n\n This function can be used by integration with other tools, like hooking\n into pytest to run tests into an IDE.\n \"\"\"\n return get_config().pluginmanager", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/findpaths.py_get_dirs_from_args_CFG_PYTEST_SECTION._pytest_section_in_fil": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/findpaths.py_get_dirs_from_args_CFG_PYTEST_SECTION._pytest_section_in_fil", "embedding": null, "metadata": {"file_path": "src/_pytest/config/findpaths.py", "file_name": "findpaths.py", "file_type": "text/x-python", "category": "implementation", "start_line": 133, "end_line": 163, "span_ids": ["impl:2", "get_dirs_from_args"], "tokens": 239}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def get_dirs_from_args(args: Iterable[str]) -> List[Path]:\n def is_option(x: str) -> bool:\n return x.startswith(\"-\")\n\n def get_file_part_from_node_id(x: str) -> str:\n return x.split(\"::\")[0]\n\n def get_dir_from_path(path: Path) -> Path:\n if path.is_dir():\n return path\n return path.parent\n\n def safe_exists(path: Path) -> bool:\n # This can throw on paths that contain characters unrepresentable at the OS level,\n # or with invalid syntax on Windows (https://bugs.python.org/issue35306)\n try:\n return path.exists()\n except OSError:\n return False\n\n # These look like paths but may not exist\n possible_paths = (\n absolutepath(get_file_part_from_node_id(arg))\n for arg in args\n if not is_option(arg)\n )\n\n return [get_dir_from_path(path) for path in possible_paths if safe_exists(path)]\n\n\nCFG_PYTEST_SECTION = \"[pytest] section in {filename} files is no longer supported, change to [tool:pytest] instead.\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py__patch_unwrap_mock_aware__patch_unwrap_mock_aware.try_.finally_.inspect.unwrap.real_unwrap": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py__patch_unwrap_mock_aware__patch_unwrap_mock_aware.try_.finally_.inspect.unwrap.real_unwrap", "embedding": null, "metadata": {"file_path": "src/_pytest/doctest.py", "file_name": "doctest.py", "file_type": "text/x-python", "category": "implementation", "start_line": 469, "end_line": 496, "span_ids": ["_patch_unwrap_mock_aware"], "tokens": 253}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@contextmanager\ndef _patch_unwrap_mock_aware() -> Generator[None, None, None]:\n \"\"\"Context manager which replaces ``inspect.unwrap`` with a version\n that's aware of mock objects and doesn't recurse into them.\"\"\"\n real_unwrap = inspect.unwrap\n\n def _mock_aware_unwrap(\n func: Callable[..., Any], *, stop: Optional[Callable[[Any], Any]] = None\n ) -> Any:\n try:\n if stop is None or stop is _is_mocked:\n return real_unwrap(func, stop=_is_mocked)\n _stop = stop\n return real_unwrap(func, stop=lambda obj: _is_mocked(obj) or _stop(func))\n except Exception as e:\n warnings.warn(\n \"Got %r when unwrapping %r. This is usually caused \"\n \"by a violation of Python's object protocol; see e.g. \"\n \"https://github.com/pytest-dev/pytest/issues/5080\" % (e, func),\n PytestWarning,\n )\n raise\n\n inspect.unwrap = _mock_aware_unwrap\n try:\n yield\n finally:\n inspect.unwrap = real_unwrap", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nodes.py_Node._repr_failure_py_Node.repr_failure.return.self__repr_failure_py_exc": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nodes.py_Node._repr_failure_py_Node.repr_failure.return.self__repr_failure_py_exc", "embedding": null, "metadata": {"file_path": "src/_pytest/nodes.py", "file_name": "nodes.py", "file_type": "text/x-python", "category": "implementation", "start_line": 358, "end_line": 422, "span_ids": ["Node._repr_failure_py", "Node.repr_failure"], "tokens": 578}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Node(metaclass=NodeMeta):\n\n def _repr_failure_py(\n self,\n excinfo: ExceptionInfo[BaseException],\n style: \"Optional[_TracebackStyle]\" = None,\n ) -> TerminalRepr:\n from _pytest.fixtures import FixtureLookupError\n\n if isinstance(excinfo.value, ConftestImportFailure):\n excinfo = ExceptionInfo(excinfo.value.excinfo)\n if isinstance(excinfo.value, fail.Exception):\n if not excinfo.value.pytrace:\n style = \"value\"\n if isinstance(excinfo.value, FixtureLookupError):\n return excinfo.value.formatrepr()\n if self.config.getoption(\"fulltrace\", False):\n style = \"long\"\n else:\n tb = _pytest._code.Traceback([excinfo.traceback[-1]])\n self._prunetraceback(excinfo)\n if len(excinfo.traceback) == 0:\n excinfo.traceback = tb\n if style == \"auto\":\n style = \"long\"\n # XXX should excinfo.getrepr record all data and toterminal() process it?\n if style is None:\n if self.config.getoption(\"tbstyle\", \"auto\") == \"short\":\n style = \"short\"\n else:\n style = \"long\"\n\n if self.config.getoption(\"verbose\", 0) > 1:\n truncate_locals = False\n else:\n truncate_locals = True\n\n # excinfo.getrepr() formats paths relative to the CWD if `abspath` is False.\n # It is possible for a fixture/test to change the CWD while this code runs, which\n # would then result in the user seeing confusing paths in the failure message.\n # To fix this, if the CWD changed, always display the full absolute path.\n # It will be better to just always display paths relative to invocation_dir, but\n # this requires a lot of plumbing (#6428).\n try:\n abspath = Path(os.getcwd()) != self.config.invocation_params.dir\n except OSError:\n abspath = True\n\n return excinfo.getrepr(\n funcargs=True,\n abspath=abspath,\n showlocals=self.config.getoption(\"showlocals\", False),\n style=style,\n tbfilter=False, # pruned already, or in --fulltrace mode.\n truncate_locals=truncate_locals,\n )\n\n def repr_failure(\n self,\n excinfo: ExceptionInfo[BaseException],\n style: \"Optional[_TracebackStyle]\" = None,\n ) -> Union[str, TerminalRepr]:\n \"\"\"Return a representation of a collection or test failure.\n\n :param excinfo: Exception information for the failure.\n \"\"\"\n return self._repr_failure_py(excinfo, style)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py_atexit_ensure_reset_dir.path_mkdir_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py_atexit_ensure_reset_dir.path_mkdir_", "embedding": null, "metadata": {"file_path": "src/_pytest/pathlib.py", "file_name": "pathlib.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 69, "span_ids": ["impl", "get_lock_path", "ensure_reset_dir", "imports", "_ignore_error"], "tokens": 429}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import atexit\nimport contextlib\nimport fnmatch\nimport importlib.util\nimport itertools\nimport os\nimport shutil\nimport sys\nimport uuid\nimport warnings\nfrom enum import Enum\nfrom errno import EBADF\nfrom errno import ELOOP\nfrom errno import ENOENT\nfrom errno import ENOTDIR\nfrom functools import partial\nfrom os.path import expanduser\nfrom os.path import expandvars\nfrom os.path import isabs\nfrom os.path import sep\nfrom pathlib import Path\nfrom pathlib import PurePath\nfrom posixpath import sep as posix_sep\nfrom types import ModuleType\nfrom typing import Callable\nfrom typing import Iterable\nfrom typing import Iterator\nfrom typing import Optional\nfrom typing import Set\nfrom typing import TypeVar\nfrom typing import Union\n\nfrom _pytest.compat import assert_never\nfrom _pytest.outcomes import skip\nfrom _pytest.warning_types import PytestWarning\n\nLOCK_TIMEOUT = 60 * 60 * 24 * 3\n\n\n_AnyPurePath = TypeVar(\"_AnyPurePath\", bound=PurePath)\n\n# The following function, variables and comments were\n# copied from cpython 3.9 Lib/pathlib.py file.\n\n# EBADF - guard against macOS `stat` throwing EBADF\n_IGNORED_ERRORS = (ENOENT, ENOTDIR, EBADF, ELOOP)\n\n_IGNORED_WINERRORS = (\n 21, # ERROR_NOT_READY - drive exists but is not accessible\n 1921, # ERROR_CANT_RESOLVE_FILENAME - fix for broken symlink pointing to itself\n)\n\n\ndef _ignore_error(exception):\n return (\n getattr(exception, \"errno\", None) in _IGNORED_ERRORS\n or getattr(exception, \"winerror\", None) in _IGNORED_WINERRORS\n )\n\n\ndef get_lock_path(path: _AnyPurePath) -> _AnyPurePath:\n return path.joinpath(\".lock\")\n\n\ndef ensure_reset_dir(path: Path) -> None:\n \"\"\"Ensure the given path is an empty directory.\"\"\"\n if path.exists():\n rm_rf(path)\n path.mkdir()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py__call_with_optional_argument__get_first_non_fixture_func.for_name_in_names_.if_meth_is_not_None_and_f.return.meth": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py__call_with_optional_argument__get_first_non_fixture_func.for_name_in_names_.if_meth_is_not_None_and_f.return.meth", "embedding": null, "metadata": {"file_path": "src/_pytest/python.py", "file_name": "python.py", "file_type": "text/x-python", "category": "implementation", "start_line": 741, "end_line": 759, "span_ids": ["_call_with_optional_argument", "_get_first_non_fixture_func"], "tokens": 169}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _call_with_optional_argument(func, arg) -> None:\n \"\"\"Call the given function with the given argument if func accepts one argument, otherwise\n calls func without arguments.\"\"\"\n arg_count = func.__code__.co_argcount\n if inspect.ismethod(func):\n arg_count -= 1\n if arg_count:\n func(arg)\n else:\n func()\n\n\ndef _get_first_non_fixture_func(obj: object, names: Iterable[str]):\n \"\"\"Return the attribute from the given object to be used as a setup/teardown\n xunit-style function, but only if not marked as a fixture to avoid calling it twice.\"\"\"\n for name in names:\n meth = getattr(obj, name, None)\n if meth is not None and fixtures.getfixturemarker(meth) is None:\n return meth", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python_api.py_approx._Delegate_the_comparison_approx.return.cls_expected_rel_abs_n": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python_api.py_approx._Delegate_the_comparison_approx.return.cls_expected_rel_abs_n", "embedding": null, "metadata": {"file_path": "src/_pytest/python_api.py", "file_name": "python_api.py", "file_type": "text/x-python", "category": "implementation", "start_line": 508, "end_line": 542, "span_ids": ["approx"], "tokens": 362}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def approx(expected, rel=None, abs=None, nan_ok: bool = False) -> ApproxBase:\n\n # Delegate the comparison to a class that knows how to deal with the type\n # of the expected value (e.g. int, float, list, dict, numpy.array, etc).\n #\n # The primary responsibility of these classes is to implement ``__eq__()``\n # and ``__repr__()``. The former is used to actually check if some\n # \"actual\" value is equivalent to the given expected value within the\n # allowed tolerance. The latter is used to show the user the expected\n # value and tolerance, in the case that a test failed.\n #\n # The actual logic for making approximate comparisons can be found in\n # ApproxScalar, which is used to compare individual numbers. All of the\n # other Approx classes eventually delegate to this class. The ApproxBase\n # class provides some convenient methods and overloads, but isn't really\n # essential.\n\n __tracebackhide__ = True\n\n if isinstance(expected, Decimal):\n cls: Type[ApproxBase] = ApproxDecimal\n elif isinstance(expected, Mapping):\n cls = ApproxMapping\n elif _is_numpy_array(expected):\n expected = _as_numpy_array(expected)\n cls = ApproxNumpy\n elif (\n isinstance(expected, Iterable)\n and isinstance(expected, Sized)\n # Type ignored because the error is wrong -- not unreachable.\n and not isinstance(expected, STRING_TYPES) # type: ignore[unreachable]\n ):\n cls = ApproxSequencelike\n else:\n cls = ApproxScalar\n\n return cls(expected, rel, abs, nan_ok)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python_api.py_RaisesContext.__exit___": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python_api.py_RaisesContext.__exit___", "embedding": null, "metadata": {"file_path": "src/_pytest/python_api.py", "file_name": "python_api.py", "file_type": "text/x-python", "category": "implementation", "start_line": 769, "end_line": 787, "span_ids": ["RaisesContext.__exit__"], "tokens": 175}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass RaisesContext(Generic[_E]):\n\n def __exit__(\n self,\n exc_type: Optional[Type[BaseException]],\n exc_val: Optional[BaseException],\n exc_tb: Optional[TracebackType],\n ) -> bool:\n __tracebackhide__ = True\n if exc_type is None:\n fail(self.message)\n assert self.excinfo is not None\n if not issubclass(exc_type, self.expected_exception):\n return False\n # Cast to narrow the exception type now that it's verified.\n exc_info = cast(Tuple[Type[_E], _E, TracebackType], (exc_type, exc_val, exc_tb))\n self.excinfo.fill_unfilled(exc_info)\n if self.match_expr is not None:\n self.excinfo.match(self.match_expr)\n return True", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/recwarn.py_WarningsRecorder_WarningsRecorder.__enter__.return.self": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/recwarn.py_WarningsRecorder_WarningsRecorder.__enter__.return.self", "embedding": null, "metadata": {"file_path": "src/_pytest/recwarn.py", "file_name": "recwarn.py", "file_type": "text/x-python", "category": "implementation", "start_line": 159, "end_line": 212, "span_ids": ["WarningsRecorder.pop", "WarningsRecorder.__getitem__", "WarningsRecorder", "WarningsRecorder.__iter__", "WarningsRecorder.clear", "WarningsRecorder.list", "WarningsRecorder.__enter__", "WarningsRecorder.__len__"], "tokens": 454}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class WarningsRecorder(warnings.catch_warnings):\n \"\"\"A context manager to record raised warnings.\n\n Adapted from `warnings.catch_warnings`.\n \"\"\"\n\n def __init__(self, *, _ispytest: bool = False) -> None:\n check_ispytest(_ispytest)\n # Type ignored due to the way typeshed handles warnings.catch_warnings.\n super().__init__(record=True) # type: ignore[call-arg]\n self._entered = False\n self._list: List[warnings.WarningMessage] = []\n\n @property\n def list(self) -> List[\"warnings.WarningMessage\"]:\n \"\"\"The list of recorded warnings.\"\"\"\n return self._list\n\n def __getitem__(self, i: int) -> \"warnings.WarningMessage\":\n \"\"\"Get a recorded warning by index.\"\"\"\n return self._list[i]\n\n def __iter__(self) -> Iterator[\"warnings.WarningMessage\"]:\n \"\"\"Iterate through the recorded warnings.\"\"\"\n return iter(self._list)\n\n def __len__(self) -> int:\n \"\"\"The number of recorded warnings.\"\"\"\n return len(self._list)\n\n def pop(self, cls: Type[Warning] = Warning) -> \"warnings.WarningMessage\":\n \"\"\"Pop the first recorded warning, raise exception if not exists.\"\"\"\n for i, w in enumerate(self._list):\n if issubclass(w.category, cls):\n return self._list.pop(i)\n __tracebackhide__ = True\n raise AssertionError(\"%r not found in warning list\" % cls)\n\n def clear(self) -> None:\n \"\"\"Clear the list of recorded warnings.\"\"\"\n self._list[:] = []\n\n # Type ignored because it doesn't exactly warnings.catch_warnings.__enter__\n # -- it returns a List but we only emulate one.\n def __enter__(self) -> \"WarningsRecorder\": # type: ignore\n if self._entered:\n __tracebackhide__ = True\n raise RuntimeError(\"Cannot enter %r twice\" % self)\n _list = super().__enter__()\n # record=True means it's None.\n assert _list is not None\n self._list = _list\n warnings.simplefilter(\"always\")\n return self", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/tmpdir.py_TempdirFactory_TempdirFactory.getbasetemp.return.py_path_local_self__tmppa": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/tmpdir.py_TempdirFactory_TempdirFactory.getbasetemp.return.py_path_local_self__tmppa", "embedding": null, "metadata": {"file_path": "src/_pytest/tmpdir.py", "file_name": "tmpdir.py", "file_type": "text/x-python", "category": "implementation", "start_line": 128, "end_line": 148, "span_ids": ["TempdirFactory.mktemp", "TempdirFactory.getbasetemp", "TempdirFactory"], "tokens": 223}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\n@attr.s(init=False)\nclass TempdirFactory:\n \"\"\"Backward comptibility wrapper that implements :class:``py.path.local``\n for :class:``TempPathFactory``.\"\"\"\n\n _tmppath_factory = attr.ib(type=TempPathFactory)\n\n def __init__(\n self, tmppath_factory: TempPathFactory, *, _ispytest: bool = False\n ) -> None:\n check_ispytest(_ispytest)\n self._tmppath_factory = tmppath_factory\n\n def mktemp(self, basename: str, numbered: bool = True) -> py.path.local:\n \"\"\"Same as :meth:`TempPathFactory.mktemp`, but returns a ``py.path.local`` object.\"\"\"\n return py.path.local(self._tmppath_factory.mktemp(basename, numbered).resolve())\n\n def getbasetemp(self) -> py.path.local:\n \"\"\"Backward compat wrapper for ``_tmppath_factory.getbasetemp``.\"\"\"\n return py.path.local(self._tmppath_factory.getbasetemp().resolve())", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/tmpdir.py_get_user_pytest_configure.mp_setattr_config__tmpd": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/tmpdir.py_get_user_pytest_configure.mp_setattr_config__tmpd", "embedding": null, "metadata": {"file_path": "src/_pytest/tmpdir.py", "file_name": "tmpdir.py", "file_type": "text/x-python", "category": "implementation", "start_line": 151, "end_line": 174, "span_ids": ["get_user", "pytest_configure"], "tokens": 213}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def get_user() -> Optional[str]:\n \"\"\"Return the current user name, or None if getuser() does not work\n in the current environment (see #1010).\"\"\"\n import getpass\n\n try:\n return getpass.getuser()\n except (ImportError, KeyError):\n return None\n\n\ndef pytest_configure(config: Config) -> None:\n \"\"\"Create a TempdirFactory and attach it to the config object.\n\n This is to comply with existing plugins which expect the handler to be\n available at pytest_configure time, but ideally should be moved entirely\n to the tmpdir_factory session fixture.\n \"\"\"\n mp = MonkeyPatch()\n tmppath_handler = TempPathFactory.from_config(config, _ispytest=True)\n t = TempdirFactory(tmppath_handler, _ispytest=True)\n config._cleanup.append(mp.undo)\n mp.setattr(config, \"_tmp_path_factory\", tmppath_handler, raising=False)\n mp.setattr(config, \"_tmpdirhandler\", t, raising=False)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/unittest.py_TestCaseFunction.runtest_TestCaseFunction._prunetraceback.if_traceback_.excinfo.traceback.traceback": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/unittest.py_TestCaseFunction.runtest_TestCaseFunction._prunetraceback.if_traceback_.excinfo.traceback.traceback", "embedding": null, "metadata": {"file_path": "src/_pytest/unittest.py", "file_name": "unittest.py", "file_type": "text/x-python", "category": "implementation", "start_line": 293, "end_line": 331, "span_ids": ["TestCaseFunction.runtest", "TestCaseFunction._prunetraceback"], "tokens": 398}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCaseFunction(Function):\n\n def runtest(self) -> None:\n from _pytest.debugging import maybe_wrap_pytest_function_for_tracing\n\n assert self._testcase is not None\n\n maybe_wrap_pytest_function_for_tracing(self)\n\n # Let the unittest framework handle async functions.\n if is_async_function(self.obj):\n # Type ignored because self acts as the TestResult, but is not actually one.\n self._testcase(result=self) # type: ignore[arg-type]\n else:\n # When --pdb is given, we want to postpone calling tearDown() otherwise\n # when entering the pdb prompt, tearDown() would have probably cleaned up\n # instance variables, which makes it difficult to debug.\n # Arguably we could always postpone tearDown(), but this changes the moment where the\n # TestCase instance interacts with the results object, so better to only do it\n # when absolutely needed.\n if self.config.getoption(\"usepdb\") and not _is_skipped(self.obj):\n self._explicit_tearDown = self._testcase.tearDown\n setattr(self._testcase, \"tearDown\", lambda *args: None)\n\n # We need to update the actual bound method with self.obj, because\n # wrap_pytest_function_for_tracing replaces self.obj by a wrapper.\n setattr(self._testcase, self.name, self.obj)\n try:\n self._testcase(result=self) # type: ignore[arg-type]\n finally:\n delattr(self._testcase, self.name)\n\n def _prunetraceback(\n self, excinfo: _pytest._code.ExceptionInfo[BaseException]\n ) -> None:\n Function._prunetraceback(self, excinfo)\n traceback = excinfo.traceback.filter(\n lambda x: not x.frame.f_globals.get(\"__unittest\")\n )\n if traceback:\n excinfo.traceback = traceback", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/test_fixture_named_request.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/fixtures/test_fixture_named_request.py__", "embedding": null, "metadata": {"file_path": "testing/example_scripts/fixtures/test_fixture_named_request.py", "file_name": "test_fixture_named_request.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 11, "span_ids": ["test", "request", "imports"], "tokens": 17}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import pytest\n\n\n@pytest.fixture\ndef request():\n pass\n\n\ndef test():\n pass", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/io/test_saferepr.py_test_buggy_builtin_repr_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/io/test_saferepr.py_test_buggy_builtin_repr_", "embedding": null, "metadata": {"file_path": "testing/io/test_saferepr.py", "file_name": "test_saferepr.py", "file_type": "text/x-python", "category": "test", "start_line": 122, "end_line": 174, "span_ids": ["test_pformat_dispatch", "test_big_repr", "test_repr_on_newstyle.Function.__repr__", "test_buggy_builtin_repr", "test_unicode", "test_broken_getattribute", "test_repr_on_newstyle.Function", "test_broken_getattribute.SomeClass.__getattribute__", "test_broken_getattribute.SomeClass", "test_repr_on_newstyle", "test_buggy_builtin_repr.int.__repr__", "test_buggy_builtin_repr.int"], "tokens": 355}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_buggy_builtin_repr():\n # Simulate a case where a repr for a builtin raises.\n # reprlib dispatches by type name, so use \"int\".\n\n class int:\n def __repr__(self):\n raise ValueError(\"Buggy repr!\")\n\n assert \"Buggy\" in saferepr(int())\n\n\ndef test_big_repr():\n from _pytest._io.saferepr import SafeRepr\n\n assert len(saferepr(range(1000))) <= len(\"[\" + SafeRepr(0).maxlist * \"1000\" + \"]\")\n\n\ndef test_repr_on_newstyle() -> None:\n class Function:\n def __repr__(self):\n return \"<%s>\" % (self.name) # type: ignore[attr-defined]\n\n assert saferepr(Function())\n\n\ndef test_unicode():\n val = \"\u00a3\u20ac\"\n reprval = \"'\u00a3\u20ac'\"\n assert saferepr(val) == reprval\n\n\ndef test_pformat_dispatch():\n assert _pformat_dispatch(\"a\") == \"'a'\"\n assert _pformat_dispatch(\"a\" * 10, width=5) == \"'aaaaaaaaaa'\"\n assert _pformat_dispatch(\"foo bar\", width=5) == \"('foo '\\n 'bar')\"\n\n\ndef test_broken_getattribute():\n \"\"\"saferepr() can create proper representations of classes with\n broken __getattribute__ (#7145)\n \"\"\"\n\n class SomeClass:\n def __getattribute__(self, attr):\n raise RuntimeError\n\n def __repr__(self):\n raise RuntimeError\n\n assert saferepr(SomeClass()).startswith(\n \"<[RuntimeError() raised in repr()] SomeClass object at 0x\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestBasic.test_getfixturevalue_TestRequestBasic.test_getfixturevalue.assert_request_in_item_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestBasic.test_getfixturevalue_TestRequestBasic.test_getfixturevalue.assert_request_in_item_", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 809, "end_line": 844, "span_ids": ["TestRequestBasic.test_getfixturevalue"], "tokens": 248}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRequestBasic:\n\n def test_getfixturevalue(self, pytester: Pytester) -> None:\n item = pytester.getitem(\n \"\"\"\n import pytest\n\n @pytest.fixture\n def something(request):\n return 1\n\n values = [2]\n @pytest.fixture\n def other(request):\n return values.pop()\n\n def test_func(something): pass\n \"\"\"\n )\n assert isinstance(item, Function)\n req = item._request\n\n # Execute item's setup.\n item.session._setupstate.setup(item)\n\n with pytest.raises(pytest.FixtureLookupError):\n req.getfixturevalue(\"notexists\")\n val = req.getfixturevalue(\"something\")\n assert val == 1\n val = req.getfixturevalue(\"something\")\n assert val == 1\n val2 = req.getfixturevalue(\"other\")\n assert val2 == 2\n val2 = req.getfixturevalue(\"other\") # see about caching\n assert val2 == 2\n assert item.funcargs[\"something\"] == 1\n assert len(get_public_names(item.funcargs)) == 2\n assert \"request\" in item.funcargs", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureUsages.test_factory_uses_unknown_funcarg_as_dependency_error_TestFixtureUsages.test_factory_setup_as_classes_fails.assert_len_values_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureUsages.test_factory_uses_unknown_funcarg_as_dependency_error_TestFixtureUsages.test_factory_setup_as_classes_fails.assert_len_values_1", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1323, "end_line": 1366, "span_ids": ["TestFixtureUsages.test_factory_uses_unknown_funcarg_as_dependency_error", "TestFixtureUsages.test_factory_setup_as_classes_fails"], "tokens": 240}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFixtureUsages:\n\n def test_factory_uses_unknown_funcarg_as_dependency_error(\n self, pytester: Pytester\n ) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.fixture()\n def fail(missing):\n return\n\n @pytest.fixture()\n def call_fail(fail):\n return\n\n def test_missing(call_fail):\n pass\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n \"\"\"\n *pytest.fixture()*\n *def call_fail(fail)*\n *pytest.fixture()*\n *def fail*\n *fixture*'missing'*not found*\n \"\"\"\n )\n\n def test_factory_setup_as_classes_fails(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n class arg1(object):\n def __init__(self, request):\n self.x = 1\n arg1 = pytest.fixture()(arg1)\n\n \"\"\"\n )\n reprec = pytester.inline_run()\n values = reprec.getfailedcollections()\n assert len(values) == 1", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/integration.py_TestMockDecoration.test_mock_sentinel_check_against_numpy_like_TestMockDecoration.test_mock_sentinel_check_against_numpy_like.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/integration.py_TestMockDecoration.test_mock_sentinel_check_against_numpy_like_TestMockDecoration.test_mock_sentinel_check_against_numpy_like.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/integration.py", "file_name": "integration.py", "file_type": "text/x-python", "category": "implementation", "start_line": 191, "end_line": 217, "span_ids": ["TestMockDecoration.test_mock_sentinel_check_against_numpy_like"], "tokens": 225}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMockDecoration:\n\n def test_mock_sentinel_check_against_numpy_like(self, pytester: Pytester) -> None:\n \"\"\"Ensure our function that detects mock arguments compares against sentinels using\n identity to circumvent objects which can't be compared with equality against others\n in a truth context, like with numpy arrays (#5606).\n \"\"\"\n pytester.makepyfile(\n dummy=\"\"\"\n class NumpyLike:\n def __init__(self, value):\n self.value = value\n def __eq__(self, other):\n raise ValueError(\"like numpy, cannot compare against others for truth\")\n FOO = NumpyLike(10)\n \"\"\"\n )\n pytester.makepyfile(\n \"\"\"\n from unittest.mock import patch\n import dummy\n class Test(object):\n @patch(\"dummy.FOO\", new=dummy.NumpyLike(50))\n def test_hello(self):\n assert dummy.FOO.value == 50\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/raises.py_TestRaises.test_noclass_TestRaises.test_no_raise_message.None_1.else_.assert_False_Expected_p": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/raises.py_TestRaises.test_noclass_TestRaises.test_no_raise_message.None_1.else_.assert_False_Expected_p", "embedding": null, "metadata": {"file_path": "testing/python/raises.py", "file_name": "raises.py", "file_type": "text/x-python", "category": "implementation", "start_line": 129, "end_line": 156, "span_ids": ["TestRaises.test_tuple", "TestRaises.test_invalid_arguments_to_raises", "TestRaises.test_no_raise_message", "TestRaises.test_noclass"], "tokens": 223}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRaises:\n\n def test_noclass(self) -> None:\n with pytest.raises(TypeError):\n pytest.raises(\"wrong\", lambda: None) # type: ignore[call-overload]\n\n def test_invalid_arguments_to_raises(self) -> None:\n with pytest.raises(TypeError, match=\"unknown\"):\n with pytest.raises(TypeError, unknown=\"bogus\"): # type: ignore[call-overload]\n raise ValueError()\n\n def test_tuple(self):\n with pytest.raises((KeyError, ValueError)):\n raise KeyError(\"oops\")\n\n def test_no_raise_message(self) -> None:\n try:\n pytest.raises(ValueError, int, \"0\")\n except pytest.fail.Exception as e:\n assert e.msg == \"DID NOT RAISE {}\".format(repr(ValueError))\n else:\n assert False, \"Expected pytest.raises.Exception\"\n\n try:\n with pytest.raises(ValueError):\n pass\n except pytest.fail.Exception as e:\n assert e.msg == \"DID NOT RAISE {}\".format(repr(ValueError))\n else:\n assert False, \"Expected pytest.raises.Exception\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestEarlyRewriteBailout.test_cwd_changed_TestEarlyRewriteBailout.test_cwd_changed.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestEarlyRewriteBailout.test_cwd_changed_TestEarlyRewriteBailout.test_cwd_changed.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_assertrewrite.py", "file_name": "test_assertrewrite.py", "file_type": "text/x-python", "category": "test", "start_line": 1384, "end_line": 1412, "span_ids": ["TestEarlyRewriteBailout.test_cwd_changed"], "tokens": 222}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestEarlyRewriteBailout:\n\n @pytest.mark.skipif(\n sys.platform.startswith(\"win32\"), reason=\"cannot remove cwd on Windows\"\n )\n def test_cwd_changed(self, pytester: Pytester, monkeypatch) -> None:\n # Setup conditions for py's fspath trying to import pathlib on py34\n # always (previously triggered via xdist only).\n # Ref: https://github.com/pytest-dev/py/pull/207\n monkeypatch.syspath_prepend(\"\")\n monkeypatch.delitem(sys.modules, \"pathlib\", raising=False)\n\n pytester.makepyfile(\n **{\n \"test_setup_nonexisting_cwd.py\": \"\"\"\\\n import os\n import shutil\n import tempfile\n\n d = tempfile.mkdtemp()\n os.chdir(d)\n shutil.rmtree(d)\n \"\"\",\n \"test_test.py\": \"\"\"\\\n def test():\n pass\n \"\"\",\n }\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"* 1 passed in *\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionPass.test_hook_call_TestAssertionPass.test_hook_call_with_parens.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionPass.test_hook_call_TestAssertionPass.test_hook_call_with_parens.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_assertrewrite.py", "file_name": "test_assertrewrite.py", "file_type": "text/x-python", "category": "test", "start_line": 1433, "end_line": 1463, "span_ids": ["TestAssertionPass.test_hook_call", "TestAssertionPass.test_hook_call_with_parens"], "tokens": 223}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssertionPass:\n\n def test_hook_call(self, pytester: Pytester, flag_on, hook_on) -> None:\n pytester.makepyfile(\n \"\"\"\\\n def test_simple():\n a=1\n b=2\n c=3\n d=0\n\n assert a+b == c+d\n\n # cover failing assertions with a message\n def test_fails():\n assert False, \"assert with message\"\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n \"*Assertion Passed: a+b == c+d (1 + 2) == (3 + 0) at line 7*\"\n )\n\n def test_hook_call_with_parens(self, pytester: Pytester, flag_on, hook_on) -> None:\n pytester.makepyfile(\n \"\"\"\\\n def f(): return 1\n def test():\n assert f()\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\"*Assertion Passed: f() 1\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionPass.test_hook_not_called_without_hookimpl_TestAssertionPass.test_hook_not_called_without_hookimpl.result_assert_outcomes_pa": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionPass.test_hook_not_called_without_hookimpl_TestAssertionPass.test_hook_not_called_without_hookimpl.result_assert_outcomes_pa", "embedding": null, "metadata": {"file_path": "testing/test_assertrewrite.py", "file_name": "test_assertrewrite.py", "file_type": "text/x-python", "category": "test", "start_line": 1465, "end_line": 1490, "span_ids": ["TestAssertionPass.test_hook_not_called_without_hookimpl"], "tokens": 182}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssertionPass:\n\n def test_hook_not_called_without_hookimpl(\n self, pytester: Pytester, monkeypatch, flag_on\n ) -> None:\n \"\"\"Assertion pass should not be called (and hence formatting should\n not occur) if there is no hook declared for pytest_assertion_pass\"\"\"\n\n def raise_on_assertionpass(*_, **__):\n raise Exception(\"Assertion passed called when it shouldn't!\")\n\n monkeypatch.setattr(\n _pytest.assertion.rewrite, \"_call_assertion_pass\", raise_on_assertionpass\n )\n\n pytester.makepyfile(\n \"\"\"\\\n def test_simple():\n a=1\n b=2\n c=3\n d=0\n\n assert a+b == c+d\n \"\"\"\n )\n result = pytester.runpytest()\n result.assert_outcomes(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionPass.test_hook_not_called_without_cmd_option_TestAssertionPass.test_hook_not_called_without_cmd_option.result_assert_outcomes_pa": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionPass.test_hook_not_called_without_cmd_option_TestAssertionPass.test_hook_not_called_without_cmd_option.result_assert_outcomes_pa", "embedding": null, "metadata": {"file_path": "testing/test_assertrewrite.py", "file_name": "test_assertrewrite.py", "file_type": "text/x-python", "category": "test", "start_line": 1492, "end_line": 1524, "span_ids": ["TestAssertionPass.test_hook_not_called_without_cmd_option"], "tokens": 227}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssertionPass:\n\n def test_hook_not_called_without_cmd_option(\n self, pytester: Pytester, monkeypatch\n ) -> None:\n \"\"\"Assertion pass should not be called (and hence formatting should\n not occur) if there is no hook declared for pytest_assertion_pass\"\"\"\n\n def raise_on_assertionpass(*_, **__):\n raise Exception(\"Assertion passed called when it shouldn't!\")\n\n monkeypatch.setattr(\n _pytest.assertion.rewrite, \"_call_assertion_pass\", raise_on_assertionpass\n )\n\n pytester.makeconftest(\n \"\"\"\\\n def pytest_assertion_pass(item, lineno, orig, expl):\n raise Exception(\"Assertion Passed: {} {} at line {}\".format(orig, expl, lineno))\n \"\"\"\n )\n\n pytester.makepyfile(\n \"\"\"\\\n def test_simple():\n a=1\n b=2\n c=3\n d=0\n\n assert a+b == c+d\n \"\"\"\n )\n result = pytester.runpytest()\n result.assert_outcomes(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestParseIni.test_ini_parse_error_TestParseIni.test_confcutdir.assert_result_ret_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestParseIni.test_ini_parse_error_TestParseIni.test_confcutdir.assert_result_ret_0", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 162, "end_line": 179, "span_ids": ["TestParseIni.test_confcutdir", "TestParseIni.test_ini_parse_error"], "tokens": 165}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestParseIni:\n\n def test_ini_parse_error(self, pytester: Pytester) -> None:\n pytester.path.joinpath(\"pytest.ini\").write_text(\"addopts = -x\")\n result = pytester.runpytest()\n assert result.ret != 0\n result.stderr.fnmatch_lines([\"ERROR: *pytest.ini:1: no section header defined\"])\n\n @pytest.mark.xfail(reason=\"probably not needed\")\n def test_confcutdir(self, pytester: Pytester) -> None:\n sub = pytester.mkdir(\"sub\")\n os.chdir(sub)\n pytester.makeini(\n \"\"\"\n [pytest]\n addopts = --qwe\n \"\"\"\n )\n result = pytester.inline_run(\"--confcutdir=.\")\n assert result.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_no_terminal_discovery_error_test_load_initial_conftest_last_ordering.assert_x_function___modu": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_no_terminal_discovery_error_test_load_initial_conftest_last_ordering.assert_x_function___modu", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 1234, "end_line": 1252, "span_ids": ["test_no_terminal_discovery_error", "test_load_initial_conftest_last_ordering.My.pytest_load_initial_conftests", "test_load_initial_conftest_last_ordering.My", "test_load_initial_conftest_last_ordering"], "tokens": 175}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_no_terminal_discovery_error(pytester: Pytester) -> None:\n pytester.makepyfile(\"raise TypeError('oops!')\")\n result = pytester.runpytest(\"-p\", \"no:terminal\", \"--collect-only\")\n assert result.ret == ExitCode.INTERRUPTED\n\n\ndef test_load_initial_conftest_last_ordering(_config_for_test):\n pm = _config_for_test.pluginmanager\n\n class My:\n def pytest_load_initial_conftests(self):\n pass\n\n m = My()\n pm.register(m)\n hc = pm.hook.pytest_load_initial_conftests\n values = hc._nonwrappers + hc._wrappers\n expected = [\"_pytest.config\", m.__module__, \"_pytest.capture\", \"_pytest.warnings\"]\n assert [x.function.__module__ for x in values] == expected", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_get_plugin_specs_as_list_test_get_plugin_specs_as_list.None_5": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_get_plugin_specs_as_list_test_get_plugin_specs_as_list.None_5", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 1123, "end_line": 1140, "span_ids": ["test_get_plugin_specs_as_list"], "tokens": 228}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_get_plugin_specs_as_list() -> None:\n def exp_match(val: object) -> str:\n return (\n \"Plugins may be specified as a sequence or a ','-separated string of plugin names. Got: %s\"\n % re.escape(repr(val))\n )\n\n with pytest.raises(pytest.UsageError, match=exp_match({\"foo\"})):\n _get_plugin_specs_as_list({\"foo\"}) # type: ignore[arg-type]\n with pytest.raises(pytest.UsageError, match=exp_match({})):\n _get_plugin_specs_as_list(dict()) # type: ignore[arg-type]\n\n assert _get_plugin_specs_as_list(None) == []\n assert _get_plugin_specs_as_list(\"\") == []\n assert _get_plugin_specs_as_list(\"foo\") == [\"foo\"]\n assert _get_plugin_specs_as_list(\"foo,bar\") == [\"foo\", \"bar\"]\n assert _get_plugin_specs_as_list([\"foo\", \"bar\"]) == [\"foo\", \"bar\"]\n assert _get_plugin_specs_as_list((\"foo\", \"bar\")) == [\"foo\", \"bar\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_config_blocked_default_plugins_test_config_blocked_default_plugins.None_3.else_.assert_result_stdout_line": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_config_blocked_default_plugins_test_config_blocked_default_plugins.None_3.else_.assert_result_stdout_line", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 1773, "end_line": 1815, "span_ids": ["test_config_blocked_default_plugins"], "tokens": 342}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\n \"plugin\",\n [\n x\n for x in _pytest.config.default_plugins\n if x not in _pytest.config.essential_plugins\n ],\n)\ndef test_config_blocked_default_plugins(pytester: Pytester, plugin: str) -> None:\n if plugin == \"debugging\":\n # Fixed in xdist master (after 1.27.0).\n # https://github.com/pytest-dev/pytest-xdist/pull/422\n try:\n import xdist # noqa: F401\n except ImportError:\n pass\n else:\n pytest.skip(\"does not work with xdist currently\")\n\n p = pytester.makepyfile(\"def test(): pass\")\n result = pytester.runpytest(str(p), \"-pno:%s\" % plugin)\n\n if plugin == \"python\":\n assert result.ret == ExitCode.USAGE_ERROR\n result.stderr.fnmatch_lines(\n [\n \"ERROR: not found: */test_config_blocked_default_plugins.py\",\n \"(no name '*/test_config_blocked_default_plugins.py' in any of [])\",\n ]\n )\n return\n\n assert result.ret == ExitCode.OK\n if plugin != \"terminal\":\n result.stdout.fnmatch_lines([\"* 1 passed in *\"])\n\n p = pytester.makepyfile(\"def test(): assert 0\")\n result = pytester.runpytest(str(p), \"-pno:%s\" % plugin)\n assert result.ret == ExitCode.TESTS_FAILED\n if plugin != \"terminal\":\n result.stdout.fnmatch_lines([\"* 1 failed in *\"])\n else:\n assert result.stdout.lines == []", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestPytestPluginsVariable_TestPytestPluginsVariable.test_pytest_plugins_in_non_top_level_conftest_unsupported.res_stdout_fnmatch_lines_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestPytestPluginsVariable_TestPytestPluginsVariable.test_pytest_plugins_in_non_top_level_conftest_unsupported.res_stdout_fnmatch_lines_", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 1842, "end_line": 1862, "span_ids": ["TestPytestPluginsVariable.test_pytest_plugins_in_non_top_level_conftest_unsupported", "TestPytestPluginsVariable"], "tokens": 156}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPytestPluginsVariable:\n def test_pytest_plugins_in_non_top_level_conftest_unsupported(\n self, pytester: Pytester\n ) -> None:\n pytester.makepyfile(\n **{\n \"subdirectory/conftest.py\": \"\"\"\n pytest_plugins=['capture']\n \"\"\"\n }\n )\n pytester.makepyfile(\n \"\"\"\n def test_func():\n pass\n \"\"\"\n )\n res = pytester.runpytest()\n assert res.ret == 2\n msg = \"Defining 'pytest_plugins' in a non-top-level conftest is no longer supported\"\n res.stdout.fnmatch_lines([f\"*{msg}*\", f\"*subdirectory{os.sep}conftest.py*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestPytestPluginsVariable.test_pytest_plugins_in_non_top_level_conftest_unsupported_pyargs_TestPytestPluginsVariable.test_pytest_plugins_in_non_top_level_conftest_unsupported_pyargs.if_use_pyargs_.else_.res_stdout_fnmatch_lines_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestPytestPluginsVariable.test_pytest_plugins_in_non_top_level_conftest_unsupported_pyargs_TestPytestPluginsVariable.test_pytest_plugins_in_non_top_level_conftest_unsupported_pyargs.if_use_pyargs_.else_.res_stdout_fnmatch_lines_", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 1864, "end_line": 1890, "span_ids": ["TestPytestPluginsVariable.test_pytest_plugins_in_non_top_level_conftest_unsupported_pyargs"], "tokens": 299}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPytestPluginsVariable:\n\n @pytest.mark.parametrize(\"use_pyargs\", [True, False])\n def test_pytest_plugins_in_non_top_level_conftest_unsupported_pyargs(\n self, pytester: Pytester, use_pyargs: bool\n ) -> None:\n \"\"\"When using --pyargs, do not emit the warning about non-top-level conftest warnings (#4039, #4044)\"\"\"\n\n files = {\n \"src/pkg/__init__.py\": \"\",\n \"src/pkg/conftest.py\": \"\",\n \"src/pkg/test_root.py\": \"def test(): pass\",\n \"src/pkg/sub/__init__.py\": \"\",\n \"src/pkg/sub/conftest.py\": \"pytest_plugins=['capture']\",\n \"src/pkg/sub/test_bar.py\": \"def test(): pass\",\n }\n pytester.makepyfile(**files)\n pytester.syspathinsert(pytester.path.joinpath(\"src\"))\n\n args = (\"--pyargs\", \"pkg\") if use_pyargs else ()\n res = pytester.runpytest(*args)\n assert res.ret == (0 if use_pyargs else 2)\n msg = (\n msg\n ) = \"Defining 'pytest_plugins' in a non-top-level conftest is no longer supported\"\n if use_pyargs:\n assert msg not in res.stdout.str()\n else:\n res.stdout.fnmatch_lines([f\"*{msg}*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestPytestPluginsVariable.test_pytest_plugins_in_non_top_level_conftest_unsupported_no_top_level_conftest_TestPytestPluginsVariable.test_pytest_plugins_in_non_top_level_conftest_unsupported_no_top_level_conftest.res_stdout_fnmatch_lines_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestPytestPluginsVariable.test_pytest_plugins_in_non_top_level_conftest_unsupported_no_top_level_conftest_TestPytestPluginsVariable.test_pytest_plugins_in_non_top_level_conftest_unsupported_no_top_level_conftest.res_stdout_fnmatch_lines_", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 1892, "end_line": 1916, "span_ids": ["TestPytestPluginsVariable.test_pytest_plugins_in_non_top_level_conftest_unsupported_no_top_level_conftest"], "tokens": 197}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPytestPluginsVariable:\n\n def test_pytest_plugins_in_non_top_level_conftest_unsupported_no_top_level_conftest(\n self, pytester: Pytester\n ) -> None:\n subdirectory = pytester.path.joinpath(\"subdirectory\")\n subdirectory.mkdir()\n pytester.makeconftest(\n \"\"\"\n pytest_plugins=['capture']\n \"\"\"\n )\n pytester.path.joinpath(\"conftest.py\").rename(\n subdirectory.joinpath(\"conftest.py\")\n )\n\n pytester.makepyfile(\n \"\"\"\n def test_func():\n pass\n \"\"\"\n )\n\n res = pytester.runpytest_subprocess()\n assert res.ret == 2\n msg = \"Defining 'pytest_plugins' in a non-top-level conftest is no longer supported\"\n res.stdout.fnmatch_lines([f\"*{msg}*\", f\"*subdirectory{os.sep}conftest.py*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestLiterals.test_number_precision_TestLiterals.test_number_precision.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestLiterals.test_number_precision_TestLiterals.test_number_precision.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 966, "end_line": 1038, "span_ids": ["TestLiterals.test_number_precision"], "tokens": 527}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestLiterals:\n\n @pytest.mark.parametrize(\"config_mode\", [\"ini\", \"comment\"])\n def test_number_precision(self, pytester, config_mode):\n \"\"\"Test the NUMBER option.\"\"\"\n if config_mode == \"ini\":\n pytester.makeini(\n \"\"\"\n [pytest]\n doctest_optionflags = NUMBER\n \"\"\"\n )\n comment = \"\"\n else:\n comment = \"#doctest: +NUMBER\"\n\n pytester.maketxtfile(\n test_doc=\"\"\"\n\n Scalars:\n\n >>> import math\n >>> math.pi {comment}\n 3.141592653589793\n >>> math.pi {comment}\n 3.1416\n >>> math.pi {comment}\n 3.14\n >>> -math.pi {comment}\n -3.14\n >>> math.pi {comment}\n 3.\n >>> 3. {comment}\n 3.0\n >>> 3. {comment}\n 3.\n >>> 3. {comment}\n 3.01\n >>> 3. {comment}\n 2.99\n >>> .299 {comment}\n .3\n >>> .301 {comment}\n .3\n >>> 951. {comment}\n 1e3\n >>> 1049. {comment}\n 1e3\n >>> -1049. {comment}\n -1e3\n >>> 1e3 {comment}\n 1e3\n >>> 1e3 {comment}\n 1000.\n\n Lists:\n\n >>> [3.1415, 0.097, 13.1, 7, 8.22222e5, 0.598e-2] {comment}\n [3.14, 0.1, 13., 7, 8.22e5, 6.0e-3]\n >>> [[0.333, 0.667], [0.999, 1.333]] {comment}\n [[0.33, 0.667], [0.999, 1.333]]\n >>> [[[0.101]]] {comment}\n [[[0.1]]]\n\n Doesn't barf on non-numbers:\n\n >>> 'abc' {comment}\n 'abc'\n >>> None {comment}\n \"\"\".format(\n comment=comment\n )\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestLiterals.test_number_non_matches_TestLiterals.test_number_non_matches.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestLiterals.test_number_non_matches_TestLiterals.test_number_non_matches.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 1040, "end_line": 1071, "span_ids": ["TestLiterals.test_number_non_matches"], "tokens": 280}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestLiterals:\n\n @pytest.mark.parametrize(\n \"expression,output\",\n [\n # ints shouldn't match floats:\n (\"3.0\", \"3\"),\n (\"3e0\", \"3\"),\n (\"1e3\", \"1000\"),\n (\"3\", \"3.0\"),\n # Rounding:\n (\"3.1\", \"3.0\"),\n (\"3.1\", \"3.2\"),\n (\"3.1\", \"4.0\"),\n (\"8.22e5\", \"810000.0\"),\n # Only the actual output is rounded up, not the expected output:\n (\"3.0\", \"2.98\"),\n (\"1e3\", \"999\"),\n # The current implementation doesn't understand that numbers inside\n # strings shouldn't be treated as numbers:\n pytest.param(\"'3.1416'\", \"'3.14'\", marks=pytest.mark.xfail),\n ],\n )\n def test_number_non_matches(self, pytester, expression, output):\n pytester.maketxtfile(\n test_doc=\"\"\"\n >>> {expression} #doctest: +NUMBER\n {output}\n \"\"\".format(\n expression=expression, output=output\n )\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=0, failed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestLiterals.test_number_and_allow_unicode_TestLiterals.test_number_and_allow_unicode.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestLiterals.test_number_and_allow_unicode_TestLiterals.test_number_and_allow_unicode.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 1073, "end_line": 1083, "span_ids": ["TestLiterals.test_number_and_allow_unicode"], "tokens": 130}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestLiterals:\n\n def test_number_and_allow_unicode(self, pytester: Pytester):\n pytester.maketxtfile(\n test_doc=\"\"\"\n >>> from collections import namedtuple\n >>> T = namedtuple('T', 'a b c')\n >>> T(a=0.2330000001, b=u'str', c=b'bytes') # doctest: +ALLOW_UNICODE, +ALLOW_BYTES, +NUMBER\n T(a=0.233, b=u'str', c='bytes')\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_DomNode_DomNode.next_sibling.return.type_self_self___node_ne": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_DomNode_DomNode.next_sibling.return.type_self_self___node_ne", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 60, "end_line": 118, "span_ids": ["DomNode.toxml", "DomNode.find_nth_by_tag", "DomNode.children", "DomNode", "DomNode.find_first_by_tag", "DomNode.__getitem__", "DomNode.tag", "DomNode.text", "DomNode.get_unique_child", "DomNode.__repr__", "DomNode.next_sibling", "DomNode.assert_attr", "DomNode._by_tag", "DomNode.find_by_tag"], "tokens": 339}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class DomNode:\n def __init__(self, dom):\n self.__node = dom\n\n def __repr__(self):\n return self.__node.toxml()\n\n def find_first_by_tag(self, tag):\n return self.find_nth_by_tag(tag, 0)\n\n def _by_tag(self, tag):\n return self.__node.getElementsByTagName(tag)\n\n @property\n def children(self):\n return [type(self)(x) for x in self.__node.childNodes]\n\n @property\n def get_unique_child(self):\n children = self.children\n assert len(children) == 1\n return children[0]\n\n def find_nth_by_tag(self, tag, n):\n items = self._by_tag(tag)\n try:\n nth = items[n]\n except IndexError:\n pass\n else:\n return type(self)(nth)\n\n def find_by_tag(self, tag):\n t = type(self)\n return [t(x) for x in self.__node.getElementsByTagName(tag)]\n\n def __getitem__(self, key):\n node = self.__node.getAttributeNode(key)\n if node is not None:\n return node.value\n\n def assert_attr(self, **kwargs):\n __tracebackhide__ = True\n return assert_attr(self.__node, **kwargs)\n\n def toxml(self):\n return self.__node.toxml()\n\n @property\n def text(self):\n return self.__node.childNodes[0].wholeText\n\n @property\n def tag(self):\n return self.__node.tagName\n\n @property\n def next_sibling(self):\n return type(self)(self.__node.nextSibling)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_root_testsuites_tag_test_runs_twice.assert_first_second": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_root_testsuites_tag_test_runs_twice.assert_first_second", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 1364, "end_line": 1392, "span_ids": ["test_root_testsuites_tag", "test_runs_twice"], "tokens": 204}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@parametrize_families\ndef test_root_testsuites_tag(\n pytester: Pytester, run_and_parse: RunAndParse, xunit_family: str\n) -> None:\n pytester.makepyfile(\n \"\"\"\n def test_x():\n pass\n \"\"\"\n )\n _, dom = run_and_parse(family=xunit_family)\n root = dom.get_unique_child\n assert root.tag == \"testsuites\"\n suite_node = root.get_unique_child\n assert suite_node.tag == \"testsuite\"\n\n\ndef test_runs_twice(pytester: Pytester, run_and_parse: RunAndParse) -> None:\n f = pytester.makepyfile(\n \"\"\"\n def test_pass():\n pass\n \"\"\"\n )\n\n result, dom = run_and_parse(f, f)\n result.stdout.no_fnmatch_line(\"*INTERNALERROR*\")\n first, second = [x[\"classname\"] for x in dom.find_by_tag(\"testcase\")]\n assert first == second", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_runs_twice_xdist_test_runs_twice_xdist.assert_first_second": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_runs_twice_xdist_test_runs_twice_xdist.assert_first_second", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 1395, "end_line": 1410, "span_ids": ["test_runs_twice_xdist"], "tokens": 140}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_runs_twice_xdist(\n pytester: Pytester, monkeypatch: MonkeyPatch, run_and_parse: RunAndParse\n) -> None:\n pytest.importorskip(\"xdist\")\n monkeypatch.delenv(\"PYTEST_DISABLE_PLUGIN_AUTOLOAD\")\n f = pytester.makepyfile(\n \"\"\"\n def test_pass():\n pass\n \"\"\"\n )\n\n result, dom = run_and_parse(f, \"--dist\", \"each\", \"--tx\", \"2*popen\")\n result.stdout.no_fnmatch_line(\"*INTERNALERROR*\")\n first, second = [x[\"classname\"] for x in dom.find_by_tag(\"testcase\")]\n assert first == second", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_recwarn.py_TestWarns_TestWarns.test_warning_tuple.pytest_raises_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_recwarn.py_TestWarns_TestWarns.test_warning_tuple.pytest_raises_", "embedding": null, "metadata": {"file_path": "testing/test_recwarn.py", "file_name": "test_recwarn.py", "file_type": "text/x-python", "category": "test", "start_line": 210, "end_line": 241, "span_ids": ["TestWarns.test_warning_tuple", "TestWarns.test_several_messages", "TestWarns.test_check_callable", "TestWarns.test_function", "TestWarns"], "tokens": 280}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestWarns:\n def test_check_callable(self) -> None:\n source = \"warnings.warn('w1', RuntimeWarning)\"\n with pytest.raises(TypeError, match=r\".* must be callable\"):\n pytest.warns(RuntimeWarning, source) # type: ignore\n\n def test_several_messages(self) -> None:\n # different messages, b/c Python suppresses multiple identical warnings\n pytest.warns(RuntimeWarning, lambda: warnings.warn(\"w1\", RuntimeWarning))\n with pytest.raises(pytest.fail.Exception):\n pytest.warns(UserWarning, lambda: warnings.warn(\"w2\", RuntimeWarning))\n pytest.warns(RuntimeWarning, lambda: warnings.warn(\"w3\", RuntimeWarning))\n\n def test_function(self) -> None:\n pytest.warns(\n SyntaxWarning, lambda msg: warnings.warn(msg, SyntaxWarning), \"syntax\"\n )\n\n def test_warning_tuple(self) -> None:\n pytest.warns(\n (RuntimeWarning, SyntaxWarning), lambda: warnings.warn(\"w1\", RuntimeWarning)\n )\n pytest.warns(\n (RuntimeWarning, SyntaxWarning), lambda: warnings.warn(\"w2\", SyntaxWarning)\n )\n pytest.raises(\n pytest.fail.Exception,\n lambda: pytest.warns(\n (RuntimeWarning, SyntaxWarning),\n lambda: warnings.warn(\"w3\", UserWarning),\n ),\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_stepwise.py_test_stop_on_collection_errors_test_stop_on_collection_errors.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_stepwise.py_test_stop_on_collection_errors_test_stop_on_collection_errors.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_stepwise.py", "file_name": "test_stepwise.py", "file_type": "text/x-python", "category": "test", "start_line": 188, "end_line": 198, "span_ids": ["test_stop_on_collection_errors"], "tokens": 125}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\"broken_first\", [True, False])\ndef test_stop_on_collection_errors(\n broken_pytester: Pytester, broken_first: bool\n) -> None:\n \"\"\"Stop during collection errors. Broken test first or broken test last\n actually surfaced a bug (#5444), so we test both situations.\"\"\"\n files = [\"working_testfile.py\", \"broken_testfile.py\"]\n if broken_first:\n files.reverse()\n result = broken_pytester.runpytest(\"-v\", \"--strict-markers\", \"--stepwise\", *files)\n result.stdout.fnmatch_lines(\"*error during collection*\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_stepwise.py_test_xfail_handling_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_stepwise.py_test_xfail_handling_", "embedding": null, "metadata": {"file_path": "testing/test_stepwise.py", "file_name": "test_stepwise.py", "file_type": "text/x-python", "category": "test", "start_line": 201, "end_line": 251, "span_ids": ["test_xfail_handling"], "tokens": 371}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_xfail_handling(pytester: Pytester, monkeypatch: MonkeyPatch) -> None:\n \"\"\"Ensure normal xfail is ignored, and strict xfail interrupts the session in sw mode\n\n (#5547)\n \"\"\"\n monkeypatch.setattr(\"sys.dont_write_bytecode\", True)\n\n contents = \"\"\"\n import pytest\n def test_a(): pass\n\n @pytest.mark.xfail(strict={strict})\n def test_b(): assert {assert_value}\n\n def test_c(): pass\n def test_d(): pass\n \"\"\"\n pytester.makepyfile(contents.format(assert_value=\"0\", strict=\"False\"))\n result = pytester.runpytest(\"--sw\", \"-v\")\n result.stdout.fnmatch_lines(\n [\n \"*::test_a PASSED *\",\n \"*::test_b XFAIL *\",\n \"*::test_c PASSED *\",\n \"*::test_d PASSED *\",\n \"* 3 passed, 1 xfailed in *\",\n ]\n )\n\n pytester.makepyfile(contents.format(assert_value=\"1\", strict=\"True\"))\n result = pytester.runpytest(\"--sw\", \"-v\")\n result.stdout.fnmatch_lines(\n [\n \"*::test_a PASSED *\",\n \"*::test_b FAILED *\",\n \"* Interrupted*\",\n \"* 1 failed, 1 passed in *\",\n ]\n )\n\n pytester.makepyfile(contents.format(assert_value=\"0\", strict=\"True\"))\n result = pytester.runpytest(\"--sw\", \"-v\")\n result.stdout.fnmatch_lines(\n [\n \"*::test_b XFAIL *\",\n \"*::test_c PASSED *\",\n \"*::test_d PASSED *\",\n \"* 2 passed, 1 deselected, 1 xfailed in *\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_tmpdir.py_os_FakeConfig.option.return.self": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_tmpdir.py_os_FakeConfig.option.return.self", "embedding": null, "metadata": {"file_path": "testing/test_tmpdir.py", "file_name": "test_tmpdir.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 46, "span_ids": ["FakeConfig.get", "FakeConfig.trace", "test_tmpdir_fixture", "FakeConfig", "FakeConfig.option", "imports"], "tokens": 271}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import os\nimport stat\nimport sys\nfrom pathlib import Path\nfrom typing import Callable\nfrom typing import cast\nfrom typing import List\n\nimport attr\n\nimport pytest\nfrom _pytest import pathlib\nfrom _pytest.config import Config\nfrom _pytest.pathlib import cleanup_numbered_dir\nfrom _pytest.pathlib import create_cleanup_lock\nfrom _pytest.pathlib import make_numbered_dir\nfrom _pytest.pathlib import maybe_delete_a_numbered_dir\nfrom _pytest.pathlib import on_rm_rf_error\nfrom _pytest.pathlib import register_cleanup_lock_removal\nfrom _pytest.pathlib import rm_rf\nfrom _pytest.pytester import Pytester\nfrom _pytest.tmpdir import get_user\nfrom _pytest.tmpdir import TempdirFactory\nfrom _pytest.tmpdir import TempPathFactory\n\n\ndef test_tmpdir_fixture(pytester: Pytester) -> None:\n p = pytester.copy_example(\"tmpdir/tmpdir_fixture.py\")\n results = pytester.runpytest(p)\n results.stdout.fnmatch_lines([\"*1 passed*\"])\n\n\n@attr.s\nclass FakeConfig:\n basetemp = attr.ib()\n\n @property\n def trace(self):\n return self\n\n def get(self, key):\n return lambda *k: None\n\n @property\n def option(self):\n return self", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_tmpdir.py_TestNumberedDir_TestNumberedDir.test_removal_accepts_lock.assert_folder_is_dir_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_tmpdir.py_TestNumberedDir_TestNumberedDir.test_removal_accepts_lock.assert_folder_is_dir_", "embedding": null, "metadata": {"file_path": "testing/test_tmpdir.py", "file_name": "test_tmpdir.py", "file_type": "text/x-python", "category": "test", "start_line": 249, "end_line": 330, "span_ids": ["TestNumberedDir._do_cleanup", "TestNumberedDir.test_lock_register_cleanup_removal", "TestNumberedDir.test_cleanup_ignores_symlink", "TestNumberedDir.test_removal_accepts_lock", "TestNumberedDir", "TestNumberedDir.test_cleanup_locked", "TestNumberedDir.test_cleanup_lock_create", "TestNumberedDir.test_cleanup_keep", "TestNumberedDir.test_make"], "tokens": 567}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestNumberedDir:\n PREFIX = \"fun-\"\n\n def test_make(self, tmp_path):\n for i in range(10):\n d = make_numbered_dir(root=tmp_path, prefix=self.PREFIX)\n assert d.name.startswith(self.PREFIX)\n assert d.name.endswith(str(i))\n\n symlink = tmp_path.joinpath(self.PREFIX + \"current\")\n if symlink.exists():\n # unix\n assert symlink.is_symlink()\n assert symlink.resolve() == d.resolve()\n\n def test_cleanup_lock_create(self, tmp_path):\n d = tmp_path.joinpath(\"test\")\n d.mkdir()\n lockfile = create_cleanup_lock(d)\n with pytest.raises(OSError, match=\"cannot create lockfile in .*\"):\n create_cleanup_lock(d)\n\n lockfile.unlink()\n\n def test_lock_register_cleanup_removal(self, tmp_path: Path) -> None:\n lock = create_cleanup_lock(tmp_path)\n\n registry: List[Callable[..., None]] = []\n register_cleanup_lock_removal(lock, register=registry.append)\n\n (cleanup_func,) = registry\n\n assert lock.is_file()\n\n cleanup_func(original_pid=\"intentionally_different\")\n\n assert lock.is_file()\n\n cleanup_func()\n\n assert not lock.exists()\n\n cleanup_func()\n\n assert not lock.exists()\n\n def _do_cleanup(self, tmp_path: Path) -> None:\n self.test_make(tmp_path)\n cleanup_numbered_dir(\n root=tmp_path,\n prefix=self.PREFIX,\n keep=2,\n consider_lock_dead_if_created_before=0,\n )\n\n def test_cleanup_keep(self, tmp_path):\n self._do_cleanup(tmp_path)\n a, b = (x for x in tmp_path.iterdir() if not x.is_symlink())\n print(a, b)\n\n def test_cleanup_locked(self, tmp_path):\n p = make_numbered_dir(root=tmp_path, prefix=self.PREFIX)\n\n create_cleanup_lock(p)\n\n assert not pathlib.ensure_deletable(\n p, consider_lock_dead_if_created_before=p.stat().st_mtime - 1\n )\n assert pathlib.ensure_deletable(\n p, consider_lock_dead_if_created_before=p.stat().st_mtime + 1\n )\n\n def test_cleanup_ignores_symlink(self, tmp_path):\n the_symlink = tmp_path / (self.PREFIX + \"current\")\n attempt_symlink_to(the_symlink, tmp_path / (self.PREFIX + \"5\"))\n self._do_cleanup(tmp_path)\n\n def test_removal_accepts_lock(self, tmp_path):\n folder = make_numbered_dir(root=tmp_path, prefix=self.PREFIX)\n create_cleanup_lock(folder)\n maybe_delete_a_numbered_dir(folder)\n assert folder.is_dir()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_tmpdir.py_TestRmRf_TestRmRf.test_rm_rf_with_read_only_directory.assert_not_adir_is_dir_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_tmpdir.py_TestRmRf_TestRmRf.test_rm_rf_with_read_only_directory.assert_not_adir_is_dir_", "embedding": null, "metadata": {"file_path": "testing/test_tmpdir.py", "file_name": "test_tmpdir.py", "file_type": "text/x-python", "category": "test", "start_line": 326, "end_line": 368, "span_ids": ["TestRmRf.test_rm_rf_with_read_only_file", "TestRmRf", "TestRmRf.test_rm_rf_with_read_only_directory", "TestRmRf.test_rm_rf", "TestRmRf.chmod_r"], "tokens": 267}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRmRf:\n def test_rm_rf(self, tmp_path):\n adir = tmp_path / \"adir\"\n adir.mkdir()\n rm_rf(adir)\n\n assert not adir.exists()\n\n adir.mkdir()\n afile = adir / \"afile\"\n afile.write_bytes(b\"aa\")\n\n rm_rf(adir)\n assert not adir.exists()\n\n def test_rm_rf_with_read_only_file(self, tmp_path):\n \"\"\"Ensure rm_rf can remove directories with read-only files in them (#5524)\"\"\"\n fn = tmp_path / \"dir/foo.txt\"\n fn.parent.mkdir()\n\n fn.touch()\n\n self.chmod_r(fn)\n\n rm_rf(fn.parent)\n\n assert not fn.parent.is_dir()\n\n def chmod_r(self, path):\n mode = os.stat(str(path)).st_mode\n os.chmod(str(path), mode & ~stat.S_IWRITE)\n\n def test_rm_rf_with_read_only_directory(self, tmp_path):\n \"\"\"Ensure rm_rf can remove read-only directories (#5524)\"\"\"\n adir = tmp_path / \"dir\"\n adir.mkdir()\n\n (adir / \"foo.txt\").touch()\n self.chmod_r(adir)\n\n rm_rf(adir)\n\n assert not adir.is_dir()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_tmpdir.py_TestRmRf.test_on_rm_rf_error_TestRmRf.test_on_rm_rf_error.assert_not_fn_is_file_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_tmpdir.py_TestRmRf.test_on_rm_rf_error_TestRmRf.test_on_rm_rf_error.assert_not_fn_is_file_", "embedding": null, "metadata": {"file_path": "testing/test_tmpdir.py", "file_name": "test_tmpdir.py", "file_type": "text/x-python", "category": "test", "start_line": 372, "end_line": 408, "span_ids": ["TestRmRf.test_on_rm_rf_error"], "tokens": 341}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRmRf:\n\n def test_on_rm_rf_error(self, tmp_path: Path) -> None:\n adir = tmp_path / \"dir\"\n adir.mkdir()\n\n fn = adir / \"foo.txt\"\n fn.touch()\n self.chmod_r(fn)\n\n # unknown exception\n with pytest.warns(pytest.PytestWarning):\n exc_info1 = (None, RuntimeError(), None)\n on_rm_rf_error(os.unlink, str(fn), exc_info1, start_path=tmp_path)\n assert fn.is_file()\n\n # we ignore FileNotFoundError\n exc_info2 = (None, FileNotFoundError(), None)\n assert not on_rm_rf_error(None, str(fn), exc_info2, start_path=tmp_path)\n\n # unknown function\n with pytest.warns(\n pytest.PytestWarning,\n match=r\"^\\(rm_rf\\) unknown function None when removing .*foo.txt:\\nNone: \",\n ):\n exc_info3 = (None, PermissionError(), None)\n on_rm_rf_error(None, str(fn), exc_info3, start_path=tmp_path)\n assert fn.is_file()\n\n # ignored function\n with pytest.warns(None) as warninfo:\n exc_info4 = (None, PermissionError(), None)\n on_rm_rf_error(os.open, str(fn), exc_info4, start_path=tmp_path)\n assert fn.is_file()\n assert not [x.message for x in warninfo]\n\n exc_info5 = (None, PermissionError(), None)\n on_rm_rf_error(os.unlink, str(fn), exc_info5, start_path=tmp_path)\n assert not fn.is_file()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_tmpdir.py_attempt_symlink_to_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_tmpdir.py_attempt_symlink_to_", "embedding": null, "metadata": {"file_path": "testing/test_tmpdir.py", "file_name": "test_tmpdir.py", "file_type": "text/x-python", "category": "test", "start_line": 416, "end_line": 448, "span_ids": ["attempt_symlink_to", "test_tmpdir_equals_tmp_path", "test_basetemp_with_read_only_files"], "tokens": 253}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def attempt_symlink_to(path, to_path):\n \"\"\"Try to make a symlink from \"path\" to \"to_path\", skipping in case this platform\n does not support it or we don't have sufficient privileges (common on Windows).\"\"\"\n try:\n Path(path).symlink_to(Path(to_path))\n except OSError:\n pytest.skip(\"could not create symbolic link\")\n\n\ndef test_tmpdir_equals_tmp_path(tmpdir, tmp_path):\n assert Path(tmpdir) == tmp_path\n\n\ndef test_basetemp_with_read_only_files(pytester: Pytester) -> None:\n \"\"\"Integration test for #5524\"\"\"\n pytester.makepyfile(\n \"\"\"\n import os\n import stat\n\n def test(tmp_path):\n fn = tmp_path / 'foo.txt'\n fn.write_text('hello')\n mode = os.stat(str(fn)).st_mode\n os.chmod(str(fn), mode & ~stat.S_IREAD)\n \"\"\"\n )\n result = pytester.runpytest(\"--basetemp=tmp\")\n assert result.ret == 0\n # running a second time and ensure we don't crash\n result = pytester.runpytest(\"--basetemp=tmp\")\n assert result.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_error_message_with_parametrized_fixtures_test_setup_inheritance_skipping.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_error_message_with_parametrized_fixtures_test_setup_inheritance_skipping.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_unittest.py", "file_name": "test_unittest.py", "file_type": "text/x-python", "category": "test", "start_line": 1080, "end_line": 1106, "span_ids": ["test_setup_inheritance_skipping", "test_error_message_with_parametrized_fixtures"], "tokens": 206}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_error_message_with_parametrized_fixtures(pytester: Pytester) -> None:\n pytester.copy_example(\"unittest/test_parametrized_fixture_error_message.py\")\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n [\n \"*test_two does not support fixtures*\",\n \"*TestSomethingElse::test_two\",\n \"*Function type: TestCaseFunction\",\n ]\n )\n\n\n@pytest.mark.parametrize(\n \"test_name, expected_outcome\",\n [\n (\"test_setup_skip.py\", \"1 skipped\"),\n (\"test_setup_skip_class.py\", \"1 skipped\"),\n (\"test_setup_skip_module.py\", \"1 error\"),\n ],\n)\ndef test_setup_inheritance_skipping(\n pytester: Pytester, test_name, expected_outcome\n) -> None:\n \"\"\"Issue #4700\"\"\"\n pytester.copy_example(f\"unittest/{test_name}\")\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([f\"* {expected_outcome} in *\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_ExceptionInfo.for_later_ExceptionInfo.__repr__.return._tblen_format": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_ExceptionInfo.for_later_ExceptionInfo.__repr__.return._tblen_format", "embedding": null, "metadata": {"file_path": "src/_pytest/_code/code.py", "file_name": "code.py", "file_type": "text/x-python", "category": "implementation", "start_line": 503, "end_line": 561, "span_ids": ["ExceptionInfo.for_later", "ExceptionInfo.fill_unfilled", "ExceptionInfo.__repr__", "ExceptionInfo.traceback", "ExceptionInfo.type", "ExceptionInfo.tb", "ExceptionInfo.typename", "ExceptionInfo.traceback_9", "ExceptionInfo.value"], "tokens": 482}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\n@attr.s(repr=False)\nclass ExceptionInfo(Generic[_E]):\n\n @classmethod\n def for_later(cls) -> \"ExceptionInfo[_E]\":\n \"\"\"Return an unfilled ExceptionInfo.\"\"\"\n return cls(None)\n\n def fill_unfilled(self, exc_info: Tuple[Type[_E], _E, TracebackType]) -> None:\n \"\"\"Fill an unfilled ExceptionInfo created with ``for_later()``.\"\"\"\n assert self._excinfo is None, \"ExceptionInfo was already filled\"\n self._excinfo = exc_info\n\n @property\n def type(self) -> Type[_E]:\n \"\"\"The exception class.\"\"\"\n assert (\n self._excinfo is not None\n ), \".type can only be used after the context manager exits\"\n return self._excinfo[0]\n\n @property\n def value(self) -> _E:\n \"\"\"The exception value.\"\"\"\n assert (\n self._excinfo is not None\n ), \".value can only be used after the context manager exits\"\n return self._excinfo[1]\n\n @property\n def tb(self) -> TracebackType:\n \"\"\"The exception raw traceback.\"\"\"\n assert (\n self._excinfo is not None\n ), \".tb can only be used after the context manager exits\"\n return self._excinfo[2]\n\n @property\n def typename(self) -> str:\n \"\"\"The type name of the exception.\"\"\"\n assert (\n self._excinfo is not None\n ), \".typename can only be used after the context manager exits\"\n return self.type.__name__\n\n @property\n def traceback(self) -> Traceback:\n \"\"\"The traceback.\"\"\"\n if self._traceback is None:\n self._traceback = Traceback(self.tb, excinfo=ref(self))\n return self._traceback\n\n @traceback.setter\n def traceback(self, value: Traceback) -> None:\n self._traceback = value\n\n def __repr__(self) -> str:\n if self._excinfo is None:\n return \"\"\n return \"<{} {} tblen={}>\".format(\n self.__class__.__name__, saferepr(self._excinfo[1]), len(self.traceback)\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py__call_assertion_pass_set_location.return.node": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py__call_assertion_pass_set_location.return.node", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/rewrite.py", "file_name": "rewrite.py", "file_type": "text/x-python", "category": "implementation", "start_line": 490, "end_line": 542, "span_ids": ["set_location", "_check_if_assertion_pass_impl", "impl:13", "_call_assertion_pass"], "tokens": 417}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _call_assertion_pass(lineno: int, orig: str, expl: str) -> None:\n if util._assertion_pass is not None:\n util._assertion_pass(lineno, orig, expl)\n\n\ndef _check_if_assertion_pass_impl() -> bool:\n \"\"\"Check if any plugins implement the pytest_assertion_pass hook\n in order not to generate explanation unecessarily (might be expensive).\"\"\"\n return True if util._assertion_pass else False\n\n\nUNARY_MAP = {ast.Not: \"not %s\", ast.Invert: \"~%s\", ast.USub: \"-%s\", ast.UAdd: \"+%s\"}\n\nBINOP_MAP = {\n ast.BitOr: \"|\",\n ast.BitXor: \"^\",\n ast.BitAnd: \"&\",\n ast.LShift: \"<<\",\n ast.RShift: \">>\",\n ast.Add: \"+\",\n ast.Sub: \"-\",\n ast.Mult: \"*\",\n ast.Div: \"/\",\n ast.FloorDiv: \"//\",\n ast.Mod: \"%%\", # escaped for string formatting\n ast.Eq: \"==\",\n ast.NotEq: \"!=\",\n ast.Lt: \"<\",\n ast.LtE: \"<=\",\n ast.Gt: \">\",\n ast.GtE: \">=\",\n ast.Pow: \"**\",\n ast.Is: \"is\",\n ast.IsNot: \"is not\",\n ast.In: \"in\",\n ast.NotIn: \"not in\",\n ast.MatMult: \"@\",\n}\n\n\ndef set_location(node, lineno, col_offset):\n \"\"\"Set node location information recursively.\"\"\"\n\n def _fix(node, lineno, col_offset):\n if \"lineno\" in node._attributes:\n node.lineno = lineno\n if \"col_offset\" in node._attributes:\n node.col_offset = col_offset\n for child in ast.iter_child_nodes(node):\n _fix(child, lineno, col_offset)\n\n _fix(node, lineno, col_offset)\n return node", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewriter.visit_Compare_AssertionRewriter.visit_Compare.return.res_self_explanation_par": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewriter.visit_Compare_AssertionRewriter.visit_Compare.return.res_self_explanation_par", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/rewrite.py", "file_name": "rewrite.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1038, "end_line": 1074, "span_ids": ["AssertionRewriter.visit_Compare"], "tokens": 427}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class AssertionRewriter(ast.NodeVisitor):\n\n def visit_Compare(self, comp: ast.Compare) -> Tuple[ast.expr, str]:\n self.push_format_context()\n left_res, left_expl = self.visit(comp.left)\n if isinstance(comp.left, (ast.Compare, ast.BoolOp)):\n left_expl = f\"({left_expl})\"\n res_variables = [self.variable() for i in range(len(comp.ops))]\n load_names = [ast.Name(v, ast.Load()) for v in res_variables]\n store_names = [ast.Name(v, ast.Store()) for v in res_variables]\n it = zip(range(len(comp.ops)), comp.ops, comp.comparators)\n expls = []\n syms = []\n results = [left_res]\n for i, op, next_operand in it:\n next_res, next_expl = self.visit(next_operand)\n if isinstance(next_operand, (ast.Compare, ast.BoolOp)):\n next_expl = f\"({next_expl})\"\n results.append(next_res)\n sym = BINOP_MAP[op.__class__]\n syms.append(ast.Str(sym))\n expl = f\"{left_expl} {sym} {next_expl}\"\n expls.append(ast.Str(expl))\n res_expr = ast.Compare(left_res, [op], [next_res])\n self.statements.append(ast.Assign([store_names[i]], res_expr))\n left_res, left_expl = next_res, next_expl\n # Use pytest.assertion.util._reprcompare if that's available.\n expl_call = self.helper(\n \"_call_reprcompare\",\n ast.Tuple(syms, ast.Load()),\n ast.Tuple(load_names, ast.Load()),\n ast.Tuple(expls, ast.Load()),\n ast.Tuple(results, ast.Load()),\n )\n if len(comp.ops) > 1:\n res: ast.expr = ast.BoolOp(ast.And(), load_names)\n else:\n res = load_names[0]\n return res, self.explanation_param(self.pop_format_context(expl_call))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_try_makedirs_try_makedirs.return.True": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_try_makedirs_try_makedirs.return.True", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/rewrite.py", "file_name": "rewrite.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1077, "end_line": 1096, "span_ids": ["try_makedirs"], "tokens": 158}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def try_makedirs(cache_dir: Path) -> bool:\n \"\"\"Attempt to create the given directory and sub-directories exist.\n\n Returns True if successful or if it already exists.\n \"\"\"\n try:\n os.makedirs(os.fspath(cache_dir), exist_ok=True)\n except (FileNotFoundError, NotADirectoryError, FileExistsError):\n # One of the path components was not a directory:\n # - we're in a zip file\n # - it is a file\n return False\n except PermissionError:\n return False\n except OSError as e:\n # as of now, EROFS doesn't have an equivalent OSError-subclass\n if e.errno == errno.EROFS:\n return False\n raise\n return True", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_get_cache_dir_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_get_cache_dir_", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/rewrite.py", "file_name": "rewrite.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1099, "end_line": 1111, "span_ids": ["get_cache_dir"], "tokens": 138}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def get_cache_dir(file_path: Path) -> Path:\n \"\"\"Return the cache directory to write .pyc files for the given .py file path.\"\"\"\n if sys.version_info >= (3, 8) and sys.pycache_prefix:\n # given:\n # prefix = '/tmp/pycs'\n # path = '/home/user/proj/test_app.py'\n # we want:\n # '/tmp/pycs/home/user/proj'\n return Path(sys.pycache_prefix) / Path(*file_path.parts[1:-1])\n else:\n # classic pycache directory\n return file_path.parent / \"__pycache__\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/util.py__compare_eq_verbose__surrounding_parens_on_own_lines.if_closing_in_.lines_lines_closi": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/util.py__compare_eq_verbose__surrounding_parens_on_own_lines.if_closing_in_.lines_lines_closi", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/util.py", "file_name": "util.py", "file_type": "text/x-python", "category": "implementation", "start_line": 248, "end_line": 269, "span_ids": ["_surrounding_parens_on_own_lines", "_compare_eq_verbose"], "tokens": 208}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _compare_eq_verbose(left: Any, right: Any) -> List[str]:\n keepends = True\n left_lines = repr(left).splitlines(keepends)\n right_lines = repr(right).splitlines(keepends)\n\n explanation: List[str] = []\n explanation += [\"+\" + line for line in left_lines]\n explanation += [\"-\" + line for line in right_lines]\n\n return explanation\n\n\ndef _surrounding_parens_on_own_lines(lines: List[str]) -> None:\n \"\"\"Move opening/closing parenthesis/bracket to own lines.\"\"\"\n opening = lines[0][:1]\n if opening in [\"(\", \"[\", \"{\"]:\n lines[0] = \" \" + lines[0][1:]\n lines[:] = [opening] + lines\n closing = lines[-1][-1:]\n if closing in [\")\", \"]\", \"}\"]:\n lines[-1] = lines[-1][:-1] + \",\"\n lines[:] = lines + [closing]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/util.py__compare_eq_iterable__compare_eq_iterable.return.explanation": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/util.py__compare_eq_iterable__compare_eq_iterable.return.explanation", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/util.py", "file_name": "util.py", "file_type": "text/x-python", "category": "implementation", "start_line": 262, "end_line": 290, "span_ids": ["_compare_eq_iterable"], "tokens": 265}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _compare_eq_iterable(\n left: Iterable[Any], right: Iterable[Any], verbose: int = 0\n) -> List[str]:\n if not verbose:\n return [\"Use -v to get the full diff\"]\n # dynamic import to speedup pytest\n import difflib\n\n left_formatting = pprint.pformat(left).splitlines()\n right_formatting = pprint.pformat(right).splitlines()\n\n # Re-format for different output lengths.\n lines_left = len(left_formatting)\n lines_right = len(right_formatting)\n if lines_left != lines_right:\n left_formatting = _pformat_dispatch(left).splitlines()\n right_formatting = _pformat_dispatch(right).splitlines()\n\n if lines_left > 1 or lines_right > 1:\n _surrounding_parens_on_own_lines(left_formatting)\n _surrounding_parens_on_own_lines(right_formatting)\n\n explanation = [\"Full diff:\"]\n # \"right\" is the expected base against which we compare \"left\",\n # see https://github.com/pytest-dev/pytest/issues/3333\n explanation.extend(\n line.rstrip() for line in difflib.ndiff(right_formatting, left_formatting)\n )\n return explanation", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/util.py__compare_eq_set__compare_eq_set.return.explanation": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/util.py__compare_eq_set__compare_eq_set.return.explanation", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/util.py", "file_name": "util.py", "file_type": "text/x-python", "category": "implementation", "start_line": 350, "end_line": 364, "span_ids": ["_compare_eq_set"], "tokens": 118}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _compare_eq_set(\n left: AbstractSet[Any], right: AbstractSet[Any], verbose: int = 0\n) -> List[str]:\n explanation = []\n diff_left = left - right\n diff_right = right - left\n if diff_left:\n explanation.append(\"Extra items in the left set:\")\n for item in diff_left:\n explanation.append(saferepr(item))\n if diff_right:\n explanation.append(\"Extra items in the right set:\")\n for item in diff_right:\n explanation.append(saferepr(item))\n return explanation", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/util.py__compare_eq_dict__compare_eq_dict.return.explanation": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/util.py__compare_eq_dict__compare_eq_dict.return.explanation", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/util.py", "file_name": "util.py", "file_type": "text/x-python", "category": "implementation", "start_line": 373, "end_line": 411, "span_ids": ["_compare_eq_dict"], "tokens": 382}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _compare_eq_dict(\n left: Mapping[Any, Any], right: Mapping[Any, Any], verbose: int = 0\n) -> List[str]:\n explanation: List[str] = []\n set_left = set(left)\n set_right = set(right)\n common = set_left.intersection(set_right)\n same = {k: left[k] for k in common if left[k] == right[k]}\n if same and verbose < 2:\n explanation += [\"Omitting %s identical items, use -vv to show\" % len(same)]\n elif same:\n explanation += [\"Common items:\"]\n explanation += pprint.pformat(same).splitlines()\n diff = {k for k in common if left[k] != right[k]}\n if diff:\n explanation += [\"Differing items:\"]\n for k in diff:\n explanation += [saferepr({k: left[k]}) + \" != \" + saferepr({k: right[k]})]\n extra_left = set_left - set_right\n len_extra_left = len(extra_left)\n if len_extra_left:\n explanation.append(\n \"Left contains %d more item%s:\"\n % (len_extra_left, \"\" if len_extra_left == 1 else \"s\")\n )\n explanation.extend(\n pprint.pformat({k: left[k] for k in extra_left}).splitlines()\n )\n extra_right = set_right - set_left\n len_extra_right = len(extra_right)\n if len_extra_right:\n explanation.append(\n \"Right contains %d more item%s:\"\n % (len_extra_right, \"\" if len_extra_right == 1 else \"s\")\n )\n explanation.extend(\n pprint.pformat({k: right[k] for k in extra_right}).splitlines()\n )\n return explanation", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py__readline_workaround__readline_workaround.if_sys_platform_startswit.try_.except_ImportError_.pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py__readline_workaround__readline_workaround.if_sys_platform_startswit.try_.except_ImportError_.pass", "embedding": null, "metadata": {"file_path": "src/_pytest/capture.py", "file_name": "capture.py", "file_type": "text/x-python", "category": "implementation", "start_line": 71, "end_line": 92, "span_ids": ["_readline_workaround"], "tokens": 222}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _readline_workaround() -> None:\n \"\"\"Ensure readline is imported so that it attaches to the correct stdio\n handles on Windows.\n\n Pdb uses readline support where available--when not running from the Python\n prompt, the readline module is not imported until running the pdb REPL. If\n running pytest with the --pdb option this means the readline module is not\n imported until after I/O capture has been started.\n\n This is a problem for pyreadline, which is often used to implement readline\n support on Windows, as it does not attach to the correct handles for stdout\n and/or stdin if they have been redirected by the FDCapture mechanism. This\n workaround ensures that readline is imported before I/O capture is setup so\n that it can attach to the actual stdin/out for the console.\n\n See https://github.com/pytest-dev/pytest/pull/1281.\n \"\"\"\n if sys.platform.startswith(\"win32\"):\n try:\n import readline # noqa: F401\n except ImportError:\n pass", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_PytestPluginManager._try_load_conftest_PytestPluginManager._rget_with_confmod.raise_KeyError_name_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_PytestPluginManager._try_load_conftest_PytestPluginManager._rget_with_confmod.raise_KeyError_name_", "embedding": null, "metadata": {"file_path": "src/_pytest/config/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 514, "end_line": 564, "span_ids": ["PytestPluginManager._rget_with_confmod", "PytestPluginManager._try_load_conftest", "PytestPluginManager._getconftestmodules"], "tokens": 414}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass PytestPluginManager(PluginManager):\n\n def _try_load_conftest(\n self, anchor: Path, importmode: Union[str, ImportMode]\n ) -> None:\n self._getconftestmodules(anchor, importmode)\n # let's also consider test* subdirs\n if anchor.is_dir():\n for x in anchor.glob(\"test*\"):\n if x.is_dir():\n self._getconftestmodules(x, importmode)\n\n @lru_cache(maxsize=128)\n def _getconftestmodules(\n self,\n path: Path,\n importmode: Union[str, ImportMode],\n ) -> List[types.ModuleType]:\n if self._noconftest:\n return []\n\n if path.is_file():\n directory = path.parent\n else:\n directory = path\n\n # XXX these days we may rather want to use config.rootpath\n # and allow users to opt into looking into the rootdir parent\n # directories instead of requiring to specify confcutdir.\n clist = []\n for parent in reversed((directory, *directory.parents)):\n if self._confcutdir and parent in self._confcutdir.parents:\n continue\n conftestpath = parent / \"conftest.py\"\n if conftestpath.is_file():\n mod = self._importconftest(conftestpath, importmode)\n clist.append(mod)\n self._dirpath2confmods[directory] = clist\n return clist\n\n def _rget_with_confmod(\n self,\n name: str,\n path: Path,\n importmode: Union[str, ImportMode],\n ) -> Tuple[types.ModuleType, Any]:\n modules = self._getconftestmodules(path, importmode)\n for mod in reversed(modules):\n try:\n return mod, getattr(mod, name)\n except AttributeError:\n continue\n raise KeyError(name)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_Parser.addoption_Parser.parse.return.self_optparser_parse_args": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_Parser.addoption_Parser.parse.return.self_optparser_parse_args", "embedding": null, "metadata": {"file_path": "src/_pytest/config/argparsing.py", "file_name": "argparsing.py", "file_type": "text/x-python", "category": "implementation", "start_line": 85, "end_line": 110, "span_ids": ["Parser.parse", "Parser.addoption"], "tokens": 253}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Parser:\n\n def addoption(self, *opts: str, **attrs: Any) -> None:\n \"\"\"Register a command line option.\n\n :opts: Option names, can be short or long options.\n :attrs: Same attributes which the ``add_argument()`` function of the\n `argparse library `_\n accepts.\n\n After command line parsing, options are available on the pytest config\n object via ``config.option.NAME`` where ``NAME`` is usually set\n by passing a ``dest`` attribute, for example\n ``addoption(\"--long\", dest=\"NAME\", ...)``.\n \"\"\"\n self._anonymous.addoption(*opts, **attrs)\n\n def parse(\n self,\n args: Sequence[Union[str, \"os.PathLike[str]\"]],\n namespace: Optional[argparse.Namespace] = None,\n ) -> argparse.Namespace:\n from _pytest._argcomplete import try_argcomplete\n\n self.optparser = self._getparser()\n try_argcomplete(self.optparser)\n strargs = [os.fspath(x) for x in args]\n return self.optparser.parse_args(strargs, namespace=namespace)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_Parser._getparser_Parser._getparser.return.optparser": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_Parser._getparser_Parser._getparser.return.optparser", "embedding": null, "metadata": {"file_path": "src/_pytest/config/argparsing.py", "file_name": "argparsing.py", "file_type": "text/x-python", "category": "implementation", "start_line": 112, "end_line": 129, "span_ids": ["Parser._getparser"], "tokens": 191}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Parser:\n\n def _getparser(self) -> \"MyOptionParser\":\n from _pytest._argcomplete import filescompleter\n\n optparser = MyOptionParser(self, self.extra_info, prog=self.prog)\n groups = self._groups + [self._anonymous]\n for group in groups:\n if group.options:\n desc = group.description or group.name\n arggroup = optparser.add_argument_group(desc)\n for option in group.options:\n n = option.names()\n a = option.attrs()\n arggroup.add_argument(*n, **a)\n file_or_dir_arg = optparser.add_argument(FILE_OR_DIR, nargs=\"*\")\n # bash like autocompletion for dirs (appending '/')\n # Type ignored because typeshed doesn't know about argcomplete.\n file_or_dir_arg.completer = filescompleter # type: ignore\n return optparser", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_DropShorterLongHelpFormatter_DropShorterLongHelpFormatter.__init__.super___init___args_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_DropShorterLongHelpFormatter_DropShorterLongHelpFormatter.__init__.super___init___args_", "embedding": null, "metadata": {"file_path": "src/_pytest/config/argparsing.py", "file_name": "argparsing.py", "file_type": "text/x-python", "category": "implementation", "start_line": 449, "end_line": 461, "span_ids": ["DropShorterLongHelpFormatter"], "tokens": 146}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class DropShorterLongHelpFormatter(argparse.HelpFormatter):\n \"\"\"Shorten help for long options that differ only in extra hyphens.\n\n - Collapse **long** options that are the same except for extra hyphens.\n - Shortcut if there are only two options and one of them is a short one.\n - Cache result on the action object as this is called at least 2 times.\n \"\"\"\n\n def __init__(self, *args: Any, **kwargs: Any) -> None:\n # Use more accurate terminal width.\n if \"width\" not in kwargs:\n kwargs[\"width\"] = _pytest._io.get_terminal_width()\n super().__init__(*args, **kwargs)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_DropShorterLongHelpFormatter._format_action_invocation_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_DropShorterLongHelpFormatter._format_action_invocation_", "embedding": null, "metadata": {"file_path": "src/_pytest/config/argparsing.py", "file_name": "argparsing.py", "file_type": "text/x-python", "category": "implementation", "start_line": 463, "end_line": 512, "span_ids": ["DropShorterLongHelpFormatter._split_lines", "DropShorterLongHelpFormatter._format_action_invocation"], "tokens": 484}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class DropShorterLongHelpFormatter(argparse.HelpFormatter):\n\n def _format_action_invocation(self, action: argparse.Action) -> str:\n orgstr = argparse.HelpFormatter._format_action_invocation(self, action)\n if orgstr and orgstr[0] != \"-\": # only optional arguments\n return orgstr\n res: Optional[str] = getattr(action, \"_formatted_action_invocation\", None)\n if res:\n return res\n options = orgstr.split(\", \")\n if len(options) == 2 and (len(options[0]) == 2 or len(options[1]) == 2):\n # a shortcut for '-h, --help' or '--abc', '-a'\n action._formatted_action_invocation = orgstr # type: ignore\n return orgstr\n return_list = []\n short_long: Dict[str, str] = {}\n for option in options:\n if len(option) == 2 or option[2] == \" \":\n continue\n if not option.startswith(\"--\"):\n raise ArgumentError(\n 'long optional argument without \"--\": [%s]' % (option), option\n )\n xxoption = option[2:]\n shortened = xxoption.replace(\"-\", \"\")\n if shortened not in short_long or len(short_long[shortened]) < len(\n xxoption\n ):\n short_long[shortened] = xxoption\n # now short_long has been filled out to the longest with dashes\n # **and** we keep the right option ordering from add_argument\n for option in options:\n if len(option) == 2 or option[2] == \" \":\n return_list.append(option)\n if option[2:] == short_long.get(option.replace(\"-\", \"\")):\n return_list.append(option.replace(\" \", \"=\", 1))\n formatted_action_invocation = \", \".join(return_list)\n action._formatted_action_invocation = formatted_action_invocation # type: ignore\n return formatted_action_invocation\n\n def _split_lines(self, text, width):\n \"\"\"Wrap lines after splitting on original newlines.\n\n This allows to have explicit line breaks in the help text.\n \"\"\"\n import textwrap\n\n lines = []\n for line in text.splitlines():\n lines.extend(textwrap.wrap(line.strip(), width))\n return lines", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py__get_runner__get_runner.return.RUNNER_CLASS_type_ig": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py__get_runner__get_runner.return.RUNNER_CLASS_type_ig", "embedding": null, "metadata": {"file_path": "src/_pytest/doctest.py", "file_name": "doctest.py", "file_type": "text/x-python", "category": "implementation", "start_line": 190, "end_line": 207, "span_ids": ["_get_runner"], "tokens": 172}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _get_runner(\n checker: Optional[\"doctest.OutputChecker\"] = None,\n verbose: Optional[bool] = None,\n optionflags: int = 0,\n continue_on_failure: bool = True,\n) -> \"doctest.DocTestRunner\":\n # We need this in order to do a lazy import on doctest\n global RUNNER_CLASS\n if RUNNER_CLASS is None:\n RUNNER_CLASS = _init_runner_class()\n # Type ignored because the continue_on_failure argument is only defined on\n # PytestDoctestRunner, which is lazily defined so can't be used as a type.\n return RUNNER_CLASS( # type: ignore\n checker=checker,\n verbose=verbose,\n optionflags=optionflags,\n continue_on_failure=continue_on_failure,\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py__init_checker_class__init_checker_class.LiteralsOutputChecker._number_re.re_compile_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py__init_checker_class__init_checker_class.LiteralsOutputChecker._number_re.re_compile_", "embedding": null, "metadata": {"file_path": "src/_pytest/doctest.py", "file_name": "doctest.py", "file_type": "text/x-python", "category": "implementation", "start_line": 582, "end_line": 614, "span_ids": ["_init_checker_class.LiteralsOutputChecker:2", "_init_checker_class", "_init_checker_class.LiteralsOutputChecker"], "tokens": 302}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _init_checker_class() -> Type[\"doctest.OutputChecker\"]:\n import doctest\n import re\n\n class LiteralsOutputChecker(doctest.OutputChecker):\n # Based on doctest_nose_plugin.py from the nltk project\n # (https://github.com/nltk/nltk) and on the \"numtest\" doctest extension\n # by Sebastien Boisgerault (https://github.com/boisgera/numtest).\n\n _unicode_literal_re = re.compile(r\"(\\W|^)[uU]([rR]?[\\'\\\"])\", re.UNICODE)\n _bytes_literal_re = re.compile(r\"(\\W|^)[bB]([rR]?[\\'\\\"])\", re.UNICODE)\n _number_re = re.compile(\n r\"\"\"\n (?P\n (?P\n (?P [+-]?\\d*)\\.(?P\\d+)\n |\n (?P [+-]?\\d+)\\.\n )\n (?:\n [Ee]\n (?P [+-]?\\d+)\n )?\n |\n (?P [+-]?\\d+)\n (?:\n [Ee]\n (?P [+-]?\\d+)\n )\n )\n \"\"\",\n re.VERBOSE,\n )\n # ... other code", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py__init_checker_class.LiteralsOutputChecker.check_output__init_checker_class.LiteralsOutputChecker.check_output.return.doctest_OutputChecker_che": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py__init_checker_class.LiteralsOutputChecker.check_output__init_checker_class.LiteralsOutputChecker.check_output.return.doctest_OutputChecker_che", "embedding": null, "metadata": {"file_path": "src/_pytest/doctest.py", "file_name": "doctest.py", "file_type": "text/x-python", "category": "implementation", "start_line": 616, "end_line": 641, "span_ids": ["_init_checker_class.LiteralsOutputChecker:2"], "tokens": 302}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _init_checker_class() -> Type[\"doctest.OutputChecker\"]:\n\n class LiteralsOutputChecker(doctest.OutputChecker):\n # Based on doctest_nose_plugin.py from the nltk project\n # (https://github.com/nltk/nltk) and on the \"numtest\" doctest extension\n\n def check_output(self, want: str, got: str, optionflags: int) -> bool:\n if doctest.OutputChecker.check_output(self, want, got, optionflags):\n return True\n\n allow_unicode = optionflags & _get_allow_unicode_flag()\n allow_bytes = optionflags & _get_allow_bytes_flag()\n allow_number = optionflags & _get_number_flag()\n\n if not allow_unicode and not allow_bytes and not allow_number:\n return False\n\n def remove_prefixes(regex: Pattern[str], txt: str) -> str:\n return re.sub(regex, r\"\\1\\2\", txt)\n\n if allow_unicode:\n want = remove_prefixes(self._unicode_literal_re, want)\n got = remove_prefixes(self._unicode_literal_re, got)\n\n if allow_bytes:\n want = remove_prefixes(self._bytes_literal_re, want)\n got = remove_prefixes(self._bytes_literal_re, got)\n\n if allow_number:\n got = self._remove_unwanted_precision(want, got)\n\n return doctest.OutputChecker.check_output(self, want, got, optionflags)\n # ... other code", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py__init_checker_class.LiteralsOutputChecker._remove_unwanted_precision__init_checker_class.return.LiteralsOutputChecker": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py__init_checker_class.LiteralsOutputChecker._remove_unwanted_precision__init_checker_class.return.LiteralsOutputChecker", "embedding": null, "metadata": {"file_path": "src/_pytest/doctest.py", "file_name": "doctest.py", "file_type": "text/x-python", "category": "implementation", "start_line": 643, "end_line": 670, "span_ids": ["_init_checker_class.LiteralsOutputChecker:2"], "tokens": 336}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _init_checker_class() -> Type[\"doctest.OutputChecker\"]:\n\n class LiteralsOutputChecker(doctest.OutputChecker):\n # Based on doctest_nose_plugin.py from the nltk project\n # (https://github.com/nltk/nltk) and on the \"numtest\" doctest extension\n\n def _remove_unwanted_precision(self, want: str, got: str) -> str:\n wants = list(self._number_re.finditer(want))\n gots = list(self._number_re.finditer(got))\n if len(wants) != len(gots):\n return got\n offset = 0\n for w, g in zip(wants, gots):\n fraction: Optional[str] = w.group(\"fraction\")\n exponent: Optional[str] = w.group(\"exponent1\")\n if exponent is None:\n exponent = w.group(\"exponent2\")\n if fraction is None:\n precision = 0\n else:\n precision = len(fraction)\n if exponent is not None:\n precision -= int(exponent)\n if float(w.group()) == approx(float(g.group()), abs=10 ** -precision):\n # They're close enough. Replace the text we actually\n # got with the text we want, so that it will match when we\n # check the string literally.\n got = (\n got[: g.start() + offset] + w.group() + got[g.end() + offset :]\n )\n offset += w.end() - w.start() - (g.end() - g.start())\n return got\n\n return LiteralsOutputChecker", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py__get_report_choice_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py__get_report_choice_", "embedding": null, "metadata": {"file_path": "src/_pytest/doctest.py", "file_name": "doctest.py", "file_type": "text/x-python", "category": "implementation", "start_line": 714, "end_line": 736, "span_ids": ["_get_report_choice", "doctest_namespace"], "tokens": 204}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _get_report_choice(key: str) -> int:\n \"\"\"Return the actual `doctest` module flag value.\n\n We want to do it as late as possible to avoid importing `doctest` and all\n its dependencies when parsing options, as it adds overhead and breaks tests.\n \"\"\"\n import doctest\n\n return {\n DOCTEST_REPORT_CHOICE_UDIFF: doctest.REPORT_UDIFF,\n DOCTEST_REPORT_CHOICE_CDIFF: doctest.REPORT_CDIFF,\n DOCTEST_REPORT_CHOICE_NDIFF: doctest.REPORT_NDIFF,\n DOCTEST_REPORT_CHOICE_ONLY_FIRST_FAILURE: doctest.REPORT_ONLY_FIRST_FAILURE,\n DOCTEST_REPORT_CHOICE_NONE: 0,\n }[key]\n\n\n@pytest.fixture(scope=\"session\")\ndef doctest_namespace() -> Dict[str, Any]:\n \"\"\"Fixture that returns a :py:class:`dict` that will be injected into the\n namespace of doctests.\"\"\"\n return dict()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_yield_fixture_yield_fixture.return.fixture_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_yield_fixture_yield_fixture.return.fixture_", "embedding": null, "metadata": {"file_path": "src/_pytest/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1350, "end_line": 1373, "span_ids": ["yield_fixture"], "tokens": 128}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def yield_fixture(\n fixture_function=None,\n *args,\n scope=\"function\",\n params=None,\n autouse=False,\n ids=None,\n name=None,\n):\n \"\"\"(Return a) decorator to mark a yield-fixture factory function.\n\n .. deprecated:: 3.0\n Use :py:func:`pytest.fixture` directly instead.\n \"\"\"\n warnings.warn(YIELD_FIXTURE, stacklevel=2)\n return fixture(\n fixture_function,\n *args,\n scope=scope,\n params=params,\n autouse=autouse,\n ids=ids,\n name=name,\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_PercentStyleMultiline.format_get_option_ini.for_name_in_names_.if_ret_.return.ret": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_PercentStyleMultiline.format_get_option_ini.for_name_in_names_.if_ret_.return.ret", "embedding": null, "metadata": {"file_path": "src/_pytest/logging.py", "file_name": "logging.py", "file_type": "text/x-python", "category": "implementation", "start_line": 169, "end_line": 199, "span_ids": ["PercentStyleMultiline.format", "get_option_ini"], "tokens": 267}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class PercentStyleMultiline(logging.PercentStyle):\n\n def format(self, record: logging.LogRecord) -> str:\n if \"\\n\" in record.message:\n if hasattr(record, \"auto_indent\"):\n # Passed in from the \"extra={}\" kwarg on the call to logging.log().\n auto_indent = self._get_auto_indent(record.auto_indent) # type: ignore[attr-defined]\n else:\n auto_indent = self._auto_indent\n\n if auto_indent:\n lines = record.message.splitlines()\n formatted = self._fmt % self._update_message(record.__dict__, lines[0])\n\n if auto_indent < 0:\n indentation = _remove_ansi_escape_sequences(formatted).find(\n lines[0]\n )\n else:\n # Optimizes logging by allowing a fixed indentation.\n indentation = auto_indent\n lines[0] = formatted\n return (\"\\n\" + \" \" * indentation).join(lines)\n return self._fmt % record.__dict__\n\n\ndef get_option_ini(config: Config, *names: str):\n for name in names:\n ret = config.getoption(name) # 'default' arg won't work as expected\n if ret is None:\n ret = config.getini(name)\n if ret:\n return ret", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/__init__.py_param_param.return.ParameterSet_param_value": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/__init__.py_param_param.return.ParameterSet_param_value", "embedding": null, "metadata": {"file_path": "src/_pytest/mark/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 47, "end_line": 71, "span_ids": ["param"], "tokens": 216}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def param(\n *values: object,\n marks: Union[MarkDecorator, Collection[Union[MarkDecorator, Mark]]] = (),\n id: Optional[str] = None,\n) -> ParameterSet:\n \"\"\"Specify a parameter in `pytest.mark.parametrize`_ calls or\n :ref:`parametrized fixtures `.\n\n .. code-block:: python\n\n @pytest.mark.parametrize(\n \"test_input,expected\",\n [\n (\"3+5\", 8),\n pytest.param(\"6*9\", 42, marks=pytest.mark.xfail),\n ],\n )\n def test_eval(test_input, expected):\n assert eval(test_input) == expected\n\n :param values: Variable args of the values of the parameter set, in order.\n :keyword marks: A single mark or a list of marks to be applied to this parameter set.\n :keyword str id: The id to attribute to this parameter set.\n \"\"\"\n return ParameterSet.param(*values, marks=marks, id=id)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/__init__.py_deselect_by_keyword_deselect_by_keyword.if_deselected_.items_remaining": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/__init__.py_deselect_by_keyword_deselect_by_keyword.if_deselected_.items_remaining", "embedding": null, "metadata": {"file_path": "src/_pytest/mark/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 187, "end_line": 222, "span_ids": ["deselect_by_keyword"], "tokens": 274}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def deselect_by_keyword(items: \"List[Item]\", config: Config) -> None:\n keywordexpr = config.option.keyword.lstrip()\n if not keywordexpr:\n return\n\n if keywordexpr.startswith(\"-\"):\n # To be removed in pytest 7.0.0.\n warnings.warn(MINUS_K_DASH, stacklevel=2)\n keywordexpr = \"not \" + keywordexpr[1:]\n selectuntil = False\n if keywordexpr[-1:] == \":\":\n # To be removed in pytest 7.0.0.\n warnings.warn(MINUS_K_COLON, stacklevel=2)\n selectuntil = True\n keywordexpr = keywordexpr[:-1]\n\n try:\n expression = Expression.compile(keywordexpr)\n except ParseError as e:\n raise UsageError(\n f\"Wrong expression passed to '-k': {keywordexpr}: {e}\"\n ) from None\n\n remaining = []\n deselected = []\n for colitem in items:\n if keywordexpr and not expression.evaluate(KeywordMatcher.from_item(colitem)):\n deselected.append(colitem)\n else:\n if selectuntil:\n keywordexpr = None\n remaining.append(colitem)\n\n if deselected:\n config.hook.pytest_deselected(items=deselected)\n items[:] = remaining", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nodes.py_Item.add_report_section_Item.add_report_section.if_content_.self__report_sections_app": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nodes.py_Item.add_report_section_Item.add_report_section.if_content_.self__report_sections_app", "embedding": null, "metadata": {"file_path": "src/_pytest/nodes.py", "file_name": "nodes.py", "file_type": "text/x-python", "category": "implementation", "start_line": 573, "end_line": 588, "span_ids": ["Item.add_report_section"], "tokens": 166}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Item(Node):\n\n def add_report_section(self, when: str, key: str, content: str) -> None:\n \"\"\"Add a new report section, similar to what's done internally to add\n stdout and stderr captured output::\n\n item.add_report_section(\"call\", \"stdout\", \"report section contents\")\n\n :param str when:\n One of the possible capture states, ``\"setup\"``, ``\"call\"``, ``\"teardown\"``.\n :param str key:\n Name of the section, can be customized at will. Pytest uses ``\"stdout\"`` and\n ``\"stderr\"`` internally.\n :param str content:\n The full contents as a string.\n \"\"\"\n if content:\n self._report_sections.append((when, key, content))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nodes.py_Item.reportinfo_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nodes.py_Item.reportinfo_", "embedding": null, "metadata": {"file_path": "src/_pytest/nodes.py", "file_name": "nodes.py", "file_type": "text/x-python", "category": "implementation", "start_line": 590, "end_line": 600, "span_ids": ["Item.location", "Item.reportinfo"], "tokens": 114}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Item(Node):\n\n def reportinfo(self) -> Tuple[Union[py.path.local, str], Optional[int], str]:\n return self.fspath, None, \"\"\n\n @cached_property\n def location(self) -> Tuple[str, Optional[int], str]:\n location = self.reportinfo()\n fspath = absolutepath(str(location[0]))\n relfspath = self.session._node_location_to_relpath(fspath)\n assert type(location[2]) is str\n return (relfspath, location[1], location[2])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pastebin.py_create_new_paste_create_new_paste.if_m_.else_.return._bad_response_invalid_fo": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pastebin.py_create_new_paste_create_new_paste.if_m_.else_.return._bad_response_invalid_fo", "embedding": null, "metadata": {"file_path": "src/_pytest/pastebin.py", "file_name": "pastebin.py", "file_type": "text/x-python", "category": "implementation", "start_line": 69, "end_line": 91, "span_ids": ["create_new_paste"], "tokens": 217}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def create_new_paste(contents: Union[str, bytes]) -> str:\n \"\"\"Create a new paste using the bpaste.net service.\n\n :contents: Paste contents string.\n :returns: URL to the pasted contents, or an error message.\n \"\"\"\n import re\n from urllib.request import urlopen\n from urllib.parse import urlencode\n\n params = {\"code\": contents, \"lexer\": \"text\", \"expiry\": \"1week\"}\n url = \"https://bpaste.net\"\n try:\n response: str = (\n urlopen(url, data=urlencode(params).encode(\"ascii\")).read().decode(\"utf-8\")\n )\n except OSError as exc_info: # urllib errors\n return \"bad response: %s\" % exc_info\n m = re.search(r'href=\"/raw/(\\w+)\"', response)\n if m:\n return \"{}/show/{}\".format(url, m.group(1))\n else:\n return \"bad response: invalid format ('\" + response + \"')\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py_on_rm_rf_error_on_rm_rf_error.return.True": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py_on_rm_rf_error_on_rm_rf_error.return.True", "embedding": null, "metadata": {"file_path": "src/_pytest/pathlib.py", "file_name": "pathlib.py", "file_type": "text/x-python", "category": "implementation", "start_line": 72, "end_line": 120, "span_ids": ["on_rm_rf_error"], "tokens": 372}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def on_rm_rf_error(func, path: str, exc, *, start_path: Path) -> bool:\n \"\"\"Handle known read-only errors during rmtree.\n\n The returned value is used only by our own tests.\n \"\"\"\n exctype, excvalue = exc[:2]\n\n # Another process removed the file in the middle of the \"rm_rf\" (xdist for example).\n # More context: https://github.com/pytest-dev/pytest/issues/5974#issuecomment-543799018\n if isinstance(excvalue, FileNotFoundError):\n return False\n\n if not isinstance(excvalue, PermissionError):\n warnings.warn(\n PytestWarning(f\"(rm_rf) error removing {path}\\n{exctype}: {excvalue}\")\n )\n return False\n\n if func not in (os.rmdir, os.remove, os.unlink):\n if func not in (os.open,):\n warnings.warn(\n PytestWarning(\n \"(rm_rf) unknown function {} when removing {}:\\n{}: {}\".format(\n func, path, exctype, excvalue\n )\n )\n )\n return False\n\n # Chmod + retry.\n import stat\n\n def chmod_rw(p: str) -> None:\n mode = os.stat(p).st_mode\n os.chmod(p, mode | stat.S_IRUSR | stat.S_IWUSR)\n\n # For files, we need to recursively go upwards in the directories to\n # ensure they all are also writable.\n p = Path(path)\n if p.is_file():\n for parent in p.parents:\n chmod_rw(str(parent))\n # Stop when we reach the original path passed to rm_rf.\n if parent == start_path:\n break\n chmod_rw(str(path))\n\n func(path)\n return True", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_HookRecorder.countoutcomes_HookRecorder.clear.self_calls_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_HookRecorder.countoutcomes_HookRecorder.clear.self_calls_", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 422, "end_line": 438, "span_ids": ["HookRecorder.countoutcomes", "HookRecorder.assertoutcome", "HookRecorder.clear"], "tokens": 127}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class HookRecorder:\n\n def countoutcomes(self) -> List[int]:\n return [len(x) for x in self.listoutcomes()]\n\n def assertoutcome(self, passed: int = 0, skipped: int = 0, failed: int = 0) -> None:\n __tracebackhide__ = True\n from _pytest.pytester_assertions import assertoutcome\n\n outcomes = self.listoutcomes()\n assertoutcome(\n outcomes,\n passed=passed,\n skipped=skipped,\n failed=failed,\n )\n\n def clear(self) -> None:\n self.calls[:] = []", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_LineMatcher._no_match_line_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_LineMatcher._no_match_line_", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1929, "end_line": 1961, "span_ids": ["LineMatcher._fail", "LineMatcher.str", "LineMatcher._no_match_line"], "tokens": 303}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class LineMatcher:\n\n def _no_match_line(\n self, pat: str, match_func: Callable[[str, str], bool], match_nickname: str\n ) -> None:\n \"\"\"Ensure captured lines does not have a the given pattern, using ``fnmatch.fnmatch``.\n\n :param str pat: The pattern to match lines.\n \"\"\"\n __tracebackhide__ = True\n nomatch_printed = False\n wnick = len(match_nickname) + 1\n for line in self.lines:\n if match_func(line, pat):\n msg = f\"{match_nickname}: {pat!r}\"\n self._log(msg)\n self._log(\"{:>{width}}\".format(\"with:\", width=wnick), repr(line))\n self._fail(msg)\n else:\n if not nomatch_printed:\n self._log(\"{:>{width}}\".format(\"nomatch:\", width=wnick), repr(pat))\n nomatch_printed = True\n self._log(\"{:>{width}}\".format(\"and:\", width=wnick), repr(line))\n self._log_output = []\n\n def _fail(self, msg: str) -> None:\n __tracebackhide__ = True\n log_text = self._log_text\n self._log_output = []\n fail(log_text)\n\n def str(self) -> str:\n \"\"\"Return the entire original text.\"\"\"\n return str(self)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_CallSpec2.setmulti2_CallSpec2.setmulti2.self_marks_extend_normali": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_CallSpec2.setmulti2_CallSpec2.setmulti2.self_marks_extend_normali", "embedding": null, "metadata": {"file_path": "src/_pytest/python.py", "file_name": "python.py", "file_type": "text/x-python", "category": "implementation", "start_line": 926, "end_line": 949, "span_ids": ["CallSpec2.setmulti2"], "tokens": 241}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass CallSpec2:\n\n def setmulti2(\n self,\n valtypes: Mapping[str, \"Literal['params', 'funcargs']\"],\n argnames: Sequence[str],\n valset: Iterable[object],\n id: str,\n marks: Iterable[Union[Mark, MarkDecorator]],\n scopenum: int,\n param_index: int,\n ) -> None:\n for arg, val in zip(argnames, valset):\n if arg in self.params or arg in self.funcargs:\n raise ValueError(f\"duplicate {arg!r}\")\n valtype_for_arg = valtypes[arg]\n if valtype_for_arg == \"params\":\n self.params[arg] = val\n elif valtype_for_arg == \"funcargs\":\n self.funcargs[arg] = val\n else: # pragma: no cover\n assert False, f\"Unhandled valtype for arg: {valtype_for_arg}\"\n self.indices[arg] = param_index\n self._arg2scopenum[arg] = scopenum\n self._idlist.append(id)\n self.marks.extend(normalize_mark_list(marks))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/recwarn.py_WarningsRecorder.__exit___WarningsRecorder.__exit__.self._entered.False": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/recwarn.py_WarningsRecorder.__exit___WarningsRecorder.__exit__.self._entered.False", "embedding": null, "metadata": {"file_path": "src/_pytest/recwarn.py", "file_name": "recwarn.py", "file_type": "text/x-python", "category": "implementation", "start_line": 214, "end_line": 228, "span_ids": ["WarningsRecorder.__exit__"], "tokens": 131}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class WarningsRecorder(warnings.catch_warnings):\n\n def __exit__(\n self,\n exc_type: Optional[Type[BaseException]],\n exc_val: Optional[BaseException],\n exc_tb: Optional[TracebackType],\n ) -> None:\n if not self._entered:\n __tracebackhide__ = True\n raise RuntimeError(\"Cannot exit %r without entering first\" % self)\n\n super().__exit__(exc_type, exc_val, exc_tb)\n\n # Built-in catch_warnings does not reset entered state so we do it\n # manually here for this context manager to become reusable.\n self._entered = False", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/reports.py_BaseReport._get_verbose_word_BaseReport._from_json.return.cls_kwargs_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/reports.py_BaseReport._get_verbose_word_BaseReport._from_json.return.cls_kwargs_", "embedding": null, "metadata": {"file_path": "src/_pytest/reports.py", "file_name": "reports.py", "file_type": "text/x-python", "category": "implementation", "start_line": 194, "end_line": 221, "span_ids": ["BaseReport._from_json", "BaseReport._to_json", "BaseReport._get_verbose_word"], "tokens": 209}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class BaseReport:\n\n def _get_verbose_word(self, config: Config):\n _category, _short, verbose = config.hook.pytest_report_teststatus(\n report=self, config=config\n )\n return verbose\n\n def _to_json(self) -> Dict[str, Any]:\n \"\"\"Return the contents of this report as a dict of builtin entries,\n suitable for serialization.\n\n This was originally the serialize_report() function from xdist (ca03269).\n\n Experimental method.\n \"\"\"\n return _report_to_json(self)\n\n @classmethod\n def _from_json(cls: Type[_R], reportdict: Dict[str, object]) -> _R:\n \"\"\"Create either a TestReport or CollectReport, depending on the calling class.\n\n It is the callers responsibility to know which class to pass here.\n\n This was originally the serialize_report() function from xdist (ca03269).\n\n Experimental method.\n \"\"\"\n kwargs = _report_kwargs_from_json(reportdict)\n return cls(**kwargs)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/reports.py__report_to_json.d__report_to_json.return.d": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/reports.py__report_to_json.d__report_to_json.return.d", "embedding": null, "metadata": {"file_path": "src/_pytest/reports.py", "file_name": "reports.py", "file_type": "text/x-python", "category": "implementation", "start_line": 492, "end_line": 507, "span_ids": ["_report_to_json"], "tokens": 161}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _report_to_json(report: BaseReport) -> Dict[str, Any]:\n # ... other code\n\n d = report.__dict__.copy()\n if hasattr(report.longrepr, \"toterminal\"):\n if hasattr(report.longrepr, \"reprtraceback\") and hasattr(\n report.longrepr, \"reprcrash\"\n ):\n d[\"longrepr\"] = serialize_exception_longrepr(report)\n else:\n d[\"longrepr\"] = str(report.longrepr)\n else:\n d[\"longrepr\"] = report.longrepr\n for name in d:\n if isinstance(d[name], (py.path.local, Path)):\n d[name] = str(d[name])\n elif name == \"result\":\n d[name] = None # for now\n return d", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/reports.py__report_kwargs_from_json__report_kwargs_from_json.deserialize_repr_entry.return.reprentry": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/reports.py__report_kwargs_from_json__report_kwargs_from_json.deserialize_repr_entry.return.reprentry", "embedding": null, "metadata": {"file_path": "src/_pytest/reports.py", "file_name": "reports.py", "file_type": "text/x-python", "category": "implementation", "start_line": 510, "end_line": 542, "span_ids": ["_report_kwargs_from_json"], "tokens": 298}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _report_kwargs_from_json(reportdict: Dict[str, Any]) -> Dict[str, Any]:\n \"\"\"Return **kwargs that can be used to construct a TestReport or\n CollectReport instance.\n\n This was originally the serialize_report() function from xdist (ca03269).\n \"\"\"\n\n def deserialize_repr_entry(entry_data):\n data = entry_data[\"data\"]\n entry_type = entry_data[\"type\"]\n if entry_type == \"ReprEntry\":\n reprfuncargs = None\n reprfileloc = None\n reprlocals = None\n if data[\"reprfuncargs\"]:\n reprfuncargs = ReprFuncArgs(**data[\"reprfuncargs\"])\n if data[\"reprfileloc\"]:\n reprfileloc = ReprFileLocation(**data[\"reprfileloc\"])\n if data[\"reprlocals\"]:\n reprlocals = ReprLocals(data[\"reprlocals\"][\"lines\"])\n\n reprentry: Union[ReprEntry, ReprEntryNative] = ReprEntry(\n lines=data[\"lines\"],\n reprfuncargs=reprfuncargs,\n reprlocals=reprlocals,\n reprfileloc=reprfileloc,\n style=data[\"style\"],\n )\n elif entry_type == \"ReprEntryNative\":\n reprentry = ReprEntryNative(data[\"lines\"])\n else:\n _report_unserialization_failure(entry_type, TestReport, reportdict)\n return reprentry\n # ... other code", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/reports.py__report_kwargs_from_json.deserialize_repr_traceback_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/reports.py__report_kwargs_from_json.deserialize_repr_traceback_", "embedding": null, "metadata": {"file_path": "src/_pytest/reports.py", "file_name": "reports.py", "file_type": "text/x-python", "category": "implementation", "start_line": 544, "end_line": 589, "span_ids": ["_report_kwargs_from_json"], "tokens": 370}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _report_kwargs_from_json(reportdict: Dict[str, Any]) -> Dict[str, Any]:\n # ... other code\n\n def deserialize_repr_traceback(repr_traceback_dict):\n repr_traceback_dict[\"reprentries\"] = [\n deserialize_repr_entry(x) for x in repr_traceback_dict[\"reprentries\"]\n ]\n return ReprTraceback(**repr_traceback_dict)\n\n def deserialize_repr_crash(repr_crash_dict: Optional[Dict[str, Any]]):\n if repr_crash_dict is not None:\n return ReprFileLocation(**repr_crash_dict)\n else:\n return None\n\n if (\n reportdict[\"longrepr\"]\n and \"reprcrash\" in reportdict[\"longrepr\"]\n and \"reprtraceback\" in reportdict[\"longrepr\"]\n ):\n\n reprtraceback = deserialize_repr_traceback(\n reportdict[\"longrepr\"][\"reprtraceback\"]\n )\n reprcrash = deserialize_repr_crash(reportdict[\"longrepr\"][\"reprcrash\"])\n if reportdict[\"longrepr\"][\"chain\"]:\n chain = []\n for repr_traceback_data, repr_crash_data, description in reportdict[\n \"longrepr\"\n ][\"chain\"]:\n chain.append(\n (\n deserialize_repr_traceback(repr_traceback_data),\n deserialize_repr_crash(repr_crash_data),\n description,\n )\n )\n exception_info: Union[\n ExceptionChainRepr, ReprExceptionInfo\n ] = ExceptionChainRepr(chain)\n else:\n exception_info = ReprExceptionInfo(reprtraceback, reprcrash)\n\n for section in reportdict[\"longrepr\"][\"sections\"]:\n exception_info.addsection(*section)\n reportdict[\"longrepr\"] = exception_info\n\n return reportdict", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_MoreQuietAction_MoreQuietAction.__call__.namespace.quiet.getattr_namespace_quiet": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_MoreQuietAction_MoreQuietAction.__call__.namespace.quiet.getattr_namespace_quiet", "embedding": null, "metadata": {"file_path": "src/_pytest/terminal.py", "file_name": "terminal.py", "file_type": "text/x-python", "category": "implementation", "start_line": 75, "end_line": 109, "span_ids": ["MoreQuietAction.__call__", "MoreQuietAction"], "tokens": 230}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class MoreQuietAction(argparse.Action):\n \"\"\"A modified copy of the argparse count action which counts down and updates\n the legacy quiet attribute at the same time.\n\n Used to unify verbosity handling.\n \"\"\"\n\n def __init__(\n self,\n option_strings: Sequence[str],\n dest: str,\n default: object = None,\n required: bool = False,\n help: Optional[str] = None,\n ) -> None:\n super().__init__(\n option_strings=option_strings,\n dest=dest,\n nargs=0,\n default=default,\n required=required,\n help=help,\n )\n\n def __call__(\n self,\n parser: argparse.ArgumentParser,\n namespace: argparse.Namespace,\n values: Union[str, Sequence[object], None],\n option_string: Optional[str] = None,\n ) -> None:\n new_count = getattr(namespace, self.dest, 0) - 1\n setattr(namespace, self.dest, new_count)\n # todo Deprecate config.quiet\n namespace.quiet = getattr(namespace, \"quiet\", 0) + 1", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.summary_errors_TerminalReporter._outrep_summary.for_secname_content_in_r.self__tw_line_content_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.summary_errors_TerminalReporter._outrep_summary.for_secname_content_in_r.self__tw_line_content_", "embedding": null, "metadata": {"file_path": "src/_pytest/terminal.py", "file_name": "terminal.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1012, "end_line": 1038, "span_ids": ["TerminalReporter._outrep_summary", "TerminalReporter.summary_errors"], "tokens": 246}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass TerminalReporter:\n\n def summary_errors(self) -> None:\n if self.config.option.tbstyle != \"no\":\n reports: List[BaseReport] = self.getreports(\"error\")\n if not reports:\n return\n self.write_sep(\"=\", \"ERRORS\")\n for rep in self.stats[\"error\"]:\n msg = self._getfailureheadline(rep)\n if rep.when == \"collect\":\n msg = \"ERROR collecting \" + msg\n else:\n msg = f\"ERROR at {rep.when} of {msg}\"\n self.write_sep(\"_\", msg, red=True, bold=True)\n self._outrep_summary(rep)\n\n def _outrep_summary(self, rep: BaseReport) -> None:\n rep.toterminal(self._tw)\n showcapture = self.config.option.showcapture\n if showcapture == \"no\":\n return\n for secname, content in rep.sections:\n if showcapture != \"all\" and showcapture not in secname:\n continue\n self._tw.sep(\"-\", secname)\n if content[-1:] == \"\\n\":\n content = content[:-1]\n self._tw.line(content)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.summary_stats_TerminalReporter.summary_stats.None_5.else_.self_write_line_msg_ma": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.summary_stats_TerminalReporter.summary_stats.None_5.else_.self_write_line_msg_ma", "embedding": null, "metadata": {"file_path": "src/_pytest/terminal.py", "file_name": "terminal.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1040, "end_line": 1075, "span_ids": ["TerminalReporter.summary_stats"], "tokens": 309}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass TerminalReporter:\n\n def summary_stats(self) -> None:\n if self.verbosity < -1:\n return\n\n session_duration = timing.time() - self._sessionstarttime\n (parts, main_color) = self.build_summary_stats_line()\n line_parts = []\n\n display_sep = self.verbosity >= 0\n if display_sep:\n fullwidth = self._tw.fullwidth\n for text, markup in parts:\n with_markup = self._tw.markup(text, **markup)\n if display_sep:\n fullwidth += len(with_markup) - len(text)\n line_parts.append(with_markup)\n msg = \", \".join(line_parts)\n\n main_markup = {main_color: True}\n duration = \" in {}\".format(format_session_duration(session_duration))\n duration_with_markup = self._tw.markup(duration, **main_markup)\n if display_sep:\n fullwidth += len(duration_with_markup) - len(duration)\n msg += duration_with_markup\n\n if display_sep:\n markup_for_end_sep = self._tw.markup(\"\", **main_markup)\n if markup_for_end_sep.endswith(\"\\x1b[0m\"):\n markup_for_end_sep = markup_for_end_sep[:-4]\n fullwidth += len(markup_for_end_sep)\n msg += markup_for_end_sep\n\n if display_sep:\n self.write_sep(\"=\", msg, fullwidth=fullwidth, **main_markup)\n else:\n self.write_line(msg, **main_markup)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/warning_types.py_from_typing_import_Any_PytestDeprecationWarning.__module__._pytest_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/warning_types.py_from_typing_import_Any_PytestDeprecationWarning.__module__._pytest_", "embedding": null, "metadata": {"file_path": "src/_pytest/warning_types.py", "file_name": "warning_types.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 49, "span_ids": ["PytestWarning", "PytestAssertRewriteWarning", "PytestDeprecationWarning", "PytestConfigWarning", "PytestCacheWarning", "imports", "PytestCollectionWarning"], "tokens": 231}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from typing import Any\nfrom typing import Generic\nfrom typing import Type\nfrom typing import TypeVar\n\nimport attr\n\nfrom _pytest.compat import final\n\n\nclass PytestWarning(UserWarning):\n \"\"\"Base class for all warnings emitted by pytest.\"\"\"\n\n __module__ = \"pytest\"\n\n\n@final\nclass PytestAssertRewriteWarning(PytestWarning):\n \"\"\"Warning emitted by the pytest assert rewrite module.\"\"\"\n\n __module__ = \"pytest\"\n\n\n@final\nclass PytestCacheWarning(PytestWarning):\n \"\"\"Warning emitted by the cache plugin in various situations.\"\"\"\n\n __module__ = \"pytest\"\n\n\n@final\nclass PytestConfigWarning(PytestWarning):\n \"\"\"Warning emitted for configuration issues.\"\"\"\n\n __module__ = \"pytest\"\n\n\n@final\nclass PytestCollectionWarning(PytestWarning):\n \"\"\"Warning emitted when pytest is not able to collect a file or symbol in a module.\"\"\"\n\n __module__ = \"pytest\"\n\n\n@final\nclass PytestDeprecationWarning(PytestWarning, DeprecationWarning):\n \"\"\"Warning class for features that will be removed in a future version.\"\"\"\n\n __module__ = \"pytest\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/warning_types.py_PytestUnhandledCoroutineWarning__W.TypeVar__W_bound_Pytes": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/warning_types.py_PytestUnhandledCoroutineWarning__W.TypeVar__W_bound_Pytes", "embedding": null, "metadata": {"file_path": "src/_pytest/warning_types.py", "file_name": "warning_types.py", "file_type": "text/x-python", "category": "implementation", "start_line": 71, "end_line": 115, "span_ids": ["PytestUnhandledThreadExceptionWarning", "PytestUnknownMarkWarning", "impl", "PytestUnhandledCoroutineWarning", "impl:2", "PytestUnraisableExceptionWarning"], "tokens": 241}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass PytestUnhandledCoroutineWarning(PytestWarning):\n \"\"\"Warning emitted for an unhandled coroutine.\n\n A coroutine was encountered when collecting test functions, but was not\n handled by any async-aware plugin.\n Coroutine test functions are not natively supported.\n \"\"\"\n\n __module__ = \"pytest\"\n\n\n@final\nclass PytestUnknownMarkWarning(PytestWarning):\n \"\"\"Warning emitted on use of unknown markers.\n\n See :ref:`mark` for details.\n \"\"\"\n\n __module__ = \"pytest\"\n\n\n@final\nclass PytestUnraisableExceptionWarning(PytestWarning):\n \"\"\"An unraisable exception was reported.\n\n Unraisable exceptions are exceptions raised in :meth:`__del__ `\n implementations and similar situations when the exception cannot be raised\n as normal.\n \"\"\"\n\n __module__ = \"pytest\"\n\n\n@final\nclass PytestUnhandledThreadExceptionWarning(PytestWarning):\n \"\"\"An unhandled exception occurred in a :class:`~threading.Thread`.\n\n Such exceptions don't propagate normally.\n \"\"\"\n\n __module__ = \"pytest\"\n\n\n_W = TypeVar(\"_W\", bound=PytestWarning)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/warning_types.py_UnformattedWarning_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/warning_types.py_UnformattedWarning_", "embedding": null, "metadata": {"file_path": "src/_pytest/warning_types.py", "file_name": "warning_types.py", "file_type": "text/x-python", "category": "implementation", "start_line": 118, "end_line": 133, "span_ids": ["UnformattedWarning", "UnformattedWarning.format"], "tokens": 111}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\n@attr.s\nclass UnformattedWarning(Generic[_W]):\n \"\"\"A warning meant to be formatted during runtime.\n\n This is used to hold warnings that need to format their message at runtime,\n as opposed to a direct message.\n \"\"\"\n\n category = attr.ib(type=Type[\"_W\"])\n template = attr.ib(type=str)\n\n def format(self, **kwargs: Any) -> _W:\n \"\"\"Return an instance of the warning category, formatted with given kwargs.\"\"\"\n return self.category(self.template.format(**kwargs))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_not_collectable_arguments_TestGeneralUsage.test_not_collectable_arguments.result_stderr_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_not_collectable_arguments_TestGeneralUsage.test_not_collectable_arguments.result_stderr_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/acceptance_test.py", "file_name": "acceptance_test.py", "file_type": "text/x-python", "category": "test", "start_line": 183, "end_line": 194, "span_ids": ["TestGeneralUsage.test_not_collectable_arguments"], "tokens": 118}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestGeneralUsage:\n\n def test_not_collectable_arguments(self, pytester: Pytester) -> None:\n p1 = pytester.makepyfile(\"\")\n p2 = pytester.makefile(\".pyc\", \"123\")\n result = pytester.runpytest(p1, p2)\n assert result.ret == ExitCode.USAGE_ERROR\n result.stderr.fnmatch_lines(\n [\n f\"ERROR: not found: {p2}\",\n \"(no name {!r} in any of [[][]])\".format(str(p2)),\n \"\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_test_zipimport_hook_test_zipimport_hook.result_stdout_no_fnmatch_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_test_zipimport_hook_test_zipimport_hook.result_stdout_no_fnmatch_", "embedding": null, "metadata": {"file_path": "testing/acceptance_test.py", "file_name": "acceptance_test.py", "file_type": "text/x-python", "category": "test", "start_line": 985, "end_line": 1005, "span_ids": ["test_zipimport_hook"], "tokens": 177}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_zipimport_hook(pytester: Pytester) -> None:\n \"\"\"Test package loader is being used correctly (see #1837).\"\"\"\n zipapp = pytest.importorskip(\"zipapp\")\n pytester.path.joinpath(\"app\").mkdir()\n pytester.makepyfile(\n **{\n \"app/foo.py\": \"\"\"\n import pytest\n def main():\n pytest.main(['--pyargs', 'foo'])\n \"\"\"\n }\n )\n target = pytester.path.joinpath(\"foo.zip\")\n zipapp.create_archive(\n str(pytester.path.joinpath(\"app\")), str(target), main=\"foo:main\"\n )\n result = pytester.runpython(target)\n assert result.ret == 0\n result.stderr.fnmatch_lines([\"*not found*foo*\"])\n result.stdout.no_fnmatch_line(\"*INTERNALERROR>*\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_test_warn_on_async_gen_function_test_warn_on_async_gen_function.assert_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_test_warn_on_async_gen_function_test_warn_on_async_gen_function.assert_", "embedding": null, "metadata": {"file_path": "testing/acceptance_test.py", "file_name": "acceptance_test.py", "file_type": "text/x-python", "category": "test", "start_line": 1209, "end_line": 1234, "span_ids": ["test_warn_on_async_gen_function"], "tokens": 180}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.filterwarnings(\"default\")\ndef test_warn_on_async_gen_function(pytester: Pytester) -> None:\n pytester.makepyfile(\n test_async=\"\"\"\n async def test_1():\n yield\n async def test_2():\n yield\n def test_3():\n return test_2()\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n [\n \"test_async.py::test_1\",\n \"test_async.py::test_2\",\n \"test_async.py::test_3\",\n \"*async def functions are not natively supported*\",\n \"*3 skipped, 3 warnings in*\",\n ]\n )\n # ensure our warning message appears only once\n assert (\n result.stdout.str().count(\"async def functions are not natively supported\") == 1\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_test_excinfo_for_later_test_excinfo_no_sourcecode.assert_s_File_str": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_test_excinfo_for_later_test_excinfo_no_sourcecode.assert_s_File_str", "embedding": null, "metadata": {"file_path": "testing/code/test_excinfo.py", "file_name": "test_excinfo.py", "file_type": "text/x-python", "category": "test", "start_line": 343, "end_line": 360, "span_ids": ["test_excinfo_for_later", "test_excinfo_no_sourcecode", "test_excinfo_errisinstance"], "tokens": 138}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_excinfo_for_later() -> None:\n e = ExceptionInfo[BaseException].for_later()\n assert \"for raises\" in repr(e)\n assert \"for raises\" in str(e)\n\n\ndef test_excinfo_errisinstance():\n excinfo = pytest.raises(ValueError, h)\n assert excinfo.errisinstance(ValueError)\n\n\ndef test_excinfo_no_sourcecode():\n try:\n exec(\"raise ValueError()\")\n except ValueError:\n excinfo = _pytest._code.ExceptionInfo.from_current()\n s = str(excinfo.traceback[-1])\n assert s == \" File '':1 in \\n ???\\n\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_test_entrysource_Queue_example_test_match_succeeds.excinfo_match_r_zero_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_test_entrysource_Queue_example_test_match_succeeds.excinfo_match_r_zero_", "embedding": null, "metadata": {"file_path": "testing/code/test_excinfo.py", "file_name": "test_excinfo.py", "file_type": "text/x-python", "category": "test", "start_line": 383, "end_line": 410, "span_ids": ["test_match_succeeds", "test_codepath_Queue_example", "test_entrysource_Queue_example"], "tokens": 199}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_entrysource_Queue_example():\n try:\n queue.Queue().get(timeout=0.001)\n except queue.Empty:\n excinfo = _pytest._code.ExceptionInfo.from_current()\n entry = excinfo.traceback[-1]\n source = entry.getsource()\n assert source is not None\n s = str(source).strip()\n assert s.startswith(\"def get\")\n\n\ndef test_codepath_Queue_example() -> None:\n try:\n queue.Queue().get(timeout=0.001)\n except queue.Empty:\n excinfo = _pytest._code.ExceptionInfo.from_current()\n entry = excinfo.traceback[-1]\n path = entry.path\n assert isinstance(path, Path)\n assert path.name.lower() == \"queue.py\"\n assert path.exists()\n\n\ndef test_match_succeeds():\n with pytest.raises(ZeroDivisionError) as excinfo:\n 0 // 0\n excinfo.match(r\".*zero.*\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_test_match_raises_error_test_match_raises_error.None_3": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_test_match_raises_error_test_match_raises_error.None_3", "embedding": null, "metadata": {"file_path": "testing/code/test_excinfo.py", "file_name": "test_excinfo.py", "file_type": "text/x-python", "category": "test", "start_line": 413, "end_line": 434, "span_ids": ["test_match_raises_error"], "tokens": 187}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_match_raises_error(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n def test_division_zero():\n with pytest.raises(ZeroDivisionError) as excinfo:\n 0 / 0\n excinfo.match(r'[123]+')\n \"\"\"\n )\n result = pytester.runpytest()\n assert result.ret != 0\n\n exc_msg = \"Regex pattern '[[]123[]]+' does not match 'division by zero'.\"\n result.stdout.fnmatch_lines([f\"E * AssertionError: {exc_msg}\"])\n result.stdout.no_fnmatch_line(\"*__tracebackhide__ = True*\")\n\n result = pytester.runpytest(\"--fulltrace\")\n assert result.ret != 0\n result.stdout.fnmatch_lines(\n [\"*__tracebackhide__ = True*\", f\"E * AssertionError: {exc_msg}\"]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_repr_tracebackentry_lines2_TestFormattedExcinfo.test_repr_tracebackentry_lines2.assert_tw_mock_lines_2_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_repr_tracebackentry_lines2_TestFormattedExcinfo.test_repr_tracebackentry_lines2.assert_tw_mock_lines_2_", "embedding": null, "metadata": {"file_path": "testing/code/test_excinfo.py", "file_name": "test_excinfo.py", "file_type": "text/x-python", "category": "test", "start_line": 622, "end_line": 647, "span_ids": ["TestFormattedExcinfo.test_repr_tracebackentry_lines2"], "tokens": 345}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFormattedExcinfo:\n\n def test_repr_tracebackentry_lines2(self, importasmod, tw_mock) -> None:\n mod = importasmod(\n \"\"\"\n def func1(m, x, y, z):\n raise ValueError(\"hello\\\\nworld\")\n \"\"\"\n )\n excinfo = pytest.raises(ValueError, mod.func1, \"m\" * 90, 5, 13, \"z\" * 120)\n excinfo.traceback = excinfo.traceback.filter()\n entry = excinfo.traceback[-1]\n p = FormattedExcinfo(funcargs=True)\n reprfuncargs = p.repr_args(entry)\n assert reprfuncargs is not None\n assert reprfuncargs.args[0] == (\"m\", repr(\"m\" * 90))\n assert reprfuncargs.args[1] == (\"x\", \"5\")\n assert reprfuncargs.args[2] == (\"y\", \"13\")\n assert reprfuncargs.args[3] == (\"z\", repr(\"z\" * 120))\n\n p = FormattedExcinfo(funcargs=True)\n repr_entry = p.repr_traceback_entry(entry)\n assert repr_entry.reprfuncargs is not None\n assert repr_entry.reprfuncargs.args == reprfuncargs.args\n repr_entry.toterminal(tw_mock)\n assert tw_mock.lines[0] == \"m = \" + repr(\"m\" * 90)\n assert tw_mock.lines[1] == \"x = 5, y = 13\"\n assert tw_mock.lines[2] == \"z = \" + repr(\"z\" * 120)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_repr_tracebackentry_lines_var_kw_args_TestFormattedExcinfo.test_repr_tracebackentry_lines_var_kw_args.assert_tw_mock_lines_0_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_repr_tracebackentry_lines_var_kw_args_TestFormattedExcinfo.test_repr_tracebackentry_lines_var_kw_args.assert_tw_mock_lines_0_", "embedding": null, "metadata": {"file_path": "testing/code/test_excinfo.py", "file_name": "test_excinfo.py", "file_type": "text/x-python", "category": "test", "start_line": 649, "end_line": 671, "span_ids": ["TestFormattedExcinfo.test_repr_tracebackentry_lines_var_kw_args"], "tokens": 282}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFormattedExcinfo:\n\n def test_repr_tracebackentry_lines_var_kw_args(self, importasmod, tw_mock) -> None:\n mod = importasmod(\n \"\"\"\n def func1(x, *y, **z):\n raise ValueError(\"hello\\\\nworld\")\n \"\"\"\n )\n excinfo = pytest.raises(ValueError, mod.func1, \"a\", \"b\", c=\"d\")\n excinfo.traceback = excinfo.traceback.filter()\n entry = excinfo.traceback[-1]\n p = FormattedExcinfo(funcargs=True)\n reprfuncargs = p.repr_args(entry)\n assert reprfuncargs is not None\n assert reprfuncargs.args[0] == (\"x\", repr(\"a\"))\n assert reprfuncargs.args[1] == (\"y\", repr((\"b\",)))\n assert reprfuncargs.args[2] == (\"z\", repr({\"c\": \"d\"}))\n\n p = FormattedExcinfo(funcargs=True)\n repr_entry = p.repr_traceback_entry(entry)\n assert repr_entry.reprfuncargs\n assert repr_entry.reprfuncargs.args == reprfuncargs.args\n repr_entry.toterminal(tw_mock)\n assert tw_mock.lines[0] == \"x = 'a', y = ('b',), z = {'c': 'd'}\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_toterminal_long_TestFormattedExcinfo.test_toterminal_long.assert_tw_mock_lines_12_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_toterminal_long_TestFormattedExcinfo.test_toterminal_long.assert_tw_mock_lines_12_", "embedding": null, "metadata": {"file_path": "testing/code/test_excinfo.py", "file_name": "test_excinfo.py", "file_type": "text/x-python", "category": "test", "start_line": 911, "end_line": 940, "span_ids": ["TestFormattedExcinfo.test_toterminal_long"], "tokens": 301}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFormattedExcinfo:\n\n def test_toterminal_long(self, importasmod, tw_mock):\n mod = importasmod(\n \"\"\"\n def g(x):\n raise ValueError(x)\n def f():\n g(3)\n \"\"\"\n )\n excinfo = pytest.raises(ValueError, mod.f)\n excinfo.traceback = excinfo.traceback.filter()\n repr = excinfo.getrepr()\n repr.toterminal(tw_mock)\n assert tw_mock.lines[0] == \"\"\n tw_mock.lines.pop(0)\n assert tw_mock.lines[0] == \" def f():\"\n assert tw_mock.lines[1] == \"> g(3)\"\n assert tw_mock.lines[2] == \"\"\n line = tw_mock.get_write_msg(3)\n assert line.endswith(\"mod.py\")\n assert tw_mock.lines[4] == (\":5: \")\n assert tw_mock.lines[5] == (\"_ \", None)\n assert tw_mock.lines[6] == \"\"\n assert tw_mock.lines[7] == \" def g(x):\"\n assert tw_mock.lines[8] == \"> raise ValueError(x)\"\n assert tw_mock.lines[9] == \"E ValueError: 3\"\n assert tw_mock.lines[10] == \"\"\n line = tw_mock.get_write_msg(11)\n assert line.endswith(\"mod.py\")\n assert tw_mock.lines[12] == \":3: ValueError\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_toterminal_long_missing_source_TestFormattedExcinfo.test_toterminal_long_missing_source.assert_tw_mock_lines_10_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_toterminal_long_missing_source_TestFormattedExcinfo.test_toterminal_long_missing_source.assert_tw_mock_lines_10_", "embedding": null, "metadata": {"file_path": "testing/code/test_excinfo.py", "file_name": "test_excinfo.py", "file_type": "text/x-python", "category": "test", "start_line": 954, "end_line": 984, "span_ids": ["TestFormattedExcinfo.test_toterminal_long_missing_source"], "tokens": 290}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFormattedExcinfo:\n\n def test_toterminal_long_missing_source(\n self, importasmod, tmp_path: Path, tw_mock\n ) -> None:\n mod = importasmod(\n \"\"\"\n def g(x):\n raise ValueError(x)\n def f():\n g(3)\n \"\"\"\n )\n excinfo = pytest.raises(ValueError, mod.f)\n tmp_path.joinpath(\"mod.py\").unlink()\n excinfo.traceback = excinfo.traceback.filter()\n repr = excinfo.getrepr()\n repr.toterminal(tw_mock)\n assert tw_mock.lines[0] == \"\"\n tw_mock.lines.pop(0)\n assert tw_mock.lines[0] == \"> ???\"\n assert tw_mock.lines[1] == \"\"\n line = tw_mock.get_write_msg(2)\n assert line.endswith(\"mod.py\")\n assert tw_mock.lines[3] == \":5: \"\n assert tw_mock.lines[4] == (\"_ \", None)\n assert tw_mock.lines[5] == \"\"\n assert tw_mock.lines[6] == \"> ???\"\n assert tw_mock.lines[7] == \"E ValueError: 3\"\n assert tw_mock.lines[8] == \"\"\n line = tw_mock.get_write_msg(9)\n assert line.endswith(\"mod.py\")\n assert tw_mock.lines[10] == \":3: ValueError\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_toterminal_long_incomplete_source_TestFormattedExcinfo.test_toterminal_long_incomplete_source.assert_tw_mock_lines_10_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_toterminal_long_incomplete_source_TestFormattedExcinfo.test_toterminal_long_incomplete_source.assert_tw_mock_lines_10_", "embedding": null, "metadata": {"file_path": "testing/code/test_excinfo.py", "file_name": "test_excinfo.py", "file_type": "text/x-python", "category": "test", "start_line": 986, "end_line": 1016, "span_ids": ["TestFormattedExcinfo.test_toterminal_long_incomplete_source"], "tokens": 294}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFormattedExcinfo:\n\n def test_toterminal_long_incomplete_source(\n self, importasmod, tmp_path: Path, tw_mock\n ) -> None:\n mod = importasmod(\n \"\"\"\n def g(x):\n raise ValueError(x)\n def f():\n g(3)\n \"\"\"\n )\n excinfo = pytest.raises(ValueError, mod.f)\n tmp_path.joinpath(\"mod.py\").write_text(\"asdf\")\n excinfo.traceback = excinfo.traceback.filter()\n repr = excinfo.getrepr()\n repr.toterminal(tw_mock)\n assert tw_mock.lines[0] == \"\"\n tw_mock.lines.pop(0)\n assert tw_mock.lines[0] == \"> ???\"\n assert tw_mock.lines[1] == \"\"\n line = tw_mock.get_write_msg(2)\n assert line.endswith(\"mod.py\")\n assert tw_mock.lines[3] == \":5: \"\n assert tw_mock.lines[4] == (\"_ \", None)\n assert tw_mock.lines[5] == \"\"\n assert tw_mock.lines[6] == \"> ???\"\n assert tw_mock.lines[7] == \"E ValueError: 3\"\n assert tw_mock.lines[8] == \"\"\n line = tw_mock.get_write_msg(9)\n assert line.endswith(\"mod.py\")\n assert tw_mock.lines[10] == \":3: ValueError\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_traceback_repr_style_TestFormattedExcinfo.test_traceback_repr_style.assert_tw_mock_lines_20_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_traceback_repr_style_TestFormattedExcinfo.test_traceback_repr_style.assert_tw_mock_lines_20_", "embedding": null, "metadata": {"file_path": "testing/code/test_excinfo.py", "file_name": "test_excinfo.py", "file_type": "text/x-python", "category": "test", "start_line": 1060, "end_line": 1103, "span_ids": ["TestFormattedExcinfo.test_traceback_repr_style"], "tokens": 423}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFormattedExcinfo:\n\n def test_traceback_repr_style(self, importasmod, tw_mock):\n mod = importasmod(\n \"\"\"\n def f():\n g()\n def g():\n h()\n def h():\n i()\n def i():\n raise ValueError()\n \"\"\"\n )\n excinfo = pytest.raises(ValueError, mod.f)\n excinfo.traceback = excinfo.traceback.filter()\n excinfo.traceback[1].set_repr_style(\"short\")\n excinfo.traceback[2].set_repr_style(\"short\")\n r = excinfo.getrepr(style=\"long\")\n r.toterminal(tw_mock)\n for line in tw_mock.lines:\n print(line)\n assert tw_mock.lines[0] == \"\"\n assert tw_mock.lines[1] == \" def f():\"\n assert tw_mock.lines[2] == \"> g()\"\n assert tw_mock.lines[3] == \"\"\n msg = tw_mock.get_write_msg(4)\n assert msg.endswith(\"mod.py\")\n assert tw_mock.lines[5] == \":3: \"\n assert tw_mock.lines[6] == (\"_ \", None)\n tw_mock.get_write_msg(7)\n assert tw_mock.lines[8].endswith(\"in g\")\n assert tw_mock.lines[9] == \" h()\"\n tw_mock.get_write_msg(10)\n assert tw_mock.lines[11].endswith(\"in h\")\n assert tw_mock.lines[12] == \" i()\"\n assert tw_mock.lines[13] == (\"_ \", None)\n assert tw_mock.lines[14] == \"\"\n assert tw_mock.lines[15] == \" def i():\"\n assert tw_mock.lines[16] == \"> raise ValueError()\"\n assert tw_mock.lines[17] == \"E ValueError\"\n assert tw_mock.lines[18] == \"\"\n msg = tw_mock.get_write_msg(19)\n msg.endswith(\"mod.py\")\n assert tw_mock.lines[20] == \":9: ValueError\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_exc_chain_repr_TestFormattedExcinfo.test_exc_chain_repr.assert_tw_mock_lines_47_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_exc_chain_repr_TestFormattedExcinfo.test_exc_chain_repr.assert_tw_mock_lines_47_", "embedding": null, "metadata": {"file_path": "testing/code/test_excinfo.py", "file_name": "test_excinfo.py", "file_type": "text/x-python", "category": "test", "start_line": 1105, "end_line": 1187, "span_ids": ["TestFormattedExcinfo.test_exc_chain_repr"], "tokens": 817}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFormattedExcinfo:\n\n def test_exc_chain_repr(self, importasmod, tw_mock):\n mod = importasmod(\n \"\"\"\n class Err(Exception):\n pass\n def f():\n try:\n g()\n except Exception as e:\n raise Err() from e\n finally:\n h()\n def g():\n raise ValueError()\n\n def h():\n raise AttributeError()\n \"\"\"\n )\n excinfo = pytest.raises(AttributeError, mod.f)\n r = excinfo.getrepr(style=\"long\")\n r.toterminal(tw_mock)\n for line in tw_mock.lines:\n print(line)\n assert tw_mock.lines[0] == \"\"\n assert tw_mock.lines[1] == \" def f():\"\n assert tw_mock.lines[2] == \" try:\"\n assert tw_mock.lines[3] == \"> g()\"\n assert tw_mock.lines[4] == \"\"\n line = tw_mock.get_write_msg(5)\n assert line.endswith(\"mod.py\")\n assert tw_mock.lines[6] == \":6: \"\n assert tw_mock.lines[7] == (\"_ \", None)\n assert tw_mock.lines[8] == \"\"\n assert tw_mock.lines[9] == \" def g():\"\n assert tw_mock.lines[10] == \"> raise ValueError()\"\n assert tw_mock.lines[11] == \"E ValueError\"\n assert tw_mock.lines[12] == \"\"\n line = tw_mock.get_write_msg(13)\n assert line.endswith(\"mod.py\")\n assert tw_mock.lines[14] == \":12: ValueError\"\n assert tw_mock.lines[15] == \"\"\n assert (\n tw_mock.lines[16]\n == \"The above exception was the direct cause of the following exception:\"\n )\n assert tw_mock.lines[17] == \"\"\n assert tw_mock.lines[18] == \" def f():\"\n assert tw_mock.lines[19] == \" try:\"\n assert tw_mock.lines[20] == \" g()\"\n assert tw_mock.lines[21] == \" except Exception as e:\"\n assert tw_mock.lines[22] == \"> raise Err() from e\"\n assert tw_mock.lines[23] == \"E test_exc_chain_repr0.mod.Err\"\n assert tw_mock.lines[24] == \"\"\n line = tw_mock.get_write_msg(25)\n assert line.endswith(\"mod.py\")\n assert tw_mock.lines[26] == \":8: Err\"\n assert tw_mock.lines[27] == \"\"\n assert (\n tw_mock.lines[28]\n == \"During handling of the above exception, another exception occurred:\"\n )\n assert tw_mock.lines[29] == \"\"\n assert tw_mock.lines[30] == \" def f():\"\n assert tw_mock.lines[31] == \" try:\"\n assert tw_mock.lines[32] == \" g()\"\n assert tw_mock.lines[33] == \" except Exception as e:\"\n assert tw_mock.lines[34] == \" raise Err() from e\"\n assert tw_mock.lines[35] == \" finally:\"\n assert tw_mock.lines[36] == \"> h()\"\n assert tw_mock.lines[37] == \"\"\n line = tw_mock.get_write_msg(38)\n assert line.endswith(\"mod.py\")\n assert tw_mock.lines[39] == \":10: \"\n assert tw_mock.lines[40] == (\"_ \", None)\n assert tw_mock.lines[41] == \"\"\n assert tw_mock.lines[42] == \" def h():\"\n assert tw_mock.lines[43] == \"> raise AttributeError()\"\n assert tw_mock.lines[44] == \"E AttributeError\"\n assert tw_mock.lines[45] == \"\"\n line = tw_mock.get_write_msg(46)\n assert line.endswith(\"mod.py\")\n assert tw_mock.lines[47] == \":15: AttributeError\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_exc_repr_chain_suppression_TestFormattedExcinfo.test_exc_repr_chain_suppression.assert_len_tw_mock_lines_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_exc_repr_chain_suppression_TestFormattedExcinfo.test_exc_repr_chain_suppression.assert_len_tw_mock_lines_", "embedding": null, "metadata": {"file_path": "testing/code/test_excinfo.py", "file_name": "test_excinfo.py", "file_type": "text/x-python", "category": "test", "start_line": 1189, "end_line": 1227, "span_ids": ["TestFormattedExcinfo.test_exc_repr_chain_suppression"], "tokens": 368}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFormattedExcinfo:\n\n @pytest.mark.parametrize(\"mode\", [\"from_none\", \"explicit_suppress\"])\n def test_exc_repr_chain_suppression(self, importasmod, mode, tw_mock):\n \"\"\"Check that exc repr does not show chained exceptions in Python 3.\n - When the exception is raised with \"from None\"\n - Explicitly suppressed with \"chain=False\" to ExceptionInfo.getrepr().\n \"\"\"\n raise_suffix = \" from None\" if mode == \"from_none\" else \"\"\n mod = importasmod(\n \"\"\"\n def f():\n try:\n g()\n except Exception:\n raise AttributeError(){raise_suffix}\n def g():\n raise ValueError()\n \"\"\".format(\n raise_suffix=raise_suffix\n )\n )\n excinfo = pytest.raises(AttributeError, mod.f)\n r = excinfo.getrepr(style=\"long\", chain=mode != \"explicit_suppress\")\n r.toterminal(tw_mock)\n for line in tw_mock.lines:\n print(line)\n assert tw_mock.lines[0] == \"\"\n assert tw_mock.lines[1] == \" def f():\"\n assert tw_mock.lines[2] == \" try:\"\n assert tw_mock.lines[3] == \" g()\"\n assert tw_mock.lines[4] == \" except Exception:\"\n assert tw_mock.lines[5] == \"> raise AttributeError(){}\".format(\n raise_suffix\n )\n assert tw_mock.lines[6] == \"E AttributeError\"\n assert tw_mock.lines[7] == \"\"\n line = tw_mock.get_write_msg(8)\n assert line.endswith(\"mod.py\")\n assert tw_mock.lines[9] == \":6: AttributeError\"\n assert len(tw_mock.lines) == 10", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_test_repr_traceback_with_unicode_test_cwd_deleted.result_stderr_no_fnmatch_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_test_repr_traceback_with_unicode_test_cwd_deleted.result_stderr_no_fnmatch_", "embedding": null, "metadata": {"file_path": "testing/code/test_excinfo.py", "file_name": "test_excinfo.py", "file_type": "text/x-python", "category": "test", "start_line": 1367, "end_line": 1397, "span_ids": ["test_cwd_deleted", "test_repr_traceback_with_unicode"], "tokens": 224}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\"style\", [\"short\", \"long\"])\n@pytest.mark.parametrize(\"encoding\", [None, \"utf8\", \"utf16\"])\ndef test_repr_traceback_with_unicode(style, encoding):\n if encoding is None:\n msg: Union[str, bytes] = \"\u2639\"\n else:\n msg = \"\u2639\".encode(encoding)\n try:\n raise RuntimeError(msg)\n except RuntimeError:\n e_info = ExceptionInfo.from_current()\n formatter = FormattedExcinfo(style=style)\n repr_traceback = formatter.repr_traceback(e_info)\n assert repr_traceback is not None\n\n\ndef test_cwd_deleted(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import os\n\n def test(tmp_path):\n os.chdir(tmp_path)\n tmp_path.unlink()\n assert False\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"* 1 failed in *\"])\n result.stdout.no_fnmatch_line(\"*INTERNALERROR*\")\n result.stderr.no_fnmatch_line(\"*INTERNALERROR*\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/conftest.py_tw_mock_tw_mock.return.TWMock_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/conftest.py_tw_mock_tw_mock.return.TWMock_", "embedding": null, "metadata": {"file_path": "testing/conftest.py", "file_name": "conftest.py", "file_type": "text/x-python", "category": "implementation", "start_line": 67, "end_line": 103, "span_ids": ["tw_mock.TWMock", "tw_mock", "tw_mock.TWMock:2"], "tokens": 205}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.fixture\ndef tw_mock():\n \"\"\"Returns a mock terminal writer\"\"\"\n\n class TWMock:\n WRITE = object()\n\n def __init__(self):\n self.lines = []\n self.is_writing = False\n\n def sep(self, sep, line=None):\n self.lines.append((sep, line))\n\n def write(self, msg, **kw):\n self.lines.append((TWMock.WRITE, msg))\n\n def _write_source(self, lines, indents=()):\n if not indents:\n indents = [\"\"] * len(lines)\n for indent, line in zip(indents, lines):\n self.line(indent + line)\n\n def line(self, line, **kw):\n self.lines.append(line)\n\n def markup(self, text, **kw):\n return text\n\n def get_write_msg(self, idx):\n flag, msg = self.lines[idx]\n assert flag == TWMock.WRITE\n return msg\n\n fullwidth = 80\n\n return TWMock()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/io/test_saferepr.py_pytest_test_maxsize_error_on_instance.assert_s_0_and_s_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/io/test_saferepr.py_pytest_test_maxsize_error_on_instance.assert_s_0_and_s_", "embedding": null, "metadata": {"file_path": "testing/io/test_saferepr.py", "file_name": "test_saferepr.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 25, "span_ids": ["test_maxsize_error_on_instance.A", "test_maxsize_error_on_instance", "test_maxsize", "test_simple_repr", "imports", "test_maxsize_error_on_instance.A.__repr__"], "tokens": 188}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import pytest\nfrom _pytest._io.saferepr import _pformat_dispatch\nfrom _pytest._io.saferepr import saferepr\n\n\ndef test_simple_repr():\n assert saferepr(1) == \"1\"\n assert saferepr(None) == \"None\"\n\n\ndef test_maxsize():\n s = saferepr(\"x\" * 50, maxsize=25)\n assert len(s) == 25\n expected = repr(\"x\" * 10 + \"...\" + \"x\" * 10)\n assert s == expected\n\n\ndef test_maxsize_error_on_instance():\n class A:\n def __repr__(self):\n raise ValueError(\"...\")\n\n s = saferepr((\"*\" * 50, A()), maxsize=25)\n assert len(s) == 25\n assert s[0] == \"(\" and s[-1] == \")\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/io/test_saferepr.py_test_exceptions_test_exceptions.assert_s2_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/io/test_saferepr.py_test_exceptions_test_exceptions.assert_s2_", "embedding": null, "metadata": {"file_path": "testing/io/test_saferepr.py", "file_name": "test_saferepr.py", "file_type": "text/x-python", "category": "test", "start_line": 28, "end_line": 56, "span_ids": ["test_exceptions.BrokenReprException", "test_exceptions.BrokenReprException:2", "test_exceptions.BrokenRepr.__init__", "test_exceptions.BrokenRepr", "test_exceptions"], "tokens": 237}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_exceptions() -> None:\n class BrokenRepr:\n def __init__(self, ex):\n self.ex = ex\n\n def __repr__(self):\n raise self.ex\n\n class BrokenReprException(Exception):\n __str__ = None # type: ignore[assignment]\n __repr__ = None # type: ignore[assignment]\n\n assert \"Exception\" in saferepr(BrokenRepr(Exception(\"broken\")))\n s = saferepr(BrokenReprException(\"really broken\"))\n assert \"TypeError\" in s\n assert \"TypeError\" in saferepr(BrokenRepr(\"string\"))\n\n none = None\n try:\n none() # type: ignore[misc]\n except BaseException as exc:\n exp_exc = repr(exc)\n obj = BrokenRepr(BrokenReprException(\"omg even worse\"))\n s2 = saferepr(obj)\n assert s2 == (\n \"<[unpresentable exception ({!s}) raised in repr()] BrokenRepr object at 0x{:x}>\".format(\n exp_exc, id(obj)\n )\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/io/test_saferepr.py_test_baseexception_test_baseexception.None_5.saferepr_BrokenObj_Raisin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/io/test_saferepr.py_test_baseexception_test_baseexception.None_5.saferepr_BrokenObj_Raisin", "embedding": null, "metadata": {"file_path": "testing/io/test_saferepr.py", "file_name": "test_saferepr.py", "file_type": "text/x-python", "category": "test", "start_line": 58, "end_line": 118, "span_ids": ["test_baseexception.BrokenObj.__init__", "test_baseexception", "test_baseexception.BrokenObj", "test_baseexception.RaisingOnStrRepr", "test_baseexception.RaisingOnStrRepr.__init__"], "tokens": 461}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_baseexception():\n \"\"\"Test saferepr() with BaseExceptions, which includes pytest outcomes.\"\"\"\n\n class RaisingOnStrRepr(BaseException):\n def __init__(self, exc_types):\n self.exc_types = exc_types\n\n def raise_exc(self, *args):\n try:\n self.exc_type = self.exc_types.pop(0)\n except IndexError:\n pass\n if hasattr(self.exc_type, \"__call__\"):\n raise self.exc_type(*args)\n raise self.exc_type\n\n def __str__(self):\n self.raise_exc(\"__str__\")\n\n def __repr__(self):\n self.raise_exc(\"__repr__\")\n\n class BrokenObj:\n def __init__(self, exc):\n self.exc = exc\n\n def __repr__(self):\n raise self.exc\n\n __str__ = __repr__\n\n baseexc_str = BaseException(\"__str__\")\n obj = BrokenObj(RaisingOnStrRepr([BaseException]))\n assert saferepr(obj) == (\n \"<[unpresentable exception ({!r}) \"\n \"raised in repr()] BrokenObj object at 0x{:x}>\".format(baseexc_str, id(obj))\n )\n obj = BrokenObj(RaisingOnStrRepr([RaisingOnStrRepr([BaseException])]))\n assert saferepr(obj) == (\n \"<[{!r} raised in repr()] BrokenObj object at 0x{:x}>\".format(\n baseexc_str, id(obj)\n )\n )\n\n with pytest.raises(KeyboardInterrupt):\n saferepr(BrokenObj(KeyboardInterrupt()))\n\n with pytest.raises(SystemExit):\n saferepr(BrokenObj(SystemExit()))\n\n with pytest.raises(KeyboardInterrupt):\n saferepr(BrokenObj(RaisingOnStrRepr([KeyboardInterrupt])))\n\n with pytest.raises(SystemExit):\n saferepr(BrokenObj(RaisingOnStrRepr([SystemExit])))\n\n with pytest.raises(KeyboardInterrupt):\n print(saferepr(BrokenObj(RaisingOnStrRepr([BaseException, KeyboardInterrupt]))))\n\n with pytest.raises(SystemExit):\n saferepr(BrokenObj(RaisingOnStrRepr([BaseException, SystemExit])))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_fixture.py_test_change_level_undo_test_change_level_undo.result_stdout_no_fnmatch_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_fixture.py_test_change_level_undo_test_change_level_undo.result_stdout_no_fnmatch_", "embedding": null, "metadata": {"file_path": "testing/logging/test_fixture.py", "file_name": "test_fixture.py", "file_type": "text/x-python", "category": "test", "start_line": 31, "end_line": 54, "span_ids": ["test_change_level_undo"], "tokens": 201}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_change_level_undo(pytester: Pytester) -> None:\n \"\"\"Ensure that 'set_level' is undone after the end of the test.\n\n Tests the logging output themselves (affacted both by logger and handler levels).\n \"\"\"\n pytester.makepyfile(\n \"\"\"\n import logging\n\n def test1(caplog):\n caplog.set_level(logging.INFO)\n # using + operator here so fnmatch_lines doesn't match the code in the traceback\n logging.info('log from ' + 'test1')\n assert 0\n\n def test2(caplog):\n # using + operator here so fnmatch_lines doesn't match the code in the traceback\n logging.info('log from ' + 'test2')\n assert 0\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*log from test1*\", \"*2 failed in *\"])\n result.stdout.no_fnmatch_line(\"*log from test2*\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_formatter.py_test_multiline_message_test_multiline_message.None_10": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_formatter.py_test_multiline_message_test_multiline_message.None_10", "embedding": null, "metadata": {"file_path": "testing/logging/test_formatter.py", "file_name": "test_formatter.py", "file_type": "text/x-python", "category": "test", "start_line": 39, "end_line": 125, "span_ids": ["test_multiline_message"], "tokens": 640}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_multiline_message() -> None:\n from _pytest.logging import PercentStyleMultiline\n\n logfmt = \"%(filename)-25s %(lineno)4d %(levelname)-8s %(message)s\"\n\n record: Any = logging.LogRecord(\n name=\"dummy\",\n level=logging.INFO,\n pathname=\"dummypath\",\n lineno=10,\n msg=\"Test Message line1\\nline2\",\n args=(),\n exc_info=None,\n )\n # this is called by logging.Formatter.format\n record.message = record.getMessage()\n\n ai_on_style = PercentStyleMultiline(logfmt, True)\n output = ai_on_style.format(record)\n assert output == (\n \"dummypath 10 INFO Test Message line1\\n\"\n \" line2\"\n )\n\n ai_off_style = PercentStyleMultiline(logfmt, False)\n output = ai_off_style.format(record)\n assert output == (\n \"dummypath 10 INFO Test Message line1\\nline2\"\n )\n\n ai_none_style = PercentStyleMultiline(logfmt, None)\n output = ai_none_style.format(record)\n assert output == (\n \"dummypath 10 INFO Test Message line1\\nline2\"\n )\n\n record.auto_indent = False\n output = ai_on_style.format(record)\n assert output == (\n \"dummypath 10 INFO Test Message line1\\nline2\"\n )\n\n record.auto_indent = True\n output = ai_off_style.format(record)\n assert output == (\n \"dummypath 10 INFO Test Message line1\\n\"\n \" line2\"\n )\n\n record.auto_indent = \"False\"\n output = ai_on_style.format(record)\n assert output == (\n \"dummypath 10 INFO Test Message line1\\nline2\"\n )\n\n record.auto_indent = \"True\"\n output = ai_off_style.format(record)\n assert output == (\n \"dummypath 10 INFO Test Message line1\\n\"\n \" line2\"\n )\n\n # bad string values default to False\n record.auto_indent = \"junk\"\n output = ai_off_style.format(record)\n assert output == (\n \"dummypath 10 INFO Test Message line1\\nline2\"\n )\n\n # anything other than string or int will default to False\n record.auto_indent = dict()\n output = ai_off_style.format(record)\n assert output == (\n \"dummypath 10 INFO Test Message line1\\nline2\"\n )\n\n record.auto_indent = \"5\"\n output = ai_off_style.format(record)\n assert output == (\n \"dummypath 10 INFO Test Message line1\\n line2\"\n )\n\n record.auto_indent = 5\n output = ai_off_style.format(record)\n assert output == (\n \"dummypath 10 INFO Test Message line1\\n line2\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_log_cli_level_log_level_interaction_test_log_cli_level_log_level_interaction.result_stdout_no_re_match": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_log_cli_level_log_level_interaction_test_log_cli_level_log_level_interaction.result_stdout_no_re_match", "embedding": null, "metadata": {"file_path": "testing/logging/test_reporting.py", "file_name": "test_reporting.py", "file_type": "text/x-python", "category": "test", "start_line": 91, "end_line": 118, "span_ids": ["test_log_cli_level_log_level_interaction"], "tokens": 203}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_log_cli_level_log_level_interaction(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import logging\n logger = logging.getLogger()\n\n def test_foo():\n logger.debug('debug text ' + 'going to logger')\n logger.info('info text ' + 'going to logger')\n logger.warning('warning text ' + 'going to logger')\n logger.error('error text ' + 'going to logger')\n assert 0\n \"\"\"\n )\n\n result = pytester.runpytest(\"--log-cli-level=INFO\", \"--log-level=ERROR\")\n assert result.ret == 1\n\n result.stdout.fnmatch_lines(\n [\n \"*-- live log call --*\",\n \"*INFO*info text going to logger\",\n \"*WARNING*warning text going to logger\",\n \"*ERROR*error text going to logger\",\n \"=* 1 failed in *=\",\n ]\n )\n result.stdout.no_re_match_line(\"DEBUG\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_log_cli_level_test_log_cli_level.None_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_log_cli_level_test_log_cli_level.None_1", "embedding": null, "metadata": {"file_path": "testing/logging/test_reporting.py", "file_name": "test_reporting.py", "file_type": "text/x-python", "category": "test", "start_line": 499, "end_line": 546, "span_ids": ["test_log_cli_level"], "tokens": 382}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_log_cli_level(pytester: Pytester) -> None:\n # Default log file level\n pytester.makepyfile(\n \"\"\"\n import pytest\n import logging\n def test_log_cli(request):\n plugin = request.config.pluginmanager.getplugin('logging-plugin')\n assert plugin.log_cli_handler.level == logging.INFO\n logging.getLogger('catchlog').debug(\"This log message won't be shown\")\n logging.getLogger('catchlog').info(\"This log message will be shown\")\n print('PASSED')\n \"\"\"\n )\n pytester.makeini(\n \"\"\"\n [pytest]\n log_cli=true\n \"\"\"\n )\n\n result = pytester.runpytest(\"-s\", \"--log-cli-level=INFO\")\n\n # fnmatch_lines does an assertion internally\n result.stdout.fnmatch_lines(\n [\n \"*test_log_cli_level.py*This log message will be shown\",\n \"PASSED\", # 'PASSED' on its own line because the log message prints a new line\n ]\n )\n result.stdout.no_fnmatch_line(\"*This log message won't be shown*\")\n\n # make sure that that we get a '0' exit code for the testsuite\n assert result.ret == 0\n\n result = pytester.runpytest(\"-s\", \"--log-level=INFO\")\n\n # fnmatch_lines does an assertion internally\n result.stdout.fnmatch_lines(\n [\n \"*test_log_cli_level.py* This log message will be shown\",\n \"PASSED\", # 'PASSED' on its own line because the log message prints a new line\n ]\n )\n result.stdout.no_fnmatch_line(\"*This log message won't be shown*\")\n\n # make sure that that we get a '0' exit code for the testsuite\n assert result.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_operator_mocked_doctest_runner.return.MyDocTestRunner_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_operator_mocked_doctest_runner.return.MyDocTestRunner_", "embedding": null, "metadata": {"file_path": "testing/python/approx.py", "file_name": "approx.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 43, "span_ids": ["impl", "mocked_doctest_runner.MyDocTestRunner", "mocked_doctest_runner.MockedPdb.__init__", "mocked_doctest_runner.MockedPdb", "mocked_doctest_runner.MyDocTestRunner.report_failure", "imports", "mocked_doctest_runner"], "tokens": 213}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import operator\nimport sys\nfrom decimal import Decimal\nfrom fractions import Fraction\nfrom operator import eq\nfrom operator import ne\nfrom typing import Optional\n\nimport pytest\nfrom _pytest.pytester import Pytester\nfrom pytest import approx\n\ninf, nan = float(\"inf\"), float(\"nan\")\n\n\n@pytest.fixture\ndef mocked_doctest_runner(monkeypatch):\n import doctest\n\n class MockedPdb:\n def __init__(self, out):\n pass\n\n def set_trace(self):\n raise NotImplementedError(\"not used\")\n\n def reset(self):\n pass\n\n def set_continue(self):\n pass\n\n monkeypatch.setattr(\"doctest._OutputRedirectingPdb\", MockedPdb)\n\n class MyDocTestRunner(doctest.DocTestRunner):\n def report_failure(self, out, test, example, got):\n raise AssertionError(\n \"'{}' evaluates to '{}', not '{}'\".format(\n example.source.strip(), got.strip(), example.want.strip()\n )\n )\n\n return MyDocTestRunner()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_repr_complex_numbers_TestApprox.test_repr_complex_numbers.assert_repr_approx_3_3_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_repr_complex_numbers_TestApprox.test_repr_complex_numbers.assert_repr_approx_3_3_", "embedding": null, "metadata": {"file_path": "testing/python/approx.py", "file_name": "approx.py", "file_type": "text/x-python", "category": "implementation", "start_line": 58, "end_line": 71, "span_ids": ["TestApprox.test_repr_complex_numbers"], "tokens": 224}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestApprox:\n\n def test_repr_complex_numbers(self):\n assert repr(approx(inf + 1j)) == \"(inf+1j)\"\n assert repr(approx(1.0j, rel=inf)) == \"1j \u00b1 inf\"\n\n # can't compute a sensible tolerance\n assert repr(approx(nan + 1j)) == \"(nan+1j) \u00b1 ???\"\n\n assert repr(approx(1.0j)) == \"1j \u00b1 1.0e-06 \u2220 \u00b1180\u00b0\"\n\n # relative tolerance is scaled to |3+4j| = 5\n assert repr(approx(3 + 4 * 1j)) == \"(3+4j) \u00b1 5.0e-06 \u2220 \u00b1180\u00b0\"\n\n # absolute tolerance is not scaled\n assert repr(approx(3.3 + 4.4 * 1j, abs=0.02)) == \"(3.3+4.4j) \u00b1 2.0e-02 \u2220 \u00b1180\u00b0\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_test_getfuncargnames_methods_test_getfuncargnames_staticmethod_partial.assert_values_arg1_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_test_getfuncargnames_methods_test_getfuncargnames_staticmethod_partial.assert_values_arg1_", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 41, "end_line": 101, "span_ids": ["test_getfuncargnames_partial", "test_getfuncargnames_partial.T", "test_getfuncargnames_staticmethod_inherited.A.static", "test_getfuncargnames_methods", "test_getfuncargnames_staticmethod_inherited", "test_getfuncargnames_methods.A.f", "test_getfuncargnames_methods.A", "test_getfuncargnames_staticmethod_inherited.B:2", "test_getfuncargnames_staticmethod_partial", "test_getfuncargnames_staticmethod_partial.T", "test_getfuncargnames_staticmethod_inherited.A", "test_getfuncargnames_partial.T:2", "test_getfuncargnames_staticmethod_inherited.B", "test_getfuncargnames_staticmethod", "test_getfuncargnames_staticmethod.A.static", "test_getfuncargnames_staticmethod.A", "test_getfuncargnames_staticmethod_partial.T:2"], "tokens": 393}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_getfuncargnames_methods():\n \"\"\"Test getfuncargnames for normal methods\"\"\"\n\n class A:\n def f(self, arg1, arg2=\"hello\"):\n raise NotImplementedError()\n\n assert getfuncargnames(A().f) == (\"arg1\",)\n\n\ndef test_getfuncargnames_staticmethod():\n \"\"\"Test getfuncargnames for staticmethods\"\"\"\n\n class A:\n @staticmethod\n def static(arg1, arg2, x=1):\n raise NotImplementedError()\n\n assert getfuncargnames(A.static, cls=A) == (\"arg1\", \"arg2\")\n\n\ndef test_getfuncargnames_staticmethod_inherited() -> None:\n \"\"\"Test getfuncargnames for inherited staticmethods (#8061)\"\"\"\n\n class A:\n @staticmethod\n def static(arg1, arg2, x=1):\n raise NotImplementedError()\n\n class B(A):\n pass\n\n assert getfuncargnames(B.static, cls=B) == (\"arg1\", \"arg2\")\n\n\ndef test_getfuncargnames_partial():\n \"\"\"Check getfuncargnames for methods defined with functools.partial (#5701)\"\"\"\n import functools\n\n def check(arg1, arg2, i):\n raise NotImplementedError()\n\n class T:\n test_ok = functools.partial(check, i=2)\n\n values = getfuncargnames(T().test_ok, name=\"test_ok\")\n assert values == (\"arg1\", \"arg2\")\n\n\ndef test_getfuncargnames_staticmethod_partial():\n \"\"\"Check getfuncargnames for staticmethods defined with functools.partial (#5701)\"\"\"\n import functools\n\n def check(arg1, arg2, i):\n raise NotImplementedError()\n\n class T:\n test_ok = staticmethod(functools.partial(check, i=2))\n\n values = getfuncargnames(T().test_ok, name=\"test_ok\")\n assert values == (\"arg1\", \"arg2\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFillFixtures.test_funcarg_lookup_error_TestFillFixtures.test_funcarg_lookup_error.result_stdout_no_fnmatch_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFillFixtures.test_funcarg_lookup_error_TestFillFixtures.test_funcarg_lookup_error.result_stdout_no_fnmatch_", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 585, "end_line": 621, "span_ids": ["TestFillFixtures.test_funcarg_lookup_error"], "tokens": 242}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.pytester_example_path(\"fixtures/fill_fixtures\")\nclass TestFillFixtures:\n\n def test_funcarg_lookup_error(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n import pytest\n\n @pytest.fixture\n def a_fixture(): pass\n\n @pytest.fixture\n def b_fixture(): pass\n\n @pytest.fixture\n def c_fixture(): pass\n\n @pytest.fixture\n def d_fixture(): pass\n \"\"\"\n )\n pytester.makepyfile(\n \"\"\"\n def test_lookup_error(unknown):\n pass\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n [\n \"*ERROR at setup of test_lookup_error*\",\n \" def test_lookup_error(unknown):*\",\n \"E fixture 'unknown' not found\",\n \"> available fixtures:*a_fixture,*b_fixture,*c_fixture,*d_fixture*monkeypatch,*\",\n # sorted\n \"> use 'py*test --fixtures *' for help on them.\",\n \"*1 error*\",\n ]\n )\n result.stdout.no_fnmatch_line(\"*INTERNAL*\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_dynamic_scope_TestFixtureMarker.test_dynamic_scope.None_3": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_dynamic_scope_TestFixtureMarker.test_dynamic_scope.None_3", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 2431, "end_line": 2471, "span_ids": ["TestFixtureMarker.test_dynamic_scope"], "tokens": 210}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFixtureMarker:\n\n def test_dynamic_scope(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n import pytest\n\n\n def pytest_addoption(parser):\n parser.addoption(\"--extend-scope\", action=\"store_true\", default=False)\n\n\n def dynamic_scope(fixture_name, config):\n if config.getoption(\"--extend-scope\"):\n return \"session\"\n return \"function\"\n\n\n @pytest.fixture(scope=dynamic_scope)\n def dynamic_fixture(calls=[]):\n calls.append(\"call\")\n return len(calls)\n\n \"\"\"\n )\n\n pytester.makepyfile(\n \"\"\"\n def test_first(dynamic_fixture):\n assert dynamic_fixture == 1\n\n\n def test_second(dynamic_fixture):\n assert dynamic_fixture == 2\n\n \"\"\"\n )\n\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=2)\n\n reprec = pytester.inline_run(\"--extend-scope\")\n reprec.assertoutcome(passed=1, failed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_parametrized_fixture_teardown_order_TestFixtureMarker.test_parametrized_fixture_teardown_order.result_stdout_no_fnmatch_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_parametrized_fixture_teardown_order_TestFixtureMarker.test_parametrized_fixture_teardown_order.result_stdout_no_fnmatch_", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 2824, "end_line": 2863, "span_ids": ["TestFixtureMarker.test_parametrized_fixture_teardown_order"], "tokens": 255}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFixtureMarker:\n\n def test_parametrized_fixture_teardown_order(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.fixture(params=[1,2], scope=\"class\")\n def param1(request):\n return request.param\n\n values = []\n\n class TestClass(object):\n @classmethod\n @pytest.fixture(scope=\"class\", autouse=True)\n def setup1(self, request, param1):\n values.append(1)\n request.addfinalizer(self.teardown1)\n @classmethod\n def teardown1(self):\n assert values.pop() == 1\n @pytest.fixture(scope=\"class\", autouse=True)\n def setup2(self, request, param1):\n values.append(2)\n request.addfinalizer(self.teardown2)\n @classmethod\n def teardown2(self):\n assert values.pop() == 2\n def test(self):\n pass\n\n def test_finish():\n assert not values\n \"\"\"\n )\n result = pytester.runpytest(\"-v\")\n result.stdout.fnmatch_lines(\n \"\"\"\n *3 passed*\n \"\"\"\n )\n result.stdout.no_fnmatch_line(\"*error*\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestShowFixtures.test_show_fixtures_testmodule_TestShowFixtures.test_show_fixtures_testmodule.result_stdout_no_fnmatch_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestShowFixtures.test_show_fixtures_testmodule_TestShowFixtures.test_show_fixtures_testmodule.result_stdout_no_fnmatch_", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 3353, "end_line": 3374, "span_ids": ["TestShowFixtures.test_show_fixtures_testmodule"], "tokens": 133}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestShowFixtures:\n\n def test_show_fixtures_testmodule(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n '''\n import pytest\n @pytest.fixture\n def _arg0():\n \"\"\" hidden \"\"\"\n @pytest.fixture\n def arg1():\n \"\"\" hello world \"\"\"\n '''\n )\n result = pytester.runpytest(\"--fixtures\", p)\n result.stdout.fnmatch_lines(\n \"\"\"\n *tmpdir\n *fixtures defined from*\n *arg1*\n *hello world*\n \"\"\"\n )\n result.stdout.no_fnmatch_line(\"*arg0*\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestShowFixtures.test_show_fixtures_with_same_name_TestShowFixtures.test_fixture_disallow_twice.with_pytest_raises_ValueE.foo.raise_NotImplementedError": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestShowFixtures.test_show_fixtures_with_same_name_TestShowFixtures.test_fixture_disallow_twice.with_pytest_raises_ValueE.foo.raise_NotImplementedError", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 3564, "end_line": 3611, "span_ids": ["TestShowFixtures.test_fixture_disallow_twice", "TestShowFixtures.test_show_fixtures_with_same_name"], "tokens": 273}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestShowFixtures:\n\n def test_show_fixtures_with_same_name(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n '''\n import pytest\n @pytest.fixture\n def arg1():\n \"\"\"Hello World in conftest.py\"\"\"\n return \"Hello World\"\n '''\n )\n pytester.makepyfile(\n \"\"\"\n def test_foo(arg1):\n assert arg1 == \"Hello World\"\n \"\"\"\n )\n pytester.makepyfile(\n '''\n import pytest\n @pytest.fixture\n def arg1():\n \"\"\"Hi from test module\"\"\"\n return \"Hi\"\n def test_bar(arg1):\n assert arg1 == \"Hi\"\n '''\n )\n result = pytester.runpytest(\"--fixtures\")\n result.stdout.fnmatch_lines(\n \"\"\"\n * fixtures defined from conftest *\n arg1\n Hello World in conftest.py\n\n * fixtures defined from test_show_fixtures_with_same_name *\n arg1\n Hi from test module\n \"\"\"\n )\n\n def test_fixture_disallow_twice(self):\n \"\"\"Test that applying @pytest.fixture twice generates an error (#2334).\"\"\"\n with pytest.raises(ValueError):\n\n @pytest.fixture\n @pytest.fixture\n def foo():\n raise NotImplementedError()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestScopeOrdering.test_class_fixture_self_instance_test_call_fixture_function_error.with_pytest_raises_pytest.assert_fix_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestScopeOrdering.test_class_fixture_self_instance_test_call_fixture_function_error.with_pytest_raises_pytest.assert_fix_1", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 4238, "end_line": 4279, "span_ids": ["TestScopeOrdering.test_class_fixture_self_instance", "test_call_fixture_function_error"], "tokens": 239}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestScopeOrdering:\n\n def test_class_fixture_self_instance(self, pytester: Pytester) -> None:\n \"\"\"Check that plugin classes which implement fixtures receive the plugin instance\n as self (see #2270).\n \"\"\"\n pytester.makeconftest(\n \"\"\"\n import pytest\n\n def pytest_configure(config):\n config.pluginmanager.register(MyPlugin())\n\n class MyPlugin():\n def __init__(self):\n self.arg = 1\n\n @pytest.fixture(scope='function')\n def myfix(self):\n assert isinstance(self, MyPlugin)\n return self.arg\n \"\"\"\n )\n\n pytester.makepyfile(\n \"\"\"\n class TestClass(object):\n def test_1(self, myfix):\n assert myfix == 1\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=1)\n\n\ndef test_call_fixture_function_error():\n \"\"\"Check if an error is raised if a fixture function is called directly (#4545)\"\"\"\n\n @pytest.fixture\n def fix():\n raise NotImplementedError()\n\n with pytest.raises(pytest.fail.Exception):\n assert fix() == 1", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/raises.py_TestRaises.test_raises_cyclic_reference_TestRaises.test_raises_cyclic_reference.assert_refcount_len_gc": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/raises.py_TestRaises.test_raises_cyclic_reference_TestRaises.test_raises_cyclic_reference.assert_refcount_len_gc", "embedding": null, "metadata": {"file_path": "testing/python/raises.py", "file_name": "raises.py", "file_type": "text/x-python", "category": "implementation", "start_line": 159, "end_line": 182, "span_ids": ["TestRaises.test_raises_cyclic_reference.T", "TestRaises.test_raises_cyclic_reference", "TestRaises.test_raises_cyclic_reference.T.__call__"], "tokens": 173}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRaises:\n\n @pytest.mark.parametrize(\"method\", [\"function\", \"function_match\", \"with\"])\n def test_raises_cyclic_reference(self, method):\n \"\"\"Ensure pytest.raises does not leave a reference cycle (#1965).\"\"\"\n import gc\n\n class T:\n def __call__(self):\n raise ValueError\n\n t = T()\n refcount = len(gc.get_referrers(t))\n\n if method == \"function\":\n pytest.raises(ValueError, t)\n elif method == \"function_match\":\n pytest.raises(ValueError, t).match(\"^$\")\n else:\n with pytest.raises(ValueError):\n t()\n\n # ensure both forms of pytest.raises don't leave exceptions in sys.exc_info()\n assert sys.exc_info() == (None, None, None)\n\n assert refcount == len(gc.get_referrers(t))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare.test_list_different_lengths_TestAssert_reprcompare.test_list_wrap_for_multiple_lines.None_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare.test_list_different_lengths_TestAssert_reprcompare.test_list_wrap_for_multiple_lines.None_1", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 451, "end_line": 487, "span_ids": ["TestAssert_reprcompare.test_list_different_lengths", "TestAssert_reprcompare.test_list_wrap_for_multiple_lines"], "tokens": 360}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssert_reprcompare:\n\n def test_list_different_lengths(self) -> None:\n expl = callequal([0, 1], [0, 1, 2])\n assert expl is not None\n assert len(expl) > 1\n expl = callequal([0, 1, 2], [0, 1])\n assert expl is not None\n assert len(expl) > 1\n\n def test_list_wrap_for_multiple_lines(self) -> None:\n long_d = \"d\" * 80\n l1 = [\"a\", \"b\", \"c\"]\n l2 = [\"a\", \"b\", \"c\", long_d]\n diff = callequal(l1, l2, verbose=True)\n assert diff == [\n \"['a', 'b', 'c'] == ['a', 'b', 'c...dddddddddddd']\",\n \"Right contains one more item: '\" + long_d + \"'\",\n \"Full diff:\",\n \" [\",\n \" 'a',\",\n \" 'b',\",\n \" 'c',\",\n \"- '\" + long_d + \"',\",\n \" ]\",\n ]\n\n diff = callequal(l2, l1, verbose=True)\n assert diff == [\n \"['a', 'b', 'c...dddddddddddd'] == ['a', 'b', 'c']\",\n \"Left contains one more item: '\" + long_d + \"'\",\n \"Full diff:\",\n \" [\",\n \" 'a',\",\n \" 'b',\",\n \" 'c',\",\n \"+ '\" + long_d + \"',\",\n \" ]\",\n ]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare.test_list_wrap_for_width_rewrap_same_length_TestAssert_reprcompare.test_list_wrap_for_width_rewrap_same_length.assert_diff_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare.test_list_wrap_for_width_rewrap_same_length_TestAssert_reprcompare.test_list_wrap_for_width_rewrap_same_length.assert_diff_", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 489, "end_line": 506, "span_ids": ["TestAssert_reprcompare.test_list_wrap_for_width_rewrap_same_length"], "tokens": 215}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssert_reprcompare:\n\n def test_list_wrap_for_width_rewrap_same_length(self) -> None:\n long_a = \"a\" * 30\n long_b = \"b\" * 30\n long_c = \"c\" * 30\n l1 = [long_a, long_b, long_c]\n l2 = [long_b, long_c, long_a]\n diff = callequal(l1, l2, verbose=True)\n assert diff == [\n \"['aaaaaaaaaaa...cccccccccccc'] == ['bbbbbbbbbbb...aaaaaaaaaaaa']\",\n \"At index 0 diff: 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' != 'bbbbbbbbbbbbbbbbbbbbbbbbbbbbbb'\",\n \"Full diff:\",\n \" [\",\n \"+ 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',\",\n \" 'bbbbbbbbbbbbbbbbbbbbbbbbbbbbbb',\",\n \" 'cccccccccccccccccccccccccccccc',\",\n \"- 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',\",\n \" ]\",\n ]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare.test_dict_wrap_TestAssert_reprcompare.test_dict_wrap.None_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare.test_dict_wrap_TestAssert_reprcompare.test_dict_wrap.None_1", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 531, "end_line": 564, "span_ids": ["TestAssert_reprcompare.test_dict_wrap"], "tokens": 453}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssert_reprcompare:\n\n def test_dict_wrap(self) -> None:\n d1 = {\"common\": 1, \"env\": {\"env1\": 1, \"env2\": 2}}\n d2 = {\"common\": 1, \"env\": {\"env1\": 1}}\n\n diff = callequal(d1, d2, verbose=True)\n assert diff == [\n \"{'common': 1,...1, 'env2': 2}} == {'common': 1,...: {'env1': 1}}\",\n \"Omitting 1 identical items, use -vv to show\",\n \"Differing items:\",\n \"{'env': {'env1': 1, 'env2': 2}} != {'env': {'env1': 1}}\",\n \"Full diff:\",\n \"- {'common': 1, 'env': {'env1': 1}}\",\n \"+ {'common': 1, 'env': {'env1': 1, 'env2': 2}}\",\n \"? +++++++++++\",\n ]\n\n long_a = \"a\" * 80\n sub = {\"long_a\": long_a, \"sub1\": {\"long_a\": \"substring that gets wrapped \" * 2}}\n d1 = {\"env\": {\"sub\": sub}}\n d2 = {\"env\": {\"sub\": sub}, \"new\": 1}\n diff = callequal(d1, d2, verbose=True)\n assert diff == [\n \"{'env': {'sub... wrapped '}}}} == {'env': {'sub...}}}, 'new': 1}\",\n \"Omitting 1 identical items, use -vv to show\",\n \"Right contains 1 more item:\",\n \"{'new': 1}\",\n \"Full diff:\",\n \" {\",\n \" 'env': {'sub': {'long_a': '\" + long_a + \"',\",\n \" 'sub1': {'long_a': 'substring that gets wrapped substring '\",\n \" 'that gets wrapped '}}},\",\n \"- 'new': 1,\",\n \" }\",\n ]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare.test_dict_TestAssert_reprcompare.test_dict_omitting.for_line_in_lines_1_.assert_b_not_in_line": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare.test_dict_TestAssert_reprcompare.test_dict_omitting.for_line_in_lines_1_.assert_b_not_in_line", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 558, "end_line": 569, "span_ids": ["TestAssert_reprcompare.test_dict_omitting", "TestAssert_reprcompare.test_dict"], "tokens": 139}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssert_reprcompare:\n\n def test_dict(self) -> None:\n expl = callequal({\"a\": 0}, {\"a\": 1})\n assert expl is not None\n assert len(expl) > 1\n\n def test_dict_omitting(self) -> None:\n lines = callequal({\"a\": 0, \"b\": 1}, {\"a\": 1, \"b\": 1})\n assert lines is not None\n assert lines[1].startswith(\"Omitting 1 identical item\")\n assert \"Common items\" not in lines\n for line in lines[1:]:\n assert \"b\" not in line", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_test_pytest_assertrepr_compare_integration_test_pytest_assertrepr_compare_integration.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_test_pytest_assertrepr_compare_integration_test_pytest_assertrepr_compare_integration.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 1266, "end_line": 1285, "span_ids": ["test_pytest_assertrepr_compare_integration"], "tokens": 124}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_pytest_assertrepr_compare_integration(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n def test_hello():\n x = set(range(100))\n y = x.copy()\n y.remove(50)\n assert x == y\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n [\n \"*def test_hello():*\",\n \"*assert x == y*\",\n \"*E*Extra items*left*\",\n \"*E*50*\",\n \"*= 1 failed in*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_test_assertion_options_test_triple_quoted_string_issue113.result_stdout_no_fnmatch_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_test_assertion_options_test_triple_quoted_string_issue113.result_stdout_no_fnmatch_", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 1336, "end_line": 1359, "span_ids": ["test_assertion_options", "test_triple_quoted_string_issue113"], "tokens": 171}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_assertion_options(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n def test_hello():\n x = 3\n assert x == 4\n \"\"\"\n )\n result = pytester.runpytest()\n assert \"3 == 4\" in result.stdout.str()\n result = pytester.runpytest_subprocess(\"--assert=plain\")\n result.stdout.no_fnmatch_line(\"*3 == 4*\")\n\n\ndef test_triple_quoted_string_issue113(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n def test_hello():\n assert \"\" == '''\n '''\"\"\"\n )\n result = pytester.runpytest(\"--fulltrace\")\n result.stdout.fnmatch_lines([\"*1 failed*\"])\n result.stdout.no_fnmatch_line(\"*SyntaxError*\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_test_assert_indirect_tuple_no_warning_test_exit_from_assertrepr_compare.with_pytest_raises_outcom.callequal_1_1_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_test_assert_indirect_tuple_no_warning_test_exit_from_assertrepr_compare.with_pytest_raises_outcom.callequal_1_1_", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 1562, "end_line": 1639, "span_ids": ["test_assert_indirect_tuple_no_warning", "test_exit_from_assertrepr_compare", "test_raise_unprintable_assertion_error", "test_raise_assertion_error_raisin_repr", "test_assert_with_unicode", "test_issue_1944"], "tokens": 477}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_assert_indirect_tuple_no_warning(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n def test_tuple():\n tpl = ('foo', 'bar')\n assert tpl\n \"\"\"\n )\n result = pytester.runpytest()\n output = \"\\n\".join(result.stdout.lines)\n assert \"WR1\" not in output\n\n\ndef test_assert_with_unicode(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\\\n def test_unicode():\n assert '\uc720\ub2c8\ucf54\ub4dc' == 'Unicode'\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*AssertionError*\"])\n\n\ndef test_raise_unprintable_assertion_error(pytester: Pytester) -> None:\n pytester.makepyfile(\n r\"\"\"\n def test_raise_assertion_error():\n raise AssertionError('\\xff')\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n [r\"> raise AssertionError('\\xff')\", \"E AssertionError: *\"]\n )\n\n\ndef test_raise_assertion_error_raisin_repr(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n class RaisingRepr(object):\n def __repr__(self):\n raise Exception()\n def test_raising_repr():\n raise AssertionError(RaisingRepr())\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n [\"E AssertionError: \"]\n )\n\n\ndef test_issue_1944(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n def f():\n return\n\n assert f() == 10\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*1 error*\"])\n assert (\n \"AttributeError: 'Module' object has no attribute '_obj'\"\n not in result.stdout.str()\n )\n\n\ndef test_exit_from_assertrepr_compare(monkeypatch) -> None:\n def raise_exit(obj):\n outcomes.exit(\"Quitting debugger\")\n\n monkeypatch.setattr(util, \"istext\", raise_exit)\n\n with pytest.raises(outcomes.Exit, match=\"Quitting debugger\"):\n callequal(1, 1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_test_assertion_location_with_coverage_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_test_assertion_location_with_coverage_", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 1642, "end_line": 1660, "span_ids": ["test_assertion_location_with_coverage"], "tokens": 124}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_assertion_location_with_coverage(pytester: Pytester) -> None:\n \"\"\"This used to report the wrong location when run with coverage (#5754).\"\"\"\n p = pytester.makepyfile(\n \"\"\"\n def test():\n assert False, 1\n assert False, 2\n \"\"\"\n )\n result = pytester.runpytest(str(p))\n result.stdout.fnmatch_lines(\n [\n \"> assert False, 1\",\n \"E AssertionError: 1\",\n \"E assert False\",\n \"*= 1 failed in*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_dont_rewrite_TestAssertionRewrite.test_dont_rewrite_plugin.assert_warning_not_in_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_dont_rewrite_TestAssertionRewrite.test_dont_rewrite_plugin.assert_warning_not_in_", "embedding": null, "metadata": {"file_path": "testing/test_assertrewrite.py", "file_name": "test_assertrewrite.py", "file_type": "text/x-python", "category": "test", "start_line": 110, "end_line": 125, "span_ids": ["TestAssertionRewrite.test_dont_rewrite", "TestAssertionRewrite.test_dont_rewrite_plugin"], "tokens": 173}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssertionRewrite:\n\n def test_dont_rewrite(self) -> None:\n s = \"\"\"'PYTEST_DONT_REWRITE'\\nassert 14\"\"\"\n m = rewrite(s)\n assert len(m.body) == 2\n assert isinstance(m.body[1], ast.Assert)\n assert m.body[1].msg is None\n\n def test_dont_rewrite_plugin(self, pytester: Pytester) -> None:\n contents = {\n \"conftest.py\": \"pytest_plugins = 'plugin'; import plugin\",\n \"plugin.py\": \"'PYTEST_DONT_REWRITE'\",\n \"test_foo.py\": \"def test_foo(): pass\",\n }\n pytester.makepyfile(**contents)\n result = pytester.runpytest_subprocess()\n assert \"warning\" not in \"\".join(result.outlines)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestRewriteOnImport.test_rewrite_warning_using_pytest_plugins_TestRewriteOnImport.test_rewrite_warning_using_pytest_plugins.result_stdout_no_fnmatch_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestRewriteOnImport.test_rewrite_warning_using_pytest_plugins_TestRewriteOnImport.test_rewrite_warning_using_pytest_plugins.result_stdout_no_fnmatch_", "embedding": null, "metadata": {"file_path": "testing/test_assertrewrite.py", "file_name": "test_assertrewrite.py", "file_type": "text/x-python", "category": "test", "start_line": 940, "end_line": 953, "span_ids": ["TestRewriteOnImport.test_rewrite_warning_using_pytest_plugins"], "tokens": 156}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRewriteOnImport:\n\n def test_rewrite_warning_using_pytest_plugins(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n **{\n \"conftest.py\": \"pytest_plugins = ['core', 'gui', 'sci']\",\n \"core.py\": \"\",\n \"gui.py\": \"pytest_plugins = ['core', 'sci']\",\n \"sci.py\": \"pytest_plugins = ['core']\",\n \"test_rewrite_warning_pytest_plugins.py\": \"def test(): pass\",\n }\n )\n pytester.chdir()\n result = pytester.runpytest_subprocess()\n result.stdout.fnmatch_lines([\"*= 1 passed in *=*\"])\n result.stdout.no_fnmatch_line(\"*pytest-warning summary*\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestRewriteOnImport.test_rewrite_warning_using_pytest_plugins_env_var_TestRewriteOnImport.test_rewrite_warning_using_pytest_plugins_env_var.result_stdout_no_fnmatch_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestRewriteOnImport.test_rewrite_warning_using_pytest_plugins_env_var_TestRewriteOnImport.test_rewrite_warning_using_pytest_plugins_env_var.result_stdout_no_fnmatch_", "embedding": null, "metadata": {"file_path": "testing/test_assertrewrite.py", "file_name": "test_assertrewrite.py", "file_type": "text/x-python", "category": "test", "start_line": 955, "end_line": 973, "span_ids": ["TestRewriteOnImport.test_rewrite_warning_using_pytest_plugins_env_var"], "tokens": 148}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRewriteOnImport:\n\n def test_rewrite_warning_using_pytest_plugins_env_var(\n self, pytester: Pytester, monkeypatch\n ) -> None:\n monkeypatch.setenv(\"PYTEST_PLUGINS\", \"plugin\")\n pytester.makepyfile(\n **{\n \"plugin.py\": \"\",\n \"test_rewrite_warning_using_pytest_plugins_env_var.py\": \"\"\"\n import plugin\n pytest_plugins = ['plugin']\n def test():\n pass\n \"\"\",\n }\n )\n pytester.chdir()\n result = pytester.runpytest_subprocess()\n result.stdout.fnmatch_lines([\"*= 1 passed in *=*\"])\n result.stdout.no_fnmatch_line(\"*pytest-warning summary*\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_test_issue731_test_issue731.result_stdout_no_fnmatch_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_test_issue731_test_issue731.result_stdout_no_fnmatch_", "embedding": null, "metadata": {"file_path": "testing/test_assertrewrite.py", "file_name": "test_assertrewrite.py", "file_type": "text/x-python", "category": "test", "start_line": 1170, "end_line": 1186, "span_ids": ["test_issue731"], "tokens": 128}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_issue731(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n class LongReprWithBraces(object):\n def __repr__(self):\n return 'LongReprWithBraces({' + ('a' * 80) + '}' + ('a' * 120) + ')'\n\n def some_method(self):\n return False\n\n def test_long_repr():\n obj = LongReprWithBraces()\n assert obj.some_method()\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.no_fnmatch_line(\"*unbalanced braces*\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_test_get_assertion_exprs_test_get_assertion_exprs.assert__get_assertion_exp": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_test_get_assertion_exprs_test_get_assertion_exprs.assert__get_assertion_exp", "embedding": null, "metadata": {"file_path": "testing/test_assertrewrite.py", "file_name": "test_assertrewrite.py", "file_type": "text/x-python", "category": "test", "start_line": 1527, "end_line": 1611, "span_ids": ["test_get_assertion_exprs"], "tokens": 698}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\n (\"src\", \"expected\"),\n (\n # fmt: off\n pytest.param(b\"\", {}, id=\"trivial\"),\n pytest.param(\n b\"def x(): assert 1\\n\",\n {1: \"1\"},\n id=\"assert statement not on own line\",\n ),\n pytest.param(\n b\"def x():\\n\"\n b\" assert 1\\n\"\n b\" assert 1+2\\n\",\n {2: \"1\", 3: \"1+2\"},\n id=\"multiple assertions\",\n ),\n pytest.param(\n # changes in encoding cause the byte offsets to be different\n \"# -*- coding: latin1\\n\"\n \"def \u00c0\u00c0\u00c0\u00c0\u00c0(): assert 1\\n\".encode(\"latin1\"),\n {2: \"1\"},\n id=\"latin1 encoded on first line\\n\",\n ),\n pytest.param(\n # using the default utf-8 encoding\n \"def \u00c0\u00c0\u00c0\u00c0\u00c0(): assert 1\\n\".encode(),\n {1: \"1\"},\n id=\"utf-8 encoded on first line\",\n ),\n pytest.param(\n b\"def x():\\n\"\n b\" assert (\\n\"\n b\" 1 + 2 # comment\\n\"\n b\" )\\n\",\n {2: \"(\\n 1 + 2 # comment\\n )\"},\n id=\"multi-line assertion\",\n ),\n pytest.param(\n b\"def x():\\n\"\n b\" assert y == [\\n\"\n b\" 1, 2, 3\\n\"\n b\" ]\\n\",\n {2: \"y == [\\n 1, 2, 3\\n ]\"},\n id=\"multi line assert with list continuation\",\n ),\n pytest.param(\n b\"def x():\\n\"\n b\" assert 1 + \\\\\\n\"\n b\" 2\\n\",\n {2: \"1 + \\\\\\n 2\"},\n id=\"backslash continuation\",\n ),\n pytest.param(\n b\"def x():\\n\"\n b\" assert x, y\\n\",\n {2: \"x\"},\n id=\"assertion with message\",\n ),\n pytest.param(\n b\"def x():\\n\"\n b\" assert (\\n\"\n b\" f(1, 2, 3)\\n\"\n b\" ), 'f did not work!'\\n\",\n {2: \"(\\n f(1, 2, 3)\\n )\"},\n id=\"assertion with message, test spanning multiple lines\",\n ),\n pytest.param(\n b\"def x():\\n\"\n b\" assert \\\\\\n\"\n b\" x\\\\\\n\"\n b\" , 'failure message'\\n\",\n {2: \"x\"},\n id=\"escaped newlines plus message\",\n ),\n pytest.param(\n b\"def x(): assert 5\",\n {1: \"5\"},\n id=\"no newline at end of file\",\n ),\n # fmt: on\n ),\n)\ndef test_get_assertion_exprs(src, expected) -> None:\n assert _get_assertion_exprs(src) == expected", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_test_try_makedirs_test_try_makedirs.assert_exc_info_value_err": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_test_try_makedirs_test_try_makedirs.assert_exc_info_value_err", "embedding": null, "metadata": {"file_path": "testing/test_assertrewrite.py", "file_name": "test_assertrewrite.py", "file_type": "text/x-python", "category": "test", "start_line": 1549, "end_line": 1586, "span_ids": ["test_try_makedirs"], "tokens": 305}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_try_makedirs(monkeypatch, tmp_path: Path) -> None:\n from _pytest.assertion.rewrite import try_makedirs\n\n p = tmp_path / \"foo\"\n\n # create\n assert try_makedirs(p)\n assert p.is_dir()\n\n # already exist\n assert try_makedirs(p)\n\n # monkeypatch to simulate all error situations\n def fake_mkdir(p, exist_ok=False, *, exc):\n assert isinstance(p, str)\n raise exc\n\n monkeypatch.setattr(os, \"makedirs\", partial(fake_mkdir, exc=FileNotFoundError()))\n assert not try_makedirs(p)\n\n monkeypatch.setattr(os, \"makedirs\", partial(fake_mkdir, exc=NotADirectoryError()))\n assert not try_makedirs(p)\n\n monkeypatch.setattr(os, \"makedirs\", partial(fake_mkdir, exc=PermissionError()))\n assert not try_makedirs(p)\n\n err = OSError()\n err.errno = errno.EROFS\n monkeypatch.setattr(os, \"makedirs\", partial(fake_mkdir, exc=err))\n assert not try_makedirs(p)\n\n # unhandled OSError should raise\n err = OSError()\n err.errno = errno.ECHILD\n monkeypatch.setattr(os, \"makedirs\", partial(fake_mkdir, exc=err))\n with pytest.raises(OSError) as exc_info:\n try_makedirs(p)\n assert exc_info.value.errno == errno.ECHILD", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestPyCacheDir_TestPyCacheDir.test_get_cache_dir.assert_get_cache_dir_Path": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestPyCacheDir_TestPyCacheDir.test_get_cache_dir.assert_get_cache_dir_Path", "embedding": null, "metadata": {"file_path": "testing/test_assertrewrite.py", "file_name": "test_assertrewrite.py", "file_type": "text/x-python", "category": "test", "start_line": 1654, "end_line": 1671, "span_ids": ["TestPyCacheDir", "TestPyCacheDir.test_get_cache_dir"], "tokens": 208}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPyCacheDir:\n @pytest.mark.parametrize(\n \"prefix, source, expected\",\n [\n (\"c:/tmp/pycs\", \"d:/projects/src/foo.py\", \"c:/tmp/pycs/projects/src\"),\n (None, \"d:/projects/src/foo.py\", \"d:/projects/src/__pycache__\"),\n (\"/tmp/pycs\", \"/home/projects/src/foo.py\", \"/tmp/pycs/home/projects/src\"),\n (None, \"/home/projects/src/foo.py\", \"/home/projects/src/__pycache__\"),\n ],\n )\n def test_get_cache_dir(self, monkeypatch, prefix, source, expected) -> None:\n monkeypatch.delenv(\"PYTHONPYCACHEPREFIX\", raising=False)\n\n if prefix is not None and sys.version_info < (3, 8):\n pytest.skip(\"pycache_prefix not available in py<38\")\n monkeypatch.setattr(sys, \"pycache_prefix\", prefix, raising=False)\n\n assert get_cache_dir(Path(source)) == Path(expected)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestPyCacheDir.test_sys_pycache_prefix_integration_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestPyCacheDir.test_sys_pycache_prefix_integration_", "embedding": null, "metadata": {"file_path": "testing/test_assertrewrite.py", "file_name": "test_assertrewrite.py", "file_type": "text/x-python", "category": "test", "start_line": 1673, "end_line": 1711, "span_ids": ["TestPyCacheDir.test_sys_pycache_prefix_integration"], "tokens": 351}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPyCacheDir:\n\n @pytest.mark.skipif(\n sys.version_info < (3, 8), reason=\"pycache_prefix not available in py<38\"\n )\n def test_sys_pycache_prefix_integration(\n self, tmp_path, monkeypatch, pytester: Pytester\n ) -> None:\n \"\"\"Integration test for sys.pycache_prefix (#4730).\"\"\"\n pycache_prefix = tmp_path / \"my/pycs\"\n monkeypatch.setattr(sys, \"pycache_prefix\", str(pycache_prefix))\n monkeypatch.setattr(sys, \"dont_write_bytecode\", False)\n\n pytester.makepyfile(\n **{\n \"src/test_foo.py\": \"\"\"\n import bar\n def test_foo():\n pass\n \"\"\",\n \"src/bar/__init__.py\": \"\",\n }\n )\n result = pytester.runpytest()\n assert result.ret == 0\n\n test_foo = pytester.path.joinpath(\"src/test_foo.py\")\n bar_init = pytester.path.joinpath(\"src/bar/__init__.py\")\n assert test_foo.is_file()\n assert bar_init.is_file()\n\n # test file: rewritten, custom pytest cache tag\n test_foo_pyc = get_cache_dir(test_foo) / (\"test_foo\" + PYC_TAIL)\n assert test_foo_pyc.is_file()\n\n # normal file: not touched by pytest, normal cache tag\n bar_init_pyc = get_cache_dir(bar_init) / \"__init__.{cache_tag}.pyc\".format(\n cache_tag=sys.implementation.cache_tag\n )\n assert bar_init_pyc.is_file()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_os_TestNewAPI.test_cache_writefail_cachfile_silent.cache_set_test_broken_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_os_TestNewAPI.test_cache_writefail_cachfile_silent.cache_set_test_broken_", "embedding": null, "metadata": {"file_path": "testing/test_cacheprovider.py", "file_name": "test_cacheprovider.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 45, "span_ids": ["impl", "TestNewAPI.test_config_cache_makedir", "TestNewAPI", "TestNewAPI.test_cache_writefail_cachfile_silent", "imports", "TestNewAPI.test_config_cache_dataerror"], "tokens": 350}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import os\nimport shutil\nimport sys\nfrom pathlib import Path\nfrom typing import List\n\nimport pytest\nfrom _pytest.config import ExitCode\nfrom _pytest.monkeypatch import MonkeyPatch\nfrom _pytest.pytester import Pytester\nfrom _pytest.tmpdir import TempPathFactory\n\npytest_plugins = (\"pytester\",)\n\n\nclass TestNewAPI:\n def test_config_cache_makedir(self, pytester: Pytester) -> None:\n pytester.makeini(\"[pytest]\")\n config = pytester.parseconfigure()\n assert config.cache is not None\n with pytest.raises(ValueError):\n config.cache.makedir(\"key/name\")\n\n p = config.cache.makedir(\"name\")\n assert p.check()\n\n def test_config_cache_dataerror(self, pytester: Pytester) -> None:\n pytester.makeini(\"[pytest]\")\n config = pytester.parseconfigure()\n assert config.cache is not None\n cache = config.cache\n pytest.raises(TypeError, lambda: cache.set(\"key/name\", cache))\n config.cache.set(\"key/name\", 0)\n config.cache._getvaluepath(\"key/name\").write_bytes(b\"123invalid\")\n val = config.cache.get(\"key/name\", -2)\n assert val == -2\n\n @pytest.mark.filterwarnings(\"ignore:could not create cache path\")\n def test_cache_writefail_cachfile_silent(self, pytester: Pytester) -> None:\n pytester.makeini(\"[pytest]\")\n pytester.path.joinpath(\".pytest_cache\").write_text(\"gone wrong\")\n config = pytester.parseconfigure()\n cache = config.cache\n assert cache is not None\n cache.set(\"test/broken\", [])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_lastfailed_failedfirst_order_TestLastFailed.test_lastfailed_failedfirst_order.result_stdout_no_fnmatch_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_lastfailed_failedfirst_order_TestLastFailed.test_lastfailed_failedfirst_order.result_stdout_no_fnmatch_", "embedding": null, "metadata": {"file_path": "testing/test_cacheprovider.py", "file_name": "test_cacheprovider.py", "file_type": "text/x-python", "category": "test", "start_line": 330, "end_line": 341, "span_ids": ["TestLastFailed.test_lastfailed_failedfirst_order"], "tokens": 147}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestLastFailed:\n\n def test_lastfailed_failedfirst_order(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n test_a=\"def test_always_passes(): assert 1\",\n test_b=\"def test_always_fails(): assert 0\",\n )\n result = pytester.runpytest()\n # Test order will be collection order; alphabetical\n result.stdout.fnmatch_lines([\"test_a.py*\", \"test_b.py*\"])\n result = pytester.runpytest(\"--lf\", \"--ff\")\n # Test order will be failing tests first\n result.stdout.fnmatch_lines([\"test_b.py*\"])\n result.stdout.no_fnmatch_line(\"*test_a.py*\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestNewFirst.test_newfirst_parametrize_TestNewFirst.test_newfirst_parametrize.None_7": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestNewFirst.test_newfirst_parametrize_TestNewFirst.test_newfirst_parametrize.None_7", "embedding": null, "metadata": {"file_path": "testing/test_cacheprovider.py", "file_name": "test_cacheprovider.py", "file_type": "text/x-python", "category": "test", "start_line": 1112, "end_line": 1173, "span_ids": ["TestNewFirst.test_newfirst_parametrize"], "tokens": 606}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestNewFirst:\n\n def test_newfirst_parametrize(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n **{\n \"test_1/test_1.py\": \"\"\"\n import pytest\n @pytest.mark.parametrize('num', [1, 2])\n def test_1(num): assert num\n \"\"\",\n \"test_2/test_2.py\": \"\"\"\n import pytest\n @pytest.mark.parametrize('num', [1, 2])\n def test_1(num): assert num\n \"\"\",\n }\n )\n\n p1 = pytester.path.joinpath(\"test_1/test_1.py\")\n os.utime(p1, ns=(p1.stat().st_atime_ns, int(1e9)))\n\n result = pytester.runpytest(\"-v\")\n result.stdout.fnmatch_lines(\n [\n \"*test_1/test_1.py::test_1[1*\",\n \"*test_1/test_1.py::test_1[2*\",\n \"*test_2/test_2.py::test_1[1*\",\n \"*test_2/test_2.py::test_1[2*\",\n ]\n )\n\n result = pytester.runpytest(\"-v\", \"--nf\")\n result.stdout.fnmatch_lines(\n [\n \"*test_2/test_2.py::test_1[1*\",\n \"*test_2/test_2.py::test_1[2*\",\n \"*test_1/test_1.py::test_1[1*\",\n \"*test_1/test_1.py::test_1[2*\",\n ]\n )\n\n p1.write_text(\n \"import pytest\\n\"\n \"@pytest.mark.parametrize('num', [1, 2, 3])\\n\"\n \"def test_1(num): assert num\\n\"\n )\n os.utime(p1, ns=(p1.stat().st_atime_ns, int(1e9)))\n\n # Running only a subset does not forget about existing ones.\n result = pytester.runpytest(\"-v\", \"--nf\", \"test_2/test_2.py\")\n result.stdout.fnmatch_lines(\n [\"*test_2/test_2.py::test_1[1*\", \"*test_2/test_2.py::test_1[2*\"]\n )\n\n result = pytester.runpytest(\"-v\", \"--nf\")\n result.stdout.fnmatch_lines(\n [\n \"*test_1/test_1.py::test_1[3*\",\n \"*test_2/test_2.py::test_1[1*\",\n \"*test_2/test_2.py::test_1[2*\",\n \"*test_1/test_1.py::test_1[1*\",\n \"*test_1/test_1.py::test_1[2*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestCaptureFixture.test_disabled_capture_fixture_TestCaptureFixture.test_disabled_capture_fixture.if_no_capture_.else_.result_stdout_no_fnmatch_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestCaptureFixture.test_disabled_capture_fixture_TestCaptureFixture.test_disabled_capture_fixture.if_no_capture_.else_.result_stdout_no_fnmatch_", "embedding": null, "metadata": {"file_path": "testing/test_capture.py", "file_name": "test_capture.py", "file_type": "text/x-python", "category": "test", "start_line": 620, "end_line": 648, "span_ids": ["TestCaptureFixture.test_disabled_capture_fixture"], "tokens": 263}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCaptureFixture:\n\n @pytest.mark.parametrize(\"fixture\", [\"capsys\", \"capfd\"])\n @pytest.mark.parametrize(\"no_capture\", [True, False])\n def test_disabled_capture_fixture(\n self, pytester: Pytester, fixture: str, no_capture: bool\n ) -> None:\n pytester.makepyfile(\n \"\"\"\\\n def test_disabled({fixture}):\n print('captured before')\n with {fixture}.disabled():\n print('while capture is disabled')\n print('captured after')\n assert {fixture}.readouterr() == ('captured before\\\\ncaptured after\\\\n', '')\n\n def test_normal():\n print('test_normal executed')\n \"\"\".format(\n fixture=fixture\n )\n )\n args = (\"-s\",) if no_capture else ()\n result = pytester.runpytest_subprocess(*args)\n result.stdout.fnmatch_lines([\"*while capture is disabled*\", \"*= 2 passed in *\"])\n result.stdout.no_fnmatch_line(\"*captured before*\")\n result.stdout.no_fnmatch_line(\"*captured after*\")\n if no_capture:\n assert \"test_normal executed\" in result.stdout.str()\n else:\n result.stdout.no_fnmatch_line(\"*test_normal executed*\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestCaptureFixture.test_fixture_use_by_other_fixtures_TestCaptureFixture.test_fixture_use_by_other_fixtures.None_3": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestCaptureFixture.test_fixture_use_by_other_fixtures_TestCaptureFixture.test_fixture_use_by_other_fixtures.None_3", "embedding": null, "metadata": {"file_path": "testing/test_capture.py", "file_name": "test_capture.py", "file_type": "text/x-python", "category": "test", "start_line": 678, "end_line": 712, "span_ids": ["TestCaptureFixture.test_fixture_use_by_other_fixtures"], "tokens": 285}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCaptureFixture:\n\n @pytest.mark.parametrize(\"fixture\", [\"capsys\", \"capfd\"])\n def test_fixture_use_by_other_fixtures(self, pytester: Pytester, fixture) -> None:\n \"\"\"Ensure that capsys and capfd can be used by other fixtures during\n setup and teardown.\"\"\"\n pytester.makepyfile(\n \"\"\"\\\n import sys\n import pytest\n\n @pytest.fixture\n def captured_print({fixture}):\n print('stdout contents begin')\n print('stderr contents begin', file=sys.stderr)\n out, err = {fixture}.readouterr()\n\n yield out, err\n\n print('stdout contents end')\n print('stderr contents end', file=sys.stderr)\n out, err = {fixture}.readouterr()\n assert out == 'stdout contents end\\\\n'\n assert err == 'stderr contents end\\\\n'\n\n def test_captured_print(captured_print):\n out, err = captured_print\n assert out == 'stdout contents begin\\\\n'\n assert err == 'stderr contents begin\\\\n'\n \"\"\".format(\n fixture=fixture\n )\n )\n result = pytester.runpytest_subprocess()\n result.stdout.fnmatch_lines([\"*1 passed*\"])\n result.stdout.no_fnmatch_line(\"*stdout contents begin*\")\n result.stdout.no_fnmatch_line(\"*stderr contents begin*\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestCollectFS.test_ignored_virtualenvs_TestCollectFS.test_ignored_virtualenvs.None_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestCollectFS.test_ignored_virtualenvs_TestCollectFS.test_ignored_virtualenvs.None_1", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 147, "end_line": 172, "span_ids": ["TestCollectFS.test_ignored_virtualenvs"], "tokens": 248}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCollectFS:\n\n @pytest.mark.parametrize(\n \"fname\",\n (\n \"activate\",\n \"activate.csh\",\n \"activate.fish\",\n \"Activate\",\n \"Activate.bat\",\n \"Activate.ps1\",\n ),\n )\n def test_ignored_virtualenvs(self, pytester: Pytester, fname: str) -> None:\n bindir = \"Scripts\" if sys.platform.startswith(\"win\") else \"bin\"\n ensure_file(pytester.path / \"virtual\" / bindir / fname)\n testfile = ensure_file(pytester.path / \"virtual\" / \"test_invenv.py\")\n testfile.write_text(\"def test_hello(): pass\")\n\n # by default, ignore tests inside a virtualenv\n result = pytester.runpytest()\n result.stdout.no_fnmatch_line(\"*test_invenv*\")\n # allow test collection if user insists\n result = pytester.runpytest(\"--collect-in-virtualenv\")\n assert \"test_invenv\" in result.stdout.str()\n # allow test collection if user directly passes in the directory\n result = pytester.runpytest(\"virtual\")\n assert \"test_invenv\" in result.stdout.str()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_exit_on_collection_with_maxfail_smaller_than_n_errors_test_exit_on_collection_with_maxfail_smaller_than_n_errors.res_stdout_no_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_exit_on_collection_with_maxfail_smaller_than_n_errors_test_exit_on_collection_with_maxfail_smaller_than_n_errors.res_stdout_no_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 896, "end_line": 916, "span_ids": ["test_exit_on_collection_with_maxfail_smaller_than_n_errors"], "tokens": 166}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_exit_on_collection_with_maxfail_smaller_than_n_errors(\n pytester: Pytester,\n) -> None:\n \"\"\"\n Verify collection is aborted once maxfail errors are encountered ignoring\n further modules which would cause more collection errors.\n \"\"\"\n pytester.makepyfile(**COLLECTION_ERROR_PY_FILES)\n\n res = pytester.runpytest(\"--maxfail=1\")\n assert res.ret == 1\n res.stdout.fnmatch_lines(\n [\n \"collected 1 item / 1 error\",\n \"*ERROR collecting test_02_import_error.py*\",\n \"*No module named *asdfa*\",\n \"*! stopping after 1 failures !*\",\n \"*= 1 error in *\",\n ]\n )\n res.stdout.no_fnmatch_line(\"*test_03*\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_collect_init_tests_test_collect_init_tests.None_7": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_collect_init_tests_test_collect_init_tests.None_7", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 1005, "end_line": 1063, "span_ids": ["test_collect_init_tests"], "tokens": 514}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_collect_init_tests(pytester: Pytester) -> None:\n \"\"\"Check that we collect files from __init__.py files when they patch the 'python_files' (#3773)\"\"\"\n p = pytester.copy_example(\"collect/collect_init_tests\")\n result = pytester.runpytest(p, \"--collect-only\")\n result.stdout.fnmatch_lines(\n [\n \"collected 2 items\",\n \"\",\n \" \",\n \" \",\n \" \",\n \" \",\n ]\n )\n result = pytester.runpytest(\"./tests\", \"--collect-only\")\n result.stdout.fnmatch_lines(\n [\n \"collected 2 items\",\n \"\",\n \" \",\n \" \",\n \" \",\n \" \",\n ]\n )\n # Ignores duplicates with \".\" and pkginit (#4310).\n result = pytester.runpytest(\"./tests\", \".\", \"--collect-only\")\n result.stdout.fnmatch_lines(\n [\n \"collected 2 items\",\n \"\",\n \" \",\n \" \",\n \" \",\n \" \",\n ]\n )\n # Same as before, but different order.\n result = pytester.runpytest(\".\", \"tests\", \"--collect-only\")\n result.stdout.fnmatch_lines(\n [\n \"collected 2 items\",\n \"\",\n \" \",\n \" \",\n \" \",\n \" \",\n ]\n )\n result = pytester.runpytest(\"./tests/test_foo.py\", \"--collect-only\")\n result.stdout.fnmatch_lines(\n [\"\", \" \", \" \"]\n )\n result.stdout.no_fnmatch_line(\"*test_init*\")\n result = pytester.runpytest(\"./tests/__init__.py\", \"--collect-only\")\n result.stdout.fnmatch_lines(\n [\"\", \" \", \" \"]\n )\n result.stdout.no_fnmatch_line(\"*test_foo*\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_collect_pkg_init_only_test_collect_pkg_init_only.None_2": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_collect_pkg_init_only_test_collect_pkg_init_only.None_2", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 1268, "end_line": 1277, "span_ids": ["test_collect_pkg_init_only"], "tokens": 117}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_collect_pkg_init_only(pytester: Pytester) -> None:\n subdir = pytester.mkdir(\"sub\")\n init = subdir.joinpath(\"__init__.py\")\n init.write_text(\"def test_init(): pass\")\n\n result = pytester.runpytest(str(init))\n result.stdout.fnmatch_lines([\"*no tests ran in*\"])\n\n result = pytester.runpytest(\"-v\", \"-o\", \"python_files=*.py\", str(init))\n result.stdout.fnmatch_lines([\"sub/__init__.py::test_init PASSED*\", \"*1 passed in*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_compat.py_test_get_real_func_partial_test_is_generator_asyncio.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_compat.py_test_get_real_func_partial_test_is_generator_asyncio.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_compat.py", "file_name": "test_compat.py", "file_type": "text/x-python", "category": "test", "start_line": 84, "end_line": 110, "span_ids": ["test_is_generator_asyncio", "test_get_real_func_partial"], "tokens": 173}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_get_real_func_partial() -> None:\n \"\"\"Test get_real_func handles partial instances correctly\"\"\"\n\n def foo(x):\n return x\n\n assert get_real_func(foo) is foo\n assert get_real_func(partial(foo)) is foo\n\n\ndef test_is_generator_asyncio(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n from _pytest.compat import is_generator\n import asyncio\n @asyncio.coroutine\n def baz():\n yield from [1,2,3]\n\n def test_is_generator_asyncio():\n assert not is_generator(baz)\n \"\"\"\n )\n # avoid importing asyncio into pytest's own process,\n # which in turn imports logging (#8)\n result = pytester.runpytest_subprocess()\n result.stdout.fnmatch_lines([\"*1 passed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_help_formatter_uses_py_get_terminal_width_test_help_formatter_uses_py_get_terminal_width.None_2": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_help_formatter_uses_py_get_terminal_width_test_help_formatter_uses_py_get_terminal_width.None_2", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 1719, "end_line": 1731, "span_ids": ["test_help_formatter_uses_py_get_terminal_width"], "tokens": 132}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_help_formatter_uses_py_get_terminal_width(monkeypatch: MonkeyPatch) -> None:\n from _pytest.config.argparsing import DropShorterLongHelpFormatter\n\n monkeypatch.setenv(\"COLUMNS\", \"90\")\n formatter = DropShorterLongHelpFormatter(\"prog\")\n assert formatter._width == 90\n\n monkeypatch.setattr(\"_pytest._io.get_terminal_width\", lambda: 160)\n formatter = DropShorterLongHelpFormatter(\"prog\")\n assert formatter._width == 160\n\n formatter = DropShorterLongHelpFormatter(\"prog\", width=42)\n assert formatter._width == 42", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_conftest_confcutdir_test_conftest_confcutdir.result_stdout_no_fnmatch_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_conftest_confcutdir_test_conftest_confcutdir.result_stdout_no_fnmatch_", "embedding": null, "metadata": {"file_path": "testing/test_conftest.py", "file_name": "test_conftest.py", "file_type": "text/x-python", "category": "test", "start_line": 201, "end_line": 214, "span_ids": ["test_conftest_confcutdir"], "tokens": 137}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_conftest_confcutdir(pytester: Pytester) -> None:\n pytester.makeconftest(\"assert 0\")\n x = pytester.mkdir(\"x\")\n x.joinpath(\"conftest.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n def pytest_addoption(parser):\n parser.addoption(\"--xyz\", action=\"store_true\")\n \"\"\"\n )\n )\n result = pytester.runpytest(\"-h\", \"--confcutdir=%s\" % x, x)\n result.stdout.fnmatch_lines([\"*--xyz*\"])\n result.stdout.no_fnmatch_line(\"*warning: could not load initial*\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_conftest_badcase_test_conftest_badcase.assert_result_ret_Exit": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_conftest_badcase_test_conftest_badcase.assert_result_ret_Exit", "embedding": null, "metadata": {"file_path": "testing/test_conftest.py", "file_name": "test_conftest.py", "file_type": "text/x-python", "category": "test", "start_line": 295, "end_line": 307, "span_ids": ["test_conftest_badcase"], "tokens": 169}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.skipif(\n os.path.normcase(\"x\") != os.path.normcase(\"X\"),\n reason=\"only relevant for case insensitive file systems\",\n)\ndef test_conftest_badcase(pytester: Pytester) -> None:\n \"\"\"Check conftest.py loading when directory casing is wrong (#5792).\"\"\"\n pytester.path.joinpath(\"JenkinsRoot/test\").mkdir(parents=True)\n source = {\"setup.py\": \"\", \"test/__init__.py\": \"\", \"test/conftest.py\": \"\"}\n pytester.makepyfile(**{\"JenkinsRoot/%s\" % k: v for k, v in source.items()})\n\n os.chdir(pytester.path.joinpath(\"jenkinsroot/test\"))\n result = pytester.runpytest()\n assert result.ret == ExitCode.NO_TESTS_COLLECTED", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_conftest_uppercase_test_no_conftest.None_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_conftest_uppercase_test_no_conftest.None_1", "embedding": null, "metadata": {"file_path": "testing/test_conftest.py", "file_name": "test_conftest.py", "file_type": "text/x-python", "category": "test", "start_line": 310, "end_line": 326, "span_ids": ["test_conftest_uppercase", "test_no_conftest"], "tokens": 168}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_conftest_uppercase(pytester: Pytester) -> None:\n \"\"\"Check conftest.py whose qualified name contains uppercase characters (#5819)\"\"\"\n source = {\"__init__.py\": \"\", \"Foo/conftest.py\": \"\", \"Foo/__init__.py\": \"\"}\n pytester.makepyfile(**source)\n\n os.chdir(pytester.path)\n result = pytester.runpytest()\n assert result.ret == ExitCode.NO_TESTS_COLLECTED\n\n\ndef test_no_conftest(pytester: Pytester) -> None:\n pytester.makeconftest(\"assert 0\")\n result = pytester.runpytest(\"--noconftest\")\n assert result.ret == ExitCode.NO_TESTS_COLLECTED\n\n result = pytester.runpytest()\n assert result.ret == ExitCode.USAGE_ERROR", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_docstring_partial_context_around_error_TestDoctests.test_docstring_partial_context_around_error.None_3": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_docstring_partial_context_around_error_TestDoctests.test_docstring_partial_context_around_error.None_3", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 245, "end_line": 287, "span_ids": ["TestDoctests.test_docstring_partial_context_around_error"], "tokens": 280}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDoctests:\n\n def test_docstring_partial_context_around_error(self, pytester: Pytester):\n \"\"\"Test that we show some context before the actual line of a failing\n doctest.\n \"\"\"\n pytester.makepyfile(\n '''\n def foo():\n \"\"\"\n text-line-1\n text-line-2\n text-line-3\n text-line-4\n text-line-5\n text-line-6\n text-line-7\n text-line-8\n text-line-9\n text-line-10\n text-line-11\n >>> 1 + 1\n 3\n\n text-line-after\n \"\"\"\n '''\n )\n result = pytester.runpytest(\"--doctest-modules\")\n result.stdout.fnmatch_lines(\n [\n \"*docstring_partial_context_around_error*\",\n \"005*text-line-3\",\n \"006*text-line-4\",\n \"013*text-line-11\",\n \"014*>>> 1 + 1\",\n \"Expected:\",\n \" 3\",\n \"Got:\",\n \" 2\",\n ]\n )\n # lines below should be trimmed out\n result.stdout.no_fnmatch_line(\"*text-line-2*\")\n result.stdout.no_fnmatch_line(\"*text-line-after*\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestLiterals.test_number_re_TestLiterals.test_number_re.for_s_in_1_abc_.assert__number_re_match_s": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestLiterals.test_number_re_TestLiterals.test_number_re.for_s_in_1_abc_.assert__number_re_match_s", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 842, "end_line": 870, "span_ids": ["TestLiterals.test_number_re"], "tokens": 205}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestLiterals:\n\n def test_number_re(self) -> None:\n _number_re = _get_checker()._number_re # type: ignore\n for s in [\n \"1.\",\n \"+1.\",\n \"-1.\",\n \".1\",\n \"+.1\",\n \"-.1\",\n \"0.1\",\n \"+0.1\",\n \"-0.1\",\n \"1e5\",\n \"+1e5\",\n \"1e+5\",\n \"+1e+5\",\n \"1e-5\",\n \"+1e-5\",\n \"-1e-5\",\n \"1.2e3\",\n \"-1.2e-3\",\n ]:\n print(s)\n m = _number_re.match(s)\n assert m is not None\n assert float(m.group()) == pytest.approx(float(s))\n for s in [\"1\", \"abc\"]:\n print(s)\n assert _number_re.match(s) is None", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_os_schema.with_fn_open_as_f_.return.xmlschema_XMLSchema_f_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_os_schema.with_fn_open_as_f_.return.xmlschema_XMLSchema_f_", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 32, "span_ids": ["schema", "imports"], "tokens": 210}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import os\nimport platform\nfrom datetime import datetime\nfrom pathlib import Path\nfrom typing import cast\nfrom typing import List\nfrom typing import Optional\nfrom typing import Tuple\nfrom typing import TYPE_CHECKING\nfrom typing import Union\nfrom xml.dom import minidom\n\nimport xmlschema\n\nimport pytest\nfrom _pytest.config import Config\nfrom _pytest.junitxml import bin_xml_escape\nfrom _pytest.junitxml import LogXML\nfrom _pytest.monkeypatch import MonkeyPatch\nfrom _pytest.pytester import Pytester\nfrom _pytest.pytester import RunResult\nfrom _pytest.reports import BaseReport\nfrom _pytest.reports import TestReport\nfrom _pytest.store import Store\n\n\n@pytest.fixture(scope=\"session\")\ndef schema() -> xmlschema.XMLSchema:\n \"\"\"Return an xmlschema.XMLSchema object for the junit-10.xsd file.\"\"\"\n fn = Path(__file__).parent / \"example_scripts/junit-10.xsd\"\n with fn.open() as f:\n return xmlschema.XMLSchema(f)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_run_and_parse_assert_attr.assert_on_node_expecte": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_run_and_parse_assert_attr.assert_on_node_expecte", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 54, "end_line": 75, "span_ids": ["assert_attr", "run_and_parse"], "tokens": 191}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.fixture\ndef run_and_parse(pytester: Pytester, schema: xmlschema.XMLSchema) -> RunAndParse:\n \"\"\"Fixture that returns a function that can be used to execute pytest and\n return the parsed ``DomNode`` of the root xml node.\n\n The ``family`` parameter is used to configure the ``junit_family`` of the written report.\n \"xunit2\" is also automatically validated against the schema.\n \"\"\"\n return RunAndParse(pytester, schema)\n\n\ndef assert_attr(node, **kwargs):\n __tracebackhide__ = True\n\n def nodeval(node, name):\n anode = node.getAttributeNode(name)\n if anode is not None:\n return anode.value\n\n expected = {name: str(value) for name, value in kwargs.items()}\n on_node = {name: nodeval(node, name) for name in expected}\n assert on_node == expected", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_parametrize_families_TestPython.test_summing_simple.node_assert_attr_name_py": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_parametrize_families_TestPython.test_summing_simple.node_assert_attr_name_py", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 139, "end_line": 167, "span_ids": ["TestPython", "impl", "TestPython.test_summing_simple"], "tokens": 205}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "parametrize_families = pytest.mark.parametrize(\"xunit_family\", [\"xunit1\", \"xunit2\"])\n\n\nclass TestPython:\n @parametrize_families\n def test_summing_simple(\n self, pytester: Pytester, run_and_parse: RunAndParse, xunit_family: str\n ) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n def test_pass():\n pass\n def test_fail():\n assert 0\n def test_skip():\n pytest.skip(\"\")\n @pytest.mark.xfail\n def test_xfail():\n assert 0\n @pytest.mark.xfail\n def test_xpass():\n assert 1\n \"\"\"\n )\n result, dom = run_and_parse(family=xunit_family)\n assert result.ret\n node = dom.find_first_by_tag(\"testsuite\")\n node.assert_attr(name=\"pytest\", errors=0, failures=1, skipped=2, tests=5)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_hostname_in_xml_TestPython.test_timestamp_in_xml.assert_start_time_time": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_hostname_in_xml_TestPython.test_timestamp_in_xml.assert_start_time_time", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 198, "end_line": 226, "span_ids": ["TestPython.test_hostname_in_xml", "TestPython.test_timestamp_in_xml"], "tokens": 231}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPython:\n\n @parametrize_families\n def test_hostname_in_xml(\n self, pytester: Pytester, run_and_parse: RunAndParse, xunit_family: str\n ) -> None:\n pytester.makepyfile(\n \"\"\"\n def test_pass():\n pass\n \"\"\"\n )\n result, dom = run_and_parse(family=xunit_family)\n node = dom.find_first_by_tag(\"testsuite\")\n node.assert_attr(hostname=platform.node())\n\n @parametrize_families\n def test_timestamp_in_xml(\n self, pytester: Pytester, run_and_parse: RunAndParse, xunit_family: str\n ) -> None:\n pytester.makepyfile(\n \"\"\"\n def test_pass():\n pass\n \"\"\"\n )\n start_time = datetime.now()\n result, dom = run_and_parse(family=xunit_family)\n node = dom.find_first_by_tag(\"testsuite\")\n timestamp = datetime.strptime(node[\"timestamp\"], \"%Y-%m-%dT%H:%M:%S.%f\")\n assert start_time <= timestamp < datetime.now()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_mark_skip_doesnt_capture_output_TestPython.test_mark_skip_doesnt_capture_output.assert_bar_not_in_node": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_mark_skip_doesnt_capture_output_TestPython.test_mark_skip_doesnt_capture_output.assert_bar_not_in_node", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 431, "end_line": 446, "span_ids": ["TestPython.test_mark_skip_doesnt_capture_output"], "tokens": 132}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPython:\n\n @parametrize_families\n def test_mark_skip_doesnt_capture_output(\n self, pytester: Pytester, run_and_parse: RunAndParse, xunit_family: str\n ) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.mark.skip(reason=\"foo\")\n def test_skip():\n print(\"bar!\")\n \"\"\"\n )\n result, dom = run_and_parse(family=xunit_family)\n assert result.ret == 0\n node_xml = dom.find_first_by_tag(\"testsuite\").toxml()\n assert \"bar!\" not in node_xml", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_classname_instance_TestPython.test_classname_instance.tnode_assert_attr_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_classname_instance_TestPython.test_classname_instance.tnode_assert_attr_", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 448, "end_line": 466, "span_ids": ["TestPython.test_classname_instance"], "tokens": 153}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPython:\n\n @parametrize_families\n def test_classname_instance(\n self, pytester: Pytester, run_and_parse: RunAndParse, xunit_family: str\n ) -> None:\n pytester.makepyfile(\n \"\"\"\n class TestClass(object):\n def test_method(self):\n assert 0\n \"\"\"\n )\n result, dom = run_and_parse(family=xunit_family)\n assert result.ret\n node = dom.find_first_by_tag(\"testsuite\")\n node.assert_attr(failures=1)\n tnode = node.find_first_by_tag(\"testcase\")\n tnode.assert_attr(\n classname=\"test_classname_instance.TestClass\", name=\"test_method\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_classname_nested_dir_TestPython.test_classname_nested_dir.tnode_assert_attr_classna": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_classname_nested_dir_TestPython.test_classname_nested_dir.tnode_assert_attr_classna", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 468, "end_line": 479, "span_ids": ["TestPython.test_classname_nested_dir"], "tokens": 148}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPython:\n\n @parametrize_families\n def test_classname_nested_dir(\n self, pytester: Pytester, run_and_parse: RunAndParse, xunit_family: str\n ) -> None:\n p = pytester.mkdir(\"sub\").joinpath(\"test_hello.py\")\n p.write_text(\"def test_func(): 0/0\")\n result, dom = run_and_parse(family=xunit_family)\n assert result.ret\n node = dom.find_first_by_tag(\"testsuite\")\n node.assert_attr(failures=1)\n tnode = node.find_first_by_tag(\"testcase\")\n tnode.assert_attr(classname=\"sub.test_hello\", name=\"test_func\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_internal_error_TestPython.test_internal_error.assert_Division_in_fnod": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_internal_error_TestPython.test_internal_error.assert_Division_in_fnod", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 481, "end_line": 495, "span_ids": ["TestPython.test_internal_error"], "tokens": 182}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPython:\n\n @parametrize_families\n def test_internal_error(\n self, pytester: Pytester, run_and_parse: RunAndParse, xunit_family: str\n ) -> None:\n pytester.makeconftest(\"def pytest_runtest_protocol(): 0 / 0\")\n pytester.makepyfile(\"def test_function(): pass\")\n result, dom = run_and_parse(family=xunit_family)\n assert result.ret\n node = dom.find_first_by_tag(\"testsuite\")\n node.assert_attr(errors=1, tests=1)\n tnode = node.find_first_by_tag(\"testcase\")\n tnode.assert_attr(classname=\"pytest\", name=\"internal\")\n fnode = tnode.find_first_by_tag(\"error\")\n fnode.assert_attr(message=\"internal error\")\n assert \"Division\" in fnode.toxml()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_xfailure_function_TestPython.test_xfailure_function.fnode_assert_attr_type_p": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_xfailure_function_TestPython.test_xfailure_function.fnode_assert_attr_type_p", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 639, "end_line": 657, "span_ids": ["TestPython.test_xfailure_function"], "tokens": 182}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPython:\n\n @parametrize_families\n def test_xfailure_function(\n self, pytester: Pytester, run_and_parse: RunAndParse, xunit_family: str\n ) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n def test_xfail():\n pytest.xfail(\"42\")\n \"\"\"\n )\n result, dom = run_and_parse(family=xunit_family)\n assert not result.ret\n node = dom.find_first_by_tag(\"testsuite\")\n node.assert_attr(skipped=1, tests=1)\n tnode = node.find_first_by_tag(\"testcase\")\n tnode.assert_attr(classname=\"test_xfailure_function\", name=\"test_xfail\")\n fnode = tnode.find_first_by_tag(\"skipped\")\n fnode.assert_attr(type=\"pytest.xfail\", message=\"42\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_collect_error_TestPython.test_collect_error.assert_SyntaxError_in_f": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_collect_error_TestPython.test_collect_error.assert_SyntaxError_in_f", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 751, "end_line": 763, "span_ids": ["TestPython.test_collect_error"], "tokens": 146}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPython:\n\n @parametrize_families\n def test_collect_error(\n self, pytester: Pytester, run_and_parse: RunAndParse, xunit_family: str\n ) -> None:\n pytester.makepyfile(\"syntax error\")\n result, dom = run_and_parse(family=xunit_family)\n assert result.ret\n node = dom.find_first_by_tag(\"testsuite\")\n node.assert_attr(errors=1, tests=1)\n tnode = node.find_first_by_tag(\"testcase\")\n fnode = tnode.find_first_by_tag(\"error\")\n fnode.assert_attr(message=\"collection failure\")\n assert \"SyntaxError\" in fnode.toxml()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pastebin.py_TestPasteCapture.test_non_ascii_paste_text_TestPasteCapture.test_non_ascii_paste_text.assert_len_pastebinlist_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pastebin.py_TestPasteCapture.test_non_ascii_paste_text_TestPasteCapture.test_non_ascii_paste_text.assert_len_pastebinlist_", "embedding": null, "metadata": {"file_path": "testing/test_pastebin.py", "file_name": "test_pastebin.py", "file_type": "text/x-python", "category": "test", "start_line": 64, "end_line": 83, "span_ids": ["TestPasteCapture.test_non_ascii_paste_text"], "tokens": 157}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPasteCapture:\n\n def test_non_ascii_paste_text(self, pytester: Pytester, pastebinlist) -> None:\n \"\"\"Make sure that text which contains non-ascii characters is pasted\n correctly. See #1219.\n \"\"\"\n pytester.makepyfile(\n test_unicode=\"\"\"\\\n def test():\n assert '\u263a' == 1\n \"\"\"\n )\n result = pytester.runpytest(\"--pastebin=all\")\n expected_msg = \"*assert '\u263a' == 1*\"\n result.stdout.fnmatch_lines(\n [\n expected_msg,\n \"*== 1 failed in *\",\n \"*Sending information to Paste Service*\",\n ]\n )\n assert len(pastebinlist) == 1", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pastebin.py_TestPaste_TestPaste.mocked_urlopen_fail.return.calls": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pastebin.py_TestPaste_TestPaste.mocked_urlopen_fail.return.calls", "embedding": null, "metadata": {"file_path": "testing/test_pastebin.py", "file_name": "test_pastebin.py", "file_type": "text/x-python", "category": "test", "start_line": 86, "end_line": 104, "span_ids": ["TestPaste.mocked_urlopen_fail", "TestPaste", "TestPaste.pastebin"], "tokens": 132}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPaste:\n @pytest.fixture\n def pastebin(self, request):\n return request.config.pluginmanager.getplugin(\"pastebin\")\n\n @pytest.fixture\n def mocked_urlopen_fail(self, monkeypatch: MonkeyPatch):\n \"\"\"Monkeypatch the actual urlopen call to emulate a HTTP Error 400.\"\"\"\n calls = []\n\n import urllib.error\n import urllib.request\n\n def mocked(url, data):\n calls.append((url, data))\n raise urllib.error.HTTPError(url, 400, \"Bad request\", {}, io.BytesIO())\n\n monkeypatch.setattr(urllib.request, \"urlopen\", mocked)\n return calls", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pastebin.py_TestPaste.mocked_urlopen_invalid_TestPaste.mocked_urlopen_invalid.return.calls": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pastebin.py_TestPaste.mocked_urlopen_invalid_TestPaste.mocked_urlopen_invalid.return.calls", "embedding": null, "metadata": {"file_path": "testing/test_pastebin.py", "file_name": "test_pastebin.py", "file_type": "text/x-python", "category": "test", "start_line": 106, "end_line": 126, "span_ids": ["TestPaste.mocked_urlopen_invalid", "TestPaste.mocked_urlopen_invalid.mocked.DummyFile.read", "TestPaste.mocked_urlopen_invalid.mocked.DummyFile"], "tokens": 149}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPaste:\n\n @pytest.fixture\n def mocked_urlopen_invalid(self, monkeypatch: MonkeyPatch):\n \"\"\"Monkeypatch the actual urlopen calls done by the internal plugin\n function that connects to bpaste service, but return a url in an\n unexpected format.\"\"\"\n calls = []\n\n def mocked(url, data):\n calls.append((url, data))\n\n class DummyFile:\n def read(self):\n # part of html of a normal response\n return b'View raw.'\n\n return DummyFile()\n\n import urllib.request\n\n monkeypatch.setattr(urllib.request, \"urlopen\", mocked)\n return calls", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pastebin.py_TestPaste.mocked_urlopen_TestPaste.mocked_urlopen.return.calls": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pastebin.py_TestPaste.mocked_urlopen_TestPaste.mocked_urlopen.return.calls", "embedding": null, "metadata": {"file_path": "testing/test_pastebin.py", "file_name": "test_pastebin.py", "file_type": "text/x-python", "category": "test", "start_line": 128, "end_line": 147, "span_ids": ["TestPaste.mocked_urlopen.mocked.DummyFile.read", "TestPaste.mocked_urlopen", "TestPaste.mocked_urlopen.mocked.DummyFile"], "tokens": 137}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPaste:\n\n @pytest.fixture\n def mocked_urlopen(self, monkeypatch: MonkeyPatch):\n \"\"\"Monkeypatch the actual urlopen calls done by the internal plugin\n function that connects to bpaste service.\"\"\"\n calls = []\n\n def mocked(url, data):\n calls.append((url, data))\n\n class DummyFile:\n def read(self):\n # part of html of a normal response\n return b'View raw.'\n\n return DummyFile()\n\n import urllib.request\n\n monkeypatch.setattr(urllib.request, \"urlopen\", mocked)\n return calls", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pastebin.py_TestPaste.test_pastebin_invalid_url_TestPaste.test_pastebin_http_error.assert_len_mocked_urlopen": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pastebin.py_TestPaste.test_pastebin_invalid_url_TestPaste.test_pastebin_http_error.assert_len_mocked_urlopen", "embedding": null, "metadata": {"file_path": "testing/test_pastebin.py", "file_name": "test_pastebin.py", "file_type": "text/x-python", "category": "test", "start_line": 149, "end_line": 160, "span_ids": ["TestPaste.test_pastebin_http_error", "TestPaste.test_pastebin_invalid_url"], "tokens": 156}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPaste:\n\n def test_pastebin_invalid_url(self, pastebin, mocked_urlopen_invalid) -> None:\n result = pastebin.create_new_paste(b\"full-paste-contents\")\n assert (\n result\n == \"bad response: invalid format ('View raw.')\"\n )\n assert len(mocked_urlopen_invalid) == 1\n\n def test_pastebin_http_error(self, pastebin, mocked_urlopen_fail) -> None:\n result = pastebin.create_new_paste(b\"full-paste-contents\")\n assert result == \"bad response: HTTP Error 400: Bad request\"\n assert len(mocked_urlopen_fail) == 1", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_TestPytestPluginInteractions.test_hook_with_addoption_TestPytestPluginInteractions.test_hook_with_addoption.res_stdout_fnmatch_lines_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_TestPytestPluginInteractions.test_hook_with_addoption_TestPytestPluginInteractions.test_hook_with_addoption.res_stdout_fnmatch_lines_", "embedding": null, "metadata": {"file_path": "testing/test_pluginmanager.py", "file_name": "test_pluginmanager.py", "file_type": "text/x-python", "category": "test", "start_line": 146, "end_line": 174, "span_ids": ["TestPytestPluginInteractions.test_hook_with_addoption"], "tokens": 224}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPytestPluginInteractions:\n\n def test_hook_with_addoption(self, pytester: Pytester) -> None:\n \"\"\"Test that hooks can be used in a call to pytest_addoption\"\"\"\n pytester.makepyfile(\n newhooks=\"\"\"\n import pytest\n @pytest.hookspec(firstresult=True)\n def pytest_default_value():\n pass\n \"\"\"\n )\n pytester.makepyfile(\n myplugin=\"\"\"\n import newhooks\n def pytest_addhooks(pluginmanager):\n pluginmanager.add_hookspecs(newhooks)\n def pytest_addoption(parser, pluginmanager):\n default_value = pluginmanager.hook.pytest_default_value()\n parser.addoption(\"--config\", help=\"Config, defaults to %(default)s\", default=default_value)\n \"\"\"\n )\n pytester.makeconftest(\n \"\"\"\n pytest_plugins=(\"myplugin\",)\n def pytest_default_value():\n return \"default_value\"\n \"\"\"\n )\n res = pytester.runpytest(\"--help\")\n res.stdout.fnmatch_lines([\"*--config=CONFIG*default_value*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_spawn_uses_tmphome_test_spawn_uses_tmphome.assert_child_wait_0_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_spawn_uses_tmphome_test_spawn_uses_tmphome.assert_child_wait_0_", "embedding": null, "metadata": {"file_path": "testing/test_pytester.py", "file_name": "test_pytester.py", "file_type": "text/x-python", "category": "test", "start_line": 719, "end_line": 738, "span_ids": ["test_spawn_uses_tmphome"], "tokens": 152}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_spawn_uses_tmphome(pytester: Pytester) -> None:\n tmphome = str(pytester.path)\n assert os.environ.get(\"HOME\") == tmphome\n\n pytester._monkeypatch.setenv(\"CUSTOMENV\", \"42\")\n\n p1 = pytester.makepyfile(\n \"\"\"\n import os\n\n def test():\n assert os.environ[\"HOME\"] == {tmphome!r}\n assert os.environ[\"CUSTOMENV\"] == \"42\"\n \"\"\".format(\n tmphome=tmphome\n )\n )\n child = pytester.spawn_pytest(str(p1))\n out = child.read()\n assert child.wait() == 0, out.decode(\"utf8\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestReportSerialization.test_deserialization_failure_TestReportSerialization.test_deserialization_failure.with_pytest_raises_.TestReport__from_json_dat": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestReportSerialization.test_deserialization_failure_TestReportSerialization.test_deserialization_failure.with_pytest_raises_.TestReport__from_json_dat", "embedding": null, "metadata": {"file_path": "testing/test_reports.py", "file_name": "test_reports.py", "file_type": "text/x-python", "category": "test", "start_line": 246, "end_line": 266, "span_ids": ["TestReportSerialization.test_deserialization_failure"], "tokens": 178}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestReportSerialization:\n\n def test_deserialization_failure(self, pytester: Pytester) -> None:\n \"\"\"Check handling of failure during deserialization of report types.\"\"\"\n pytester.makepyfile(\n \"\"\"\n def test_a():\n assert False\n \"\"\"\n )\n reprec = pytester.inline_run()\n reports = reprec.getreports(\"pytest_runtest_logreport\")\n assert len(reports) == 3\n test_a_call = reports[1]\n data = test_a_call._to_json()\n entry = data[\"longrepr\"][\"reprtraceback\"][\"reprentries\"][0]\n assert entry[\"type\"] == \"ReprEntry\"\n\n entry[\"type\"] = \"Unknown\"\n with pytest.raises(\n RuntimeError, match=\"INTERNALERROR: Unknown entry type returned: Unknown\"\n ):\n TestReport._from_json(data)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestReportSerialization.test_chained_exceptions_TestReportSerialization.test_chained_exceptions.if_report_class_is_TestRe.else_.report.reports_1_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestReportSerialization.test_chained_exceptions_TestReportSerialization.test_chained_exceptions.if_report_class_is_TestRe.else_.report.reports_1_", "embedding": null, "metadata": {"file_path": "testing/test_reports.py", "file_name": "test_reports.py", "file_type": "text/x-python", "category": "test", "start_line": 268, "end_line": 303, "span_ids": ["TestReportSerialization.test_chained_exceptions"], "tokens": 287}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestReportSerialization:\n\n @pytest.mark.parametrize(\"report_class\", [TestReport, CollectReport])\n def test_chained_exceptions(\n self, pytester: Pytester, tw_mock, report_class\n ) -> None:\n \"\"\"Check serialization/deserialization of report objects containing chained exceptions (#5786)\"\"\"\n pytester.makepyfile(\n \"\"\"\n def foo():\n raise ValueError('value error')\n def test_a():\n try:\n foo()\n except ValueError as e:\n raise RuntimeError('runtime error') from e\n if {error_during_import}:\n test_a()\n \"\"\".format(\n error_during_import=report_class is CollectReport\n )\n )\n\n reprec = pytester.inline_run()\n if report_class is TestReport:\n reports: Union[\n Sequence[TestReport], Sequence[CollectReport]\n ] = reprec.getreports(\"pytest_runtest_logreport\")\n # we have 3 reports: setup/call/teardown\n assert len(reports) == 3\n # get the call report\n report = reports[1]\n else:\n assert report_class is CollectReport\n # two collection reports: session and test file\n reports = reprec.getreports(\"pytest_collectreport\")\n assert len(reports) == 2\n report = reports[1]\n # ... other code", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestReportSerialization.test_chained_exceptions.check_longrepr_TestReportSerialization.test_chained_exceptions.check_longrepr.assert_desc2_is_None": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestReportSerialization.test_chained_exceptions.check_longrepr_TestReportSerialization.test_chained_exceptions.check_longrepr.assert_desc2_is_None", "embedding": null, "metadata": {"file_path": "testing/test_reports.py", "file_name": "test_reports.py", "file_type": "text/x-python", "category": "test", "start_line": 305, "end_line": 325, "span_ids": ["TestReportSerialization.test_chained_exceptions"], "tokens": 255}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestReportSerialization:\n\n @pytest.mark.parametrize(\"report_class\", [TestReport, CollectReport])\n def test_chained_exceptions(\n self, pytester: Pytester, tw_mock, report_class\n ) -> None:\n # ... other code\n\n def check_longrepr(longrepr: ExceptionChainRepr) -> None:\n \"\"\"Check the attributes of the given longrepr object according to the test file.\n\n We can get away with testing both CollectReport and TestReport with this function because\n the longrepr objects are very similar.\n \"\"\"\n assert isinstance(longrepr, ExceptionChainRepr)\n assert longrepr.sections == [(\"title\", \"contents\", \"=\")]\n assert len(longrepr.chain) == 2\n entry1, entry2 = longrepr.chain\n tb1, fileloc1, desc1 = entry1\n tb2, fileloc2, desc2 = entry2\n\n assert \"ValueError('value error')\" in str(tb1)\n assert \"RuntimeError('runtime error')\" in str(tb2)\n\n assert (\n desc1\n == \"The above exception was the direct cause of the following exception:\"\n )\n assert desc2 is None\n # ... other code", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestReportSerialization.test_chained_exceptions.assert_report_failed_TestReportSerialization.test_chained_exceptions.loaded_report_longrepr_to": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestReportSerialization.test_chained_exceptions.assert_report_failed_TestReportSerialization.test_chained_exceptions.loaded_report_longrepr_to", "embedding": null, "metadata": {"file_path": "testing/test_reports.py", "file_name": "test_reports.py", "file_type": "text/x-python", "category": "test", "start_line": 327, "end_line": 342, "span_ids": ["TestReportSerialization.test_chained_exceptions"], "tokens": 223}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestReportSerialization:\n\n @pytest.mark.parametrize(\"report_class\", [TestReport, CollectReport])\n def test_chained_exceptions(\n self, pytester: Pytester, tw_mock, report_class\n ) -> None:\n # ... other code\n\n assert report.failed\n assert len(report.sections) == 0\n assert isinstance(report.longrepr, ExceptionChainRepr)\n report.longrepr.addsection(\"title\", \"contents\", \"=\")\n check_longrepr(report.longrepr)\n\n data = report._to_json()\n loaded_report = report_class._from_json(data)\n\n assert loaded_report.failed\n check_longrepr(loaded_report.longrepr)\n\n # make sure we don't blow up on ``toterminal`` call; we don't test the actual output because it is very\n # brittle and hard to maintain, but we can assume it is correct because ``toterminal`` is already tested\n # elsewhere and we do check the contents of the longrepr object after loading it.\n loaded_report.longrepr.toterminal(tw_mock)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_test_callinfo_test_callinfo.assert_n_not_in_repr_c": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_test_callinfo_test_callinfo.assert_n_not_in_repr_c", "embedding": null, "metadata": {"file_path": "testing/test_runner.py", "file_name": "test_runner.py", "file_type": "text/x-python", "category": "test", "start_line": 476, "end_line": 497, "span_ids": ["test_callinfo"], "tokens": 245}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_callinfo() -> None:\n ci = runner.CallInfo.from_call(lambda: 0, \"collect\")\n assert ci.when == \"collect\"\n assert ci.result == 0\n assert \"result\" in repr(ci)\n assert repr(ci) == \"\"\n assert str(ci) == \"\"\n\n ci2 = runner.CallInfo.from_call(lambda: 0 / 0, \"collect\")\n assert ci2.when == \"collect\"\n assert not hasattr(ci2, \"result\")\n assert repr(ci2) == f\"\"\n assert str(ci2) == repr(ci2)\n assert ci2.excinfo\n\n # Newlines are escaped.\n def raise_assertion():\n assert 0, \"assert_msg\"\n\n ci3 = runner.CallInfo.from_call(raise_assertion, \"call\")\n assert repr(ci3) == f\"\"\n assert \"\\n\" not in repr(ci3)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_test_outcomeexception_exceptionattributes__strip_resource_warnings.return._": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_test_outcomeexception_exceptionattributes__strip_resource_warnings.return._", "embedding": null, "metadata": {"file_path": "testing/test_runner.py", "file_name": "test_runner.py", "file_type": "text/x-python", "category": "test", "start_line": 537, "end_line": 583, "span_ids": ["test_outcomeexception_exceptionattributes", "test_outcomeexception_passes_except_Exception", "test_pytest_exit", "test_pytest_exit_msg", "test_pytest_fail", "_strip_resource_warnings"], "tokens": 302}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_outcomeexception_exceptionattributes() -> None:\n outcome = outcomes.OutcomeException(\"test\")\n assert outcome.args[0] == outcome.msg\n\n\ndef test_outcomeexception_passes_except_Exception() -> None:\n with pytest.raises(outcomes.OutcomeException):\n try:\n raise outcomes.OutcomeException(\"test\")\n except Exception as e:\n raise NotImplementedError from e\n\n\ndef test_pytest_exit() -> None:\n with pytest.raises(pytest.exit.Exception) as excinfo:\n pytest.exit(\"hello\")\n assert excinfo.errisinstance(pytest.exit.Exception)\n\n\ndef test_pytest_fail() -> None:\n with pytest.raises(pytest.fail.Exception) as excinfo:\n pytest.fail(\"hello\")\n s = excinfo.exconly(tryshort=True)\n assert s.startswith(\"Failed\")\n\n\ndef test_pytest_exit_msg(pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n import pytest\n\n def pytest_configure(config):\n pytest.exit('oh noes')\n \"\"\"\n )\n result = pytester.runpytest()\n result.stderr.fnmatch_lines([\"Exit: oh noes\"])\n\n\ndef _strip_resource_warnings(lines):\n # Assert no output on stderr, except for unreliable ResourceWarnings.\n # (https://github.com/pytest-dev/pytest/issues/5088)\n return [\n x\n for x in lines\n if not x.startswith((\"Exception ignored in:\", \"ResourceWarning\"))\n ]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_test_pytest_fail_notrace_non_ascii_test_pytest_fail_notrace_non_ascii.result_stdout_no_fnmatch_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_test_pytest_fail_notrace_non_ascii_test_pytest_fail_notrace_non_ascii.result_stdout_no_fnmatch_", "embedding": null, "metadata": {"file_path": "testing/test_runner.py", "file_name": "test_runner.py", "file_type": "text/x-python", "category": "test", "start_line": 648, "end_line": 663, "span_ids": ["test_pytest_fail_notrace_non_ascii"], "tokens": 128}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_pytest_fail_notrace_non_ascii(pytester: Pytester) -> None:\n \"\"\"Fix pytest.fail with pytrace=False with non-ascii characters (#1178).\n\n This tests with native and unicode strings containing non-ascii chars.\n \"\"\"\n pytester.makepyfile(\n \"\"\"\\\n import pytest\n\n def test_hello():\n pytest.fail('oh oh: \u263a', pytrace=False)\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*test_hello*\", \"oh oh: \u263a\"])\n result.stdout.no_fnmatch_line(\"*def test_hello*\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner_xunit.py_test_setup_funcarg_setup_when_outer_scope_fails_test_setup_funcarg_setup_when_outer_scope_fails.result_stdout_no_fnmatch_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner_xunit.py_test_setup_funcarg_setup_when_outer_scope_fails_test_setup_funcarg_setup_when_outer_scope_fails.result_stdout_no_fnmatch_", "embedding": null, "metadata": {"file_path": "testing/test_runner_xunit.py", "file_name": "test_runner_xunit.py", "file_type": "text/x-python", "category": "test", "start_line": 215, "end_line": 240, "span_ids": ["test_setup_funcarg_setup_when_outer_scope_fails"], "tokens": 158}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_setup_funcarg_setup_when_outer_scope_fails(pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n import pytest\n def setup_module(mod):\n raise ValueError(42)\n @pytest.fixture\n def hello(request):\n raise ValueError(\"xyz43\")\n def test_function1(hello):\n pass\n def test_function2(hello):\n pass\n \"\"\"\n )\n result = pytester.runpytest(p)\n result.stdout.fnmatch_lines(\n [\n \"*function1*\",\n \"*ValueError*42*\",\n \"*function2*\",\n \"*ValueError*42*\",\n \"*2 errors*\",\n ]\n )\n result.stdout.no_fnmatch_line(\"*xyz43*\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_session.py_SessionTests.test_broken_repr_SessionTests.test_broken_repr.assert_out_find_reprex": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_session.py_SessionTests.test_broken_repr_SessionTests.test_broken_repr.assert_out_find_reprex", "embedding": null, "metadata": {"file_path": "testing/test_session.py", "file_name": "test_session.py", "file_type": "text/x-python", "category": "test", "start_line": 103, "end_line": 133, "span_ids": ["SessionTests.test_broken_repr"], "tokens": 256}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class SessionTests:\n\n def test_broken_repr(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n import pytest\n\n class reprexc(BaseException):\n def __str__(self):\n return \"Ha Ha fooled you, I'm a broken repr().\"\n\n class BrokenRepr1(object):\n foo=0\n def __repr__(self):\n raise reprexc\n\n class TestBrokenClass(object):\n def test_explicit_bad_repr(self):\n t = BrokenRepr1()\n with pytest.raises(BaseException, match=\"broken repr\"):\n repr(t)\n\n def test_implicit_bad_repr1(self):\n t = BrokenRepr1()\n assert t.foo == 1\n\n \"\"\"\n )\n reprec = pytester.inline_run(p)\n passed, skipped, failed = reprec.listoutcomes()\n assert (len(passed), len(skipped), len(failed)) == (1, 0, 1)\n out = failed[0].longrepr.reprcrash.message # type: ignore[union-attr]\n assert out.find(\"<[reprexc() raised in repr()] BrokenRepr1\") != -1", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_setuponly.py_test_show_different_scopes_test_show_different_scopes.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_setuponly.py_test_show_different_scopes_test_show_different_scopes.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_setuponly.py", "file_name": "test_setuponly.py", "file_type": "text/x-python", "category": "test", "start_line": 39, "end_line": 65, "span_ids": ["test_show_different_scopes"], "tokens": 173}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_show_different_scopes(pytester: Pytester, mode) -> None:\n p = pytester.makepyfile(\n '''\n import pytest\n @pytest.fixture\n def arg_function():\n \"\"\"function scoped fixture\"\"\"\n @pytest.fixture(scope='session')\n def arg_session():\n \"\"\"session scoped fixture\"\"\"\n def test_arg1(arg_session, arg_function):\n pass\n '''\n )\n\n result = pytester.runpytest(mode, p)\n assert result.ret == 0\n\n result.stdout.fnmatch_lines(\n [\n \"SETUP S arg_session*\",\n \"*SETUP F arg_function*\",\n \"*test_arg1 (fixtures used: arg_function, arg_session)*\",\n \"*TEARDOWN F arg_function*\",\n \"TEARDOWN S arg_session*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_setuponly.py_test_show_nested_fixtures_test_show_nested_fixtures.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_setuponly.py_test_show_nested_fixtures_test_show_nested_fixtures.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_setuponly.py", "file_name": "test_setuponly.py", "file_type": "text/x-python", "category": "test", "start_line": 68, "end_line": 99, "span_ids": ["test_show_nested_fixtures"], "tokens": 196}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_show_nested_fixtures(pytester: Pytester, mode) -> None:\n pytester.makeconftest(\n '''\n import pytest\n @pytest.fixture(scope='session')\n def arg_same():\n \"\"\"session scoped fixture\"\"\"\n '''\n )\n p = pytester.makepyfile(\n '''\n import pytest\n @pytest.fixture(scope='function')\n def arg_same(arg_same):\n \"\"\"function scoped fixture\"\"\"\n def test_arg1(arg_same):\n pass\n '''\n )\n\n result = pytester.runpytest(mode, p)\n assert result.ret == 0\n\n result.stdout.fnmatch_lines(\n [\n \"SETUP S arg_same*\",\n \"*SETUP F arg_same (fixtures used: arg_same)*\",\n \"*test_arg1 (fixtures used: arg_same)*\",\n \"*TEARDOWN F arg_same*\",\n \"TEARDOWN S arg_same*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_setuponly.py_test_show_fixtures_with_autouse_test_show_fixtures_with_autouse.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_setuponly.py_test_show_fixtures_with_autouse_test_show_fixtures_with_autouse.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_setuponly.py", "file_name": "test_setuponly.py", "file_type": "text/x-python", "category": "test", "start_line": 102, "end_line": 126, "span_ids": ["test_show_fixtures_with_autouse"], "tokens": 156}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_show_fixtures_with_autouse(pytester: Pytester, mode) -> None:\n p = pytester.makepyfile(\n '''\n import pytest\n @pytest.fixture\n def arg_function():\n \"\"\"function scoped fixture\"\"\"\n @pytest.fixture(scope='session', autouse=True)\n def arg_session():\n \"\"\"session scoped fixture\"\"\"\n def test_arg1(arg_function):\n pass\n '''\n )\n\n result = pytester.runpytest(mode, p)\n assert result.ret == 0\n\n result.stdout.fnmatch_lines(\n [\n \"SETUP S arg_session*\",\n \"*SETUP F arg_function*\",\n \"*test_arg1 (fixtures used: arg_function, arg_session)*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_setuponly.py_test_show_fixtures_with_parameters_test_show_fixtures_with_parameters.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_setuponly.py_test_show_fixtures_with_parameters_test_show_fixtures_with_parameters.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_setuponly.py", "file_name": "test_setuponly.py", "file_type": "text/x-python", "category": "test", "start_line": 129, "end_line": 159, "span_ids": ["test_show_fixtures_with_parameters"], "tokens": 193}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_show_fixtures_with_parameters(pytester: Pytester, mode) -> None:\n pytester.makeconftest(\n '''\n import pytest\n @pytest.fixture(scope='session', params=['foo', 'bar'])\n def arg_same():\n \"\"\"session scoped fixture\"\"\"\n '''\n )\n p = pytester.makepyfile(\n '''\n import pytest\n @pytest.fixture(scope='function')\n def arg_other(arg_same):\n \"\"\"function scoped fixture\"\"\"\n def test_arg1(arg_other):\n pass\n '''\n )\n\n result = pytester.runpytest(mode, p)\n assert result.ret == 0\n\n result.stdout.fnmatch_lines(\n [\n \"SETUP S arg_same?'foo'?\",\n \"TEARDOWN S arg_same?'foo'?\",\n \"SETUP S arg_same?'bar'?\",\n \"TEARDOWN S arg_same?'bar'?\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_setuponly.py_test_show_fixtures_with_parameter_ids_test_show_fixtures_with_parameter_ids.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_setuponly.py_test_show_fixtures_with_parameter_ids_test_show_fixtures_with_parameter_ids.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_setuponly.py", "file_name": "test_setuponly.py", "file_type": "text/x-python", "category": "test", "start_line": 162, "end_line": 188, "span_ids": ["test_show_fixtures_with_parameter_ids"], "tokens": 174}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_show_fixtures_with_parameter_ids(pytester: Pytester, mode) -> None:\n pytester.makeconftest(\n '''\n import pytest\n @pytest.fixture(\n scope='session', params=['foo', 'bar'], ids=['spam', 'ham'])\n def arg_same():\n \"\"\"session scoped fixture\"\"\"\n '''\n )\n p = pytester.makepyfile(\n '''\n import pytest\n @pytest.fixture(scope='function')\n def arg_other(arg_same):\n \"\"\"function scoped fixture\"\"\"\n def test_arg1(arg_other):\n pass\n '''\n )\n\n result = pytester.runpytest(mode, p)\n assert result.ret == 0\n\n result.stdout.fnmatch_lines(\n [\"SETUP S arg_same?'spam'?\", \"SETUP S arg_same?'ham'?\"]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_setuponly.py_test_show_fixtures_with_parameter_ids_function_test_show_fixtures_with_parameter_ids_function.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_setuponly.py_test_show_fixtures_with_parameter_ids_function_test_show_fixtures_with_parameter_ids_function.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_setuponly.py", "file_name": "test_setuponly.py", "file_type": "text/x-python", "category": "test", "start_line": 191, "end_line": 208, "span_ids": ["test_show_fixtures_with_parameter_ids_function"], "tokens": 130}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_show_fixtures_with_parameter_ids_function(pytester: Pytester, mode) -> None:\n p = pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.fixture(params=['foo', 'bar'], ids=lambda p: p.upper())\n def foobar():\n pass\n def test_foobar(foobar):\n pass\n \"\"\"\n )\n\n result = pytester.runpytest(mode, p)\n assert result.ret == 0\n\n result.stdout.fnmatch_lines(\n [\"*SETUP F foobar?'FOO'?\", \"*SETUP F foobar?'BAR'?\"]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_setuponly.py_test_dynamic_fixture_request_test_dynamic_fixture_request.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_setuponly.py_test_dynamic_fixture_request_test_dynamic_fixture_request.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_setuponly.py", "file_name": "test_setuponly.py", "file_type": "text/x-python", "category": "test", "start_line": 211, "end_line": 234, "span_ids": ["test_dynamic_fixture_request"], "tokens": 135}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_dynamic_fixture_request(pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.fixture()\n def dynamically_requested_fixture():\n pass\n @pytest.fixture()\n def dependent_fixture(request):\n request.getfixturevalue('dynamically_requested_fixture')\n def test_dyn(dependent_fixture):\n pass\n \"\"\"\n )\n\n result = pytester.runpytest(\"--setup-only\", p)\n assert result.ret == 0\n\n result.stdout.fnmatch_lines(\n [\n \"*SETUP F dynamically_requested_fixture\",\n \"*TEARDOWN F dynamically_requested_fixture\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_setuponly.py_test_capturing_test_capturing.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_setuponly.py_test_capturing_test_capturing.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_setuponly.py", "file_name": "test_setuponly.py", "file_type": "text/x-python", "category": "test", "start_line": 237, "end_line": 256, "span_ids": ["test_capturing"], "tokens": 128}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_capturing(pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n import pytest, sys\n @pytest.fixture()\n def one():\n sys.stdout.write('this should be captured')\n sys.stderr.write('this should also be captured')\n @pytest.fixture()\n def two(one):\n assert 0\n def test_capturing(two):\n pass\n \"\"\"\n )\n\n result = pytester.runpytest(\"--setup-only\", p)\n result.stdout.fnmatch_lines(\n [\"this should be captured\", \"this should also be captured\"]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_setuponly.py_test_show_fixtures_and_execute_test_test_show_fixtures_and_execute_test.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_setuponly.py_test_show_fixtures_and_execute_test_test_show_fixtures_and_execute_test.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_setuponly.py", "file_name": "test_setuponly.py", "file_type": "text/x-python", "category": "test", "start_line": 259, "end_line": 277, "span_ids": ["test_show_fixtures_and_execute_test"], "tokens": 129}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_show_fixtures_and_execute_test(pytester: Pytester) -> None:\n \"\"\"Verify that setups are shown and tests are executed.\"\"\"\n p = pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.fixture\n def arg():\n assert True\n def test_arg(arg):\n assert False\n \"\"\"\n )\n\n result = pytester.runpytest(\"--setup-show\", p)\n assert result.ret == 1\n\n result.stdout.fnmatch_lines(\n [\"*SETUP F arg*\", \"*test_arg (fixtures used: arg)F*\", \"*TEARDOWN F arg*\"]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminal.test_report_collect_after_half_a_second_TestTerminal.test_report_collect_after_half_a_second.assert_x1b_32m_x1b_1m": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminal.test_report_collect_after_half_a_second_TestTerminal.test_report_collect_after_half_a_second.assert_x1b_32m_x1b_1m", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 155, "end_line": 182, "span_ids": ["TestTerminal.test_report_collect_after_half_a_second"], "tokens": 246}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTerminal:\n\n def test_report_collect_after_half_a_second(\n self, pytester: Pytester, monkeypatch: MonkeyPatch\n ) -> None:\n \"\"\"Test for \"collecting\" being updated after 0.5s\"\"\"\n\n pytester.makepyfile(\n **{\n \"test1.py\": \"\"\"\n import _pytest.terminal\n\n _pytest.terminal.REPORT_COLLECTING_RESOLUTION = 0\n\n def test_1():\n pass\n \"\"\",\n \"test2.py\": \"def test_2(): pass\",\n }\n )\n # Explicitly test colored output.\n monkeypatch.setenv(\"PY_COLORS\", \"1\")\n\n child = pytester.spawn_pytest(\"-v test1.py test2.py\")\n child.expect(r\"collecting \\.\\.\\.\")\n child.expect(r\"collecting 1 item\")\n child.expect(r\"collecting 2 items\")\n child.expect(r\"collected 2 items\")\n rest = child.read().decode(\"utf8\")\n assert \"= \\x1b[32m\\x1b[1m2 passed\\x1b[0m\\x1b[32m in\" in rest", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminal.test_itemreport_directclasses_not_shown_as_subclasses_TestTerminal.test_itemreport_directclasses_not_shown_as_subclasses.result_stdout_no_fnmatch_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminal.test_itemreport_directclasses_not_shown_as_subclasses_TestTerminal.test_itemreport_directclasses_not_shown_as_subclasses.result_stdout_no_fnmatch_", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 237, "end_line": 253, "span_ids": ["TestTerminal.test_itemreport_directclasses_not_shown_as_subclasses"], "tokens": 135}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTerminal:\n\n def test_itemreport_directclasses_not_shown_as_subclasses(\n self, pytester: Pytester\n ) -> None:\n a = pytester.mkpydir(\"a123\")\n a.joinpath(\"test_hello123.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n class TestClass(object):\n def test_method(self):\n pass\n \"\"\"\n )\n )\n result = pytester.runpytest(\"-vv\")\n assert result.ret == 0\n result.stdout.fnmatch_lines([\"*a123/test_hello123.py*PASS*\"])\n result.stdout.no_fnmatch_line(\"* <- *\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_fail_reporting_on_pass_test_pass_reporting_on_fail.result_stdout_no_fnmatch_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_fail_reporting_on_pass_test_pass_reporting_on_fail.result_stdout_no_fnmatch_", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 1098, "end_line": 1115, "span_ids": ["test_pass_extra_reporting", "test_fail_reporting_on_pass", "test_pass_reporting_on_fail"], "tokens": 187}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_fail_reporting_on_pass(pytester: Pytester) -> None:\n pytester.makepyfile(\"def test_this(): assert 1\")\n result = pytester.runpytest(\"-rf\")\n result.stdout.no_fnmatch_line(\"*short test summary*\")\n\n\ndef test_pass_extra_reporting(pytester: Pytester) -> None:\n pytester.makepyfile(\"def test_this(): assert 1\")\n result = pytester.runpytest()\n result.stdout.no_fnmatch_line(\"*short test summary*\")\n result = pytester.runpytest(\"-rp\")\n result.stdout.fnmatch_lines([\"*test summary*\", \"PASS*test_pass_extra_reporting*\"])\n\n\ndef test_pass_reporting_on_fail(pytester: Pytester) -> None:\n pytester.makepyfile(\"def test_this(): assert 0\")\n result = pytester.runpytest(\"-rp\")\n result.stdout.no_fnmatch_line(\"*short test summary*\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_skip_counting_towards_summary_test_skip_counting_towards_summary.assert_res_1_faile": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_skip_counting_towards_summary_test_skip_counting_towards_summary.assert_res_1_faile", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 1643, "end_line": 1658, "span_ids": ["test_skip_counting_towards_summary", "test_skip_counting_towards_summary.DummyReport:2", "test_skip_counting_towards_summary.DummyReport"], "tokens": 157}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_skip_counting_towards_summary(tr):\n class DummyReport(BaseReport):\n count_towards_summary = True\n\n r1 = DummyReport()\n r2 = DummyReport()\n tr.stats = {\"failed\": (r1, r2)}\n tr._main_color = None\n res = tr.build_summary_stats_line()\n assert res == ([(\"2 failed\", {\"bold\": True, \"red\": True})], \"red\")\n\n r1.count_towards_summary = False\n tr.stats = {\"failed\": (r1, r2)}\n tr._main_color = None\n res = tr.build_summary_stats_line()\n assert res == ([(\"1 failed\", {\"bold\": True, \"red\": True})], \"red\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestClassicOutputStyle_TestClassicOutputStyle.test_normal_verbosity.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestClassicOutputStyle_TestClassicOutputStyle.test_normal_verbosity.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 1878, "end_line": 1904, "span_ids": ["TestClassicOutputStyle", "TestClassicOutputStyle.test_normal_verbosity", "TestClassicOutputStyle.test_files"], "tokens": 211}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestClassicOutputStyle:\n \"\"\"Ensure classic output style works as expected (#3883)\"\"\"\n\n @pytest.fixture\n def test_files(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n **{\n \"test_one.py\": \"def test_one(): pass\",\n \"test_two.py\": \"def test_two(): assert 0\",\n \"sub/test_three.py\": \"\"\"\n def test_three_1(): pass\n def test_three_2(): assert 0\n def test_three_3(): pass\n \"\"\",\n }\n )\n\n def test_normal_verbosity(self, pytester: Pytester, test_files) -> None:\n result = pytester.runpytest(\"-o\", \"console_output_style=classic\")\n result.stdout.fnmatch_lines(\n [\n \"test_one.py .\",\n \"test_two.py F\",\n f\"sub{os.sep}test_three.py .F.\",\n \"*2 failed, 3 passed in*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestProgressOutputStyle.test_zero_tests_collected_TestProgressOutputStyle.test_normal.output_stdout_re_match_li": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestProgressOutputStyle.test_zero_tests_collected_TestProgressOutputStyle.test_normal.output_stdout_re_match_li", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 1945, "end_line": 1971, "span_ids": ["TestProgressOutputStyle.test_zero_tests_collected", "TestProgressOutputStyle.test_normal"], "tokens": 282}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestProgressOutputStyle:\n\n def test_zero_tests_collected(self, pytester: Pytester) -> None:\n \"\"\"Some plugins (testmon for example) might issue pytest_runtest_logreport without any tests being\n actually collected (#2971).\"\"\"\n pytester.makeconftest(\n \"\"\"\n def pytest_collection_modifyitems(items, config):\n from _pytest.runner import CollectReport\n for node_id in ('nodeid1', 'nodeid2'):\n rep = CollectReport(node_id, 'passed', None, None)\n rep.when = 'passed'\n rep.duration = 0.1\n config.hook.pytest_runtest_logreport(report=rep)\n \"\"\"\n )\n output = pytester.runpytest()\n output.stdout.no_fnmatch_line(\"*ZeroDivisionError*\")\n output.stdout.fnmatch_lines([\"=* 2 passed in *=\"])\n\n def test_normal(self, many_tests_files, pytester: Pytester) -> None:\n output = pytester.runpytest()\n output.stdout.re_match_lines(\n [\n r\"test_bar.py \\.{10} \\s+ \\[ 50%\\]\",\n r\"test_foo.py \\.{5} \\s+ \\[ 75%\\]\",\n r\"test_foobar.py \\.{5} \\s+ \\[100%\\]\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestProgressOutputStyle.test_count_TestProgressOutputStyle.test_count.output_stdout_re_match_li": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestProgressOutputStyle.test_count_TestProgressOutputStyle.test_count.output_stdout_re_match_li", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 2025, "end_line": 2039, "span_ids": ["TestProgressOutputStyle.test_count"], "tokens": 132}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestProgressOutputStyle:\n\n def test_count(self, many_tests_files, pytester: Pytester) -> None:\n pytester.makeini(\n \"\"\"\n [pytest]\n console_output_style = count\n \"\"\"\n )\n output = pytester.runpytest()\n output.stdout.re_match_lines(\n [\n r\"test_bar.py \\.{10} \\s+ \\[10/20\\]\",\n r\"test_foo.py \\.{5} \\s+ \\[15/20\\]\",\n r\"test_foobar.py \\.{5} \\s+ \\[20/20\\]\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestProgressOutputStyle.test_xdist_verbose_TestProgressOutputStyle.test_capture_no.output_stdout_no_fnmatch_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestProgressOutputStyle.test_xdist_verbose_TestProgressOutputStyle.test_capture_no.output_stdout_no_fnmatch_", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 2089, "end_line": 2126, "span_ids": ["TestProgressOutputStyle.test_capture_no", "TestProgressOutputStyle.test_xdist_verbose"], "tokens": 439}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestProgressOutputStyle:\n\n def test_xdist_verbose(\n self, many_tests_files, pytester: Pytester, monkeypatch\n ) -> None:\n pytest.importorskip(\"xdist\")\n monkeypatch.delenv(\"PYTEST_DISABLE_PLUGIN_AUTOLOAD\", raising=False)\n output = pytester.runpytest(\"-n2\", \"-v\")\n output.stdout.re_match_lines_random(\n [\n r\"\\[gw\\d\\] \\[\\s*\\d+%\\] PASSED test_bar.py::test_bar\\[1\\]\",\n r\"\\[gw\\d\\] \\[\\s*\\d+%\\] PASSED test_foo.py::test_foo\\[1\\]\",\n r\"\\[gw\\d\\] \\[\\s*\\d+%\\] PASSED test_foobar.py::test_foobar\\[1\\]\",\n ]\n )\n output.stdout.fnmatch_lines_random(\n [\n line.translate(TRANS_FNMATCH)\n for line in [\n \"test_bar.py::test_bar[0] \",\n \"test_foo.py::test_foo[0] \",\n \"test_foobar.py::test_foobar[0] \",\n \"[gw?] [ 5%] PASSED test_*[?] \",\n \"[gw?] [ 10%] PASSED test_*[?] \",\n \"[gw?] [ 55%] PASSED test_*[?] \",\n \"[gw?] [ 60%] PASSED test_*[?] \",\n \"[gw?] [ 95%] PASSED test_*[?] \",\n \"[gw?] [100%] PASSED test_*[?] \",\n ]\n ]\n )\n\n def test_capture_no(self, many_tests_files, pytester: Pytester) -> None:\n output = pytester.runpytest(\"-s\")\n output.stdout.re_match_lines(\n [r\"test_bar.py \\.{10}\", r\"test_foo.py \\.{5}\", r\"test_foobar.py \\.{5}\"]\n )\n\n output = pytester.runpytest(\"--capture=no\")\n output.stdout.no_fnmatch_line(\"*%]*\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_format_session_duration_test_format_session_duration.assert_format_session_dur": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_format_session_duration_test_format_session_duration.assert_format_session_dur", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 1796, "end_line": 1810, "span_ids": ["test_format_session_duration"], "tokens": 144}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\n \"seconds, expected\",\n [\n (10.0, \"10.00s\"),\n (10.34, \"10.34s\"),\n (59.99, \"59.99s\"),\n (60.55, \"60.55s (0:01:00)\"),\n (123.55, \"123.55s (0:02:03)\"),\n (60 * 60 + 0.5, \"3600.50s (1:00:00)\"),\n ],\n)\ndef test_format_session_duration(seconds, expected):\n from _pytest.terminal import format_session_duration\n\n assert format_session_duration(seconds) == expected", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_setup_failure_is_shown_test_setup_failure_is_shown.result_stdout_no_fnmatch_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_setup_failure_is_shown_test_setup_failure_is_shown.result_stdout_no_fnmatch_", "embedding": null, "metadata": {"file_path": "testing/test_unittest.py", "file_name": "test_unittest.py", "file_type": "text/x-python", "category": "test", "start_line": 261, "end_line": 277, "span_ids": ["test_setup_failure_is_shown"], "tokens": 125}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_setup_failure_is_shown(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import unittest\n import pytest\n class TC(unittest.TestCase):\n def setUp(self):\n assert 0, \"down1\"\n def test_method(self):\n print(\"never42\")\n xyz\n \"\"\"\n )\n result = pytester.runpytest(\"-s\")\n assert result.ret == 1\n result.stdout.fnmatch_lines([\"*setUp*\", \"*assert 0*down1*\", \"*1 failed*\"])\n result.stdout.no_fnmatch_line(\"*never42*\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_testcase_adderrorandfailure_defers_test_testcase_adderrorandfailure_defers.result_stdout_no_fnmatch_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_testcase_adderrorandfailure_defers_test_testcase_adderrorandfailure_defers.result_stdout_no_fnmatch_", "embedding": null, "metadata": {"file_path": "testing/test_unittest.py", "file_name": "test_unittest.py", "file_type": "text/x-python", "category": "test", "start_line": 328, "end_line": 349, "span_ids": ["test_testcase_adderrorandfailure_defers"], "tokens": 161}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\"type\", [\"Error\", \"Failure\"])\ndef test_testcase_adderrorandfailure_defers(pytester: Pytester, type: str) -> None:\n pytester.makepyfile(\n \"\"\"\n from unittest import TestCase\n import pytest\n class MyTestCase(TestCase):\n def run(self, result):\n excinfo = pytest.raises(ZeroDivisionError, lambda: 0/0)\n try:\n result.add%s(self, excinfo._excinfo)\n except KeyboardInterrupt:\n raise\n except:\n pytest.fail(\"add%s should not raise\")\n def test_hello(self):\n pass\n \"\"\"\n % (type, type)\n )\n result = pytester.runpytest()\n result.stdout.no_fnmatch_line(\"*should not raise*\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_testcase_handles_init_exceptions_test_testcase_handles_init_exceptions.result_stdout_no_fnmatch_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_testcase_handles_init_exceptions_test_testcase_handles_init_exceptions.result_stdout_no_fnmatch_", "embedding": null, "metadata": {"file_path": "testing/test_unittest.py", "file_name": "test_unittest.py", "file_type": "text/x-python", "category": "test", "start_line": 1059, "end_line": 1077, "span_ids": ["test_testcase_handles_init_exceptions"], "tokens": 154}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_testcase_handles_init_exceptions(pytester: Pytester) -> None:\n \"\"\"\n Regression test to make sure exceptions in the __init__ method are bubbled up correctly.\n See https://github.com/pytest-dev/pytest/issues/3788\n \"\"\"\n pytester.makepyfile(\n \"\"\"\n from unittest import TestCase\n import pytest\n class MyTestCase(TestCase):\n def __init__(self, *args, **kwargs):\n raise Exception(\"should raise this exception\")\n def test_hello(self):\n pass\n \"\"\"\n )\n result = pytester.runpytest()\n assert \"should raise this exception\" in result.stdout.str()\n result.stdout.no_fnmatch_line(\"*ERROR at teardown of MyTestCase.test_hello*\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/conf.py___add_module_names.False": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/conf.py___add_module_names.False", "embedding": null, "metadata": {"file_path": "doc/en/conf.py", "file_name": "conf.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 109, "span_ids": ["impl", "impl:2", "docstring", "imports", "imports:7", "impl:22"], "tokens": 785}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "#\n# pytest documentation build configuration file, created by\n# sphinx-quickstart on Fri Oct 8 17:54:28 2010.\n#\n# This file is execfile()d with the current directory set to its containing dir.\n#\n# Note that not all possible configuration values are present in this\n# autogenerated file.\n#\n# All configuration values have a default; values that are commented out\n# serve to show the default.\n# The version info for the project you're documenting, acts as replacement for\n# |version| and |release|, also used in various other places throughout the\n# built documents.\n#\n# The full version, including alpha/beta/rc tags.\n# The short X.Y version.\nimport ast\nimport os\nimport sys\nfrom typing import List\nfrom typing import TYPE_CHECKING\n\nfrom _pytest import __version__ as version\n\nif TYPE_CHECKING:\n import sphinx.application\n\n\nrelease = \".\".join(version.split(\".\")[:2])\n\n# If extensions (or modules to document with autodoc) are in another directory,\n# add these directories to sys.path here. If the directory is relative to the\n# documentation root, use os.path.abspath to make it absolute, like shown here.\n# sys.path.insert(0, os.path.abspath('.'))\n\nautodoc_member_order = \"bysource\"\ntodo_include_todos = 1\n\n# -- General configuration -----------------------------------------------------\n\n# If your documentation needs a minimal Sphinx version, state it here.\n# needs_sphinx = '1.0'\n\n# Add any Sphinx extension module names here, as strings. They can be extensions\n# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.\nextensions = [\n \"pallets_sphinx_themes\",\n \"pygments_pytest\",\n \"sphinx.ext.autodoc\",\n \"sphinx.ext.autosummary\",\n \"sphinx.ext.intersphinx\",\n \"sphinx.ext.todo\",\n \"sphinx.ext.viewcode\",\n \"sphinx_removed_in\",\n \"sphinxcontrib_trio\",\n]\n\n# Add any paths that contain templates here, relative to this directory.\ntemplates_path = [\"_templates\"]\n\n# The suffix of source filenames.\nsource_suffix = \".rst\"\n\n# The encoding of source files.\n# source_encoding = 'utf-8-sig'\n\n# The master toctree document.\nmaster_doc = \"contents\"\n\n# General information about the project.\nproject = \"pytest\"\ncopyright = \"2015\u20132020, holger krekel and pytest-dev team\"\n\n\n# The language for content autogenerated by Sphinx. Refer to documentation\n# for a list of supported languages.\n# language = None\n\n# There are two options for replacing |today|: either, you set today to some\n# non-false value, then it is used:\n# today = ''\n# Else, today_fmt is used as the format for a strftime call.\n# today_fmt = '%B %d, %Y'\n\n# List of patterns, relative to source directory, that match files and\n# directories to ignore when looking for source files.\nexclude_patterns = [\n \"_build\",\n \"naming20.rst\",\n \"test/*\",\n \"old_*\",\n \"*attic*\",\n \"*/attic*\",\n \"funcargs.rst\",\n \"setup.rst\",\n \"example/remoteinterp.rst\",\n]\n\n\n# The reST default role (used for this markup: `text`) to use for all documents.\ndefault_role = \"literal\"\n\n# If true, '()' will be appended to :func: etc. cross-reference text.\n# add_function_parentheses = True\n\n# If true, the current module name will be prepended to all description\n# unit titles (such as .. function::).\nadd_module_names = False", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/conf.py_pygments_style__If_false_no_index_is_g": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/conf.py_pygments_style__If_false_no_index_is_g", "embedding": null, "metadata": {"file_path": "doc/en/conf.py", "file_name": "conf.py", "file_type": "text/x-python", "category": "implementation", "start_line": 116, "end_line": 212, "span_ids": ["impl:22", "impl:43"], "tokens": 777}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "pygments_style = \"sphinx\"\n\n\n# A list of ignored prefixes for module index sorting.\n# modindex_common_prefix = []\n\n# A list of regular expressions that match URIs that should not be checked when\n# doing a linkcheck.\nlinkcheck_ignore = [\n \"https://github.com/numpy/numpy/blob/master/doc/release/1.16.0-notes.rst#new-deprecations\",\n \"https://blogs.msdn.microsoft.com/bharry/2017/06/28/testing-in-a-cloud-delivery-cadence/\",\n \"http://pythontesting.net/framework/pytest-introduction/\",\n r\"https://github.com/pytest-dev/pytest/issues/\\d+\",\n r\"https://github.com/pytest-dev/pytest/pull/\\d+\",\n]\n\n# The number of worker threads to use when checking links (default=5).\nlinkcheck_workers = 5\n\n\n# -- Options for HTML output ---------------------------------------------------\n\nsys.path.append(os.path.abspath(\"_themes\"))\nhtml_theme_path = [\"_themes\"]\n\n# The theme to use for HTML and HTML Help pages. See the documentation for\n# a list of builtin themes.\nhtml_theme = \"flask\"\n\n# Theme options are theme-specific and customize the look and feel of a theme\n# further. For a list of options available for each theme, see the\n# documentation.\n# html_theme_options = {\"index_logo\": None}\n\n# Add any paths that contain custom themes here, relative to this directory.\n# html_theme_path = []\n\n# The name for this set of Sphinx documents. If None, it defaults to\n# \" v documentation\".\nhtml_title = \"pytest documentation\"\n\n# A shorter title for the navigation bar. Default is the same as html_title.\nhtml_short_title = \"pytest-%s\" % release\n\n# The name of an image file (relative to this directory) to place at the top\n# of the sidebar.\nhtml_logo = \"img/pytest_logo_curves.svg\"\n\n# The name of an image file (within the static path) to use as favicon of the\n# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32\n# pixels large.\nhtml_favicon = \"img/favicon.png\"\n\n# Add any paths that contain custom static files (such as style sheets) here,\n# relative to this directory. They are copied after the builtin static files,\n# so a file named \"default.css\" will overwrite the builtin \"default.css\".\n# html_static_path = ['_static']\n\n# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,\n# using the given strftime format.\n# html_last_updated_fmt = '%b %d, %Y'\n\n# If true, SmartyPants will be used to convert quotes and dashes to\n# typographically correct entities.\n# html_use_smartypants = True\n\n# Custom sidebar templates, maps document names to template names.\n# html_sidebars = {}\n# html_sidebars = {'index': 'indexsidebar.html'}\n\nhtml_sidebars = {\n \"index\": [\n \"slim_searchbox.html\",\n \"sidebarintro.html\",\n \"globaltoc.html\",\n \"links.html\",\n \"sourcelink.html\",\n ],\n \"**\": [\n \"slim_searchbox.html\",\n \"globaltoc.html\",\n \"relations.html\",\n \"links.html\",\n \"sourcelink.html\",\n ],\n}\n\n# Additional templates that should be rendered to pages, maps page names to\n# template names.\n# html_additional_pages = {}\n# html_additional_pages = {'index': 'index.html'}\n\n\n# If false, no module index is generated.\nhtml_domain_indices = True\n\n# If false, no index is generated.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/conf.py_html_use_index__Example_configuration_f": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/conf.py_html_use_index__Example_configuration_f", "embedding": null, "metadata": {"file_path": "doc/en/conf.py", "file_name": "conf.py", "file_type": "text/x-python", "category": "implementation", "start_line": 211, "end_line": 346, "span_ids": ["impl:71", "impl:53", "impl:43"], "tokens": 790}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "html_use_index = True\n\n# If true, the index is split into individual pages for each letter.\n# html_split_index = False\n\n# If true, links to the reST sources are added to the pages.\nhtml_show_sourcelink = False\n\n# If true, \"Created using Sphinx\" is shown in the HTML footer. Default is True.\n# html_show_sphinx = True\nhtmlhelp_basename = \"pytestdoc\"\n\n\n# -- Options for LaTeX output --------------------------------------------------\n\n# The paper size ('letter' or 'a4').\n# latex_paper_size = 'letter'\n\n# The font size ('10pt', '11pt' or '12pt').\n# latex_font_size = '10pt'\n\n# Grouping the document tree into LaTeX files. List of tuples\nlatex_documents = [\n (\n \"contents\",\n \"pytest.tex\",\n \"pytest Documentation\",\n \"holger krekel, trainer and consultant, http://merlinux.eu\",\n \"manual\",\n )\n]\n\n# The name of an image file (relative to this directory) to place at the top of\n# the title page.\nlatex_logo = \"img/pytest1.png\"\n\n# For \"manual\" documents, if this is true, then toplevel headings are parts,\n# not chapters.\n# latex_use_parts = False\n\n# If true, show page references after internal links.\n# latex_show_pagerefs = False\n\n# If true, show URL addresses after external links.\n# latex_show_urls = False\n\n# Additional stuff for the LaTeX preamble.\n# latex_preamble = ''\n\n# Documents to append as an appendix to all manuals.\n# latex_appendices = []\n\n# If false, no module index is generated.\nlatex_domain_indices = False\n\n# -- Options for manual page output --------------------------------------------\n\n# One entry per manual page. List of tuples\nman_pages = [(\"usage\", \"pytest\", \"pytest usage\", [\"holger krekel at merlinux eu\"], 1)]\n\n\n# -- Options for Epub output ---------------------------------------------------\n\n# Bibliographic Dublin Core info.\nepub_title = \"pytest\"\nepub_author = \"holger krekel at merlinux eu\"\nepub_publisher = \"holger krekel at merlinux eu\"\nepub_copyright = \"2013-2020, holger krekel et alii\"\n\n# The language of the text. It defaults to the language option\n# or en if the language is not set.\n# epub_language = ''\n\n# The scheme of the identifier. Typical schemes are ISBN or URL.\n# epub_scheme = ''\n\n# The unique identifier of the text. This can be a ISBN number\n# or the project homepage.\n# epub_identifier = ''\n\n# A unique identification for the text.\n# epub_uid = ''\n\n# HTML files that should be inserted before the pages created by sphinx.\n# The format is a list of tuples containing the path and title.\n# epub_pre_files = []\n\n# HTML files shat should be inserted after the pages created by sphinx.\n# The format is a list of tuples containing the path and title.\n# epub_post_files = []\n\n# A list of files that should not be packed into the epub file.\n# epub_exclude_files = []\n\n# The depth of the table of contents in toc.ncx.\n# epub_tocdepth = 3\n\n# Allow duplicate toc entries.\n# epub_tocdup = True\n\n\n# -- Options for texinfo output ------------------------------------------------\n\ntexinfo_documents = [\n (\n master_doc,\n \"pytest\",\n \"pytest Documentation\",\n (\n \"Holger Krekel@*Benjamin Peterson@*Ronny Pfannschmidt@*\"\n \"Floris Bruynooghe@*others\"\n ),\n \"pytest\",\n \"simple powerful testing with Python\",\n \"Programming\",\n 1,\n )\n]\n\n\n# Example configuration for intersphinx: refer to the Python standard library.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/conf.py_intersphinx_mapping_configure_logging.warn_handler_0_filters_i": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/conf.py_intersphinx_mapping_configure_logging.warn_handler_0_filters_i", "embedding": null, "metadata": {"file_path": "doc/en/conf.py", "file_name": "conf.py", "file_type": "text/x-python", "category": "implementation", "start_line": 347, "end_line": 373, "span_ids": ["configure_logging.WarnLogFilter.filter", "impl:71", "configure_logging.WarnLogFilter", "configure_logging"], "tokens": 236}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "intersphinx_mapping = {\n \"pluggy\": (\"https://pluggy.readthedocs.io/en/latest\", None),\n \"python\": (\"https://docs.python.org/3\", None),\n}\n\n\ndef configure_logging(app: \"sphinx.application.Sphinx\") -> None:\n \"\"\"Configure Sphinx's WarningHandler to handle (expected) missing include.\"\"\"\n import sphinx.util.logging\n import logging\n\n class WarnLogFilter(logging.Filter):\n def filter(self, record: logging.LogRecord) -> bool:\n \"\"\"Ignore warnings about missing include with \"only\" directive.\n\n Ref: https://github.com/sphinx-doc/sphinx/issues/2150.\"\"\"\n if (\n record.msg.startswith('Problems with \"include\" directive path:')\n and \"_changelog_towncrier_draft.rst\" in record.msg\n ):\n return False\n return True\n\n logger = logging.getLogger(sphinx.util.logging.NAMESPACE)\n warn_handler = [x for x in logger.handlers if x.level == logging.WARNING]\n assert len(warn_handler) == 1, warn_handler\n warn_handler[0].filters.insert(0, WarnLogFilter())", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/conf.py_setup_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/conf.py_setup_", "embedding": null, "metadata": {"file_path": "doc/en/conf.py", "file_name": "conf.py", "file_type": "text/x-python", "category": "implementation", "start_line": 378, "end_line": 422, "span_ids": ["setup"], "tokens": 363}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def setup(app: \"sphinx.application.Sphinx\") -> None:\n # from sphinx.ext.autodoc import cut_lines\n # app.connect('autodoc-process-docstring', cut_lines(4, what=['module']))\n app.add_crossref_type(\n \"fixture\",\n \"fixture\",\n objname=\"built-in fixture\",\n indextemplate=\"pair: %s; fixture\",\n )\n\n app.add_object_type(\n \"confval\",\n \"confval\",\n objname=\"configuration value\",\n indextemplate=\"pair: %s; configuration value\",\n )\n\n app.add_object_type(\n \"globalvar\",\n \"globalvar\",\n objname=\"global variable interpreted by pytest\",\n indextemplate=\"pair: %s; global variable interpreted by pytest\",\n )\n\n configure_logging(app)\n\n # Make Sphinx mark classes with \"final\" when decorated with @final.\n # We need this because we import final from pytest._compat, not from\n # typing (for Python < 3.8 compat), so Sphinx doesn't detect it.\n # To keep things simple we accept any `@final` decorator.\n # Ref: https://github.com/pytest-dev/pytest/pull/7780\n import sphinx.pycode.ast\n import sphinx.pycode.parser\n\n original_is_final = sphinx.pycode.parser.VariableCommentPicker.is_final\n\n def patched_is_final(self, decorators: List[ast.expr]) -> bool:\n if original_is_final(self, decorators):\n return True\n return any(\n sphinx.pycode.ast.unparse(decorator) == \"final\" for decorator in decorators\n )\n\n sphinx.pycode.parser.VariableCommentPicker.is_final = patched_is_final", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/nonpython/conftest.py__content_of_conftest_py_YamlFile.collect.for_name_spec_in_sorted_.yield_YamlItem_from_paren": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/nonpython/conftest.py__content_of_conftest_py_YamlFile.collect.for_name_spec_in_sorted_.yield_YamlItem_from_paren", "embedding": null, "metadata": {"file_path": "doc/en/example/nonpython/conftest.py", "file_name": "conftest.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 17, "span_ids": ["docstring", "pytest_collect_file", "YamlFile", "imports", "YamlFile.collect"], "tokens": 116}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "# content of conftest.py\nimport pytest\n\n\ndef pytest_collect_file(parent, path):\n if path.ext == \".yaml\" and path.basename.startswith(\"test\"):\n return YamlFile.from_parent(parent, fspath=path)\n\n\nclass YamlFile(pytest.File):\n def collect(self):\n # We need a yaml parser, e.g. PyYAML.\n import yaml\n\n raw = yaml.safe_load(self.fspath.open())\n for name, spec in sorted(raw.items()):\n yield YamlItem.from_parent(self, name=name, spec=spec)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/nonpython/conftest.py_YamlItem_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/nonpython/conftest.py_YamlItem_", "embedding": null, "metadata": {"file_path": "doc/en/example/nonpython/conftest.py", "file_name": "conftest.py", "file_type": "text/x-python", "category": "implementation", "start_line": 20, "end_line": 48, "span_ids": ["YamlItem.repr_failure", "YamlException", "YamlItem.reportinfo", "YamlItem", "YamlItem.runtest"], "tokens": 206}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class YamlItem(pytest.Item):\n def __init__(self, name, parent, spec):\n super().__init__(name, parent)\n self.spec = spec\n\n def runtest(self):\n for name, value in sorted(self.spec.items()):\n # Some custom test execution (dumb example follows).\n if name != value:\n raise YamlException(self, name, value)\n\n def repr_failure(self, excinfo):\n \"\"\"Called when self.runtest() raises an exception.\"\"\"\n if isinstance(excinfo.value, YamlException):\n return \"\\n\".join(\n [\n \"usecase execution failed\",\n \" spec failed: {1!r}: {2!r}\".format(*excinfo.value.args),\n \" no further details known at this point.\",\n ]\n )\n\n def reportinfo(self):\n return self.fspath, 0, f\"usecase: {self.name}\"\n\n\nclass YamlException(Exception):\n \"\"\"Custom exception for error reporting.\"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/scripts/publish-gh-release-notes.py___publish_github_release.return.repo_create_release_tag_n": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/scripts/publish-gh-release-notes.py___publish_github_release.return.repo_create_release_tag_n", "embedding": null, "metadata": {"file_path": "scripts/publish-gh-release-notes.py", "file_name": "publish-gh-release-notes.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 35, "span_ids": ["publish_github_release", "docstring", "imports"], "tokens": 215}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"\nScript used to publish GitHub release notes extracted from CHANGELOG.rst.\n\nThis script is meant to be executed after a successful deployment in GitHub actions.\n\nUses the following environment variables:\n\n* GIT_TAG: the name of the tag of the current commit.\n* GH_RELEASE_NOTES_TOKEN: a personal access token with 'repo' permissions.\n\n Create one at:\n\n https://github.com/settings/tokens\n\n This token should be set in a secret in the repository, which is exposed as an\n environment variable in the main.yml workflow file.\n\nThe script also requires ``pandoc`` to be previously installed in the system.\n\nRequires Python3.6+.\n\"\"\"\nimport os\nimport re\nimport sys\nfrom pathlib import Path\n\nimport github3\nimport pypandoc\n\n\ndef publish_github_release(slug, token, tag_name, body):\n github = github3.login(token=token)\n owner, repo = slug.split(\"/\")\n repo = github.repository(owner, repo)\n return repo.create_release(tag_name=tag_name, body=body)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/scripts/publish-gh-release-notes.py_parse_changelog_convert_rst_to_md.return.pypandoc_convert_text_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/scripts/publish-gh-release-notes.py_parse_changelog_convert_rst_to_md.return.pypandoc_convert_text_", "embedding": null, "metadata": {"file_path": "scripts/publish-gh-release-notes.py", "file_name": "publish-gh-release-notes.py", "file_type": "text/x-python", "category": "implementation", "start_line": 41, "end_line": 66, "span_ids": ["convert_rst_to_md", "parse_changelog"], "tokens": 227}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def parse_changelog(tag_name):\n p = Path(__file__).parent.parent / \"doc/en/changelog.rst\"\n changelog_lines = p.read_text(encoding=\"UTF-8\").splitlines()\n\n title_regex = re.compile(r\"pytest (\\d\\.\\d+\\.\\d+) \\(\\d{4}-\\d{2}-\\d{2}\\)\")\n consuming_version = False\n version_lines = []\n for line in changelog_lines:\n m = title_regex.match(line)\n if m:\n # found the version we want: start to consume lines until we find the next version title\n if m.group(1) == tag_name:\n consuming_version = True\n # found a new version title while parsing the version we want: break out\n elif consuming_version:\n break\n if consuming_version:\n version_lines.append(line)\n\n return \"\\n\".join(version_lines)\n\n\ndef convert_rst_to_md(text):\n return pypandoc.convert_text(\n text, \"md\", format=\"rst\", extra_args=[\"--wrap=preserve\"]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/scripts/publish-gh-release-notes.py_main_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/scripts/publish-gh-release-notes.py_main_", "embedding": null, "metadata": {"file_path": "scripts/publish-gh-release-notes.py", "file_name": "publish-gh-release-notes.py", "file_type": "text/x-python", "category": "implementation", "start_line": 69, "end_line": 106, "span_ids": ["main", "impl"], "tokens": 287}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def main(argv):\n if len(argv) > 1:\n tag_name = argv[1]\n else:\n tag_name = os.environ.get(\"GITHUB_REF\")\n if not tag_name:\n print(\"tag_name not given and $GITHUB_REF not set\", file=sys.stderr)\n return 1\n if tag_name.startswith(\"refs/tags/\"):\n tag_name = tag_name[len(\"refs/tags/\") :]\n\n token = os.environ.get(\"GH_RELEASE_NOTES_TOKEN\")\n if not token:\n print(\"GH_RELEASE_NOTES_TOKEN not set\", file=sys.stderr)\n return 1\n\n slug = os.environ.get(\"GITHUB_REPOSITORY\")\n if not slug:\n print(\"GITHUB_REPOSITORY not set\", file=sys.stderr)\n return 1\n\n rst_body = parse_changelog(tag_name)\n md_body = convert_rst_to_md(rst_body)\n if not publish_github_release(slug, token, tag_name, md_body):\n print(\"Could not publish release notes:\", file=sys.stderr)\n print(md_body, file=sys.stderr)\n return 5\n\n print()\n print(f\"Release notes for {tag_name} published successfully:\")\n print(f\"https://github.com/{slug}/releases/tag/{tag_name}\")\n print()\n return 0\n\n\nif __name__ == \"__main__\":\n sys.exit(main(sys.argv))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/scripts/release.py_regen_check_links.check_call_tox_e_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/scripts/release.py_regen_check_links.check_call_tox_e_", "embedding": null, "metadata": {"file_path": "scripts/release.py", "file_name": "release.py", "file_type": "text/x-python", "category": "implementation", "start_line": 69, "end_line": 89, "span_ids": ["fix_formatting", "regen", "check_links"], "tokens": 194}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def regen(version):\n \"\"\"Call regendoc tool to update examples and pytest output in the docs.\"\"\"\n print(f\"{Fore.CYAN}[generate.regen] {Fore.RESET}Updating docs\")\n check_call(\n [\"tox\", \"-e\", \"regen\"],\n env={**os.environ, \"SETUPTOOLS_SCM_PRETEND_VERSION\": version},\n )\n\n\ndef fix_formatting():\n \"\"\"Runs pre-commit in all files to ensure they are formatted correctly\"\"\"\n print(\n f\"{Fore.CYAN}[generate.fix linting] {Fore.RESET}Fixing formatting using pre-commit\"\n )\n call([\"pre-commit\", \"run\", \"--all-files\"])\n\n\ndef check_links():\n \"\"\"Runs sphinx-build to check links\"\"\"\n print(f\"{Fore.CYAN}[generate.check_links] {Fore.RESET}Checking links\")\n check_call([\"tox\", \"-e\", \"docs-checklinks\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/scripts/release.py_pre_release_pre_release.print_Please_push_your_b": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/scripts/release.py_pre_release_pre_release.print_Please_push_your_b", "embedding": null, "metadata": {"file_path": "scripts/release.py", "file_name": "release.py", "file_type": "text/x-python", "category": "implementation", "start_line": 84, "end_line": 99, "span_ids": ["pre_release"], "tokens": 125}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pre_release(version, *, skip_check_links):\n \"\"\"Generates new docs, release announcements and creates a local tag.\"\"\"\n announce(version)\n regen(version)\n changelog(version, write_out=True)\n fix_formatting()\n if not skip_check_links:\n check_links()\n\n msg = f\"Prepare release version {version}\"\n check_call([\"git\", \"commit\", \"-a\", \"-m\", msg])\n\n print()\n print(f\"{Fore.CYAN}[generate.pre_release] {Fore.GREEN}All done!\")\n print()\n print(\"Please push your branch and open a PR.\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/scripts/release.py_changelog_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/scripts/release.py_changelog_", "embedding": null, "metadata": {"file_path": "scripts/release.py", "file_name": "release.py", "file_type": "text/x-python", "category": "implementation", "start_line": 106, "end_line": 125, "span_ids": ["main", "impl", "changelog"], "tokens": 126}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def changelog(version, write_out=False):\n if write_out:\n addopts = []\n else:\n addopts = [\"--draft\"]\n check_call([\"towncrier\", \"--yes\", \"--version\", version] + addopts)\n\n\ndef main():\n init(autoreset=True)\n parser = argparse.ArgumentParser()\n parser.add_argument(\"version\", help=\"Release version\")\n parser.add_argument(\"--skip-check-links\", action=\"store_true\", default=False)\n options = parser.parse_args()\n pre_release(options.version, skip_check_links=options.skip_check_links)\n\n\nif __name__ == \"__main__\":\n main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_inspect_if_TYPE_CHECKING_._TracebackStyle.Literal_long_short_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_inspect_if_TYPE_CHECKING_._TracebackStyle.Literal_long_short_", "embedding": null, "metadata": {"file_path": "src/_pytest/_code/code.py", "file_name": "code.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 53, "span_ids": ["impl:2", "impl", "imports:46", "imports"], "tokens": 333}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import inspect\nimport re\nimport sys\nimport traceback\nfrom inspect import CO_VARARGS\nfrom inspect import CO_VARKEYWORDS\nfrom io import StringIO\nfrom pathlib import Path\nfrom traceback import format_exception_only\nfrom types import CodeType\nfrom types import FrameType\nfrom types import TracebackType\nfrom typing import Any\nfrom typing import Callable\nfrom typing import Dict\nfrom typing import Generic\nfrom typing import Iterable\nfrom typing import List\nfrom typing import Mapping\nfrom typing import Optional\nfrom typing import overload\nfrom typing import Pattern\nfrom typing import Sequence\nfrom typing import Set\nfrom typing import Tuple\nfrom typing import Type\nfrom typing import TYPE_CHECKING\nfrom typing import TypeVar\nfrom typing import Union\nfrom weakref import ref\n\nimport attr\nimport pluggy\nimport py\n\nimport _pytest\nfrom _pytest._code.source import findsource\nfrom _pytest._code.source import getrawcode\nfrom _pytest._code.source import getstatementrange_ast\nfrom _pytest._code.source import Source\nfrom _pytest._io import TerminalWriter\nfrom _pytest._io.saferepr import safeformat\nfrom _pytest._io.saferepr import saferepr\nfrom _pytest.compat import final\nfrom _pytest.compat import get_real_func\nfrom _pytest.pathlib import absolutepath\nfrom _pytest.pathlib import bestrelpath\n\nif TYPE_CHECKING:\n from typing_extensions import Literal\n from weakref import ReferenceType\n\n _TracebackStyle = Literal[\"long\", \"short\", \"line\", \"no\", \"native\", \"value\", \"auto\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_Code.path_Code.path.try_.except_OSError_.return.self_raw_co_filename": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_Code.path_Code.path.try_.except_OSError_.return.self_raw_co_filename", "embedding": null, "metadata": {"file_path": "src/_pytest/_code/code.py", "file_name": "code.py", "file_type": "text/x-python", "category": "implementation", "start_line": 82, "end_line": 97, "span_ids": ["Code.path"], "tokens": 136}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Code:\n\n @property\n def path(self) -> Union[Path, str]:\n \"\"\"Return a path object pointing to source code, or an ``str`` in\n case of ``OSError`` / non-existing file.\"\"\"\n if not self.raw.co_filename:\n return \"\"\n try:\n p = absolutepath(self.raw.co_filename)\n # maybe don't try this checking\n if not p.exists():\n raise OSError(\"path check failed.\")\n return p\n except OSError:\n # XXX maybe try harder like the weird logic\n # in the standard lib [linecache.updatecache] does?\n return self.raw.co_filename", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_Traceback_Traceback.__init__.if_isinstance_tb_Traceba.else_.super___init___tb_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_Traceback_Traceback.__init__.if_isinstance_tb_Traceba.else_.super___init___tb_", "embedding": null, "metadata": {"file_path": "src/_pytest/_code/code.py", "file_name": "code.py", "file_type": "text/x-python", "category": "implementation", "start_line": 317, "end_line": 337, "span_ids": ["Traceback.__init__.if_isinstance_tb_Traceba.f", "Traceback"], "tokens": 179}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Traceback(List[TracebackEntry]):\n \"\"\"Traceback objects encapsulate and offer higher level access to Traceback entries.\"\"\"\n\n def __init__(\n self,\n tb: Union[TracebackType, Iterable[TracebackEntry]],\n excinfo: Optional[\"ReferenceType[ExceptionInfo[BaseException]]\"] = None,\n ) -> None:\n \"\"\"Initialize from given python traceback object and ExceptionInfo.\"\"\"\n self._excinfo = excinfo\n if isinstance(tb, TracebackType):\n\n def f(cur: TracebackType) -> Iterable[TracebackEntry]:\n cur_: Optional[TracebackType] = cur\n while cur_ is not None:\n yield TracebackEntry(cur_, excinfo=excinfo)\n cur_ = cur_.tb_next\n\n super().__init__(f(tb))\n else:\n super().__init__(tb)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_ExceptionInfo.from_current_ExceptionInfo.from_current.return.ExceptionInfo_from_exc_in": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_ExceptionInfo.from_current_ExceptionInfo.from_current.return.ExceptionInfo_from_exc_in", "embedding": null, "metadata": {"file_path": "src/_pytest/_code/code.py", "file_name": "code.py", "file_type": "text/x-python", "category": "implementation", "start_line": 481, "end_line": 501, "span_ids": ["ExceptionInfo.from_current"], "tokens": 196}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\n@attr.s(repr=False)\nclass ExceptionInfo(Generic[_E]):\n\n @classmethod\n def from_current(\n cls, exprinfo: Optional[str] = None\n ) -> \"ExceptionInfo[BaseException]\":\n \"\"\"Return an ExceptionInfo matching the current traceback.\n\n .. warning::\n\n Experimental API\n\n :param exprinfo:\n A text string helping to determine if we should strip\n ``AssertionError`` from the output. Defaults to the exception\n message/``__str__()``.\n \"\"\"\n tup = sys.exc_info()\n assert tup[0] is not None, \"no current exception\"\n assert tup[1] is not None, \"no current exception\"\n assert tup[2] is not None, \"no current exception\"\n exc_info = (tup[0], tup[1], tup[2])\n return ExceptionInfo.from_exc_info(exc_info, exprinfo)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_FormattedExcinfo_FormattedExcinfo.repr_args.return.None": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_FormattedExcinfo_FormattedExcinfo.repr_args.return.None", "embedding": null, "metadata": {"file_path": "src/_pytest/_code/code.py", "file_name": "code.py", "file_type": "text/x-python", "category": "implementation", "start_line": 669, "end_line": 713, "span_ids": ["FormattedExcinfo", "FormattedExcinfo.repr_args", "FormattedExcinfo._getentrysource", "FormattedExcinfo._getindent"], "tokens": 358}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@attr.s\nclass FormattedExcinfo:\n \"\"\"Presenting information about failing Functions and Generators.\"\"\"\n\n # for traceback entries\n flow_marker = \">\"\n fail_marker = \"E\"\n\n showlocals = attr.ib(type=bool, default=False)\n style = attr.ib(type=\"_TracebackStyle\", default=\"long\")\n abspath = attr.ib(type=bool, default=True)\n tbfilter = attr.ib(type=bool, default=True)\n funcargs = attr.ib(type=bool, default=False)\n truncate_locals = attr.ib(type=bool, default=True)\n chain = attr.ib(type=bool, default=True)\n astcache = attr.ib(default=attr.Factory(dict), init=False, repr=False)\n\n def _getindent(self, source: \"Source\") -> int:\n # Figure out indent for the given source.\n try:\n s = str(source.getstatement(len(source) - 1))\n except KeyboardInterrupt:\n raise\n except BaseException:\n try:\n s = str(source[-1])\n except KeyboardInterrupt:\n raise\n except BaseException:\n return 0\n return 4 + (len(s) - len(s.lstrip()))\n\n def _getentrysource(self, entry: TracebackEntry) -> Optional[\"Source\"]:\n source = entry.getsource(self.astcache)\n if source is not None:\n source = source.deindent()\n return source\n\n def repr_args(self, entry: TracebackEntry) -> Optional[\"ReprFuncArgs\"]:\n if self.funcargs:\n args = []\n for argname, argvalue in entry.frame.getargs(var=True):\n args.append((argname, saferepr(argvalue)))\n return ReprFuncArgs(args)\n return None", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_FormattedExcinfo.get_source_FormattedExcinfo.get_source.return.lines": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_FormattedExcinfo.get_source_FormattedExcinfo.get_source.return.lines", "embedding": null, "metadata": {"file_path": "src/_pytest/_code/code.py", "file_name": "code.py", "file_type": "text/x-python", "category": "implementation", "start_line": 715, "end_line": 741, "span_ids": ["FormattedExcinfo.get_source"], "tokens": 244}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@attr.s\nclass FormattedExcinfo:\n\n def get_source(\n self,\n source: Optional[\"Source\"],\n line_index: int = -1,\n excinfo: Optional[ExceptionInfo[BaseException]] = None,\n short: bool = False,\n ) -> List[str]:\n \"\"\"Return formatted and marked up source lines.\"\"\"\n lines = []\n if source is None or line_index >= len(source.lines):\n source = Source(\"???\")\n line_index = 0\n if line_index < 0:\n line_index += len(source)\n space_prefix = \" \"\n if short:\n lines.append(space_prefix + source.lines[line_index].strip())\n else:\n for line in source.lines[:line_index]:\n lines.append(space_prefix + line)\n lines.append(self.flow_marker + \" \" + source.lines[line_index])\n for line in source.lines[line_index + 1 :]:\n lines.append(space_prefix + line)\n if excinfo is not None:\n indent = 4 if short else self._getindent(source)\n lines.extend(self.get_exconly(excinfo, indent=indent, markall=True))\n return lines", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_FormattedExcinfo.get_exconly_FormattedExcinfo.get_exconly.return.lines": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_FormattedExcinfo.get_exconly_FormattedExcinfo.get_exconly.return.lines", "embedding": null, "metadata": {"file_path": "src/_pytest/_code/code.py", "file_name": "code.py", "file_type": "text/x-python", "category": "implementation", "start_line": 743, "end_line": 758, "span_ids": ["FormattedExcinfo.get_exconly"], "tokens": 137}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@attr.s\nclass FormattedExcinfo:\n\n def get_exconly(\n self,\n excinfo: ExceptionInfo[BaseException],\n indent: int = 4,\n markall: bool = False,\n ) -> List[str]:\n lines = []\n indentstr = \" \" * indent\n # Get the real exception information out.\n exlines = excinfo.exconly(tryshort=True).split(\"\\n\")\n failindent = self.fail_marker + indentstr[1:]\n for line in exlines:\n lines.append(failindent + line)\n if not markall:\n failindent = indentstr\n return lines", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_FormattedExcinfo.repr_locals_FormattedExcinfo.repr_locals.return.None": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_FormattedExcinfo.repr_locals_FormattedExcinfo.repr_locals.return.None", "embedding": null, "metadata": {"file_path": "src/_pytest/_code/code.py", "file_name": "code.py", "file_type": "text/x-python", "category": "implementation", "start_line": 760, "end_line": 784, "span_ids": ["FormattedExcinfo.repr_locals"], "tokens": 248}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@attr.s\nclass FormattedExcinfo:\n\n def repr_locals(self, locals: Mapping[str, object]) -> Optional[\"ReprLocals\"]:\n if self.showlocals:\n lines = []\n keys = [loc for loc in locals if loc[0] != \"@\"]\n keys.sort()\n for name in keys:\n value = locals[name]\n if name == \"__builtins__\":\n lines.append(\"__builtins__ = \")\n else:\n # This formatting could all be handled by the\n # _repr() function, which is only reprlib.Repr in\n # disguise, so is very configurable.\n if self.truncate_locals:\n str_repr = saferepr(value)\n else:\n str_repr = safeformat(value)\n # if len(str_repr) < 70 or not isinstance(value, (list, tuple, dict)):\n lines.append(f\"{name:<10} = {str_repr}\")\n # else:\n # self._line(\"%-10s =\\\\\" % (name,))\n # # XXX\n # pprint.pprint(value, stream=self.excinfowriter)\n return ReprLocals(lines)\n return None", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_TerminalRepr_TerminalRepr.toterminal.raise_NotImplementedError": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_TerminalRepr_TerminalRepr.toterminal.raise_NotImplementedError", "embedding": null, "metadata": {"file_path": "src/_pytest/_code/code.py", "file_name": "code.py", "file_type": "text/x-python", "category": "implementation", "start_line": 947, "end_line": 961, "span_ids": ["TerminalRepr", "TerminalRepr.__repr__", "TerminalRepr.toterminal", "TerminalRepr.__str__"], "tokens": 118}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@attr.s(eq=False)\nclass TerminalRepr:\n def __str__(self) -> str:\n # FYI this is called from pytest-xdist's serialization of exception\n # information.\n io = StringIO()\n tw = TerminalWriter(file=io)\n self.toterminal(tw)\n return io.getvalue().strip()\n\n def __repr__(self) -> str:\n return \"<{} instance at {:0x}>\".format(self.__class__, id(self))\n\n def toterminal(self, tw: TerminalWriter) -> None:\n raise NotImplementedError()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_ReprTraceback_ReprTraceback.toterminal.if_self_extraline_.tw_line_self_extraline_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_ReprTraceback_ReprTraceback.toterminal.if_self_extraline_.tw_line_self_extraline_", "embedding": null, "metadata": {"file_path": "src/_pytest/_code/code.py", "file_name": "code.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1017, "end_line": 1041, "span_ids": ["ReprTraceback", "ReprTraceback.toterminal"], "tokens": 206}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@attr.s(eq=False)\nclass ReprTraceback(TerminalRepr):\n reprentries = attr.ib(type=Sequence[Union[\"ReprEntry\", \"ReprEntryNative\"]])\n extraline = attr.ib(type=Optional[str])\n style = attr.ib(type=\"_TracebackStyle\")\n\n entrysep = \"_ \"\n\n def toterminal(self, tw: TerminalWriter) -> None:\n # The entries might have different styles.\n for i, entry in enumerate(self.reprentries):\n if entry.style == \"long\":\n tw.line(\"\")\n entry.toterminal(tw)\n if i < len(self.reprentries) - 1:\n next_entry = self.reprentries[i + 1]\n if (\n entry.style == \"long\"\n or entry.style == \"short\"\n and next_entry.style == \"long\"\n ):\n tw.sep(self.entrysep)\n\n if self.extraline:\n tw.line(self.extraline)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_ReprTracebackNative_ReprEntryNative.toterminal.tw_write_join_self_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_ReprTracebackNative_ReprEntryNative.toterminal.tw_write_join_self_lin", "embedding": null, "metadata": {"file_path": "src/_pytest/_code/code.py", "file_name": "code.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1044, "end_line": 1057, "span_ids": ["ReprEntryNative.toterminal", "ReprTracebackNative", "ReprEntryNative"], "tokens": 120}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class ReprTracebackNative(ReprTraceback):\n def __init__(self, tblines: Sequence[str]) -> None:\n self.style = \"native\"\n self.reprentries = [ReprEntryNative(tblines)]\n self.extraline = None\n\n\n@attr.s(eq=False)\nclass ReprEntryNative(TerminalRepr):\n lines = attr.ib(type=Sequence[str])\n style: \"_TracebackStyle\" = \"native\"\n\n def toterminal(self, tw: TerminalWriter) -> None:\n tw.write(\"\".join(self.lines))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_ReprEntry_ReprEntry._write_entry_lines.for_line_in_failure_lines.tw_line_line_bold_True_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_ReprEntry_ReprEntry._write_entry_lines.for_line_in_failure_lines.tw_line_line_bold_True_", "embedding": null, "metadata": {"file_path": "src/_pytest/_code/code.py", "file_name": "code.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1060, "end_line": 1110, "span_ids": ["ReprEntry._write_entry_lines", "ReprEntry"], "tokens": 438}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@attr.s(eq=False)\nclass ReprEntry(TerminalRepr):\n lines = attr.ib(type=Sequence[str])\n reprfuncargs = attr.ib(type=Optional[\"ReprFuncArgs\"])\n reprlocals = attr.ib(type=Optional[\"ReprLocals\"])\n reprfileloc = attr.ib(type=Optional[\"ReprFileLocation\"])\n style = attr.ib(type=\"_TracebackStyle\")\n\n def _write_entry_lines(self, tw: TerminalWriter) -> None:\n \"\"\"Write the source code portions of a list of traceback entries with syntax highlighting.\n\n Usually entries are lines like these:\n\n \" x = 1\"\n \"> assert x == 2\"\n \"E assert 1 == 2\"\n\n This function takes care of rendering the \"source\" portions of it (the lines without\n the \"E\" prefix) using syntax highlighting, taking care to not highlighting the \">\"\n character, as doing so might break line continuations.\n \"\"\"\n\n if not self.lines:\n return\n\n # separate indents and source lines that are not failures: we want to\n # highlight the code but not the indentation, which may contain markers\n # such as \"> assert 0\"\n fail_marker = f\"{FormattedExcinfo.fail_marker} \"\n indent_size = len(fail_marker)\n indents: List[str] = []\n source_lines: List[str] = []\n failure_lines: List[str] = []\n for index, line in enumerate(self.lines):\n is_failure_line = line.startswith(fail_marker)\n if is_failure_line:\n # from this point on all lines are considered part of the failure\n failure_lines.extend(self.lines[index:])\n break\n else:\n if self.style == \"value\":\n source_lines.append(line)\n else:\n indents.append(line[:indent_size])\n source_lines.append(line[indent_size:])\n\n tw._write_source(source_lines, indents)\n\n # failure lines are always completely red and bold\n for line in failure_lines:\n tw.line(line, bold=True, red=True)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_ReprEntry.toterminal_ReprEntry.__str__.return._n_n_format_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_ReprEntry.toterminal_ReprEntry.__str__.return._n_n_format_", "embedding": null, "metadata": {"file_path": "src/_pytest/_code/code.py", "file_name": "code.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1112, "end_line": 1137, "span_ids": ["ReprEntry.toterminal", "ReprEntry.__str__"], "tokens": 224}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@attr.s(eq=False)\nclass ReprEntry(TerminalRepr):\n\n def toterminal(self, tw: TerminalWriter) -> None:\n if self.style == \"short\":\n assert self.reprfileloc is not None\n self.reprfileloc.toterminal(tw)\n self._write_entry_lines(tw)\n if self.reprlocals:\n self.reprlocals.toterminal(tw, indent=\" \" * 8)\n return\n\n if self.reprfuncargs:\n self.reprfuncargs.toterminal(tw)\n\n self._write_entry_lines(tw)\n\n if self.reprlocals:\n tw.line(\"\")\n self.reprlocals.toterminal(tw)\n if self.reprfileloc:\n if self.lines:\n tw.line(\"\")\n self.reprfileloc.toterminal(tw)\n\n def __str__(self) -> str:\n return \"{}\\n{}\\n{}\".format(\n \"\\n\".join(self.lines), self.reprlocals, self.reprfileloc\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_ReprFileLocation_ReprLocals.toterminal.for_line_in_self_lines_.tw_line_indent_line_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_ReprFileLocation_ReprLocals.toterminal.for_line_in_self_lines_.tw_line_indent_line_", "embedding": null, "metadata": {"file_path": "src/_pytest/_code/code.py", "file_name": "code.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1140, "end_line": 1163, "span_ids": ["ReprFileLocation", "ReprLocals", "ReprLocals.toterminal", "ReprFileLocation.toterminal"], "tokens": 192}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@attr.s(eq=False)\nclass ReprFileLocation(TerminalRepr):\n path = attr.ib(type=str, converter=str)\n lineno = attr.ib(type=int)\n message = attr.ib(type=str)\n\n def toterminal(self, tw: TerminalWriter) -> None:\n # Filename and lineno output for each entry, using an output format\n # that most editors understand.\n msg = self.message\n i = msg.find(\"\\n\")\n if i != -1:\n msg = msg[:i]\n tw.write(self.path, bold=True, red=True)\n tw.line(f\":{self.lineno}: {msg}\")\n\n\n@attr.s(eq=False)\nclass ReprLocals(TerminalRepr):\n lines = attr.ib(type=Sequence[str])\n\n def toterminal(self, tw: TerminalWriter, indent=\"\") -> None:\n for line in self.lines:\n tw.line(indent + line)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_io/saferepr.py_safeformat_saferepr.return.SafeRepr_maxsize_repr_ob": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_io/saferepr.py_safeformat_saferepr.return.SafeRepr_maxsize_repr_ob", "embedding": null, "metadata": {"file_path": "src/_pytest/_io/saferepr.py", "file_name": "saferepr.py", "file_type": "text/x-python", "category": "implementation", "start_line": 66, "end_line": 88, "span_ids": ["saferepr", "safeformat"], "tokens": 181}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def safeformat(obj: object) -> str:\n \"\"\"Return a pretty printed string for the given object.\n\n Failing __repr__ functions of user instances will be represented\n with a short exception info.\n \"\"\"\n try:\n return pprint.pformat(obj)\n except Exception as exc:\n return _format_repr_exception(exc, obj)\n\n\ndef saferepr(obj: object, maxsize: int = 240) -> str:\n \"\"\"Return a size-limited safe repr-string for the given object.\n\n Failing __repr__ functions of user instances will be represented\n with a short exception info and 'saferepr' generally takes\n care to never raise exceptions itself.\n\n This function is a wrapper around the Repr/reprlib functionality of the\n standard 2.6 lib.\n \"\"\"\n return SafeRepr(maxsize).repr(obj)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_io/saferepr.py_AlwaysDispatchingPrettyPrinter_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_io/saferepr.py_AlwaysDispatchingPrettyPrinter_", "embedding": null, "metadata": {"file_path": "src/_pytest/_io/saferepr.py", "file_name": "saferepr.py", "file_type": "text/x-python", "category": "implementation", "start_line": 91, "end_line": 135, "span_ids": ["AlwaysDispatchingPrettyPrinter._format", "AlwaysDispatchingPrettyPrinter", "_pformat_dispatch"], "tokens": 284}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class AlwaysDispatchingPrettyPrinter(pprint.PrettyPrinter):\n \"\"\"PrettyPrinter that always dispatches (regardless of width).\"\"\"\n\n def _format(\n self,\n object: object,\n stream: IO[str],\n indent: int,\n allowance: int,\n context: Dict[int, Any],\n level: int,\n ) -> None:\n # Type ignored because _dispatch is private.\n p = self._dispatch.get(type(object).__repr__, None) # type: ignore[attr-defined]\n\n objid = id(object)\n if objid in context or p is None:\n # Type ignored because _format is private.\n super()._format( # type: ignore[misc]\n object,\n stream,\n indent,\n allowance,\n context,\n level,\n )\n return\n\n context[objid] = 1\n p(self, object, stream, indent, allowance, context, level + 1)\n del context[objid]\n\n\ndef _pformat_dispatch(\n object: object,\n indent: int = 1,\n width: int = 80,\n depth: Optional[int] = None,\n *,\n compact: bool = False,\n) -> str:\n return AlwaysDispatchingPrettyPrinter(\n indent=indent, width=width, depth=depth, compact=compact\n ).pformat(object)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/__init__.py_pytest_runtest_protocol_pytest_runtest_protocol.callbinrepr.return.None": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/__init__.py_pytest_runtest_protocol_pytest_runtest_protocol.callbinrepr.return.None", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 115, "end_line": 152, "span_ids": ["pytest_runtest_protocol"], "tokens": 357}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@hookimpl(tryfirst=True, hookwrapper=True)\ndef pytest_runtest_protocol(item: Item) -> Generator[None, None, None]:\n \"\"\"Setup the pytest_assertrepr_compare and pytest_assertion_pass hooks.\n\n The rewrite module will use util._reprcompare if it exists to use custom\n reporting via the pytest_assertrepr_compare hook. This sets up this custom\n comparison for the test.\n \"\"\"\n\n ihook = item.ihook\n\n def callbinrepr(op, left: object, right: object) -> Optional[str]:\n \"\"\"Call the pytest_assertrepr_compare hook and prepare the result.\n\n This uses the first result from the hook and then ensures the\n following:\n * Overly verbose explanations are truncated unless configured otherwise\n (eg. if running in verbose mode).\n * Embedded newlines are escaped to help util.format_explanation()\n later.\n * If the rewrite mode is used embedded %-characters are replaced\n to protect later % formatting.\n\n The result can be formatted by util.format_explanation() for\n pretty printing.\n \"\"\"\n hook_result = ihook.pytest_assertrepr_compare(\n config=item.config, op=op, left=left, right=right\n )\n for new_expl in hook_result:\n if new_expl:\n new_expl = truncate.truncate_if_required(new_expl, item)\n new_expl = [line.replace(\"\\n\", \"\\\\n\") for line in new_expl]\n res = \"\\n~\".join(new_expl)\n if item.config.getvalue(\"assertmode\") == \"rewrite\":\n res = res.replace(\"%\", \"%%\")\n return res\n return None\n # ... other code", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/__init__.py_pytest_runtest_protocol.saved_assert_hooks_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/__init__.py_pytest_runtest_protocol.saved_assert_hooks_", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 157, "end_line": 183, "span_ids": ["pytest_runtest_protocol", "pytest_sessionfinish", "pytest_assertrepr_compare"], "tokens": 256}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@hookimpl(tryfirst=True, hookwrapper=True)\ndef pytest_runtest_protocol(item: Item) -> Generator[None, None, None]:\n # ... other code\n\n saved_assert_hooks = util._reprcompare, util._assertion_pass\n util._reprcompare = callbinrepr\n\n if ihook.pytest_assertion_pass.get_hookimpls():\n\n def call_assertion_pass_hook(lineno: int, orig: str, expl: str) -> None:\n ihook.pytest_assertion_pass(item=item, lineno=lineno, orig=orig, expl=expl)\n\n util._assertion_pass = call_assertion_pass_hook\n\n yield\n\n util._reprcompare, util._assertion_pass = saved_assert_hooks\n\n\ndef pytest_sessionfinish(session: \"Session\") -> None:\n assertstate = session.config._store.get(assertstate_key, None)\n if assertstate:\n if assertstate.hook is not None:\n assertstate.hook.set_session(None)\n\n\ndef pytest_assertrepr_compare(\n config: Config, op: str, left: Any, right: Any\n) -> Optional[List[str]]:\n return util.assertrepr_compare(config=config, op=op, left=left, right=right)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py__write_pyc_fp__write_pyc_fp.fp_write_marshal_dumps_co": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py__write_pyc_fp__write_pyc_fp.fp_write_marshal_dumps_co", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/rewrite.py", "file_name": "rewrite.py", "file_type": "text/x-python", "category": "implementation", "start_line": 278, "end_line": 294, "span_ids": ["_write_pyc_fp"], "tokens": 229}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _write_pyc_fp(\n fp: IO[bytes], source_stat: os.stat_result, co: types.CodeType\n) -> None:\n # Technically, we don't have to have the same pyc format as\n # (C)Python, since these \"pycs\" should never be seen by builtin\n # import. However, there's little reason to deviate.\n fp.write(importlib.util.MAGIC_NUMBER)\n # https://www.python.org/dev/peps/pep-0552/\n if sys.version_info >= (3, 7):\n flags = b\"\\x00\\x00\\x00\\x00\"\n fp.write(flags)\n # as of now, bytecode header expects 32-bit numbers for size and mtime (#4903)\n mtime = int(source_stat.st_mtime) & 0xFFFFFFFF\n size = source_stat.st_size & 0xFFFFFFFF\n # \" List[ast.stmt]:\n \"\"\"Return the AST statements to replace the ast.Assert instance.\n\n This rewrites the test of an assertion to provide\n intermediate values and replace it with an if statement which\n raises an assertion error with a detailed explanation in case\n the expression is false.\n \"\"\"\n if isinstance(assert_.test, ast.Tuple) and len(assert_.test.elts) >= 1:\n from _pytest.warning_types import PytestAssertRewriteWarning\n import warnings\n\n # TODO: This assert should not be needed.\n assert self.module_path is not None\n warnings.warn_explicit(\n PytestAssertRewriteWarning(\n \"assertion is always true, perhaps remove parentheses?\"\n ),\n category=None,\n filename=os.fspath(self.module_path),\n lineno=assert_.lineno,\n )\n\n self.statements: List[ast.stmt] = []\n self.variables: List[str] = []\n self.variable_counter = itertools.count()\n\n if self.enable_assertion_pass_hook:\n self.format_variables: List[str] = []\n\n self.stack: List[Dict[str, ast.expr]] = []\n self.expl_stmts: List[ast.stmt] = []\n self.push_format_context()\n # Rewrite assert into a bunch of statements.\n top_condition, explanation = self.visit(assert_.test)\n\n negation = ast.UnaryOp(ast.Not(), top_condition)\n # ... other code", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewriter.visit_Assert.None_2_AssertionRewriter.visit_Assert.return.self_statements": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewriter.visit_Assert.None_2_AssertionRewriter.visit_Assert.return.self_statements", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/rewrite.py", "file_name": "rewrite.py", "file_type": "text/x-python", "category": "implementation", "start_line": 859, "end_line": 935, "span_ids": ["AssertionRewriter.visit_Assert"], "tokens": 686}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class AssertionRewriter(ast.NodeVisitor):\n\n def visit_Assert(self, assert_: ast.Assert) -> List[ast.stmt]:\n # ... other code\n\n if self.enable_assertion_pass_hook: # Experimental pytest_assertion_pass hook\n msg = self.pop_format_context(ast.Str(explanation))\n\n # Failed\n if assert_.msg:\n assertmsg = self.helper(\"_format_assertmsg\", assert_.msg)\n gluestr = \"\\n>assert \"\n else:\n assertmsg = ast.Str(\"\")\n gluestr = \"assert \"\n err_explanation = ast.BinOp(ast.Str(gluestr), ast.Add(), msg)\n err_msg = ast.BinOp(assertmsg, ast.Add(), err_explanation)\n err_name = ast.Name(\"AssertionError\", ast.Load())\n fmt = self.helper(\"_format_explanation\", err_msg)\n exc = ast.Call(err_name, [fmt], [])\n raise_ = ast.Raise(exc, None)\n statements_fail = []\n statements_fail.extend(self.expl_stmts)\n statements_fail.append(raise_)\n\n # Passed\n fmt_pass = self.helper(\"_format_explanation\", msg)\n orig = self._assert_expr_to_lineno()[assert_.lineno]\n hook_call_pass = ast.Expr(\n self.helper(\n \"_call_assertion_pass\",\n ast.Num(assert_.lineno),\n ast.Str(orig),\n fmt_pass,\n )\n )\n # If any hooks implement assert_pass hook\n hook_impl_test = ast.If(\n self.helper(\"_check_if_assertion_pass_impl\"),\n self.expl_stmts + [hook_call_pass],\n [],\n )\n statements_pass = [hook_impl_test]\n\n # Test for assertion condition\n main_test = ast.If(negation, statements_fail, statements_pass)\n self.statements.append(main_test)\n if self.format_variables:\n variables = [\n ast.Name(name, ast.Store()) for name in self.format_variables\n ]\n clear_format = ast.Assign(variables, ast.NameConstant(None))\n self.statements.append(clear_format)\n\n else: # Original assertion rewriting\n # Create failure message.\n body = self.expl_stmts\n self.statements.append(ast.If(negation, body, []))\n if assert_.msg:\n assertmsg = self.helper(\"_format_assertmsg\", assert_.msg)\n explanation = \"\\n>assert \" + explanation\n else:\n assertmsg = ast.Str(\"\")\n explanation = \"assert \" + explanation\n template = ast.BinOp(assertmsg, ast.Add(), ast.Str(explanation))\n msg = self.pop_format_context(template)\n fmt = self.helper(\"_format_explanation\", msg)\n err_name = ast.Name(\"AssertionError\", ast.Load())\n exc = ast.Call(err_name, [fmt], [])\n raise_ = ast.Raise(exc, None)\n\n body.append(raise_)\n\n # Clear temporary variables by setting them to None.\n if self.variables:\n variables = [ast.Name(name, ast.Store()) for name in self.variables]\n clear = ast.Assign(variables, ast.NameConstant(None))\n self.statements.append(clear)\n # Fix line numbers.\n for stmt in self.statements:\n set_location(stmt, assert_.lineno, assert_.col_offset)\n return self.statements", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py_Cache._getvaluepath_Cache.get.try_.except_ValueError_OSErr.return.default": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py_Cache._getvaluepath_Cache.get.try_.except_ValueError_OSErr.return.default", "embedding": null, "metadata": {"file_path": "src/_pytest/cacheprovider.py", "file_name": "cacheprovider.py", "file_type": "text/x-python", "category": "implementation", "start_line": 142, "end_line": 162, "span_ids": ["Cache.get", "Cache._getvaluepath"], "tokens": 183}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\n@attr.s(init=False)\nclass Cache:\n\n def _getvaluepath(self, key: str) -> Path:\n return self._cachedir.joinpath(self._CACHE_PREFIX_VALUES, Path(key))\n\n def get(self, key: str, default):\n \"\"\"Return the cached value for the given key.\n\n If no value was yet cached or the value cannot be read, the specified\n default is returned.\n\n :param key:\n Must be a ``/`` separated value. Usually the first\n name is the name of your plugin or your application.\n :param default:\n The value to return in case of a cache-miss or invalid cache value.\n \"\"\"\n path = self._getvaluepath(key)\n try:\n with path.open(\"r\") as f:\n return json.load(f)\n except (ValueError, OSError):\n return default", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py_Cache.set_Cache.set.None_1.else_.with_f_.f_write_data_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py_Cache.set_Cache.set.None_1.else_.with_f_.f_write_data_", "embedding": null, "metadata": {"file_path": "src/_pytest/cacheprovider.py", "file_name": "cacheprovider.py", "file_type": "text/x-python", "category": "implementation", "start_line": 164, "end_line": 193, "span_ids": ["Cache.set"], "tokens": 255}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\n@attr.s(init=False)\nclass Cache:\n\n def set(self, key: str, value: object) -> None:\n \"\"\"Save value for the given key.\n\n :param key:\n Must be a ``/`` separated value. Usually the first\n name is the name of your plugin or your application.\n :param value:\n Must be of any combination of basic python types,\n including nested types like lists of dictionaries.\n \"\"\"\n path = self._getvaluepath(key)\n try:\n if path.parent.is_dir():\n cache_dir_exists_already = True\n else:\n cache_dir_exists_already = self._cachedir.exists()\n path.parent.mkdir(exist_ok=True, parents=True)\n except OSError:\n self.warn(\"could not create cache path {path}\", path=path, _ispytest=True)\n return\n if not cache_dir_exists_already:\n self._ensure_supporting_files()\n data = json.dumps(value, indent=2, sort_keys=True)\n try:\n f = path.open(\"w\")\n except OSError:\n self.warn(\"cache could not write path {path}\", path=path, _ispytest=True)\n else:\n with f:\n f.write(data)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py_LFPluginCollWrapper_LFPluginCollWrapper.pytest_make_collect_report.yield": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py_LFPluginCollWrapper_LFPluginCollWrapper.pytest_make_collect_report.yield", "embedding": null, "metadata": {"file_path": "src/_pytest/cacheprovider.py", "file_name": "cacheprovider.py", "file_type": "text/x-python", "category": "implementation", "start_line": 208, "end_line": 254, "span_ids": ["LFPluginCollWrapper.pytest_make_collect_report", "LFPluginCollWrapper"], "tokens": 364}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class LFPluginCollWrapper:\n def __init__(self, lfplugin: \"LFPlugin\") -> None:\n self.lfplugin = lfplugin\n self._collected_at_least_one_failure = False\n\n @hookimpl(hookwrapper=True)\n def pytest_make_collect_report(self, collector: nodes.Collector):\n if isinstance(collector, Session):\n out = yield\n res: CollectReport = out.get_result()\n\n # Sort any lf-paths to the beginning.\n lf_paths = self.lfplugin._last_failed_paths\n res.result = sorted(\n res.result,\n key=lambda x: 0 if Path(str(x.fspath)) in lf_paths else 1,\n )\n return\n\n elif isinstance(collector, Module):\n if Path(str(collector.fspath)) in self.lfplugin._last_failed_paths:\n out = yield\n res = out.get_result()\n result = res.result\n lastfailed = self.lfplugin.lastfailed\n\n # Only filter with known failures.\n if not self._collected_at_least_one_failure:\n if not any(x.nodeid in lastfailed for x in result):\n return\n self.lfplugin.config.pluginmanager.register(\n LFPluginCollSkipfiles(self.lfplugin), \"lfplugin-collskip\"\n )\n self._collected_at_least_one_failure = True\n\n session = collector.session\n result[:] = [\n x\n for x in result\n if x.nodeid in lastfailed\n # Include any passed arguments (not trivial to filter).\n or session.isinitpath(x.fspath)\n # Keep all sub-collectors.\n or isinstance(x, nodes.Collector)\n ]\n return\n yield", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py_LFPluginCollSkipfiles_LFPluginCollSkipfiles.pytest_make_collect_report.return.None": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py_LFPluginCollSkipfiles_LFPluginCollSkipfiles.pytest_make_collect_report.return.None", "embedding": null, "metadata": {"file_path": "src/_pytest/cacheprovider.py", "file_name": "cacheprovider.py", "file_type": "text/x-python", "category": "implementation", "start_line": 257, "end_line": 275, "span_ids": ["LFPluginCollSkipfiles.pytest_make_collect_report", "LFPluginCollSkipfiles"], "tokens": 176}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class LFPluginCollSkipfiles:\n def __init__(self, lfplugin: \"LFPlugin\") -> None:\n self.lfplugin = lfplugin\n\n @hookimpl\n def pytest_make_collect_report(\n self, collector: nodes.Collector\n ) -> Optional[CollectReport]:\n # Packages are Modules, but _last_failed_paths only contains\n # test-bearing paths and doesn't try to include the paths of their\n # packages, so don't filter them.\n if isinstance(collector, Module) and not isinstance(collector, Package):\n if Path(str(collector.fspath)) not in self.lfplugin._last_failed_paths:\n self.lfplugin._skipped_files += 1\n\n return CollectReport(\n collector.nodeid, \"passed\", longrepr=None, result=[]\n )\n return None", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_CaptureIO_TeeCaptureIO.write.return.self__other_write_s_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_CaptureIO_TeeCaptureIO.write.return.self__other_write_s_", "embedding": null, "metadata": {"file_path": "src/_pytest/capture.py", "file_name": "capture.py", "file_type": "text/x-python", "category": "implementation", "start_line": 193, "end_line": 209, "span_ids": ["TeeCaptureIO.write", "CaptureIO", "TeeCaptureIO", "CaptureIO.getvalue"], "tokens": 132}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class CaptureIO(io.TextIOWrapper):\n def __init__(self) -> None:\n super().__init__(io.BytesIO(), encoding=\"UTF-8\", newline=\"\", write_through=True)\n\n def getvalue(self) -> str:\n assert isinstance(self.buffer, io.BytesIO)\n return self.buffer.getvalue().decode(\"UTF-8\")\n\n\nclass TeeCaptureIO(CaptureIO):\n def __init__(self, other: TextIO) -> None:\n self._other = other\n super().__init__()\n\n def write(self, s: str) -> int:\n super().write(s)\n return self._other.write(s)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/compat.py_iscoroutinefunction_iscoroutinefunction.return.inspect_iscoroutinefuncti": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/compat.py_iscoroutinefunction_iscoroutinefunction.return.inspect_iscoroutinefuncti", "embedding": null, "metadata": {"file_path": "src/_pytest/compat.py", "file_name": "compat.py", "file_type": "text/x-python", "category": "implementation", "start_line": 61, "end_line": 70, "span_ids": ["iscoroutinefunction"], "tokens": 120}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def iscoroutinefunction(func: object) -> bool:\n \"\"\"Return True if func is a coroutine function (a function defined with async\n def syntax, and doesn't contain yield), or a function decorated with\n @asyncio.coroutine.\n\n Note: copied and modified from Python 3.5's builtin couroutines.py to avoid\n importing asyncio directly, which in turns also initializes the \"logging\"\n module as a side-effect (see issue #8).\n \"\"\"\n return inspect.iscoroutinefunction(func) or getattr(func, \"_is_coroutine\", False)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/compat.py_is_async_function_getlocation.return._s_d_fn_lineno_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/compat.py_is_async_function_getlocation.return._s_d_fn_lineno_1", "embedding": null, "metadata": {"file_path": "src/_pytest/compat.py", "file_name": "compat.py", "file_type": "text/x-python", "category": "implementation", "start_line": 73, "end_line": 90, "span_ids": ["is_async_function", "getlocation"], "tokens": 158}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def is_async_function(func: object) -> bool:\n \"\"\"Return True if the given function seems to be an async function or\n an async generator.\"\"\"\n return iscoroutinefunction(func) or inspect.isasyncgenfunction(func)\n\n\ndef getlocation(function, curdir: Optional[str] = None) -> str:\n function = get_real_func(function)\n fn = Path(inspect.getfile(function))\n lineno = function.__code__.co_firstlineno\n if curdir is not None:\n try:\n relfn = fn.relative_to(curdir)\n except ValueError:\n pass\n else:\n return \"%s:%d\" % (relfn, lineno + 1)\n return \"%s:%d\" % (fn, lineno + 1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/compat.py_safe_isclass_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/compat.py_safe_isclass_", "embedding": null, "metadata": {"file_path": "src/_pytest/compat.py", "file_name": "compat.py", "file_type": "text/x-python", "category": "implementation", "start_line": 323, "end_line": 407, "span_ids": ["None_4.else_.cached_property", "assert_never", "impl:23", "safe_isclass", "None_4.else_.cached_property:2"], "tokens": 572}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def safe_isclass(obj: object) -> bool:\n \"\"\"Ignore any exception via isinstance on Python 3.\"\"\"\n try:\n return inspect.isclass(obj)\n except Exception:\n return False\n\n\nif TYPE_CHECKING:\n if sys.version_info >= (3, 8):\n from typing import final as final\n else:\n from typing_extensions import final as final\nelif sys.version_info >= (3, 8):\n from typing import final as final\nelse:\n\n def final(f):\n return f\n\n\nif sys.version_info >= (3, 8):\n from functools import cached_property as cached_property\nelse:\n from typing import overload\n from typing import Type\n\n class cached_property(Generic[_S, _T]):\n __slots__ = (\"func\", \"__doc__\")\n\n def __init__(self, func: Callable[[_S], _T]) -> None:\n self.func = func\n self.__doc__ = func.__doc__\n\n @overload\n def __get__(\n self, instance: None, owner: Optional[Type[_S]] = ...\n ) -> \"cached_property[_S, _T]\":\n ...\n\n @overload\n def __get__(self, instance: _S, owner: Optional[Type[_S]] = ...) -> _T:\n ...\n\n def __get__(self, instance, owner=None):\n if instance is None:\n return self\n value = instance.__dict__[self.func.__name__] = self.func(instance)\n return value\n\n\n# Perform exhaustiveness checking.\n#\n# Consider this example:\n#\n# MyUnion = Union[int, str]\n#\n# def handle(x: MyUnion) -> int {\n# if isinstance(x, int):\n# return 1\n# elif isinstance(x, str):\n# return 2\n# else:\n# raise Exception('unreachable')\n#\n# Now suppose we add a new variant:\n#\n# MyUnion = Union[int, str, bytes]\n#\n# After doing this, we must remember ourselves to go and update the handle\n# function to handle the new variant.\n#\n# With `assert_never` we can do better:\n#\n# // raise Exception('unreachable')\n# return assert_never(x)\n#\n# Now, if we forget to handle the new variant, the type-checker will emit a\n# compile-time error, instead of the runtime error we would have gotten\n# previously.\n#\n# This also work for Enums (if you use `is` to compare) and Literals.\ndef assert_never(value: \"NoReturn\") -> \"NoReturn\":\n assert False, \"Unhandled value: {} ({})\".format(value, type(value).__name__)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_PytestPluginManager.None_4_PytestPluginManager.consider_preparse.while_i_n_.if_isinstance_opt_str_.self_consider_pluginarg_p": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_PytestPluginManager.None_4_PytestPluginManager.consider_preparse.while_i_n_.if_isinstance_opt_str_.self_consider_pluginarg_p", "embedding": null, "metadata": {"file_path": "src/_pytest/config/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 627, "end_line": 653, "span_ids": ["PytestPluginManager._check_non_top_pytest_plugins", "PytestPluginManager.consider_preparse"], "tokens": 168}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass PytestPluginManager(PluginManager):\n\n #\n # API for bootstrapping plugin loading\n #\n #\n\n def consider_preparse(\n self, args: Sequence[str], *, exclude_only: bool = False\n ) -> None:\n i = 0\n n = len(args)\n while i < n:\n opt = args[i]\n i += 1\n if isinstance(opt, str):\n if opt == \"-p\":\n try:\n parg = args[i]\n except IndexError:\n return\n i += 1\n elif opt.startswith(\"-p\"):\n parg = opt[2:]\n else:\n continue\n if exclude_only and not parg.startswith(\"no:\"):\n continue\n self.consider_pluginarg(parg)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py__get_plugin_specs_as_list_notset.Notset_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py__get_plugin_specs_as_list_notset.Notset_", "embedding": null, "metadata": {"file_path": "src/_pytest/config/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 734, "end_line": 768, "span_ids": ["Notset.__repr__", "Notset", "impl:18", "_ensure_removed_sysmodule", "_get_plugin_specs_as_list"], "tokens": 229}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _get_plugin_specs_as_list(\n specs: Union[None, types.ModuleType, str, Sequence[str]]\n) -> List[str]:\n \"\"\"Parse a plugins specification into a list of plugin names.\"\"\"\n # None means empty.\n if specs is None:\n return []\n # Workaround for #3899 - a submodule which happens to be called \"pytest_plugins\".\n if isinstance(specs, types.ModuleType):\n return []\n # Comma-separated list.\n if isinstance(specs, str):\n return specs.split(\",\") if specs else []\n # Direct specification.\n if isinstance(specs, collections.abc.Sequence):\n return list(specs)\n raise UsageError(\n \"Plugins may be specified as a sequence or a ','-separated string of plugin names. Got: %r\"\n % specs\n )\n\n\ndef _ensure_removed_sysmodule(modname: str) -> None:\n try:\n del sys.modules[modname]\n except KeyError:\n pass\n\n\nclass Notset:\n def __repr__(self):\n return \"\"\n\n\nnotset = Notset()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py__assertion_supported_create_terminal_writer.return.tw": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py__assertion_supported_create_terminal_writer.return.tw", "embedding": null, "metadata": {"file_path": "src/_pytest/config/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1537, "end_line": 1567, "span_ids": ["create_terminal_writer", "_assertion_supported"], "tokens": 185}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _assertion_supported() -> bool:\n try:\n assert False\n except AssertionError:\n return True\n else:\n return False # type: ignore[unreachable]\n\n\ndef create_terminal_writer(\n config: Config, file: Optional[TextIO] = None\n) -> TerminalWriter:\n \"\"\"Create a TerminalWriter instance configured according to the options\n in the config object.\n\n Every code which requires a TerminalWriter object and has access to a\n config object should use this function.\n \"\"\"\n tw = TerminalWriter(file=file)\n\n if config.option.color == \"yes\":\n tw.hasmarkup = True\n elif config.option.color == \"no\":\n tw.hasmarkup = False\n\n if config.option.code_highlight == \"yes\":\n tw.code_highlight = True\n elif config.option.code_highlight == \"no\":\n tw.code_highlight = False\n\n return tw", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_Parser.parse_setoption_Parser.parse_known_args.return.self_parse_known_and_unkn": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_Parser.parse_setoption_Parser.parse_known_args.return.self_parse_known_and_unkn", "embedding": null, "metadata": {"file_path": "src/_pytest/config/argparsing.py", "file_name": "argparsing.py", "file_type": "text/x-python", "category": "implementation", "start_line": 131, "end_line": 148, "span_ids": ["Parser.parse_known_args", "Parser.parse_setoption"], "tokens": 172}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Parser:\n\n def parse_setoption(\n self,\n args: Sequence[Union[str, \"os.PathLike[str]\"]],\n option: argparse.Namespace,\n namespace: Optional[argparse.Namespace] = None,\n ) -> List[str]:\n parsedoption = self.parse(args, namespace=namespace)\n for name, value in parsedoption.__dict__.items():\n setattr(option, name, value)\n return cast(List[str], getattr(parsedoption, FILE_OR_DIR))\n\n def parse_known_args(\n self,\n args: Sequence[Union[str, \"os.PathLike[str]\"]],\n namespace: Optional[argparse.Namespace] = None,\n ) -> argparse.Namespace:\n \"\"\"Parse and return a namespace object with known arguments at this point.\"\"\"\n return self.parse_known_and_unknown_args(args, namespace=namespace)[0]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_Parser.parse_known_and_unknown_args_Parser.parse_known_and_unknown_args.return.optparser_parse_known_arg": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_Parser.parse_known_and_unknown_args_Parser.parse_known_and_unknown_args.return.optparser_parse_known_arg", "embedding": null, "metadata": {"file_path": "src/_pytest/config/argparsing.py", "file_name": "argparsing.py", "file_type": "text/x-python", "category": "implementation", "start_line": 150, "end_line": 159, "span_ids": ["Parser.parse_known_and_unknown_args"], "tokens": 115}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Parser:\n\n def parse_known_and_unknown_args(\n self,\n args: Sequence[Union[str, \"os.PathLike[str]\"]],\n namespace: Optional[argparse.Namespace] = None,\n ) -> Tuple[argparse.Namespace, List[str]]:\n \"\"\"Parse and return a namespace object with known arguments, and\n the remaining arguments unknown at this point.\"\"\"\n optparser = self._getparser()\n strargs = [os.fspath(x) for x in args]\n return optparser.parse_known_args(strargs, namespace=namespace)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_OptionGroup_OptionGroup.addoption.self__addoption_instance_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_OptionGroup_OptionGroup.addoption.self__addoption_instance_", "embedding": null, "metadata": {"file_path": "src/_pytest/config/argparsing.py", "file_name": "argparsing.py", "file_type": "text/x-python", "category": "implementation", "start_line": 323, "end_line": 346, "span_ids": ["OptionGroup.addoption", "OptionGroup"], "tokens": 227}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class OptionGroup:\n def __init__(\n self, name: str, description: str = \"\", parser: Optional[Parser] = None\n ) -> None:\n self.name = name\n self.description = description\n self.options: List[Argument] = []\n self.parser = parser\n\n def addoption(self, *optnames: str, **attrs: Any) -> None:\n \"\"\"Add an option to this group.\n\n If a shortened version of a long option is specified, it will\n be suppressed in the help. addoption('--twowords', '--two-words')\n results in help showing '--two-words' only, but --twowords gets\n accepted **and** the automatic destination is in args.twowords.\n \"\"\"\n conflict = set(optnames).intersection(\n name for opt in self.options for name in opt.names()\n )\n if conflict:\n raise ValueError(\"option names %s already added\" % conflict)\n option = Argument(*optnames, **attrs)\n self._addoption_instance(option, shortupper=False)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_OptionGroup._addoption_OptionGroup._addoption_instance.self_options_append_optio": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_OptionGroup._addoption_OptionGroup._addoption_instance.self_options_append_optio", "embedding": null, "metadata": {"file_path": "src/_pytest/config/argparsing.py", "file_name": "argparsing.py", "file_type": "text/x-python", "category": "implementation", "start_line": 357, "end_line": 368, "span_ids": ["OptionGroup._addoption_instance", "OptionGroup._addoption"], "tokens": 130}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class OptionGroup:\n\n def _addoption(self, *optnames: str, **attrs: Any) -> None:\n option = Argument(*optnames, **attrs)\n self._addoption_instance(option, shortupper=True)\n\n def _addoption_instance(self, option: \"Argument\", shortupper: bool = False) -> None:\n if not shortupper:\n for opt in option._short_opts:\n if opt[0] == \"-\" and opt[1].islower():\n raise ValueError(\"lowercase shortoptions reserved\")\n if self.parser:\n self.parser.processoption(option)\n self.options.append(option)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_MyOptionParser_MyOptionParser._Type_ignored_because_ty": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_MyOptionParser_MyOptionParser._Type_ignored_because_ty", "embedding": null, "metadata": {"file_path": "src/_pytest/config/argparsing.py", "file_name": "argparsing.py", "file_type": "text/x-python", "category": "implementation", "start_line": 362, "end_line": 392, "span_ids": ["MyOptionParser.error", "MyOptionParser"], "tokens": 257}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class MyOptionParser(argparse.ArgumentParser):\n def __init__(\n self,\n parser: Parser,\n extra_info: Optional[Dict[str, Any]] = None,\n prog: Optional[str] = None,\n ) -> None:\n self._parser = parser\n argparse.ArgumentParser.__init__(\n self,\n prog=prog,\n usage=parser._usage,\n add_help=False,\n formatter_class=DropShorterLongHelpFormatter,\n allow_abbrev=False,\n )\n # extra_info is a dict of (param -> value) to display if there's\n # an usage error to provide more contextual information to the user.\n self.extra_info = extra_info if extra_info else {}\n\n def error(self, message: str) -> \"NoReturn\":\n \"\"\"Transform argparse error message into UsageError.\"\"\"\n msg = f\"{self.prog}: error: {message}\"\n\n if hasattr(self._parser, \"_config_source_hint\"):\n # Type ignored because the attribute is set dynamically.\n msg = f\"{msg} ({self._parser._config_source_hint})\" # type: ignore\n\n raise UsageError(self.format_usage() + msg)\n\n # Type ignored because typeshed has a very complex type in the superclass.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_MyOptionParser.parse_args_MyOptionParser.parse_args.return.parsed": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_MyOptionParser.parse_args_MyOptionParser.parse_args.return.parsed", "embedding": null, "metadata": {"file_path": "src/_pytest/config/argparsing.py", "file_name": "argparsing.py", "file_type": "text/x-python", "category": "implementation", "start_line": 393, "end_line": 408, "span_ids": ["MyOptionParser.parse_args"], "tokens": 158}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class MyOptionParser(argparse.ArgumentParser):\n def parse_args( # type: ignore\n self,\n args: Optional[Sequence[str]] = None,\n namespace: Optional[argparse.Namespace] = None,\n ) -> argparse.Namespace:\n \"\"\"Allow splitting of positional arguments.\"\"\"\n parsed, unrecognized = self.parse_known_args(args, namespace)\n if unrecognized:\n for arg in unrecognized:\n if arg and arg[0] == \"-\":\n lines = [\"unrecognized arguments: %s\" % (\" \".join(unrecognized))]\n for k, v in sorted(self.extra_info.items()):\n lines.append(f\" {k}: {v}\")\n self.error(\"\\n\".join(lines))\n getattr(parsed, FILE_OR_DIR).extend(unrecognized)\n return parsed", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/debugging.py_wrap_pytest_function_for_tracing_maybe_wrap_pytest_function_for_tracing.if_pyfuncitem_config_getv.wrap_pytest_function_for_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/debugging.py_wrap_pytest_function_for_tracing_maybe_wrap_pytest_function_for_tracing.if_pyfuncitem_config_getv.wrap_pytest_function_for_", "embedding": null, "metadata": {"file_path": "src/_pytest/debugging.py", "file_name": "debugging.py", "file_type": "text/x-python", "category": "implementation", "start_line": 310, "end_line": 333, "span_ids": ["wrap_pytest_function_for_tracing", "maybe_wrap_pytest_function_for_tracing"], "tokens": 259}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def wrap_pytest_function_for_tracing(pyfuncitem):\n \"\"\"Change the Python function object of the given Function item by a\n wrapper which actually enters pdb before calling the python function\n itself, effectively leaving the user in the pdb prompt in the first\n statement of the function.\"\"\"\n _pdb = pytestPDB._init_pdb(\"runcall\")\n testfunction = pyfuncitem.obj\n\n # we can't just return `partial(pdb.runcall, testfunction)` because (on\n # python < 3.7.4) runcall's first param is `func`, which means we'd get\n # an exception if one of the kwargs to testfunction was called `func`.\n @functools.wraps(testfunction)\n def wrapper(*args, **kwargs):\n func = functools.partial(testfunction, *args, **kwargs)\n _pdb.runcall(func)\n\n pyfuncitem.obj = wrapper\n\n\ndef maybe_wrap_pytest_function_for_tracing(pyfuncitem):\n \"\"\"Wrap the given pytestfunct item for tracing support if --trace was given in\n the command line.\"\"\"\n if pyfuncitem.config.getvalue(\"trace\"):\n wrap_pytest_function_for_tracing(pyfuncitem)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py_DoctestTextfile_DoctestTextfile.collect.if_test_examples_.yield_DoctestItem_from_pa": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py_DoctestTextfile_DoctestTextfile.collect.if_test_examples_.yield_DoctestItem_from_pa", "embedding": null, "metadata": {"file_path": "src/_pytest/doctest.py", "file_name": "doctest.py", "file_type": "text/x-python", "category": "implementation", "start_line": 419, "end_line": 447, "span_ids": ["DoctestTextfile", "DoctestTextfile.collect"], "tokens": 215}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class DoctestTextfile(pytest.Module):\n obj = None\n\n def collect(self) -> Iterable[DoctestItem]:\n import doctest\n\n # Inspired by doctest.testfile; ideally we would use it directly,\n # but it doesn't support passing a custom checker.\n encoding = self.config.getini(\"doctest_encoding\")\n text = self.fspath.read_text(encoding)\n filename = str(self.fspath)\n name = self.fspath.basename\n globs = {\"__name__\": \"__main__\"}\n\n optionflags = get_optionflags(self)\n\n runner = _get_runner(\n verbose=False,\n optionflags=optionflags,\n checker=_get_checker(),\n continue_on_failure=_get_continue_on_failure(self.config),\n )\n\n parser = doctest.DocTestParser()\n test = parser.get_doctest(text, globs, name, filename, 0)\n if test.examples:\n yield DoctestItem.from_parent(\n self, name=test.name, runner=runner, dtest=test\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py_DoctestModule.collect.if_self_fspath_basename__DoctestModule.collect.for_test_in_finder_find_m.if_test_examples_skip.yield_DoctestItem_from_pa": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py_DoctestModule.collect.if_self_fspath_basename__DoctestModule.collect.for_test_in_finder_find_m.if_test_examples_skip.yield_DoctestItem_from_pa", "embedding": null, "metadata": {"file_path": "src/_pytest/doctest.py", "file_name": "doctest.py", "file_type": "text/x-python", "category": "implementation", "start_line": 537, "end_line": 563, "span_ids": ["DoctestModule.collect.MockAwareDocTestFinder:2"], "tokens": 238}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class DoctestModule(pytest.Module):\n def collect(self) -> Iterable[DoctestItem]:\n # ... other code\n\n if self.fspath.basename == \"conftest.py\":\n module = self.config.pluginmanager._importconftest(\n Path(self.fspath), self.config.getoption(\"importmode\")\n )\n else:\n try:\n module = import_path(self.fspath)\n except ImportError:\n if self.config.getvalue(\"doctest_ignore_import_errors\"):\n pytest.skip(\"unable to import module %r\" % self.fspath)\n else:\n raise\n # Uses internal doctest module parsing mechanism.\n finder = MockAwareDocTestFinder()\n optionflags = get_optionflags(self)\n runner = _get_runner(\n verbose=False,\n optionflags=optionflags,\n checker=_get_checker(),\n continue_on_failure=_get_continue_on_failure(self.config),\n )\n\n for test in finder.find(module, module.__name__):\n if test.examples: # skip empty doctests\n yield DoctestItem.from_parent(\n self, name=test.name, runner=runner, dtest=test\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureRequest.config_FixtureRequest._fillfixtures.for_argname_in_fixturenam.if_argname_not_in_item_fu.item_funcargs_argname_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureRequest.config_FixtureRequest._fillfixtures.for_argname_in_fixturenam.if_argname_not_in_item_fu.item_funcargs_argname_", "embedding": null, "metadata": {"file_path": "src/_pytest/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 481, "end_line": 571, "span_ids": ["FixtureRequest.raiseerror", "FixtureRequest._fillfixtures", "FixtureRequest.instance", "FixtureRequest.applymarker", "FixtureRequest.function", "FixtureRequest.cls", "FixtureRequest.fspath", "FixtureRequest._addfinalizer", "FixtureRequest.keywords", "FixtureRequest.session", "FixtureRequest.config", "FixtureRequest.module", "FixtureRequest.addfinalizer"], "tokens": 816}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class FixtureRequest:\n\n @property\n def config(self) -> Config:\n \"\"\"The pytest config object associated with this request.\"\"\"\n return self._pyfuncitem.config # type: ignore[no-any-return]\n\n @property\n def function(self):\n \"\"\"Test function object if the request has a per-function scope.\"\"\"\n if self.scope != \"function\":\n raise AttributeError(\n f\"function not available in {self.scope}-scoped context\"\n )\n return self._pyfuncitem.obj\n\n @property\n def cls(self):\n \"\"\"Class (can be None) where the test function was collected.\"\"\"\n if self.scope not in (\"class\", \"function\"):\n raise AttributeError(f\"cls not available in {self.scope}-scoped context\")\n clscol = self._pyfuncitem.getparent(_pytest.python.Class)\n if clscol:\n return clscol.obj\n\n @property\n def instance(self):\n \"\"\"Instance (can be None) on which test function was collected.\"\"\"\n # unittest support hack, see _pytest.unittest.TestCaseFunction.\n try:\n return self._pyfuncitem._testcase\n except AttributeError:\n function = getattr(self, \"function\", None)\n return getattr(function, \"__self__\", None)\n\n @property\n def module(self):\n \"\"\"Python module object where the test function was collected.\"\"\"\n if self.scope not in (\"function\", \"class\", \"module\"):\n raise AttributeError(f\"module not available in {self.scope}-scoped context\")\n return self._pyfuncitem.getparent(_pytest.python.Module).obj\n\n @property\n def fspath(self) -> py.path.local:\n \"\"\"The file system path of the test module which collected this test.\"\"\"\n if self.scope not in (\"function\", \"class\", \"module\", \"package\"):\n raise AttributeError(f\"module not available in {self.scope}-scoped context\")\n # TODO: Remove ignore once _pyfuncitem is properly typed.\n return self._pyfuncitem.fspath # type: ignore\n\n @property\n def keywords(self) -> MutableMapping[str, Any]:\n \"\"\"Keywords/markers dictionary for the underlying node.\"\"\"\n node: nodes.Node = self.node\n return node.keywords\n\n @property\n def session(self) -> \"Session\":\n \"\"\"Pytest session object.\"\"\"\n return self._pyfuncitem.session # type: ignore[no-any-return]\n\n def addfinalizer(self, finalizer: Callable[[], object]) -> None:\n \"\"\"Add finalizer/teardown function to be called after the last test\n within the requesting test context finished execution.\"\"\"\n # XXX usually this method is shadowed by fixturedef specific ones.\n self._addfinalizer(finalizer, scope=self.scope)\n\n def _addfinalizer(self, finalizer: Callable[[], object], scope) -> None:\n node = self._getscopeitem(scope)\n node.addfinalizer(finalizer)\n\n def applymarker(self, marker: Union[str, MarkDecorator]) -> None:\n \"\"\"Apply a marker to a single test function invocation.\n\n This method is useful if you don't want to have a keyword/marker\n on all function invocations.\n\n :param marker:\n A :class:`pytest.MarkDecorator` object created by a call\n to ``pytest.mark.NAME(...)``.\n \"\"\"\n self.node.add_marker(marker)\n\n def raiseerror(self, msg: Optional[str]) -> \"NoReturn\":\n \"\"\"Raise a FixtureLookupError with the given message.\"\"\"\n raise self._fixturemanager.FixtureLookupError(None, self, msg)\n\n def _fillfixtures(self) -> None:\n item = self._pyfuncitem\n fixturenames = getattr(item, \"fixturenames\", self.fixturenames)\n for argname in fixturenames:\n if argname not in item.funcargs:\n item.funcargs[argname] = self.getfixturevalue(argname)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_pytestconfig_pytest_addoption.parser_addini_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_pytestconfig_pytest_addoption.parser_addini_", "embedding": null, "metadata": {"file_path": "src/_pytest/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1391, "end_line": 1411, "span_ids": ["pytestconfig", "pytest_addoption"], "tokens": 116}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@fixture(scope=\"session\")\ndef pytestconfig(request: FixtureRequest) -> Config:\n \"\"\"Session-scoped fixture that returns the :class:`_pytest.config.Config` object.\n\n Example::\n\n def test_foo(pytestconfig):\n if pytestconfig.getoption(\"verbose\") > 0:\n ...\n\n \"\"\"\n return request.config\n\n\ndef pytest_addoption(parser: Parser) -> None:\n parser.addini(\n \"usefixtures\",\n type=\"args\",\n default=[],\n help=\"list of default fixtures to be used with this project\",\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_None_8_pytest_collection._Perform_the_collection": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_None_8_pytest_collection._Perform_the_collection", "embedding": null, "metadata": {"file_path": "src/_pytest/hookspec.py", "file_name": "hookspec.py", "file_type": "text/x-python", "category": "implementation", "start_line": 203, "end_line": 242, "span_ids": ["pytest_load_initial_conftests", "pytest_collection"], "tokens": 335}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "# -------------------------------------------------------------------------\n# collection hooks\n# -------------------------------------------------------------------------\n\n\n@hookspec(firstresult=True)\ndef pytest_collection(session: \"Session\") -> Optional[object]:\n \"\"\"Perform the collection phase for the given session.\n\n Stops at first non-None result, see :ref:`firstresult`.\n The return value is not used, but only stops further processing.\n\n The default collection phase is this (see individual hooks for full details):\n\n 1. Starting from ``session`` as the initial collector:\n\n 1. ``pytest_collectstart(collector)``\n 2. ``report = pytest_make_collect_report(collector)``\n 3. ``pytest_exception_interact(collector, call, report)`` if an interactive exception occurred\n 4. For each collected node:\n\n 1. If an item, ``pytest_itemcollected(item)``\n 2. If a collector, recurse into it.\n\n 5. ``pytest_collectreport(report)``\n\n 2. ``pytest_collection_modifyitems(session, config, items)``\n\n 1. ``pytest_deselected(items)`` for any deselected items (may be called multiple times)\n\n 3. ``pytest_collection_finish(session)``\n 4. Set ``session.items`` to the list of collected items\n 5. Set ``session.testscollected`` to the number of collected items\n\n You can implement this hook to only perform some action before collection,\n for example the terminal plugin uses it to start displaying the collection\n counter (and returns `None`).\n\n :param pytest.Session session: The pytest session object.\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_report_teststatus_pytest_report_teststatus._Return_result_category": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_report_teststatus_pytest_report_teststatus._Return_result_category", "embedding": null, "metadata": {"file_path": "src/_pytest/hookspec.py", "file_name": "hookspec.py", "file_type": "text/x-python", "category": "implementation", "start_line": 737, "end_line": 761, "span_ids": ["pytest_report_teststatus"], "tokens": 252}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@hookspec(firstresult=True)\ndef pytest_report_teststatus(\n report: Union[\"CollectReport\", \"TestReport\"], config: \"Config\"\n) -> Tuple[str, str, Union[str, Mapping[str, bool]]]:\n \"\"\"Return result-category, shortletter and verbose word for status\n reporting.\n\n The result-category is a category in which to count the result, for\n example \"passed\", \"skipped\", \"error\" or the empty string.\n\n The shortletter is shown as testing progresses, for example \".\", \"s\",\n \"E\" or the empty string.\n\n The verbose word is shown as testing progresses in verbose mode, for\n example \"PASSED\", \"SKIPPED\", \"ERROR\" or the empty string.\n\n pytest may style these implicitly according to the report outcome.\n To provide explicit styling, return a tuple for the verbose word,\n for example ``\"rerun\", \"R\", (\"RERUN\", {\"yellow\": True})``.\n\n :param report: The report object whose status is to be returned.\n :param _pytest.config.Config config: The pytest config object.\n\n Stops at first non-None result, see :ref:`firstresult`.\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py__NodeReporter.record_testreport__NodeReporter.record_testreport.self.attrs_6.temp_attrs": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py__NodeReporter.record_testreport__NodeReporter.record_testreport.self.attrs_6.temp_attrs", "embedding": null, "metadata": {"file_path": "src/_pytest/junitxml.py", "file_name": "junitxml.py", "file_type": "text/x-python", "category": "implementation", "start_line": 119, "end_line": 147, "span_ids": ["_NodeReporter.record_testreport"], "tokens": 257}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class _NodeReporter:\n\n def record_testreport(self, testreport: TestReport) -> None:\n names = mangle_test_address(testreport.nodeid)\n existing_attrs = self.attrs\n classnames = names[:-1]\n if self.xml.prefix:\n classnames.insert(0, self.xml.prefix)\n attrs: Dict[str, str] = {\n \"classname\": \".\".join(classnames),\n \"name\": bin_xml_escape(names[-1]),\n \"file\": testreport.location[0],\n }\n if testreport.location[1] is not None:\n attrs[\"line\"] = str(testreport.location[1])\n if hasattr(testreport, \"url\"):\n attrs[\"url\"] = testreport.url\n self.attrs = attrs\n self.attrs.update(existing_attrs) # Restore any user-defined attributes.\n\n # Preserve legacy testcase behavior.\n if self.family == \"xunit1\":\n return\n\n # Filter out attributes not permitted by this test family.\n # Including custom attributes because they are not valid here.\n temp_attrs = {}\n for key in self.attrs.keys():\n if key in families[self.family][\"testcase\"]:\n temp_attrs[key] = self.attrs[key]\n self.attrs = temp_attrs", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_get_log_level_for_setting_pytest_configure.config_pluginmanager_regi": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_get_log_level_for_setting_pytest_configure.config_pluginmanager_regi", "embedding": null, "metadata": {"file_path": "src/_pytest/logging.py", "file_name": "logging.py", "file_type": "text/x-python", "category": "implementation", "start_line": 492, "end_line": 518, "span_ids": ["pytest_configure", "get_log_level_for_setting"], "tokens": 211}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def get_log_level_for_setting(config: Config, *setting_names: str) -> Optional[int]:\n for setting_name in setting_names:\n log_level = config.getoption(setting_name)\n if log_level is None:\n log_level = config.getini(setting_name)\n if log_level:\n break\n else:\n return None\n\n if isinstance(log_level, str):\n log_level = log_level.upper()\n try:\n return int(getattr(logging, log_level, log_level))\n except ValueError as e:\n # Python logging does not recognise this as a logging level\n raise UsageError(\n \"'{}' is not recognized as a logging level name for \"\n \"'{}'. Please consider passing the \"\n \"logging level num instead.\".format(log_level, setting_name)\n ) from e\n\n\n# run after terminalreporter/capturemanager are configured\n@hookimpl(trylast=True)\ndef pytest_configure(config: Config) -> None:\n config.pluginmanager.register(LoggingPlugin(config), \"logging-plugin\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_pytest_runtestloop_pytest_runtestloop.return.True": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_pytest_runtestloop_pytest_runtestloop.return.True", "embedding": null, "metadata": {"file_path": "src/_pytest/main.py", "file_name": "main.py", "file_type": "text/x-python", "category": "implementation", "start_line": 309, "end_line": 326, "span_ids": ["pytest_runtestloop"], "tokens": 162}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_runtestloop(session: \"Session\") -> bool:\n if session.testsfailed and not session.config.option.continue_on_collection_errors:\n raise session.Interrupted(\n \"%d error%s during collection\"\n % (session.testsfailed, \"s\" if session.testsfailed != 1 else \"\")\n )\n\n if session.config.option.collectonly:\n return True\n\n for i, item in enumerate(session.items):\n nextitem = session.items[i + 1] if i + 1 < len(session.items) else None\n item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem)\n if session.shouldfail:\n raise session.Failed(session.shouldfail)\n if session.shouldstop:\n raise session.Interrupted(session.shouldstop)\n return True", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_pytest_ignore_collect_pytest_ignore_collect.return.None": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_pytest_ignore_collect_pytest_ignore_collect.return.None", "embedding": null, "metadata": {"file_path": "src/_pytest/main.py", "file_name": "main.py", "file_type": "text/x-python", "category": "implementation", "start_line": 379, "end_line": 403, "span_ids": ["pytest_ignore_collect"], "tokens": 235}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_ignore_collect(fspath: Path, config: Config) -> Optional[bool]:\n ignore_paths = config._getconftest_pathlist(\"collect_ignore\", path=fspath.parent)\n ignore_paths = ignore_paths or []\n excludeopt = config.getoption(\"ignore\")\n if excludeopt:\n ignore_paths.extend(absolutepath(x) for x in excludeopt)\n\n if fspath in ignore_paths:\n return True\n\n ignore_globs = config._getconftest_pathlist(\n \"collect_ignore_glob\", path=fspath.parent\n )\n ignore_globs = ignore_globs or []\n excludeglobopt = config.getoption(\"ignore_glob\")\n if excludeglobopt:\n ignore_globs.extend(absolutepath(x) for x in excludeglobopt)\n\n if any(fnmatch.fnmatch(str(fspath), str(glob)) for glob in ignore_globs):\n return True\n\n allow_in_venv = config.getoption(\"collect_in_virtualenv\")\n if not allow_in_venv and _in_venv(fspath):\n return True\n return None", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/__init__.py_deselect_by_mark_pytest_collection_modifyitems.deselect_by_mark_items_c": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/__init__.py_deselect_by_mark_pytest_collection_modifyitems.deselect_by_mark_items_c", "embedding": null, "metadata": {"file_path": "src/_pytest/mark/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 243, "end_line": 268, "span_ids": ["pytest_collection_modifyitems", "deselect_by_mark"], "tokens": 177}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def deselect_by_mark(items: \"List[Item]\", config: Config) -> None:\n matchexpr = config.option.markexpr\n if not matchexpr:\n return\n\n try:\n expression = Expression.compile(matchexpr)\n except ParseError as e:\n raise UsageError(f\"Wrong expression passed to '-m': {matchexpr}: {e}\") from None\n\n remaining = []\n deselected = []\n for item in items:\n if expression.evaluate(MarkMatcher.from_item(item)):\n remaining.append(item)\n else:\n deselected.append(item)\n\n if deselected:\n config.hook.pytest_deselected(items=deselected)\n items[:] = remaining\n\n\ndef pytest_collection_modifyitems(items: \"List[Item]\", config: Config) -> None:\n deselect_by_keyword(items, config)\n deselect_by_mark(items, config)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/__init__.py_pytest_configure_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/__init__.py_pytest_configure_", "embedding": null, "metadata": {"file_path": "src/_pytest/mark/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 263, "end_line": 278, "span_ids": ["pytest_unconfigure", "pytest_configure"], "tokens": 143}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_configure(config: Config) -> None:\n config._store[old_mark_config_key] = MARK_GEN._config\n MARK_GEN._config = config\n\n empty_parameterset = config.getini(EMPTY_PARAMETERSET_OPTION)\n\n if empty_parameterset not in (\"skip\", \"xfail\", \"fail_at_collect\", None, \"\"):\n raise UsageError(\n \"{!s} must be one of skip, xfail or fail_at_collect\"\n \" but it is {!r}\".format(EMPTY_PARAMETERSET_OPTION, empty_parameterset)\n )\n\n\ndef pytest_unconfigure(config: Config) -> None:\n MARK_GEN._config = config._store.get(old_mark_config_key, None)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/structures.py_Mark_Mark._has_param_ids.return._ids_in_self_kwargs_or_l": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/structures.py_Mark_Mark._has_param_ids.return._ids_in_self_kwargs_or_l", "embedding": null, "metadata": {"file_path": "src/_pytest/mark/structures.py", "file_name": "structures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 203, "end_line": 238, "span_ids": ["Mark", "Mark._has_param_ids"], "tokens": 318}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\n@attr.s(frozen=True, init=False, auto_attribs=True)\nclass Mark:\n #: Name of the mark.\n name: str\n #: Positional arguments of the mark decorator.\n args: Tuple[Any, ...]\n #: Keyword arguments of the mark decorator.\n kwargs: Mapping[str, Any]\n\n #: Source Mark for ids with parametrize Marks.\n _param_ids_from: Optional[\"Mark\"] = attr.ib(default=None, repr=False)\n #: Resolved/generated ids with parametrize Marks.\n _param_ids_generated: Optional[Sequence[str]] = attr.ib(default=None, repr=False)\n\n def __init__(\n self,\n name: str,\n args: Tuple[Any, ...],\n kwargs: Mapping[str, Any],\n param_ids_from: Optional[\"Mark\"] = None,\n param_ids_generated: Optional[Sequence[str]] = None,\n *,\n _ispytest: bool = False,\n ) -> None:\n \"\"\":meta private:\"\"\"\n check_ispytest(_ispytest)\n # Weirdness to bypass frozen=True.\n object.__setattr__(self, \"name\", name)\n object.__setattr__(self, \"args\", args)\n object.__setattr__(self, \"kwargs\", kwargs)\n object.__setattr__(self, \"_param_ids_from\", param_ids_from)\n object.__setattr__(self, \"_param_ids_generated\", param_ids_generated)\n\n def _has_param_ids(self) -> bool:\n return \"ids\" in self.kwargs or len(self.args) >= 4", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/structures.py_MARK_GEN_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/structures.py_MARK_GEN_", "embedding": null, "metadata": {"file_path": "src/_pytest/mark/structures.py", "file_name": "structures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 543, "end_line": 582, "span_ids": ["NodeKeywords.__len__", "impl:8", "NodeKeywords", "NodeKeywords.__iter__", "NodeKeywords._seen", "NodeKeywords.__getitem__", "NodeKeywords.__delitem__", "NodeKeywords.__repr__", "NodeKeywords.__setitem__"], "tokens": 262}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "MARK_GEN = MarkGenerator(_ispytest=True)\n\n\n@final\nclass NodeKeywords(MutableMapping[str, Any]):\n def __init__(self, node: \"Node\") -> None:\n self.node = node\n self.parent = node.parent\n self._markers = {node.name: True}\n\n def __getitem__(self, key: str) -> Any:\n try:\n return self._markers[key]\n except KeyError:\n if self.parent is None:\n raise\n return self.parent.keywords[key]\n\n def __setitem__(self, key: str, value: Any) -> None:\n self._markers[key] = value\n\n def __delitem__(self, key: str) -> None:\n raise ValueError(\"cannot delete key in keywords dict\")\n\n def __iter__(self) -> Iterator[str]:\n seen = self._seen()\n return iter(seen)\n\n def _seen(self) -> Set[str]:\n seen = set(self._markers)\n if self.parent is not None:\n seen.update(self.parent.keywords)\n return seen\n\n def __len__(self) -> int:\n return len(self._seen())\n\n def __repr__(self) -> str:\n return f\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nodes.py_Node_Node.__init__.self._store.Store_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nodes.py_Node_Node.__init__.self._store.Store_", "embedding": null, "metadata": {"file_path": "src/_pytest/nodes.py", "file_name": "nodes.py", "file_type": "text/x-python", "category": "implementation", "start_line": 99, "end_line": 175, "span_ids": ["Node"], "tokens": 532}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Node(metaclass=NodeMeta):\n \"\"\"Base class for Collector and Item, the components of the test\n collection tree.\n\n Collector subclasses have children; Items are leaf nodes.\n \"\"\"\n\n # Use __slots__ to make attribute access faster.\n # Note that __dict__ is still available.\n __slots__ = (\n \"name\",\n \"parent\",\n \"config\",\n \"session\",\n \"fspath\",\n \"_nodeid\",\n \"_store\",\n \"__dict__\",\n )\n\n def __init__(\n self,\n name: str,\n parent: \"Optional[Node]\" = None,\n config: Optional[Config] = None,\n session: \"Optional[Session]\" = None,\n fspath: Optional[py.path.local] = None,\n nodeid: Optional[str] = None,\n ) -> None:\n #: A unique name within the scope of the parent node.\n self.name = name\n\n #: The parent collector node.\n self.parent = parent\n\n #: The pytest config object.\n if config:\n self.config: Config = config\n else:\n if not parent:\n raise TypeError(\"config or parent must be provided\")\n self.config = parent.config\n\n #: The pytest session this node is part of.\n if session:\n self.session = session\n else:\n if not parent:\n raise TypeError(\"session or parent must be provided\")\n self.session = parent.session\n\n #: Filesystem path where this node was collected from (can be None).\n self.fspath = fspath or getattr(parent, \"fspath\", None)\n\n # The explicit annotation is to avoid publicly exposing NodeKeywords.\n #: Keywords/markers collected from all scopes.\n self.keywords: MutableMapping[str, Any] = NodeKeywords(self)\n\n #: The marker objects belonging to this node.\n self.own_markers: List[Mark] = []\n\n #: Allow adding of extra keywords to use for matching.\n self.extra_keyword_matches: Set[str] = set()\n\n if nodeid is not None:\n assert \"::()\" not in nodeid\n self._nodeid = nodeid\n else:\n if not self.parent:\n raise TypeError(\"nodeid or parent must be provided\")\n self._nodeid = self.parent.nodeid\n if self.name != \"()\":\n self._nodeid += \"::\" + self.name\n\n # A place where plugins can store information on the node for their\n # own use. Currently only intended for internal plugins.\n self._store = Store()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nodes.py_Node.from_parent_Node.__repr__.return._format_self___c": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nodes.py_Node.from_parent_Node.__repr__.return._format_self___c", "embedding": null, "metadata": {"file_path": "src/_pytest/nodes.py", "file_name": "nodes.py", "file_type": "text/x-python", "category": "implementation", "start_line": 177, "end_line": 201, "span_ids": ["Node.ihook", "Node.__repr__", "Node.from_parent"], "tokens": 209}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Node(metaclass=NodeMeta):\n\n @classmethod\n def from_parent(cls, parent: \"Node\", **kw):\n \"\"\"Public constructor for Nodes.\n\n This indirection got introduced in order to enable removing\n the fragile logic from the node constructors.\n\n Subclasses can use ``super().from_parent(...)`` when overriding the\n construction.\n\n :param parent: The parent node of this Node.\n \"\"\"\n if \"config\" in kw:\n raise TypeError(\"config is not a valid argument for from_parent\")\n if \"session\" in kw:\n raise TypeError(\"session is not a valid argument for from_parent\")\n return cls._create(parent=parent, **kw)\n\n @property\n def ihook(self):\n \"\"\"fspath-sensitive hook proxy used to call pytest hooks.\"\"\"\n return self.session.gethookproxy(self.fspath)\n\n def __repr__(self) -> str:\n return \"<{} {}>\".format(self.__class__.__name__, getattr(self, \"name\", None))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nodes.py_File_Item.runtest.raise_NotImplementedError": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nodes.py_File_Item.runtest.raise_NotImplementedError", "embedding": null, "metadata": {"file_path": "src/_pytest/nodes.py", "file_name": "nodes.py", "file_type": "text/x-python", "category": "implementation", "start_line": 540, "end_line": 571, "span_ids": ["Item.runtest", "File", "Item"], "tokens": 208}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class File(FSCollector):\n \"\"\"Base class for collecting tests from a file.\n\n :ref:`non-python tests`.\n \"\"\"\n\n\nclass Item(Node):\n \"\"\"A basic test invocation item.\n\n Note that for a single function there might be multiple test invocation items.\n \"\"\"\n\n nextitem = None\n\n def __init__(\n self,\n name,\n parent=None,\n config: Optional[Config] = None,\n session: Optional[\"Session\"] = None,\n nodeid: Optional[str] = None,\n ) -> None:\n super().__init__(name, parent, config, session, nodeid=nodeid)\n self._report_sections: List[Tuple[str, str, str]] = []\n\n #: A list of tuples (name, value) that holds user defined properties\n #: for this test.\n self.user_properties: List[Tuple[str, object]] = []\n\n def runtest(self) -> None:\n raise NotImplementedError(\"runtest must be implemented by Item subclass\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/outcomes.py_OutcomeException_OutcomeException.__str__.__repr__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/outcomes.py_OutcomeException_OutcomeException.__str__.__repr__", "embedding": null, "metadata": {"file_path": "src/_pytest/outcomes.py", "file_name": "outcomes.py", "file_type": "text/x-python", "category": "implementation", "start_line": 25, "end_line": 45, "span_ids": ["OutcomeException", "OutcomeException:3", "OutcomeException.__repr__"], "tokens": 192}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class OutcomeException(BaseException):\n \"\"\"OutcomeException and its subclass instances indicate and contain info\n about test and collection outcomes.\"\"\"\n\n def __init__(self, msg: Optional[str] = None, pytrace: bool = True) -> None:\n if msg is not None and not isinstance(msg, str):\n error_msg = ( # type: ignore[unreachable]\n \"{} expected string as 'msg' parameter, got '{}' instead.\\n\"\n \"Perhaps you meant to use a mark?\"\n )\n raise TypeError(error_msg.format(type(self).__name__, type(msg).__name__))\n BaseException.__init__(self, msg)\n self.msg = msg\n self.pytrace = pytrace\n\n def __repr__(self) -> str:\n if self.msg is not None:\n return self.msg\n return f\"<{self.__class__.__name__} instance>\"\n\n __str__ = __repr__", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/outcomes.py_Failed_exit.raise_Exit_msg_returncod": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/outcomes.py_Failed_exit.raise_Exit_msg_returncod", "embedding": null, "metadata": {"file_path": "src/_pytest/outcomes.py", "file_name": "outcomes.py", "file_type": "text/x-python", "category": "implementation", "start_line": 71, "end_line": 120, "span_ids": ["exit", "_with_exception", "Exit", "impl:9", "_WithException", "Failed", "impl:10"], "tokens": 348}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Failed(OutcomeException):\n \"\"\"Raised from an explicit call to pytest.fail().\"\"\"\n\n __module__ = \"builtins\"\n\n\nclass Exit(Exception):\n \"\"\"Raised for immediate program exits (no tracebacks/summaries).\"\"\"\n\n def __init__(\n self, msg: str = \"unknown reason\", returncode: Optional[int] = None\n ) -> None:\n self.msg = msg\n self.returncode = returncode\n super().__init__(msg)\n\n\n# Elaborate hack to work around https://github.com/python/mypy/issues/2087.\n# Ideally would just be `exit.Exception = Exit` etc.\n\n_F = TypeVar(\"_F\", bound=Callable[..., object])\n_ET = TypeVar(\"_ET\", bound=Type[BaseException])\n\n\nclass _WithException(Protocol[_F, _ET]):\n Exception: _ET\n __call__: _F\n\n\ndef _with_exception(exception_type: _ET) -> Callable[[_F], _WithException[_F, _ET]]:\n def decorate(func: _F) -> _WithException[_F, _ET]:\n func_with_exception = cast(_WithException[_F, _ET], func)\n func_with_exception.Exception = exception_type\n return func_with_exception\n\n return decorate\n\n\n# Exposed helper methods.\n\n\n@_with_exception(Exit)\ndef exit(msg: str, returncode: Optional[int] = None) -> \"NoReturn\":\n \"\"\"Exit testing process.\n\n :param str msg: Message to display upon exit.\n :param int returncode: Return code to be used when exiting pytest.\n \"\"\"\n __tracebackhide__ = True\n raise Exit(msg, returncode)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/outcomes.py_skip_skip.raise_Skipped_msg_msg_al": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/outcomes.py_skip_skip.raise_Skipped_msg_msg_al", "embedding": null, "metadata": {"file_path": "src/_pytest/outcomes.py", "file_name": "outcomes.py", "file_type": "text/x-python", "category": "implementation", "start_line": 123, "end_line": 144, "span_ids": ["skip"], "tokens": 238}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@_with_exception(Skipped)\ndef skip(msg: str = \"\", *, allow_module_level: bool = False) -> \"NoReturn\":\n \"\"\"Skip an executing test with the given message.\n\n This function should be called only during testing (setup, call or teardown) or\n during collection by using the ``allow_module_level`` flag. This function can\n be called in doctests as well.\n\n :param bool allow_module_level:\n Allows this function to be called at module level, skipping the rest\n of the module. Defaults to False.\n\n .. note::\n It is better to use the :ref:`pytest.mark.skipif ref` marker when\n possible to declare a test to be skipped under certain conditions\n like mismatching platforms or dependencies.\n Similarly, use the ``# doctest: +SKIP`` directive (see `doctest.SKIP\n `_)\n to skip a doctest statically.\n \"\"\"\n __tracebackhide__ = True\n raise Skipped(msg=msg, allow_module_level=allow_module_level)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/outcomes.py_xfail_xfail.raise_XFailed_reason_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/outcomes.py_xfail_xfail.raise_XFailed_reason_", "embedding": null, "metadata": {"file_path": "src/_pytest/outcomes.py", "file_name": "outcomes.py", "file_type": "text/x-python", "category": "implementation", "start_line": 165, "end_line": 177, "span_ids": ["xfail"], "tokens": 116}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@_with_exception(XFailed)\ndef xfail(reason: str = \"\") -> \"NoReturn\":\n \"\"\"Imperatively xfail an executing test or setup function with the given reason.\n\n This function should be called only during testing (setup, call or teardown).\n\n .. note::\n It is better to use the :ref:`pytest.mark.xfail ref` marker when\n possible to declare a test to be xfailed under certain conditions\n like known bugs or missing features.\n \"\"\"\n __tracebackhide__ = True\n raise XFailed(reason)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/outcomes.py_importorskip_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/outcomes.py_importorskip_", "embedding": null, "metadata": {"file_path": "src/_pytest/outcomes.py", "file_name": "outcomes.py", "file_type": "text/x-python", "category": "implementation", "start_line": 180, "end_line": 233, "span_ids": ["importorskip"], "tokens": 428}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def importorskip(\n modname: str, minversion: Optional[str] = None, reason: Optional[str] = None\n) -> Any:\n \"\"\"Import and return the requested module ``modname``, or skip the\n current test if the module cannot be imported.\n\n :param str modname:\n The name of the module to import.\n :param str minversion:\n If given, the imported module's ``__version__`` attribute must be at\n least this minimal version, otherwise the test is still skipped.\n :param str reason:\n If given, this reason is shown as the message when the module cannot\n be imported.\n\n :returns:\n The imported module. This should be assigned to its canonical name.\n\n Example::\n\n docutils = pytest.importorskip(\"docutils\")\n \"\"\"\n import warnings\n\n __tracebackhide__ = True\n compile(modname, \"\", \"eval\") # to catch syntaxerrors\n\n with warnings.catch_warnings():\n # Make sure to ignore ImportWarnings that might happen because\n # of existing directories with the same name we're trying to\n # import but without a __init__.py file.\n warnings.simplefilter(\"ignore\")\n try:\n __import__(modname)\n except ImportError as exc:\n if reason is None:\n reason = f\"could not import {modname!r}: {exc}\"\n raise Skipped(reason, allow_module_level=True) from None\n mod = sys.modules[modname]\n if minversion is None:\n return mod\n verattr = getattr(mod, \"__version__\", None)\n if minversion is not None:\n # Imported lazily to improve start-up time.\n from packaging.version import Version\n\n if verattr is None or Version(verattr) < Version(minversion):\n raise Skipped(\n \"module %r has __version__ %r, required is: %r\"\n % (modname, verattr, minversion),\n allow_module_level=True,\n )\n return mod", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pastebin.py_pytest_configure_pytest_configure.if_config_option_pastebin.if_tr_is_not_None_.tr._tw.write.tee_write": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pastebin.py_pytest_configure_pytest_configure.if_config_option_pastebin.if_tr_is_not_None_.tr._tw.write.tee_write", "embedding": null, "metadata": {"file_path": "src/_pytest/pastebin.py", "file_name": "pastebin.py", "file_type": "text/x-python", "category": "implementation", "start_line": 31, "end_line": 49, "span_ids": ["pytest_configure"], "tokens": 188}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.hookimpl(trylast=True)\ndef pytest_configure(config: Config) -> None:\n if config.option.pastebin == \"all\":\n tr = config.pluginmanager.getplugin(\"terminalreporter\")\n # If no terminal reporter plugin is present, nothing we can do here;\n # this can happen when this function executes in a worker node\n # when using pytest-xdist, for example.\n if tr is not None:\n # pastebin file will be UTF-8 encoded binary file.\n config._store[pastebinfile_key] = tempfile.TemporaryFile(\"w+b\")\n oldwrite = tr._tw.write\n\n def tee_write(s, **kwargs):\n oldwrite(s, **kwargs)\n if isinstance(s, str):\n s = s.encode(\"utf-8\")\n config._store[pastebinfile_key].write(s)\n\n tr._tw.write = tee_write", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pastebin.py_pytest_unconfigure_pytest_unconfigure.if_pastebinfile_key_in_co.tr_write_line_pastebin_s": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pastebin.py_pytest_unconfigure_pytest_unconfigure.if_pastebinfile_key_in_co.tr_write_line_pastebin_s", "embedding": null, "metadata": {"file_path": "src/_pytest/pastebin.py", "file_name": "pastebin.py", "file_type": "text/x-python", "category": "implementation", "start_line": 52, "end_line": 66, "span_ids": ["pytest_unconfigure"], "tokens": 162}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_unconfigure(config: Config) -> None:\n if pastebinfile_key in config._store:\n pastebinfile = config._store[pastebinfile_key]\n # Get terminal contents and delete file.\n pastebinfile.seek(0)\n sessionlog = pastebinfile.read()\n pastebinfile.close()\n del config._store[pastebinfile_key]\n # Undo our patching in the terminal reporter.\n tr = config.pluginmanager.getplugin(\"terminalreporter\")\n del tr._tw.__dict__[\"write\"]\n # Write summary.\n tr.write_sep(\"=\", \"Sending information to Paste Service\")\n pastebinurl = create_new_paste(sessionlog)\n tr.write_line(\"pastebin session-log: %s\\n\" % pastebinurl)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py_make_numbered_dir_make_numbered_dir.for_i_in_range_10_.else_.raise_OSError_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py_make_numbered_dir_make_numbered_dir.for_i_in_range_10_.else_.raise_OSError_", "embedding": null, "metadata": {"file_path": "src/_pytest/pathlib.py", "file_name": "pathlib.py", "file_type": "text/x-python", "category": "implementation", "start_line": 215, "end_line": 233, "span_ids": ["make_numbered_dir"], "tokens": 175}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def make_numbered_dir(root: Path, prefix: str) -> Path:\n \"\"\"Create a directory with an increased number as suffix for the given prefix.\"\"\"\n for i in range(10):\n # try up to 10 times to create the folder\n max_existing = max(map(parse_num, find_suffixes(root, prefix)), default=-1)\n new_number = max_existing + 1\n new_path = root.joinpath(f\"{prefix}{new_number}\")\n try:\n new_path.mkdir()\n except Exception:\n pass\n else:\n _force_symlink(root, prefix + \"current\", new_path)\n return new_path\n else:\n raise OSError(\n \"could not create numbered dir with prefix \"\n \"{prefix} in {root} after 10 tries\".format(prefix=prefix, root=root)\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py_create_cleanup_lock_register_cleanup_lock_removal.return.register_cleanup_on_exit_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py_create_cleanup_lock_register_cleanup_lock_removal.return.register_cleanup_on_exit_", "embedding": null, "metadata": {"file_path": "src/_pytest/pathlib.py", "file_name": "pathlib.py", "file_type": "text/x-python", "category": "implementation", "start_line": 236, "end_line": 267, "span_ids": ["create_cleanup_lock", "register_cleanup_lock_removal"], "tokens": 253}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def create_cleanup_lock(p: Path) -> Path:\n \"\"\"Create a lock to prevent premature folder cleanup.\"\"\"\n lock_path = get_lock_path(p)\n try:\n fd = os.open(str(lock_path), os.O_WRONLY | os.O_CREAT | os.O_EXCL, 0o644)\n except FileExistsError as e:\n raise OSError(f\"cannot create lockfile in {p}\") from e\n else:\n pid = os.getpid()\n spid = str(pid).encode()\n os.write(fd, spid)\n os.close(fd)\n if not lock_path.is_file():\n raise OSError(\"lock path got renamed after successful creation\")\n return lock_path\n\n\ndef register_cleanup_lock_removal(lock_path: Path, register=atexit.register):\n \"\"\"Register a cleanup function for removing a lock, by default on atexit.\"\"\"\n pid = os.getpid()\n\n def cleanup_on_exit(lock_path: Path = lock_path, original_pid: int = pid) -> None:\n current_pid = os.getpid()\n if current_pid != original_pid:\n # fork\n return\n try:\n lock_path.unlink()\n except OSError:\n pass\n\n return register(cleanup_on_exit)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py_maybe_delete_a_numbered_dir_maybe_delete_a_numbered_dir.try_.finally_.if_lock_path_is_not_None_.try_.except_OSError_.pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py_maybe_delete_a_numbered_dir_maybe_delete_a_numbered_dir.try_.finally_.if_lock_path_is_not_None_.try_.except_OSError_.pass", "embedding": null, "metadata": {"file_path": "src/_pytest/pathlib.py", "file_name": "pathlib.py", "file_type": "text/x-python", "category": "implementation", "start_line": 270, "end_line": 295, "span_ids": ["maybe_delete_a_numbered_dir"], "tokens": 198}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def maybe_delete_a_numbered_dir(path: Path) -> None:\n \"\"\"Remove a numbered directory if its lock can be obtained and it does\n not seem to be in use.\"\"\"\n path = ensure_extended_length_path(path)\n lock_path = None\n try:\n lock_path = create_cleanup_lock(path)\n parent = path.parent\n\n garbage = parent.joinpath(f\"garbage-{uuid.uuid4()}\")\n path.rename(garbage)\n rm_rf(garbage)\n except OSError:\n # known races:\n # * other process did a cleanup at the same time\n # * deletable folder was found\n # * process cwd (Windows)\n return\n finally:\n # If we created the lock, ensure we remove it even if we failed\n # to properly remove the numbered dir.\n if lock_path is not None:\n try:\n lock_path.unlink()\n except OSError:\n pass", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_RunResult_RunResult.__repr__.return._": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_RunResult_RunResult.__repr__.return._", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 508, "end_line": 542, "span_ids": ["RunResult", "RunResult.__repr__"], "tokens": 279}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class RunResult:\n \"\"\"The result of running a command.\"\"\"\n\n def __init__(\n self,\n ret: Union[int, ExitCode],\n outlines: List[str],\n errlines: List[str],\n duration: float,\n ) -> None:\n try:\n self.ret: Union[int, ExitCode] = ExitCode(ret)\n \"\"\"The return value.\"\"\"\n except ValueError:\n self.ret = ret\n self.outlines = outlines\n \"\"\"List of lines captured from stdout.\"\"\"\n self.errlines = errlines\n \"\"\"List of lines captured from stderr.\"\"\"\n self.stdout = LineMatcher(outlines)\n \"\"\":class:`LineMatcher` of stdout.\n\n Use e.g. :func:`str(stdout) ` to reconstruct stdout, or the commonly used\n :func:`stdout.fnmatch_lines() ` method.\n \"\"\"\n self.stderr = LineMatcher(errlines)\n \"\"\":class:`LineMatcher` of stderr.\"\"\"\n self.duration = duration\n \"\"\"Duration in seconds.\"\"\"\n\n def __repr__(self) -> str:\n return (\n \"\"\n % (self.ret, len(self.stdout.lines), len(self.stderr.lines), self.duration)\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_LineComp_LineComp.assert_contains_lines.LineMatcher_lines1_fnmat": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_LineComp_LineComp.assert_contains_lines.LineMatcher_lines1_fnmat", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1304, "end_line": 1319, "span_ids": ["LineComp.assert_contains_lines", "LineComp"], "tokens": 149}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class LineComp:\n def __init__(self) -> None:\n self.stringio = StringIO()\n \"\"\":class:`python:io.StringIO()` instance used for input.\"\"\"\n\n def assert_contains_lines(self, lines2: Sequence[str]) -> None:\n \"\"\"Assert that ``lines2`` are contained (linearly) in :attr:`stringio`'s value.\n\n Lines are matched using :func:`LineMatcher.fnmatch_lines`.\n \"\"\"\n __tracebackhide__ = True\n val = self.stringio.getvalue()\n self.stringio.truncate(0)\n self.stringio.seek(0)\n lines1 = val.split(\"\\n\")\n LineMatcher(lines1).fnmatch_lines(lines2)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_LineMatcher._match_lines_random_LineMatcher._match_lines_random.for_line_in_lines2_.for_x_in_self_lines_.else_.self__fail_msg_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_LineMatcher._match_lines_random_LineMatcher._match_lines_random.for_line_in_lines2_.for_x_in_self_lines_.else_.self__fail_msg_", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1355, "end_line": 1368, "span_ids": ["LineMatcher._match_lines_random"], "tokens": 126}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class LineMatcher:\n\n def _match_lines_random(\n self, lines2: Sequence[str], match_func: Callable[[str, str], bool]\n ) -> None:\n __tracebackhide__ = True\n lines2 = self._getlines(lines2)\n for line in lines2:\n for x in self.lines:\n if line == x or match_func(x, line):\n self._log(\"matched: \", repr(line))\n break\n else:\n msg = \"line %r not found in output\" % line\n self._log(msg)\n self._fail(msg)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_LineMatcher.get_lines_after_LineMatcher._log_text.return._n_join_self__log_outpu": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_LineMatcher.get_lines_after_LineMatcher._log_text.return._n_join_self__log_outpu", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1370, "end_line": 1385, "span_ids": ["LineMatcher._log_text", "LineMatcher._log", "LineMatcher.get_lines_after"], "tokens": 148}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class LineMatcher:\n\n def get_lines_after(self, fnline: str) -> Sequence[str]:\n \"\"\"Return all lines following the given line in the text.\n\n The given line can contain glob wildcards.\n \"\"\"\n for i, line in enumerate(self.lines):\n if fnline == line or fnmatch(line, fnline):\n return self.lines[i + 1 :]\n raise ValueError(\"line %r not found in output\" % fnline)\n\n def _log(self, *args) -> None:\n self._log_output.append(\" \".join(str(x) for x in args))\n\n @property\n def _log_text(self) -> str:\n return \"\\n\".join(self._log_output)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_LineMatcher.fnmatch_lines_LineMatcher.fnmatch_lines.self__match_lines_lines2_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_LineMatcher.fnmatch_lines_LineMatcher.fnmatch_lines.self__match_lines_lines2_", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1808, "end_line": 1821, "span_ids": ["LineMatcher.fnmatch_lines"], "tokens": 158}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class LineMatcher:\n\n def fnmatch_lines(\n self, lines2: Sequence[str], *, consecutive: bool = False\n ) -> None:\n \"\"\"Check lines exist in the output (using :func:`python:fnmatch.fnmatch`).\n\n The argument is a list of lines which have to match and can use glob\n wildcards. If they do not match a pytest.fail() is called. The\n matches and non-matches are also shown as part of the error message.\n\n :param lines2: String patterns to match.\n :param consecutive: Match lines consecutively?\n \"\"\"\n __tracebackhide__ = True\n self._match_lines(lines2, fnmatch, \"fnmatch\", consecutive=consecutive)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_LineMatcher.re_match_lines_LineMatcher.re_match_lines.self__match_lines_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_LineMatcher.re_match_lines_LineMatcher.re_match_lines.self__match_lines_", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1402, "end_line": 1421, "span_ids": ["LineMatcher.re_match_lines"], "tokens": 167}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class LineMatcher:\n\n def re_match_lines(\n self, lines2: Sequence[str], *, consecutive: bool = False\n ) -> None:\n \"\"\"Check lines exist in the output (using :func:`python:re.match`).\n\n The argument is a list of lines which have to match using ``re.match``.\n If they do not match a pytest.fail() is called.\n\n The matches and non-matches are also shown as part of the error message.\n\n :param lines2: string patterns to match.\n :param consecutive: match lines consecutively?\n \"\"\"\n __tracebackhide__ = True\n self._match_lines(\n lines2,\n lambda name, pat: bool(re.match(pat, name)),\n \"re.match\",\n consecutive=consecutive,\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_LineMatcher._match_lines_LineMatcher._match_lines.self._log_output._": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_LineMatcher._match_lines_LineMatcher._match_lines.self._log_output._", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1844, "end_line": 1909, "span_ids": ["LineMatcher._match_lines"], "tokens": 545}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class LineMatcher:\n\n def _match_lines(\n self,\n lines2: Sequence[str],\n match_func: Callable[[str, str], bool],\n match_nickname: str,\n *,\n consecutive: bool = False,\n ) -> None:\n \"\"\"Underlying implementation of ``fnmatch_lines`` and ``re_match_lines``.\n\n :param Sequence[str] lines2:\n List of string patterns to match. The actual format depends on\n ``match_func``.\n :param match_func:\n A callable ``match_func(line, pattern)`` where line is the\n captured line from stdout/stderr and pattern is the matching\n pattern.\n :param str match_nickname:\n The nickname for the match function that will be logged to stdout\n when a match occurs.\n :param consecutive:\n Match lines consecutively?\n \"\"\"\n if not isinstance(lines2, collections.abc.Sequence):\n raise TypeError(\"invalid type for lines2: {}\".format(type(lines2).__name__))\n lines2 = self._getlines(lines2)\n lines1 = self.lines[:]\n extralines = []\n __tracebackhide__ = True\n wnick = len(match_nickname) + 1\n started = False\n for line in lines2:\n nomatchprinted = False\n while lines1:\n nextline = lines1.pop(0)\n if line == nextline:\n self._log(\"exact match:\", repr(line))\n started = True\n break\n elif match_func(nextline, line):\n self._log(\"%s:\" % match_nickname, repr(line))\n self._log(\n \"{:>{width}}\".format(\"with:\", width=wnick), repr(nextline)\n )\n started = True\n break\n else:\n if consecutive and started:\n msg = f\"no consecutive match: {line!r}\"\n self._log(msg)\n self._log(\n \"{:>{width}}\".format(\"with:\", width=wnick), repr(nextline)\n )\n self._fail(msg)\n if not nomatchprinted:\n self._log(\n \"{:>{width}}\".format(\"nomatch:\", width=wnick), repr(line)\n )\n nomatchprinted = True\n self._log(\"{:>{width}}\".format(\"and:\", width=wnick), repr(nextline))\n extralines.append(nextline)\n else:\n msg = f\"remains unmatched: {line!r}\"\n self._log(msg)\n self._fail(msg)\n self._log_output = []", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_LineMatcher.no_fnmatch_line_LineMatcher.no_re_match_line.self__no_match_line_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_LineMatcher.no_fnmatch_line_LineMatcher.no_re_match_line.self__no_match_line_", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1911, "end_line": 1927, "span_ids": ["LineMatcher.no_fnmatch_line", "LineMatcher.no_re_match_line"], "tokens": 162}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class LineMatcher:\n\n def no_fnmatch_line(self, pat: str) -> None:\n \"\"\"Ensure captured lines do not match the given pattern, using ``fnmatch.fnmatch``.\n\n :param str pat: The pattern to match lines.\n \"\"\"\n __tracebackhide__ = True\n self._no_match_line(pat, fnmatch, \"fnmatch\")\n\n def no_re_match_line(self, pat: str) -> None:\n \"\"\"Ensure captured lines do not match the given pattern, using ``re.match``.\n\n :param str pat: The regular expression to match lines.\n \"\"\"\n __tracebackhide__ = True\n self._no_match_line(\n pat, lambda name, pat: bool(re.match(pat, name)), \"re.match\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_async_warn_and_skip_async_warn_and_skip.skip_msg_async_def_funct": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_async_warn_and_skip_async_warn_and_skip.skip_msg_async_def_funct", "embedding": null, "metadata": {"file_path": "src/_pytest/python.py", "file_name": "python.py", "file_type": "text/x-python", "category": "implementation", "start_line": 164, "end_line": 175, "span_ids": ["async_warn_and_skip"], "tokens": 141}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def async_warn_and_skip(nodeid: str) -> None:\n msg = \"async def functions are not natively supported and have been skipped.\\n\"\n msg += (\n \"You need to install a suitable plugin for your async framework, for example:\\n\"\n )\n msg += \" - anyio\\n\"\n msg += \" - pytest-asyncio\\n\"\n msg += \" - pytest-tornasync\\n\"\n msg += \" - pytest-trio\\n\"\n msg += \" - pytest-twisted\"\n warnings.warn(PytestUnhandledCoroutineWarning(msg.format(nodeid)))\n skip(msg=\"async def function and no async plugin installed (see warnings)\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_pytest_pyfunc_call_pytest_pyfunc_call.return.True": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_pytest_pyfunc_call_pytest_pyfunc_call.return.True", "embedding": null, "metadata": {"file_path": "src/_pytest/python.py", "file_name": "python.py", "file_type": "text/x-python", "category": "implementation", "start_line": 170, "end_line": 180, "span_ids": ["pytest_pyfunc_call"], "tokens": 127}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@hookimpl(trylast=True)\ndef pytest_pyfunc_call(pyfuncitem: \"Function\") -> Optional[object]:\n testfunction = pyfuncitem.obj\n if is_async_function(testfunction):\n async_warn_and_skip(pyfuncitem.nodeid)\n funcargs = pyfuncitem.funcargs\n testargs = {arg: funcargs[arg] for arg in pyfuncitem._fixtureinfo.argnames}\n result = testfunction(**testargs)\n if hasattr(result, \"__await__\") or hasattr(result, \"__aiter__\"):\n async_warn_and_skip(pyfuncitem.nodeid)\n return True", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_PyCollector._genfunctions_PyCollector._genfunctions.if_not_metafunc__calls_.else_.for_callspec_in_metafunc_.yield_Function_from_paren": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_PyCollector._genfunctions_PyCollector._genfunctions.if_not_metafunc__calls_.else_.for_callspec_in_metafunc_.yield_Function_from_paren", "embedding": null, "metadata": {"file_path": "src/_pytest/python.py", "file_name": "python.py", "file_type": "text/x-python", "category": "implementation", "start_line": 450, "end_line": 498, "span_ids": ["PyCollector._genfunctions"], "tokens": 405}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class PyCollector(PyobjMixin, nodes.Collector):\n\n def _genfunctions(self, name: str, funcobj) -> Iterator[\"Function\"]:\n modulecol = self.getparent(Module)\n assert modulecol is not None\n module = modulecol.obj\n clscol = self.getparent(Class)\n cls = clscol and clscol.obj or None\n fm = self.session._fixturemanager\n\n definition = FunctionDefinition.from_parent(self, name=name, callobj=funcobj)\n fixtureinfo = definition._fixtureinfo\n\n metafunc = Metafunc(\n definition=definition,\n fixtureinfo=fixtureinfo,\n config=self.config,\n cls=cls,\n module=module,\n _ispytest=True,\n )\n methods = []\n if hasattr(module, \"pytest_generate_tests\"):\n methods.append(module.pytest_generate_tests)\n if cls is not None and hasattr(cls, \"pytest_generate_tests\"):\n methods.append(cls().pytest_generate_tests)\n\n self.ihook.pytest_generate_tests.call_extra(methods, dict(metafunc=metafunc))\n\n if not metafunc._calls:\n yield Function.from_parent(self, name=name, fixtureinfo=fixtureinfo)\n else:\n # Add funcargs() as fixturedefs to fixtureinfo.arg2fixturedefs.\n fixtures.add_funcarg_pseudo_fixture_def(self, metafunc, fm)\n\n # Add_funcarg_pseudo_fixture_def may have shadowed some fixtures\n # with direct parametrization, so make sure we update what the\n # function really needs.\n fixtureinfo.prune_dependency_tree()\n\n for callspec in metafunc._calls:\n subname = f\"{name}[{callspec.id}]\"\n yield Function.from_parent(\n self,\n name=subname,\n callspec=callspec,\n callobj=funcobj,\n fixtureinfo=fixtureinfo,\n keywords={callspec.id: True},\n originalname=name,\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Class_Class.collect.return._Instance_from_parent_sel": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Class_Class.collect.return._Instance_from_parent_sel", "embedding": null, "metadata": {"file_path": "src/_pytest/python.py", "file_name": "python.py", "file_type": "text/x-python", "category": "implementation", "start_line": 762, "end_line": 797, "span_ids": ["Class", "Class.collect", "Class.from_parent"], "tokens": 251}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Class(PyCollector):\n \"\"\"Collector for test methods.\"\"\"\n\n @classmethod\n def from_parent(cls, parent, *, name, obj=None):\n \"\"\"The public constructor.\"\"\"\n return super().from_parent(name=name, parent=parent)\n\n def collect(self) -> Iterable[Union[nodes.Item, nodes.Collector]]:\n if not safe_getattr(self.obj, \"__test__\", True):\n return []\n if hasinit(self.obj):\n assert self.parent is not None\n self.warn(\n PytestCollectionWarning(\n \"cannot collect test class %r because it has a \"\n \"__init__ constructor (from: %s)\"\n % (self.obj.__name__, self.parent.nodeid)\n )\n )\n return []\n elif hasnew(self.obj):\n assert self.parent is not None\n self.warn(\n PytestCollectionWarning(\n \"cannot collect test class %r because it has a \"\n \"__new__ constructor (from: %s)\"\n % (self.obj.__name__, self.parent.nodeid)\n )\n )\n return []\n\n self._inject_setup_class_fixture()\n self._inject_setup_method_fixture()\n\n return [Instance.from_parent(self, name=\"()\")]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_CallSpec2_CallSpec2.id.return._join_map_str_self__i": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_CallSpec2_CallSpec2.id.return._join_map_str_self__i", "embedding": null, "metadata": {"file_path": "src/_pytest/python.py", "file_name": "python.py", "file_type": "text/x-python", "category": "implementation", "start_line": 894, "end_line": 924, "span_ids": ["CallSpec2.getparam", "CallSpec2", "CallSpec2.id", "CallSpec2.copy"], "tokens": 262}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass CallSpec2:\n def __init__(self, metafunc: \"Metafunc\") -> None:\n self.metafunc = metafunc\n self.funcargs: Dict[str, object] = {}\n self._idlist: List[str] = []\n self.params: Dict[str, object] = {}\n # Used for sorting parametrized resources.\n self._arg2scopenum: Dict[str, int] = {}\n self.marks: List[Mark] = []\n self.indices: Dict[str, int] = {}\n\n def copy(self) -> \"CallSpec2\":\n cs = CallSpec2(self.metafunc)\n cs.funcargs.update(self.funcargs)\n cs.params.update(self.params)\n cs.marks.extend(self.marks)\n cs.indices.update(self.indices)\n cs._arg2scopenum.update(self._arg2scopenum)\n cs._idlist = list(self._idlist)\n return cs\n\n def getparam(self, name: str) -> object:\n try:\n return self.params[name]\n except KeyError as e:\n raise ValueError(name) from e\n\n @property\n def id(self) -> str:\n return \"-\".join(map(str, self._idlist))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Metafunc._resolve_arg_ids_Metafunc._resolve_arg_ids.return.idmaker_argnames_paramet": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Metafunc._resolve_arg_ids_Metafunc._resolve_arg_ids.return.idmaker_argnames_paramet", "embedding": null, "metadata": {"file_path": "src/_pytest/python.py", "file_name": "python.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1122, "end_line": 1153, "span_ids": ["Metafunc._resolve_arg_ids"], "tokens": 297}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Metafunc:\n\n def _resolve_arg_ids(\n self,\n argnames: Sequence[str],\n ids: Optional[\n Union[\n Iterable[Union[None, str, float, int, bool]],\n Callable[[Any], Optional[object]],\n ]\n ],\n parameters: Sequence[ParameterSet],\n nodeid: str,\n ) -> List[str]:\n \"\"\"Resolve the actual ids for the given argnames, based on the ``ids`` parameter given\n to ``parametrize``.\n\n :param List[str] argnames: List of argument names passed to ``parametrize()``.\n :param ids: The ids parameter of the parametrized call (see docs).\n :param List[ParameterSet] parameters: The list of parameter values, same size as ``argnames``.\n :param str str: The nodeid of the item that generated this parametrized call.\n :rtype: List[str]\n :returns: The list of ids for each argname given.\n \"\"\"\n if ids is None:\n idfn = None\n ids_ = None\n elif callable(ids):\n idfn = ids\n ids_ = None\n else:\n idfn = None\n ids_ = self._validate_ids(ids, parameters, self.function.__name__)\n return idmaker(argnames, parameters, idfn, ids_, self.config, nodeid=nodeid)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Metafunc._validate_ids_Metafunc._validate_ids.return.new_ids": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Metafunc._validate_ids_Metafunc._validate_ids.return.new_ids", "embedding": null, "metadata": {"file_path": "src/_pytest/python.py", "file_name": "python.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1155, "end_line": 1190, "span_ids": ["Metafunc._validate_ids"], "tokens": 338}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Metafunc:\n\n def _validate_ids(\n self,\n ids: Iterable[Union[None, str, float, int, bool]],\n parameters: Sequence[ParameterSet],\n func_name: str,\n ) -> List[Union[None, str]]:\n try:\n num_ids = len(ids) # type: ignore[arg-type]\n except TypeError:\n try:\n iter(ids)\n except TypeError as e:\n raise TypeError(\"ids must be a callable or an iterable\") from e\n num_ids = len(parameters)\n\n # num_ids == 0 is a special case: https://github.com/pytest-dev/pytest/issues/1849\n if num_ids != len(parameters) and num_ids != 0:\n msg = \"In {}: {} parameter sets specified, with different number of ids: {}\"\n fail(msg.format(func_name, len(parameters), num_ids), pytrace=False)\n\n new_ids = []\n for idx, id_value in enumerate(itertools.islice(ids, num_ids)):\n if id_value is None or isinstance(id_value, str):\n new_ids.append(id_value)\n elif isinstance(id_value, (float, int, bool)):\n new_ids.append(str(id_value))\n else:\n msg = ( # type: ignore[unreachable]\n \"In {}: ids must be list of string/float/int/bool, \"\n \"found: {} (type: {!r}) at index {}\"\n )\n fail(\n msg.format(func_name, saferepr(id_value), type(id_value), idx),\n pytrace=False,\n )\n return new_ids", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py__find_parametrized_scope__find_parametrized_scope.return._function_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py__find_parametrized_scope__find_parametrized_scope.return._function_", "embedding": null, "metadata": {"file_path": "src/_pytest/python.py", "file_name": "python.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1265, "end_line": 1297, "span_ids": ["_find_parametrized_scope"], "tokens": 275}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _find_parametrized_scope(\n argnames: Sequence[str],\n arg2fixturedefs: Mapping[str, Sequence[fixtures.FixtureDef[object]]],\n indirect: Union[bool, Sequence[str]],\n) -> \"fixtures._Scope\":\n \"\"\"Find the most appropriate scope for a parametrized call based on its arguments.\n\n When there's at least one direct argument, always use \"function\" scope.\n\n When a test function is parametrized and all its arguments are indirect\n (e.g. fixtures), return the most narrow scope based on the fixtures used.\n\n Related to issue #1832, based on code posted by @Kingdread.\n \"\"\"\n if isinstance(indirect, Sequence):\n all_arguments_are_fixtures = len(indirect) == len(argnames)\n else:\n all_arguments_are_fixtures = bool(indirect)\n\n if all_arguments_are_fixtures:\n fixturedefs = arg2fixturedefs or {}\n used_scopes = [\n fixturedef[0].scope\n for name, fixturedef in fixturedefs.items()\n if name in argnames\n ]\n if used_scopes:\n # Takes the most narrow scope from used fixtures.\n for scope in reversed(fixtures.scopes):\n if scope in used_scopes:\n return scope\n\n return \"function\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py__ascii_escaped_by_config__ascii_escaped_by_config._type_ignore": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py__ascii_escaped_by_config__ascii_escaped_by_config._type_ignore", "embedding": null, "metadata": {"file_path": "src/_pytest/python.py", "file_name": "python.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1178, "end_line": 1188, "span_ids": ["_ascii_escaped_by_config"], "tokens": 125}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _ascii_escaped_by_config(val: Union[str, bytes], config: Optional[Config]) -> str:\n if config is None:\n escape_option = False\n else:\n escape_option = config.getini(\n \"disable_test_id_escaping_and_forfeit_all_rights_to_community_support\"\n )\n # TODO: If escaping is turned off and the user passes bytes,\n # will return a bytes. For now we ignore this but the\n # code *probably* doesn't handle this case.\n return val if escape_option else ascii_escaped(val) # type: ignore", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py__idvalset__idvalset.if_id_is_None_.else_.return._ascii_escaped_by_config_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py__idvalset__idvalset.if_id_is_None_.else_.return._ascii_escaped_by_config_", "embedding": null, "metadata": {"file_path": "src/_pytest/python.py", "file_name": "python.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1356, "end_line": 1375, "span_ids": ["_idvalset"], "tokens": 177}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _idvalset(\n idx: int,\n parameterset: ParameterSet,\n argnames: Iterable[str],\n idfn: Optional[Callable[[Any], Optional[object]]],\n ids: Optional[List[Union[None, str]]],\n nodeid: Optional[str],\n config: Optional[Config],\n) -> str:\n if parameterset.id is not None:\n return parameterset.id\n id = None if ids is None or idx >= len(ids) else ids[idx]\n if id is None:\n this_id = [\n _idval(val, argname, idx, idfn, nodeid=nodeid, config=config)\n for val, argname in zip(parameterset.values, argnames)\n ]\n return \"-\".join(this_id)\n else:\n return _ascii_escaped_by_config(id, config)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_idmaker_idmaker.return.resolved_ids": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_idmaker_idmaker.return.resolved_ids", "embedding": null, "metadata": {"file_path": "src/_pytest/python.py", "file_name": "python.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1378, "end_line": 1409, "span_ids": ["idmaker"], "tokens": 278}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def idmaker(\n argnames: Iterable[str],\n parametersets: Iterable[ParameterSet],\n idfn: Optional[Callable[[Any], Optional[object]]] = None,\n ids: Optional[List[Union[None, str]]] = None,\n config: Optional[Config] = None,\n nodeid: Optional[str] = None,\n) -> List[str]:\n resolved_ids = [\n _idvalset(\n valindex, parameterset, argnames, idfn, ids, config=config, nodeid=nodeid\n )\n for valindex, parameterset in enumerate(parametersets)\n ]\n\n # All IDs must be unique!\n unique_ids = set(resolved_ids)\n if len(unique_ids) != len(resolved_ids):\n\n # Record the number of occurrences of each test ID.\n test_id_counts = Counter(resolved_ids)\n\n # Map the test ID to its next suffix.\n test_id_suffixes: Dict[str, int] = defaultdict(int)\n\n # Suffix non-unique IDs to make them unique.\n for index, test_id in enumerate(resolved_ids):\n if test_id_counts[test_id] > 1:\n resolved_ids[index] = \"{}{}\".format(test_id, test_id_suffixes[test_id])\n test_id_suffixes[test_id] += 1\n\n return resolved_ids", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_show_fixtures_per_test__show_fixtures_per_test.write_fixture.if_fixture_doc_.else_.tw_line_no_docstring": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_show_fixtures_per_test__show_fixtures_per_test.write_fixture.if_fixture_doc_.else_.tw_line_no_docstring", "embedding": null, "metadata": {"file_path": "src/_pytest/python.py", "file_name": "python.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1412, "end_line": 1444, "span_ids": ["_show_fixtures_per_test", "show_fixtures_per_test"], "tokens": 261}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def show_fixtures_per_test(config):\n from _pytest.main import wrap_session\n\n return wrap_session(config, _show_fixtures_per_test)\n\n\ndef _show_fixtures_per_test(config: Config, session: Session) -> None:\n import _pytest.config\n\n session.perform_collect()\n curdir = Path.cwd()\n tw = _pytest.config.create_terminal_writer(config)\n verbose = config.getvalue(\"verbose\")\n\n def get_best_relpath(func) -> str:\n loc = getlocation(func, str(curdir))\n return bestrelpath(curdir, Path(loc))\n\n def write_fixture(fixture_def: fixtures.FixtureDef[object]) -> None:\n argname = fixture_def.argname\n if verbose <= 0 and argname.startswith(\"_\"):\n return\n if verbose > 0:\n bestrel = get_best_relpath(fixture_def.func)\n funcargspec = f\"{argname} -- {bestrel}\"\n else:\n funcargspec = argname\n tw.line(funcargspec, green=True)\n fixture_doc = inspect.getdoc(fixture_def.func)\n if fixture_doc:\n write_docstring(tw, fixture_doc)\n else:\n tw.line(\" no docstring available\", red=True)\n # ... other code", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/reports.py__report_to_json__report_to_json.serialize_repr_crash.if_reprcrash_is_not_None_.else_.return.None": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/reports.py__report_to_json__report_to_json.serialize_repr_crash.if_reprcrash_is_not_None_.else_.return.None", "embedding": null, "metadata": {"file_path": "src/_pytest/reports.py", "file_name": "reports.py", "file_type": "text/x-python", "category": "implementation", "start_line": 437, "end_line": 467, "span_ids": ["_report_to_json"], "tokens": 265}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _report_to_json(report: BaseReport) -> Dict[str, Any]:\n \"\"\"Return the contents of this report as a dict of builtin entries,\n suitable for serialization.\n\n This was originally the serialize_report() function from xdist (ca03269).\n \"\"\"\n\n def serialize_repr_entry(\n entry: Union[ReprEntry, ReprEntryNative]\n ) -> Dict[str, Any]:\n data = attr.asdict(entry)\n for key, value in data.items():\n if hasattr(value, \"__dict__\"):\n data[key] = attr.asdict(value)\n entry_data = {\"type\": type(entry).__name__, \"data\": data}\n return entry_data\n\n def serialize_repr_traceback(reprtraceback: ReprTraceback) -> Dict[str, Any]:\n result = attr.asdict(reprtraceback)\n result[\"reprentries\"] = [\n serialize_repr_entry(x) for x in reprtraceback.reprentries\n ]\n return result\n\n def serialize_repr_crash(\n reprcrash: Optional[ReprFileLocation],\n ) -> Optional[Dict[str, Any]]:\n if reprcrash is not None:\n return attr.asdict(reprcrash)\n else:\n return None\n # ... other code", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/runner.py_pytest_report_teststatus_call_and_report.return.report": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/runner.py_pytest_report_teststatus_call_and_report.return.report", "embedding": null, "metadata": {"file_path": "src/_pytest/runner.py", "file_name": "runner.py", "file_type": "text/x-python", "category": "implementation", "start_line": 202, "end_line": 228, "span_ids": ["pytest_report_teststatus", "call_and_report"], "tokens": 226}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_report_teststatus(report: BaseReport) -> Optional[Tuple[str, str, str]]:\n if report.when in (\"setup\", \"teardown\"):\n if report.failed:\n # category, shortletter, verbose-word\n return \"error\", \"E\", \"ERROR\"\n elif report.skipped:\n return \"skipped\", \"s\", \"SKIPPED\"\n else:\n return \"\", \"\", \"\"\n return None\n\n\n#\n# Implementation\n\n\ndef call_and_report(\n item: Item, when: \"Literal['setup', 'call', 'teardown']\", log: bool = True, **kwds\n) -> TestReport:\n call = call_runtest_hook(item, when, **kwds)\n hook = item.ihook\n report: TestReport = hook.pytest_runtest_makereport(item=item, call=call)\n if log:\n hook.pytest_runtest_logreport(report=report)\n if check_interactive_exception(call, report):\n hook.pytest_exception_interact(node=item, call=call, report=report)\n return report", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/store.py_from_typing_import_Any_StoreKey.__slots__._": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/store.py_from_typing_import_Any_StoreKey.__slots__._", "embedding": null, "metadata": {"file_path": "src/_pytest/store.py", "file_name": "store.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 24, "span_ids": ["StoreKey", "impl", "imports"], "tokens": 117}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from typing import Any\nfrom typing import cast\nfrom typing import Dict\nfrom typing import Generic\nfrom typing import TypeVar\nfrom typing import Union\n\n\n__all__ = [\"Store\", \"StoreKey\"]\n\n\nT = TypeVar(\"T\")\nD = TypeVar(\"D\")\n\n\nclass StoreKey(Generic[T]):\n \"\"\"StoreKey is an object used as a key to a Store.\n\n A StoreKey is associated with the type T of the value of the key.\n\n A StoreKey is unique and cannot conflict with another key.\n \"\"\"\n\n __slots__ = ()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/store.py_Store_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/store.py_Store_", "embedding": null, "metadata": {"file_path": "src/_pytest/store.py", "file_name": "store.py", "file_type": "text/x-python", "category": "implementation", "start_line": 27, "end_line": 126, "span_ids": ["Store.__contains__", "Store.__setitem__", "Store.get", "Store", "Store.__delitem__", "Store.__getitem__", "Store.setdefault"], "tokens": 732}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Store:\n \"\"\"Store is a type-safe heterogenous mutable mapping that\n allows keys and value types to be defined separately from\n where it (the Store) is created.\n\n Usually you will be given an object which has a ``Store``:\n\n .. code-block:: python\n\n store: Store = some_object.store\n\n If a module wants to store data in this Store, it creates StoreKeys\n for its keys (at the module level):\n\n .. code-block:: python\n\n some_str_key = StoreKey[str]()\n some_bool_key = StoreKey[bool]()\n\n To store information:\n\n .. code-block:: python\n\n # Value type must match the key.\n store[some_str_key] = \"value\"\n store[some_bool_key] = True\n\n To retrieve the information:\n\n .. code-block:: python\n\n # The static type of some_str is str.\n some_str = store[some_str_key]\n # The static type of some_bool is bool.\n some_bool = store[some_bool_key]\n\n Why use this?\n -------------\n\n Problem: module Internal defines an object. Module External, which\n module Internal doesn't know about, receives the object and wants to\n attach information to it, to be retrieved later given the object.\n\n Bad solution 1: Module External assigns private attributes directly on\n the object. This doesn't work well because the type checker doesn't\n know about these attributes and it complains about undefined attributes.\n\n Bad solution 2: module Internal adds a ``Dict[str, Any]`` attribute to\n the object. Module External stores its data in private keys of this dict.\n This doesn't work well because retrieved values are untyped.\n\n Good solution: module Internal adds a ``Store`` to the object. Module\n External mints StoreKeys for its own keys. Module External stores and\n retrieves its data using these keys.\n \"\"\"\n\n __slots__ = (\"_store\",)\n\n def __init__(self) -> None:\n self._store: Dict[StoreKey[Any], object] = {}\n\n def __setitem__(self, key: StoreKey[T], value: T) -> None:\n \"\"\"Set a value for key.\"\"\"\n self._store[key] = value\n\n def __getitem__(self, key: StoreKey[T]) -> T:\n \"\"\"Get the value for key.\n\n Raises ``KeyError`` if the key wasn't set before.\n \"\"\"\n return cast(T, self._store[key])\n\n def get(self, key: StoreKey[T], default: D) -> Union[T, D]:\n \"\"\"Get the value for key, or return default if the key wasn't set\n before.\"\"\"\n try:\n return self[key]\n except KeyError:\n return default\n\n def setdefault(self, key: StoreKey[T], default: T) -> T:\n \"\"\"Return the value of key if already set, otherwise set the value\n of key to default and return default.\"\"\"\n try:\n return self[key]\n except KeyError:\n self[key] = default\n return default\n\n def __delitem__(self, key: StoreKey[T]) -> None:\n \"\"\"Delete the value for key.\n\n Raises ``KeyError`` if the key wasn't set before.\n \"\"\"\n del self._store[key]\n\n def __contains__(self, key: StoreKey[T]) -> bool:\n \"\"\"Return whether key was set.\"\"\"\n return key in self._store", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter._is_last_item_TerminalReporter.pytest_runtest_logfinish.if_self_verbosity_0_an.if_self__is_last_item_.else_.if_past_edge_.self__tw_write_msg_n_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter._is_last_item_TerminalReporter.pytest_runtest_logfinish.if_self_verbosity_0_an.if_self__is_last_item_.else_.if_past_edge_.self__tw_write_msg_n_", "embedding": null, "metadata": {"file_path": "src/_pytest/terminal.py", "file_name": "terminal.py", "file_type": "text/x-python", "category": "implementation", "start_line": 570, "end_line": 594, "span_ids": ["TerminalReporter.pytest_runtest_logfinish", "TerminalReporter._is_last_item"], "tokens": 247}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass TerminalReporter:\n\n @property\n def _is_last_item(self) -> bool:\n assert self._session is not None\n return len(self._progress_nodeids_reported) == self._session.testscollected\n\n def pytest_runtest_logfinish(self, nodeid: str) -> None:\n assert self._session\n if self.verbosity <= 0 and self._show_progress_info:\n if self._show_progress_info == \"count\":\n num_tests = self._session.testscollected\n progress_length = len(\" [{}/{}]\".format(str(num_tests), str(num_tests)))\n else:\n progress_length = len(\" [100%]\")\n\n self._progress_nodeids_reported.add(nodeid)\n\n if self._is_last_item:\n self._write_progress_information_filling_space()\n else:\n main_color, _ = self._get_main_color()\n w = self._width_of_current_line\n past_edge = w + progress_length + 1 >= self._screen_width\n if past_edge:\n msg = self._get_progress_information_message()\n self._tw.write(msg + \"\\n\", **{main_color: True})", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter._printcollecteditems_TerminalReporter._printcollecteditems.for_item_in_items_.for_col_in_needed_collect.if_self_config_option_ver.if_doc_.for_line_in_doc_splitline.self__tw_line_form": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter._printcollecteditems_TerminalReporter._printcollecteditems.for_item_in_items_.for_col_in_needed_collect.if_self_config_option_ver.if_doc_.for_line_in_doc_splitline.self__tw_line_form", "embedding": null, "metadata": {"file_path": "src/_pytest/terminal.py", "file_name": "terminal.py", "file_type": "text/x-python", "category": "implementation", "start_line": 762, "end_line": 794, "span_ids": ["TerminalReporter._printcollecteditems"], "tokens": 329}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass TerminalReporter:\n\n def _printcollecteditems(self, items: Sequence[Item]) -> None:\n # To print out items and their parent collectors\n # we take care to leave out Instances aka ()\n # because later versions are going to get rid of them anyway.\n if self.config.option.verbose < 0:\n if self.config.option.verbose < -1:\n counts = Counter(item.nodeid.split(\"::\", 1)[0] for item in items)\n for name, count in sorted(counts.items()):\n self._tw.line(\"%s: %d\" % (name, count))\n else:\n for item in items:\n self._tw.line(item.nodeid)\n return\n stack: List[Node] = []\n indent = \"\"\n for item in items:\n needed_collectors = item.listchain()[1:] # strip root node\n while stack:\n if stack == needed_collectors[: len(stack)]:\n break\n stack.pop()\n for col in needed_collectors[len(stack) :]:\n stack.append(col)\n if col.name == \"()\": # Skip Instances.\n continue\n indent = (len(stack) - 1) * \" \"\n self._tw.line(f\"{indent}{col}\")\n if self.config.option.verbose >= 1:\n obj = getattr(col, \"obj\", None)\n doc = inspect.getdoc(obj) if obj else None\n if doc:\n for line in doc.splitlines():\n self._tw.line(\"{}{}\".format(indent + \" \", line))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.summary_failures_TerminalReporter.summary_failures.if_self_config_option_tbs.if_self_config_option_tbs.else_.for_rep_in_reports_.self__handle_teardown_sec": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.summary_failures_TerminalReporter.summary_failures.if_self_config_option_tbs.if_self_config_option_tbs.else_.for_rep_in_reports_.self__handle_teardown_sec", "embedding": null, "metadata": {"file_path": "src/_pytest/terminal.py", "file_name": "terminal.py", "file_type": "text/x-python", "category": "implementation", "start_line": 995, "end_line": 1010, "span_ids": ["TerminalReporter.summary_failures"], "tokens": 147}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass TerminalReporter:\n\n def summary_failures(self) -> None:\n if self.config.option.tbstyle != \"no\":\n reports: List[BaseReport] = self.getreports(\"failed\")\n if not reports:\n return\n self.write_sep(\"=\", \"FAILURES\")\n if self.config.option.tbstyle == \"line\":\n for rep in reports:\n line = self._getcrashline(rep)\n self.write_line(line)\n else:\n for rep in reports:\n msg = self._getfailureheadline(rep)\n self.write_sep(\"_\", msg, red=True, bold=True)\n self._outrep_summary(rep)\n self._handle_teardown_sections(rep.nodeid)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.short_test_summary.REPORTCHAR_ACTIONS_TerminalReporter.short_test_summary.if_lines_.for_line_in_lines_.self_write_line_line_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.short_test_summary.REPORTCHAR_ACTIONS_TerminalReporter.short_test_summary.if_lines_.for_line_in_lines_.self_write_line_line_", "embedding": null, "metadata": {"file_path": "src/_pytest/terminal.py", "file_name": "terminal.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1126, "end_line": 1144, "span_ids": ["TerminalReporter.short_test_summary"], "tokens": 176}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass TerminalReporter:\n\n def short_test_summary(self) -> None:\n # ... other code\n\n REPORTCHAR_ACTIONS: Mapping[str, Callable[[List[str]], None]] = {\n \"x\": show_xfailed,\n \"X\": show_xpassed,\n \"f\": partial(show_simple, \"failed\"),\n \"s\": show_skipped,\n \"p\": partial(show_simple, \"passed\"),\n \"E\": partial(show_simple, \"error\"),\n }\n\n lines: List[str] = []\n for char in self.reportchars:\n action = REPORTCHAR_ACTIONS.get(char)\n if action: # skipping e.g. \"P\" (passed with output) here.\n action(lines)\n\n if lines:\n self.write_sep(\"=\", \"short test summary info\")\n for line in lines:\n self.write_line(line)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter._get_main_color_TerminalReporter._set_main_color.self._main_color.self__determine_main_colo": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter._get_main_color_TerminalReporter._set_main_color.self._main_color.self__determine_main_colo", "embedding": null, "metadata": {"file_path": "src/_pytest/terminal.py", "file_name": "terminal.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1146, "end_line": 1172, "span_ids": ["TerminalReporter._get_main_color", "TerminalReporter._determine_main_color", "TerminalReporter._set_main_color"], "tokens": 291}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass TerminalReporter:\n\n def _get_main_color(self) -> Tuple[str, List[str]]:\n if self._main_color is None or self._known_types is None or self._is_last_item:\n self._set_main_color()\n assert self._main_color\n assert self._known_types\n return self._main_color, self._known_types\n\n def _determine_main_color(self, unknown_type_seen: bool) -> str:\n stats = self.stats\n if \"failed\" in stats or \"error\" in stats:\n main_color = \"red\"\n elif \"warnings\" in stats or \"xpassed\" in stats or unknown_type_seen:\n main_color = \"yellow\"\n elif \"passed\" in stats or not self._is_last_item:\n main_color = \"green\"\n else:\n main_color = \"yellow\"\n return main_color\n\n def _set_main_color(self) -> None:\n unknown_types: List[str] = []\n for found_type in self.stats.keys():\n if found_type: # setup/teardown reports have an empty key, ignore them\n if found_type not in KNOWN_TYPES and found_type not in unknown_types:\n unknown_types.append(found_type)\n self._known_types = list(KNOWN_TYPES) + unknown_types\n self._main_color = self._determine_main_color(bool(unknown_types))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/tmpdir.py_TempPathFactory.mktemp_TempPathFactory.mktemp.return.p": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/tmpdir.py_TempPathFactory.mktemp_TempPathFactory.mktemp.return.p", "embedding": null, "metadata": {"file_path": "src/_pytest/tmpdir.py", "file_name": "tmpdir.py", "file_type": "text/x-python", "category": "implementation", "start_line": 78, "end_line": 100, "span_ids": ["TempPathFactory.mktemp"], "tokens": 221}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\n@attr.s(init=False)\nclass TempPathFactory:\n\n def mktemp(self, basename: str, numbered: bool = True) -> Path:\n \"\"\"Create a new temporary directory managed by the factory.\n\n :param basename:\n Directory base name, must be a relative path.\n\n :param numbered:\n If ``True``, ensure the directory is unique by adding a numbered\n suffix greater than any existing one: ``basename=\"foo-\"`` and ``numbered=True``\n means that this function will create directories named ``\"foo-0\"``,\n ``\"foo-1\"``, ``\"foo-2\"`` and so on.\n\n :returns:\n The path to the new directory.\n \"\"\"\n basename = self._ensure_relative_to_basetemp(basename)\n if not numbered:\n p = self.getbasetemp().joinpath(basename)\n p.mkdir()\n else:\n p = make_numbered_dir(root=self.getbasetemp(), prefix=basename)\n self._trace(\"mktemp\", p)\n return p", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/unittest.py_UnitTestCase_UnitTestCase.collect.if_not_foundsomething_.if_runtest_is_not_None_.if_ut_is_None_or_runtest_.yield_TestCaseFunction_fr": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/unittest.py_UnitTestCase_UnitTestCase.collect.if_not_foundsomething_.if_runtest_is_not_None_.if_ut_is_None_or_runtest_.yield_TestCaseFunction_fr", "embedding": null, "metadata": {"file_path": "src/_pytest/unittest.py", "file_name": "unittest.py", "file_type": "text/x-python", "category": "implementation", "start_line": 61, "end_line": 95, "span_ids": ["UnitTestCase", "UnitTestCase.collect"], "tokens": 295}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class UnitTestCase(Class):\n # Marker for fixturemanger.getfixtureinfo()\n # to declare that our children do not support funcargs.\n nofuncargs = True\n\n def collect(self) -> Iterable[Union[Item, Collector]]:\n from unittest import TestLoader\n\n cls = self.obj\n if not getattr(cls, \"__test__\", True):\n return\n\n skipped = _is_skipped(cls)\n if not skipped:\n self._inject_setup_teardown_fixtures(cls)\n self._inject_setup_class_fixture()\n\n self.session._fixturemanager.parsefactories(self, unittest=True)\n loader = TestLoader()\n foundsomething = False\n for name in loader.getTestCaseNames(self.obj):\n x = getattr(self.obj, name)\n if not getattr(x, \"__test__\", True):\n continue\n funcobj = getimfunc(x)\n yield TestCaseFunction.from_parent(self, name=name, callobj=funcobj)\n foundsomething = True\n\n if not foundsomething:\n runtest = getattr(self.obj, \"runTest\", None)\n if runtest is not None:\n ut = sys.modules.get(\"twisted.trial.unittest\", None)\n # Type ignored because `ut` is an opaque module.\n if ut is None or runtest != ut.TestCase.runTest: # type: ignore\n yield TestCaseFunction.from_parent(self, name=\"runTest\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/unittest.py_TestCaseFunction_TestCaseFunction.startTest.pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/unittest.py_TestCaseFunction_TestCaseFunction.startTest.pass", "embedding": null, "metadata": {"file_path": "src/_pytest/unittest.py", "file_name": "unittest.py", "file_type": "text/x-python", "category": "implementation", "start_line": 184, "end_line": 206, "span_ids": ["TestCaseFunction.startTest", "TestCaseFunction", "TestCaseFunction.teardown", "TestCaseFunction.setup"], "tokens": 220}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCaseFunction(Function):\n nofuncargs = True\n _excinfo: Optional[List[_pytest._code.ExceptionInfo[BaseException]]] = None\n _testcase: Optional[\"unittest.TestCase\"] = None\n\n def setup(self) -> None:\n # A bound method to be called during teardown() if set (see 'runtest()').\n self._explicit_tearDown: Optional[Callable[[], None]] = None\n assert self.parent is not None\n self._testcase = self.parent.obj(self.name) # type: ignore[attr-defined]\n self._obj = getattr(self._testcase, self.name)\n if hasattr(self, \"_request\"):\n self._request._fillfixtures()\n\n def teardown(self) -> None:\n if self._explicit_tearDown is not None:\n self._explicit_tearDown()\n self._explicit_tearDown = None\n self._testcase = None\n self._obj = None\n\n def startTest(self, testcase: \"unittest.TestCase\") -> None:\n pass", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/unittest.py_TestCaseFunction._addexcinfo_TestCaseFunction._addexcinfo.self___dict___setdefault_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/unittest.py_TestCaseFunction._addexcinfo_TestCaseFunction._addexcinfo.self___dict___setdefault_", "embedding": null, "metadata": {"file_path": "src/_pytest/unittest.py", "file_name": "unittest.py", "file_type": "text/x-python", "category": "implementation", "start_line": 208, "end_line": 239, "span_ids": ["TestCaseFunction._addexcinfo"], "tokens": 266}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCaseFunction(Function):\n\n def _addexcinfo(self, rawexcinfo: \"_SysExcInfoType\") -> None:\n # Unwrap potential exception info (see twisted trial support below).\n rawexcinfo = getattr(rawexcinfo, \"_rawexcinfo\", rawexcinfo)\n try:\n excinfo = _pytest._code.ExceptionInfo(rawexcinfo) # type: ignore[arg-type]\n # Invoke the attributes to trigger storing the traceback\n # trial causes some issue there.\n excinfo.value\n excinfo.traceback\n except TypeError:\n try:\n try:\n values = traceback.format_exception(*rawexcinfo)\n values.insert(\n 0,\n \"NOTE: Incompatible Exception Representation, \"\n \"displaying natively:\\n\\n\",\n )\n fail(\"\".join(values), pytrace=False)\n except (fail.Exception, KeyboardInterrupt):\n raise\n except BaseException:\n fail(\n \"ERROR: Unknown Incompatible Exception \"\n \"representation:\\n%r\" % (rawexcinfo,),\n pytrace=False,\n )\n except KeyboardInterrupt:\n raise\n except fail.Exception:\n excinfo = _pytest._code.ExceptionInfo.from_current()\n self.__dict__.setdefault(\"_excinfo\", []).append(excinfo)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/pytest/__main__.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/pytest/__main__.py__", "embedding": null, "metadata": {"file_path": "src/pytest/__main__.py", "file_name": "__main__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 6, "span_ids": ["impl", "docstring", "imports"], "tokens": 26}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"The pytest entry point.\"\"\"\nimport pytest\n\nif __name__ == \"__main__\":\n raise SystemExit(pytest.console_main())", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/pytest/collect.py_sys_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/pytest/collect.py_sys_", "embedding": null, "metadata": {"file_path": "src/pytest/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 40, "span_ids": ["impl", "FakeCollectModule", "FakeCollectModule.__getattr__", "imports", "impl:3", "FakeCollectModule.__dir__"], "tokens": 220}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import sys\nimport warnings\nfrom types import ModuleType\nfrom typing import Any\nfrom typing import List\n\nimport pytest\nfrom _pytest.deprecated import PYTEST_COLLECT_MODULE\n\nCOLLECT_FAKEMODULE_ATTRIBUTES = [\n \"Collector\",\n \"Module\",\n \"Function\",\n \"Instance\",\n \"Session\",\n \"Item\",\n \"Class\",\n \"File\",\n \"_fillfuncargs\",\n]\n\n\nclass FakeCollectModule(ModuleType):\n def __init__(self) -> None:\n super().__init__(\"pytest.collect\")\n self.__all__ = list(COLLECT_FAKEMODULE_ATTRIBUTES)\n self.__pytest = pytest\n\n def __dir__(self) -> List[str]:\n return dir(super()) + self.__all__\n\n def __getattr__(self, name: str) -> Any:\n if name not in self.__all__:\n raise AttributeError(name)\n warnings.warn(PYTEST_COLLECT_MODULE.format(name=name), stacklevel=2)\n return getattr(pytest, name)\n\n\nsys.modules[\"pytest.collect\"] = FakeCollectModule()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_better_reporting_on_conftest_load_failure_TestGeneralUsage.test_better_reporting_on_conftest_load_failure.assert_result_stderr_line": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_better_reporting_on_conftest_load_failure_TestGeneralUsage.test_better_reporting_on_conftest_load_failure.assert_result_stderr_line", "embedding": null, "metadata": {"file_path": "testing/acceptance_test.py", "file_name": "acceptance_test.py", "file_type": "text/x-python", "category": "test", "start_line": 196, "end_line": 225, "span_ids": ["TestGeneralUsage.test_better_reporting_on_conftest_load_failure"], "tokens": 228}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestGeneralUsage:\n\n @pytest.mark.filterwarnings(\"default\")\n def test_better_reporting_on_conftest_load_failure(\n self, pytester: Pytester\n ) -> None:\n \"\"\"Show a user-friendly traceback on conftest import failures (#486, #3332)\"\"\"\n pytester.makepyfile(\"\")\n conftest = pytester.makeconftest(\n \"\"\"\n def foo():\n import qwerty\n foo()\n \"\"\"\n )\n result = pytester.runpytest(\"--help\")\n result.stdout.fnmatch_lines(\n \"\"\"\n *--version*\n *warning*conftest.py*\n \"\"\"\n )\n result = pytester.runpytest()\n assert result.stdout.lines == []\n assert result.stderr.lines == [\n f\"ImportError while loading conftest '{conftest}'.\",\n \"conftest.py:3: in \",\n \" foo()\",\n \"conftest.py:2: in foo\",\n \" import qwerty\",\n \"E ModuleNotFoundError: No module named 'qwerty'\",\n ]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestDurationsWithFixture_TestDurationsWithFixture.test_setup_function.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestDurationsWithFixture_TestDurationsWithFixture.test_setup_function.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/acceptance_test.py", "file_name": "acceptance_test.py", "file_type": "text/x-python", "category": "test", "start_line": 958, "end_line": 982, "span_ids": ["TestDurationsWithFixture.test_setup_function", "TestDurationsWithFixture"], "tokens": 151}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDurationsWithFixture:\n source = \"\"\"\n import pytest\n from _pytest import timing\n\n @pytest.fixture\n def setup_fixt():\n timing.sleep(2)\n\n def test_1(setup_fixt):\n timing.sleep(5)\n \"\"\"\n\n def test_setup_function(self, pytester: Pytester, mock_timing) -> None:\n pytester.makepyfile(self.source)\n result = pytester.runpytest_inprocess(\"--durations=10\")\n assert result.ret == 0\n\n result.stdout.fnmatch_lines_random(\n \"\"\"\n *durations*\n 5.00s call *test_1*\n 2.00s setup *test_1*\n \"\"\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_test_pdb_can_be_rewritten_test_pdb_can_be_rewritten.assert_result_ret_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_test_pdb_can_be_rewritten_test_pdb_can_be_rewritten.assert_result_ret_1", "embedding": null, "metadata": {"file_path": "testing/acceptance_test.py", "file_name": "acceptance_test.py", "file_type": "text/x-python", "category": "test", "start_line": 1237, "end_line": 1271, "span_ids": ["test_pdb_can_be_rewritten"], "tokens": 237}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_pdb_can_be_rewritten(pytester: Pytester) -> None:\n pytester.makepyfile(\n **{\n \"conftest.py\": \"\"\"\n import pytest\n pytest.register_assert_rewrite(\"pdb\")\n \"\"\",\n \"__init__.py\": \"\",\n \"pdb.py\": \"\"\"\n def check():\n assert 1 == 2\n \"\"\",\n \"test_pdb.py\": \"\"\"\n def test():\n import pdb\n assert pdb.check()\n \"\"\",\n }\n )\n # Disable debugging plugin itself to avoid:\n # > INTERNALERROR> AttributeError: module 'pdb' has no attribute 'set_trace'\n result = pytester.runpytest_subprocess(\"-p\", \"no:debugging\", \"-vv\")\n result.stdout.fnmatch_lines(\n [\n \" def check():\",\n \"> assert 1 == 2\",\n \"E assert 1 == 2\",\n \"E +1\",\n \"E -2\",\n \"\",\n \"pdb.py:2: AssertionError\",\n \"*= 1 failed in *\",\n ]\n )\n assert result.ret == 1", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_code.py_TestReprFuncArgs_TestReprFuncArgs.test_not_raise_exception_with_mixed_encoding.assert_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_code.py_TestReprFuncArgs_TestReprFuncArgs.test_not_raise_exception_with_mixed_encoding.assert_", "embedding": null, "metadata": {"file_path": "testing/code/test_code.py", "file_name": "test_code.py", "file_type": "text/x-python", "category": "test", "start_line": 173, "end_line": 183, "span_ids": ["TestReprFuncArgs", "TestReprFuncArgs.test_not_raise_exception_with_mixed_encoding"], "tokens": 105}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestReprFuncArgs:\n def test_not_raise_exception_with_mixed_encoding(self, tw_mock) -> None:\n args = [(\"unicode_string\", \"S\u00e3o Paulo\"), (\"utf8_string\", b\"S\\xc3\\xa3o Paulo\")]\n\n r = ReprFuncArgs(args)\n r.toterminal(tw_mock)\n\n assert (\n tw_mock.lines[0]\n == r\"unicode_string = S\u00e3o Paulo, utf8_string = b'S\\xc3\\xa3o Paulo'\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_code.py_test_ExceptionChainRepr_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_code.py_test_ExceptionChainRepr_", "embedding": null, "metadata": {"file_path": "testing/code/test_code.py", "file_name": "test_code.py", "file_type": "text/x-python", "category": "test", "start_line": 186, "end_line": 201, "span_ids": ["test_ExceptionChainRepr"], "tokens": 119}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_ExceptionChainRepr():\n \"\"\"Test ExceptionChainRepr, especially with regard to being hashable.\"\"\"\n try:\n raise ValueError()\n except ValueError:\n excinfo1 = ExceptionInfo.from_current()\n excinfo2 = ExceptionInfo.from_current()\n\n repr1 = excinfo1.getrepr()\n repr2 = excinfo2.getrepr()\n assert repr1 != repr2\n\n assert isinstance(repr1, ExceptionChainRepr)\n assert hash(repr1) != hash(repr2)\n assert repr1 is not excinfo1.getrepr()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestTraceback_f_g_h.test_traceback_recursion_index_TestTraceback_f_g_h.test_traceback_only_specific_recursion_errors.assert_RuntimeError_hel": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestTraceback_f_g_h.test_traceback_recursion_index_TestTraceback_f_g_h.test_traceback_only_specific_recursion_errors.assert_RuntimeError_hel", "embedding": null, "metadata": {"file_path": "testing/code/test_excinfo.py", "file_name": "test_excinfo.py", "file_type": "text/x-python", "category": "test", "start_line": 205, "end_line": 225, "span_ids": ["TestTraceback_f_g_h.test_traceback_only_specific_recursion_errors", "TestTraceback_f_g_h.test_traceback_recursion_index"], "tokens": 175}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTraceback_f_g_h:\n\n def test_traceback_recursion_index(self):\n def f(n):\n if n < 10:\n n += 1\n f(n)\n\n excinfo = pytest.raises(RuntimeError, f, 8)\n traceback = excinfo.traceback\n recindex = traceback.recursionindex()\n assert recindex == 3\n\n def test_traceback_only_specific_recursion_errors(self, monkeypatch):\n def f(n):\n if n == 0:\n raise RuntimeError(\"hello\")\n f(n - 1)\n\n excinfo = pytest.raises(RuntimeError, f, 25)\n monkeypatch.delattr(excinfo.traceback.__class__, \"recursionindex\")\n repr = excinfo.getrepr()\n assert \"RuntimeError: hello\" in str(repr.reprcrash)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestTraceback_f_g_h.test_traceback_no_recursion_index_TestTraceback_f_g_h.test_traceback_messy_recursion.assert_excinfo_traceback_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestTraceback_f_g_h.test_traceback_no_recursion_index_TestTraceback_f_g_h.test_traceback_messy_recursion.assert_excinfo_traceback_", "embedding": null, "metadata": {"file_path": "testing/code/test_excinfo.py", "file_name": "test_excinfo.py", "file_type": "text/x-python", "category": "test", "start_line": 238, "end_line": 277, "span_ids": ["TestTraceback_f_g_h.test_traceback_no_recursion_index", "TestTraceback_f_g_h.test_traceback_messy_recursion"], "tokens": 257}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTraceback_f_g_h:\n\n def test_traceback_no_recursion_index(self) -> None:\n def do_stuff() -> None:\n raise RuntimeError\n\n def reraise_me() -> None:\n import sys\n\n exc, val, tb = sys.exc_info()\n assert val is not None\n raise val.with_traceback(tb)\n\n def f(n: int) -> None:\n try:\n do_stuff()\n except BaseException:\n reraise_me()\n\n excinfo = pytest.raises(RuntimeError, f, 8)\n assert excinfo is not None\n traceback = excinfo.traceback\n recindex = traceback.recursionindex()\n assert recindex is None\n\n def test_traceback_messy_recursion(self):\n # XXX: simplified locally testable version\n decorator = pytest.importorskip(\"decorator\").decorator\n\n def log(f, *k, **kw):\n print(f\"{k} {kw}\")\n f(*k, **kw)\n\n log = decorator(log)\n\n def fail():\n raise ValueError(\"\")\n\n fail = log(log(fail))\n\n excinfo = pytest.raises(ValueError, fail)\n assert excinfo.traceback.recursionindex() is None", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_repr_source_failing_fullsource_TestFormattedExcinfo.test_repr_source_failing_fullsource.assert_repr_chain_0_0_r": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_repr_source_failing_fullsource_TestFormattedExcinfo.test_repr_source_failing_fullsource.assert_repr_chain_0_0_r", "embedding": null, "metadata": {"file_path": "testing/code/test_excinfo.py", "file_name": "test_excinfo.py", "file_type": "text/x-python", "category": "test", "start_line": 509, "end_line": 522, "span_ids": ["TestFormattedExcinfo.test_repr_source_failing_fullsource"], "tokens": 142}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFormattedExcinfo:\n\n def test_repr_source_failing_fullsource(self, monkeypatch) -> None:\n pr = FormattedExcinfo()\n\n try:\n 1 / 0\n except ZeroDivisionError:\n excinfo = ExceptionInfo.from_current()\n\n with monkeypatch.context() as m:\n m.setattr(_pytest._code.Code, \"fullsource\", property(lambda self: None))\n repr = pr.repr_excinfo(excinfo)\n\n assert repr.reprtraceback.reprentries[0].lines[0] == \"> ???\"\n assert repr.chain[0][0].reprentries[0].lines[0] == \"> ???\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_repr_traceback_with_invalid_cwd_TestFormattedExcinfo.test_repr_traceback_with_invalid_cwd.assert_raised_3": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_repr_traceback_with_invalid_cwd_TestFormattedExcinfo.test_repr_traceback_with_invalid_cwd.assert_raised_3", "embedding": null, "metadata": {"file_path": "testing/code/test_excinfo.py", "file_name": "test_excinfo.py", "file_type": "text/x-python", "category": "test", "start_line": 793, "end_line": 842, "span_ids": ["TestFormattedExcinfo.test_repr_traceback_with_invalid_cwd"], "tokens": 335}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFormattedExcinfo:\n\n def test_repr_traceback_with_invalid_cwd(self, importasmod, monkeypatch) -> None:\n mod = importasmod(\n \"\"\"\n def f(x):\n raise ValueError(x)\n def entry():\n f(0)\n \"\"\"\n )\n excinfo = pytest.raises(ValueError, mod.entry)\n\n p = FormattedExcinfo(abspath=False)\n\n raised = 0\n\n orig_path_cwd = Path.cwd\n\n def raiseos():\n nonlocal raised\n upframe = sys._getframe().f_back\n assert upframe is not None\n if upframe.f_code.co_name == \"_makepath\":\n # Only raise with expected calls, but not via e.g. inspect for\n # py38-windows.\n raised += 1\n raise OSError(2, \"custom_oserror\")\n return orig_path_cwd()\n\n monkeypatch.setattr(Path, \"cwd\", raiseos)\n assert p._makepath(Path(__file__)) == __file__\n assert raised == 1\n repr_tb = p.repr_traceback(excinfo)\n\n matcher = LineMatcher(str(repr_tb).splitlines())\n matcher.fnmatch_lines(\n [\n \"def entry():\",\n \"> f(0)\",\n \"\",\n f\"{mod.__file__}:5: \",\n \"_ _ *\",\n \"\",\n \" def f(x):\",\n \"> raise ValueError(x)\",\n \"E ValueError: 0\",\n \"\",\n f\"{mod.__file__}:3: ValueError\",\n ]\n )\n assert raised == 3", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_source.py_test_findsource_test_findsource.assert_src_lineno_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_source.py_test_findsource_test_findsource.assert_src_lineno_", "embedding": null, "metadata": {"file_path": "testing/code/test_source.py", "file_name": "test_source.py", "file_type": "text/x-python", "category": "test", "start_line": 330, "end_line": 349, "span_ids": ["test_findsource"], "tokens": 192}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_findsource(monkeypatch) -> None:\n from _pytest._code.source import findsource\n\n filename = \"\"\n lines = [\"if 1:\\n\", \" def x():\\n\", \" pass\\n\"]\n co = compile(\"\".join(lines), filename, \"exec\")\n\n # Type ignored because linecache.cache is private.\n monkeypatch.setitem(linecache.cache, filename, (1, None, lines, filename)) # type: ignore[attr-defined]\n\n src, lineno = findsource(co)\n assert src is not None\n assert \"if 1:\" in str(src)\n\n d: Dict[str, Any] = {}\n eval(co, d)\n src, lineno = findsource(d[\"x\"])\n assert src is not None\n assert \"if 1:\" in str(src)\n assert src[lineno] == \" def x():\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/conftest.py_re_pytest_collection_modifyitems.yield": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/conftest.py_re_pytest_collection_modifyitems.yield", "embedding": null, "metadata": {"file_path": "testing/conftest.py", "file_name": "conftest.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 64, "span_ids": ["impl", "pytest_collection_modifyitems", "imports"], "tokens": 384}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import re\nimport sys\nfrom typing import List\n\nimport pytest\nfrom _pytest.monkeypatch import MonkeyPatch\nfrom _pytest.pytester import Pytester\n\nif sys.gettrace():\n\n @pytest.fixture(autouse=True)\n def restore_tracing():\n \"\"\"Restore tracing function (when run with Coverage.py).\n\n https://bugs.python.org/issue37011\n \"\"\"\n orig_trace = sys.gettrace()\n yield\n if sys.gettrace() != orig_trace:\n sys.settrace(orig_trace)\n\n\n@pytest.hookimpl(hookwrapper=True, tryfirst=True)\ndef pytest_collection_modifyitems(items):\n \"\"\"Prefer faster tests.\n\n Use a hookwrapper to do this in the beginning, so e.g. --ff still works\n correctly.\n \"\"\"\n fast_items = []\n slow_items = []\n slowest_items = []\n neutral_items = []\n\n spawn_names = {\"spawn_pytest\", \"spawn\"}\n\n for item in items:\n try:\n fixtures = item.fixturenames\n except AttributeError:\n # doctest at least\n # (https://github.com/pytest-dev/pytest/issues/5070)\n neutral_items.append(item)\n else:\n if \"pytester\" in fixtures:\n co_names = item.function.__code__.co_names\n if spawn_names.intersection(co_names):\n item.add_marker(pytest.mark.uses_pexpect)\n slowest_items.append(item)\n elif \"runpytest_subprocess\" in co_names:\n slowest_items.append(item)\n else:\n slow_items.append(item)\n item.add_marker(pytest.mark.slow)\n else:\n marker = item.get_closest_marker(\"slow\")\n if marker:\n slowest_items.append(item)\n else:\n fast_items.append(item)\n\n items[:] = fast_items + neutral_items + slow_items + slowest_items\n\n yield", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/unittest/test_unittest_asynctest.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/unittest/test_unittest_asynctest.py__", "embedding": null, "metadata": {"file_path": "testing/example_scripts/unittest/test_unittest_asynctest.py", "file_name": "test_unittest_asynctest.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 24, "span_ids": ["impl", "docstring", "Test", "Test.tearDown", "Test.test_ok", "imports", "Test.test_teardowns", "Test.test_error"], "tokens": 103}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"Issue #7110\"\"\"\nimport asyncio\nfrom typing import List\n\nimport asynctest\n\n\nteardowns: List[None] = []\n\n\nclass Test(asynctest.TestCase):\n async def tearDown(self):\n teardowns.append(None)\n\n async def test_error(self):\n await asyncio.sleep(0)\n self.fail(\"failing on purpose\")\n\n async def test_ok(self):\n await asyncio.sleep(0)\n\n def test_teardowns(self):\n assert len(teardowns) == 2", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/warnings/test_group_warnings_by_message_summary/test_1.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/warnings/test_group_warnings_by_message_summary/test_1.py__", "embedding": null, "metadata": {"file_path": "testing/example_scripts/warnings/test_group_warnings_by_message_summary/test_1.py", "file_name": "test_1.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 22, "span_ids": ["func", "test_bar", "imports", "test_foo_1", "test_foo"], "tokens": 70}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import warnings\n\nimport pytest\n\n\ndef func(msg):\n warnings.warn(UserWarning(msg))\n\n\n@pytest.mark.parametrize(\"i\", range(20))\ndef test_foo(i):\n func(\"foo\")\n\n\ndef test_foo_1():\n func(\"foo\")\n\n\n@pytest.mark.parametrize(\"i\", range(20))\ndef test_bar(i):\n func(\"bar\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/warnings/test_group_warnings_by_message_summary/test_2.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/warnings/test_group_warnings_by_message_summary/test_2.py__", "embedding": null, "metadata": {"file_path": "testing/example_scripts/warnings/test_group_warnings_by_message_summary/test_2.py", "file_name": "test_2.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 6, "span_ids": ["imports", "test_2"], "tokens": 17}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from test_1 import func\n\n\ndef test_2():\n func(\"foo\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_test_skip_duplicates_by_default_test_skip_duplicates_by_default.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_test_skip_duplicates_by_default_test_skip_duplicates_by_default.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1325, "end_line": 1342, "span_ids": ["test_skip_duplicates_by_default"], "tokens": 125}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_skip_duplicates_by_default(pytester: Pytester) -> None:\n \"\"\"Test for issue https://github.com/pytest-dev/pytest/issues/1609 (#1609)\n\n Ignore duplicate directories.\n \"\"\"\n a = pytester.mkdir(\"a\")\n fh = a.joinpath(\"test_a.py\")\n fh.write_text(\n textwrap.dedent(\n \"\"\"\\\n import pytest\n def test_real():\n pass\n \"\"\"\n )\n )\n result = pytester.runpytest(str(a), str(a))\n result.stdout.fnmatch_lines([\"*collected 1 item*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureUsages.test_parameters_without_eq_semantics_TestFixtureUsages.test_parameters_without_eq_semantics.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureUsages.test_parameters_without_eq_semantics_TestFixtureUsages.test_parameters_without_eq_semantics.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1269, "end_line": 1299, "span_ids": ["TestFixtureUsages.test_parameters_without_eq_semantics"], "tokens": 216}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFixtureUsages:\n\n @pytest.mark.parametrize(\"scope\", [\"function\", \"session\"])\n def test_parameters_without_eq_semantics(self, scope, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n class NoEq1: # fails on `a == b` statement\n def __eq__(self, _):\n raise RuntimeError\n\n class NoEq2: # fails on `if a == b:` statement\n def __eq__(self, _):\n class NoBool:\n def __bool__(self):\n raise RuntimeError\n return NoBool()\n\n import pytest\n @pytest.fixture(params=[NoEq1(), NoEq2()], scope={scope!r})\n def no_eq(request):\n return request.param\n\n def test1(no_eq):\n pass\n\n def test2(no_eq):\n pass\n \"\"\".format(\n scope=scope\n )\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*4 passed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestParameterizedSubRequest.test_non_relative_path_TestParameterizedSubRequest.test_non_relative_path.None_6": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestParameterizedSubRequest.test_non_relative_path_TestParameterizedSubRequest.test_non_relative_path.None_6", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 3831, "end_line": 3890, "span_ids": ["TestParameterizedSubRequest.test_non_relative_path"], "tokens": 402}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestParameterizedSubRequest:\n\n def test_non_relative_path(self, pytester: Pytester) -> None:\n tests_dir = pytester.mkdir(\"tests\")\n fixdir = pytester.mkdir(\"fixtures\")\n fixfile = fixdir.joinpath(\"fix.py\")\n fixfile.write_text(\n textwrap.dedent(\n \"\"\"\\\n import pytest\n\n @pytest.fixture(params=[0, 1, 2])\n def fix_with_param(request):\n return request.param\n \"\"\"\n )\n )\n\n testfile = tests_dir.joinpath(\"test_foos.py\")\n testfile.write_text(\n textwrap.dedent(\n \"\"\"\\\n from fix import fix_with_param\n\n def test_foo(request):\n request.getfixturevalue('fix_with_param')\n \"\"\"\n )\n )\n\n os.chdir(tests_dir)\n pytester.syspathinsert(fixdir)\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n [\n \"The requested fixture has no parameter defined for test:\",\n \" test_foos.py::test_foo\",\n \"\",\n \"Requested fixture 'fix_with_param' defined in:\",\n f\"{fixfile}:4\",\n \"Requested here:\",\n \"test_foos.py:4\",\n \"*1 failed*\",\n ]\n )\n\n # With non-overlapping rootdir, passing tests_dir.\n rootdir = pytester.mkdir(\"rootdir\")\n os.chdir(rootdir)\n result = pytester.runpytest(\"--rootdir\", rootdir, tests_dir)\n result.stdout.fnmatch_lines(\n [\n \"The requested fixture has no parameter defined for test:\",\n \" test_foos.py::test_foo\",\n \"\",\n \"Requested fixture 'fix_with_param' defined in:\",\n f\"{fixfile}:4\",\n \"Requested here:\",\n f\"{testfile}:4\",\n \"*1 failed*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_test_indirect_fixture_does_not_break_scope_test_indirect_fixture_does_not_break_scope.result_assert_outcomes_pa": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_test_indirect_fixture_does_not_break_scope_test_indirect_fixture_does_not_break_scope.result_assert_outcomes_pa", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 4336, "end_line": 4384, "span_ids": ["test_indirect_fixture_does_not_break_scope"], "tokens": 331}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_indirect_fixture_does_not_break_scope(pytester: Pytester) -> None:\n \"\"\"Ensure that fixture scope is respected when using indirect fixtures (#570)\"\"\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n instantiated = []\n\n @pytest.fixture(scope=\"session\")\n def fixture_1(request):\n instantiated.append((\"fixture_1\", request.param))\n\n\n @pytest.fixture(scope=\"session\")\n def fixture_2(request):\n instantiated.append((\"fixture_2\", request.param))\n\n\n scenarios = [\n (\"A\", \"a1\"),\n (\"A\", \"a2\"),\n (\"B\", \"b1\"),\n (\"B\", \"b2\"),\n (\"C\", \"c1\"),\n (\"C\", \"c2\"),\n ]\n\n @pytest.mark.parametrize(\n \"fixture_1,fixture_2\", scenarios, indirect=[\"fixture_1\", \"fixture_2\"]\n )\n def test_create_fixtures(fixture_1, fixture_2):\n pass\n\n\n def test_check_fixture_instantiations():\n assert instantiated == [\n ('fixture_1', 'A'),\n ('fixture_2', 'a1'),\n ('fixture_2', 'a2'),\n ('fixture_1', 'B'),\n ('fixture_2', 'b1'),\n ('fixture_2', 'b2'),\n ('fixture_1', 'C'),\n ('fixture_2', 'c1'),\n ('fixture_2', 'c2'),\n ]\n \"\"\"\n )\n result = pytester.runpytest()\n result.assert_outcomes(passed=7)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_test_yield_fixture_with_no_value_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_test_yield_fixture_with_no_value_", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 4442, "end_line": 4460, "span_ids": ["test_yield_fixture_with_no_value"], "tokens": 114}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_yield_fixture_with_no_value(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.fixture(name='custom')\n def empty_yield():\n if False:\n yield\n\n def test_fixt(custom):\n pass\n \"\"\"\n )\n expected = \"E ValueError: custom did not yield a value\"\n result = pytester.runpytest()\n result.assert_outcomes(errors=1)\n result.stdout.fnmatch_lines([expected])\n assert result.ret == ExitCode.TESTS_FAILED", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_error_TestMetafunc.test_parametrize_error.with_pytest_raises_TypeEr._type_ignore_arg_type_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_error_TestMetafunc.test_parametrize_error.with_pytest_raises_TypeEr._type_ignore_arg_type_", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 67, "end_line": 80, "span_ids": ["TestMetafunc.test_parametrize_error"], "tokens": 181}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafunc:\n\n def test_parametrize_error(self) -> None:\n def func(x, y):\n pass\n\n metafunc = self.Metafunc(func)\n metafunc.parametrize(\"x\", [1, 2])\n pytest.raises(ValueError, lambda: metafunc.parametrize(\"x\", [5, 6]))\n pytest.raises(ValueError, lambda: metafunc.parametrize(\"x\", [5, 6]))\n metafunc.parametrize(\"y\", [1, 2])\n pytest.raises(ValueError, lambda: metafunc.parametrize(\"y\", [5, 6]))\n pytest.raises(ValueError, lambda: metafunc.parametrize(\"y\", [5, 6]))\n\n with pytest.raises(TypeError, match=\"^ids must be a callable or an iterable$\"):\n metafunc.parametrize(\"y\", [5, 6], ids=42) # type: ignore[arg-type]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_error_iterator_TestMetafunc.test_parametrize_error_iterator.with_pytest_raises_._type_ignore_arg_type_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_error_iterator_TestMetafunc.test_parametrize_error_iterator.with_pytest_raises_._type_ignore_arg_type_", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 82, "end_line": 110, "span_ids": ["TestMetafunc.test_parametrize_error_iterator.Exc.__repr__", "TestMetafunc.test_parametrize_error_iterator.Exc", "TestMetafunc.test_parametrize_error_iterator"], "tokens": 261}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafunc:\n\n def test_parametrize_error_iterator(self) -> None:\n def func(x):\n raise NotImplementedError()\n\n class Exc(Exception):\n def __repr__(self):\n return \"Exc(from_gen)\"\n\n def gen() -> Iterator[Union[int, None, Exc]]:\n yield 0\n yield None\n yield Exc()\n\n metafunc = self.Metafunc(func)\n # When the input is an iterator, only len(args) are taken,\n # so the bad Exc isn't reached.\n metafunc.parametrize(\"x\", [1, 2], ids=gen()) # type: ignore[arg-type]\n assert [(x.funcargs, x.id) for x in metafunc._calls] == [\n ({\"x\": 1}, \"0\"),\n ({\"x\": 2}, \"2\"),\n ]\n with pytest.raises(\n fail.Exception,\n match=(\n r\"In func: ids must be list of string/float/int/bool, found:\"\n r\" Exc\\(from_gen\\) \\(type: \\) at index 2\"\n ),\n ):\n metafunc.parametrize(\"x\", [1, 2, 3], ids=gen()) # type: ignore[arg-type]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_bad_scope_TestMetafunc.test_parametrize_request_name.with_pytest_raises_.metafunc_parametrize_req": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_bad_scope_TestMetafunc.test_parametrize_request_name.with_pytest_raises_.metafunc_parametrize_req", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 115, "end_line": 137, "span_ids": ["TestMetafunc.test_parametrize_request_name", "TestMetafunc.test_parametrize_bad_scope"], "tokens": 190}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafunc:\n\n def test_parametrize_bad_scope(self) -> None:\n def func(x):\n pass\n\n metafunc = self.Metafunc(func)\n with pytest.raises(\n fail.Exception,\n match=r\"parametrize\\(\\) call in func got an unexpected scope value 'doggy'\",\n ):\n metafunc.parametrize(\"x\", [1], scope=\"doggy\") # type: ignore[arg-type]\n\n def test_parametrize_request_name(self, pytester: Pytester) -> None:\n \"\"\"Show proper error when 'request' is used as a parameter name in parametrize (#6183)\"\"\"\n\n def func(request):\n raise NotImplementedError()\n\n metafunc = self.Metafunc(func)\n with pytest.raises(\n fail.Exception,\n match=r\"'request' is a reserved name and cannot be used in @pytest.mark.parametrize\",\n ):\n metafunc.parametrize(\"request\", [1])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_and_id_TestMetafunc.test_parametrize_with_wrong_number_of_ids.None_1.metafunc_parametrize_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_and_id_TestMetafunc.test_parametrize_with_wrong_number_of_ids.None_1.metafunc_parametrize_", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 180, "end_line": 214, "span_ids": ["TestMetafunc.test_parametrize_and_id", "TestMetafunc.test_parametrize_and_id_unicode", "TestMetafunc.test_parametrize_with_wrong_number_of_ids"], "tokens": 298}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafunc:\n\n def test_parametrize_and_id(self) -> None:\n def func(x, y):\n pass\n\n metafunc = self.Metafunc(func)\n\n metafunc.parametrize(\"x\", [1, 2], ids=[\"basic\", \"advanced\"])\n metafunc.parametrize(\"y\", [\"abc\", \"def\"])\n ids = [x.id for x in metafunc._calls]\n assert ids == [\"basic-abc\", \"basic-def\", \"advanced-abc\", \"advanced-def\"]\n\n def test_parametrize_and_id_unicode(self) -> None:\n \"\"\"Allow unicode strings for \"ids\" parameter in Python 2 (##1905)\"\"\"\n\n def func(x):\n pass\n\n metafunc = self.Metafunc(func)\n metafunc.parametrize(\"x\", [1, 2], ids=[\"basic\", \"advanced\"])\n ids = [x.id for x in metafunc._calls]\n assert ids == [\"basic\", \"advanced\"]\n\n def test_parametrize_with_wrong_number_of_ids(self) -> None:\n def func(x, y):\n pass\n\n metafunc = self.Metafunc(func)\n\n with pytest.raises(fail.Exception):\n metafunc.parametrize(\"x\", [1, 2], ids=[\"basic\"])\n\n with pytest.raises(fail.Exception):\n metafunc.parametrize(\n (\"x\", \"y\"), [(\"abc\", \"def\"), (\"ghi\", \"jkl\")], ids=[\"one\"]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_ids_iterator_without_mark_TestMetafunc.test_parametrize_ids_iterator_without_mark.assert_ids_4_6_4_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_ids_iterator_without_mark_TestMetafunc.test_parametrize_ids_iterator_without_mark.assert_ids_4_6_4_", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 216, "end_line": 232, "span_ids": ["TestMetafunc.test_parametrize_ids_iterator_without_mark"], "tokens": 197}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafunc:\n\n def test_parametrize_ids_iterator_without_mark(self) -> None:\n def func(x, y):\n pass\n\n it = itertools.count()\n\n metafunc = self.Metafunc(func)\n metafunc.parametrize(\"x\", [1, 2], ids=it)\n metafunc.parametrize(\"y\", [3, 4], ids=it)\n ids = [x.id for x in metafunc._calls]\n assert ids == [\"0-2\", \"0-3\", \"1-2\", \"1-3\"]\n\n metafunc = self.Metafunc(func)\n metafunc.parametrize(\"x\", [1, 2], ids=it)\n metafunc.parametrize(\"y\", [3, 4], ids=it)\n ids = [x.id for x in metafunc._calls]\n assert ids == [\"4-6\", \"4-7\", \"5-6\", \"5-7\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_empty_list_TestMetafunc.test_idval_hypothesis.escaped_encode_ascii_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_empty_list_TestMetafunc.test_idval_hypothesis.escaped_encode_ascii_", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 239, "end_line": 283, "span_ids": ["TestMetafunc.test_parametrize_empty_list", "TestMetafunc.test_parametrize_with_userobjects.A", "TestMetafunc.test_parametrize_with_userobjects.A:2", "TestMetafunc.test_parametrize_empty_list.MockConfig.getini", "TestMetafunc.test_idval_hypothesis", "TestMetafunc.test_parametrize_with_userobjects", "TestMetafunc.test_parametrize_empty_list.MockConfig"], "tokens": 339}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafunc:\n\n def test_parametrize_empty_list(self) -> None:\n \"\"\"#510\"\"\"\n\n def func(y):\n pass\n\n class MockConfig:\n def getini(self, name):\n return \"\"\n\n @property\n def hook(self):\n return self\n\n def pytest_make_parametrize_id(self, **kw):\n pass\n\n metafunc = self.Metafunc(func, MockConfig())\n metafunc.parametrize(\"y\", [])\n assert \"skip\" == metafunc._calls[0].marks[0].name\n\n def test_parametrize_with_userobjects(self) -> None:\n def func(x, y):\n pass\n\n metafunc = self.Metafunc(func)\n\n class A:\n pass\n\n metafunc.parametrize(\"x\", [A(), A()])\n metafunc.parametrize(\"y\", list(\"ab\"))\n assert metafunc._calls[0].id == \"x0-a\"\n assert metafunc._calls[1].id == \"x0-b\"\n assert metafunc._calls[2].id == \"x1-a\"\n assert metafunc._calls[3].id == \"x1-b\"\n\n @hypothesis.given(strategies.text() | strategies.binary())\n @hypothesis.settings(\n deadline=400.0\n ) # very close to std deadline and CI boxes are not reliable in CPU power\n def test_idval_hypothesis(self, value) -> None:\n escaped = _idval(value, \"a\", 6, None, nodeid=None, config=None)\n assert isinstance(escaped, str)\n escaped.encode(\"ascii\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_unicode_idval_with_config_TestMetafunc.test_unicode_idval_with_config.for_val_config_expected.assert_actual_expected": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_unicode_idval_with_config_TestMetafunc.test_unicode_idval_with_config.for_val_config_expected.assert_actual_expected", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 308, "end_line": 335, "span_ids": ["TestMetafunc.test_unicode_idval_with_config.MockConfig", "TestMetafunc.test_unicode_idval_with_config", "TestMetafunc.test_unicode_idval_with_config.MockConfig.__init__"], "tokens": 221}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafunc:\n\n def test_unicode_idval_with_config(self) -> None:\n \"\"\"Unit test for expected behavior to obtain ids with\n disable_test_id_escaping_and_forfeit_all_rights_to_community_support\n option (#5294).\"\"\"\n\n class MockConfig:\n def __init__(self, config):\n self.config = config\n\n @property\n def hook(self):\n return self\n\n def pytest_make_parametrize_id(self, **kw):\n pass\n\n def getini(self, name):\n return self.config[name]\n\n option = \"disable_test_id_escaping_and_forfeit_all_rights_to_community_support\"\n\n values: List[Tuple[str, Any, str]] = [\n (\"a\u00e7\u00e3o\", MockConfig({option: True}), \"a\u00e7\u00e3o\"),\n (\"a\u00e7\u00e3o\", MockConfig({option: False}), \"a\\\\xe7\\\\xe3o\"),\n ]\n for val, config, expected in values:\n actual = _idval(val, \"a\", 6, None, nodeid=None, config=config)\n assert actual == expected", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_ids_returns_non_string_TestMetafunc.test_parametrize_ids_returns_non_string.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_ids_returns_non_string_TestMetafunc.test_parametrize_ids_returns_non_string.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 584, "end_line": 610, "span_ids": ["TestMetafunc.test_parametrize_ids_returns_non_string"], "tokens": 238}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafunc:\n\n def test_parametrize_ids_returns_non_string(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\\\n import pytest\n\n def ids(d):\n return d\n\n @pytest.mark.parametrize(\"arg\", ({1: 2}, {3, 4}), ids=ids)\n def test(arg):\n assert arg\n\n @pytest.mark.parametrize(\"arg\", (1, 2.0, True), ids=ids)\n def test_int(arg):\n assert arg\n \"\"\"\n )\n result = pytester.runpytest(\"-vv\", \"-s\")\n result.stdout.fnmatch_lines(\n [\n \"test_parametrize_ids_returns_non_string.py::test[arg0] PASSED\",\n \"test_parametrize_ids_returns_non_string.py::test[arg1] PASSED\",\n \"test_parametrize_ids_returns_non_string.py::test_int[1] PASSED\",\n \"test_parametrize_ids_returns_non_string.py::test_int[2.0] PASSED\",\n \"test_parametrize_ids_returns_non_string.py::test_int[True] PASSED\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_idmaker_with_ids_TestMetafunc.test_idmaker_with_ids_unique_names.assert_result_a0_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_idmaker_with_ids_TestMetafunc.test_idmaker_with_ids_unique_names.assert_result_a0_", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 601, "end_line": 619, "span_ids": ["TestMetafunc.test_idmaker_with_ids", "TestMetafunc.test_idmaker_with_ids_unique_names", "TestMetafunc.test_idmaker_with_paramset_id"], "tokens": 225}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafunc:\n\n def test_idmaker_with_ids(self) -> None:\n result = idmaker(\n (\"a\", \"b\"), [pytest.param(1, 2), pytest.param(3, 4)], ids=[\"a\", None]\n )\n assert result == [\"a\", \"3-4\"]\n\n def test_idmaker_with_paramset_id(self) -> None:\n result = idmaker(\n (\"a\", \"b\"),\n [pytest.param(1, 2, id=\"me\"), pytest.param(3, 4, id=\"you\")],\n ids=[\"a\", None],\n )\n assert result == [\"me\", \"you\"]\n\n def test_idmaker_with_ids_unique_names(self) -> None:\n result = idmaker(\n (\"a\"), map(pytest.param, [1, 2, 3, 4, 5]), ids=[\"a\", \"a\", \"b\", \"c\", \"b\"]\n )\n assert result == [\"a0\", \"a1\", \"b0\", \"c\", \"b1\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_indirect_list_TestMetafunc.test_parametrize_indirect_wrong_type.with_pytest_raises_._type_ignore_arg_type_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_parametrize_indirect_list_TestMetafunc.test_parametrize_indirect_wrong_type.with_pytest_raises_._type_ignore_arg_type_", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 636, "end_line": 678, "span_ids": ["TestMetafunc.test_parametrize_indirect_wrong_type", "TestMetafunc.test_parametrize_indirect_list", "TestMetafunc.test_parametrize_indirect_list_empty", "TestMetafunc.test_parametrize_indirect_list_all"], "tokens": 366}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafunc:\n\n def test_parametrize_indirect_list(self) -> None:\n \"\"\"#714\"\"\"\n\n def func(x, y):\n pass\n\n metafunc = self.Metafunc(func)\n metafunc.parametrize(\"x, y\", [(\"a\", \"b\")], indirect=[\"x\"])\n assert metafunc._calls[0].funcargs == dict(y=\"b\")\n assert metafunc._calls[0].params == dict(x=\"a\")\n\n def test_parametrize_indirect_list_all(self) -> None:\n \"\"\"#714\"\"\"\n\n def func(x, y):\n pass\n\n metafunc = self.Metafunc(func)\n metafunc.parametrize(\"x, y\", [(\"a\", \"b\")], indirect=[\"x\", \"y\"])\n assert metafunc._calls[0].funcargs == {}\n assert metafunc._calls[0].params == dict(x=\"a\", y=\"b\")\n\n def test_parametrize_indirect_list_empty(self) -> None:\n \"\"\"#714\"\"\"\n\n def func(x, y):\n pass\n\n metafunc = self.Metafunc(func)\n metafunc.parametrize(\"x, y\", [(\"a\", \"b\")], indirect=[])\n assert metafunc._calls[0].funcargs == dict(x=\"a\", y=\"b\")\n assert metafunc._calls[0].params == {}\n\n def test_parametrize_indirect_wrong_type(self) -> None:\n def func(x, y):\n pass\n\n metafunc = self.Metafunc(func)\n with pytest.raises(\n fail.Exception,\n match=\"In func: expected Sequence or boolean for indirect, got dict\",\n ):\n metafunc.parametrize(\"x, y\", [(\"a\", \"b\")], indirect={}) # type: ignore[arg-type]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_generate_same_function_names_issue403_TestMetafuncFunctional.test_generate_same_function_names_issue403.reprec_assert_outcomes_pa": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_generate_same_function_names_issue403_TestMetafuncFunctional.test_generate_same_function_names_issue403.reprec_assert_outcomes_pa", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1388, "end_line": 1404, "span_ids": ["TestMetafuncFunctional.test_generate_same_function_names_issue403"], "tokens": 110}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafuncFunctional:\n\n def test_generate_same_function_names_issue403(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n def make_tests():\n @pytest.mark.parametrize(\"x\", range(2))\n def test_foo(x):\n pass\n return test_foo\n\n test_x = make_tests()\n test_y = make_tests()\n \"\"\"\n )\n reprec = pytester.runpytest()\n reprec.assert_outcomes(passed=4)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_parametrize_misspelling_TestMetafuncFunctional.test_parametrize_misspelling.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafuncFunctional.test_parametrize_misspelling_TestMetafuncFunctional.test_parametrize_misspelling.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1406, "end_line": 1430, "span_ids": ["TestMetafuncFunctional.test_parametrize_misspelling"], "tokens": 214}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafuncFunctional:\n\n def test_parametrize_misspelling(self, pytester: Pytester) -> None:\n \"\"\"#463\"\"\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.mark.parametrise(\"x\", range(2))\n def test_foo(x):\n pass\n \"\"\"\n )\n result = pytester.runpytest(\"--collectonly\")\n result.stdout.fnmatch_lines(\n [\n \"collected 0 items / 1 error\",\n \"\",\n \"*= ERRORS =*\",\n \"*_ ERROR collecting test_parametrize_misspelling.py _*\",\n \"test_parametrize_misspelling.py:3: in \",\n ' @pytest.mark.parametrise(\"x\", range(2))',\n \"E Failed: Unknown 'parametrise' mark, did you mean 'parametrize'?\",\n \"*! Interrupted: 1 error during collection !*\",\n \"*= no tests collected, 1 error in *\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMarkersWithParametrization.test_simple_xfail_single_argname_TestMarkersWithParametrization.test_simple_xfail_single_argname.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMarkersWithParametrization.test_simple_xfail_single_argname_TestMarkersWithParametrization.test_simple_xfail_single_argname.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1656, "end_line": 1670, "span_ids": ["TestMarkersWithParametrization.test_simple_xfail_single_argname"], "tokens": 118}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMarkersWithParametrization:\n\n def test_simple_xfail_single_argname(self, pytester: Pytester) -> None:\n s = \"\"\"\n import pytest\n\n @pytest.mark.parametrize(\"n\", [\n 2,\n pytest.param(3, marks=pytest.mark.xfail),\n 4,\n ])\n def test_isEven(n):\n assert n % 2 == 0\n \"\"\"\n pytester.makepyfile(s)\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=2, skipped=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMarkersWithParametrization.test_xfail_with_arg_TestMarkersWithParametrization.test_xfail_with_arg.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMarkersWithParametrization.test_xfail_with_arg_TestMarkersWithParametrization.test_xfail_with_arg.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1672, "end_line": 1686, "span_ids": ["TestMarkersWithParametrization.test_xfail_with_arg"], "tokens": 130}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMarkersWithParametrization:\n\n def test_xfail_with_arg(self, pytester: Pytester) -> None:\n s = \"\"\"\n import pytest\n\n @pytest.mark.parametrize((\"n\", \"expected\"), [\n (1, 2),\n pytest.param(1, 3, marks=pytest.mark.xfail(\"True\")),\n (2, 3),\n ])\n def test_increment(n, expected):\n assert n + 1 == expected\n \"\"\"\n pytester.makepyfile(s)\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=2, skipped=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMarkersWithParametrization.test_parametrize_called_in_generate_tests_TestMarkersWithParametrization.test_parametrize_called_in_generate_tests.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMarkersWithParametrization.test_parametrize_called_in_generate_tests_TestMarkersWithParametrization.test_parametrize_called_in_generate_tests.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1742, "end_line": 1763, "span_ids": ["TestMarkersWithParametrization.test_parametrize_called_in_generate_tests"], "tokens": 166}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMarkersWithParametrization:\n\n def test_parametrize_called_in_generate_tests(self, pytester: Pytester) -> None:\n s = \"\"\"\n import pytest\n\n\n def pytest_generate_tests(metafunc):\n passingTestData = [(1, 2),\n (2, 3)]\n failingTestData = [(1, 3),\n (2, 2)]\n\n testData = passingTestData + [pytest.param(*d, marks=pytest.mark.xfail)\n for d in failingTestData]\n metafunc.parametrize((\"n\", \"expected\"), testData)\n\n\n def test_increment(n, expected):\n assert n + 1 == expected\n \"\"\"\n pytester.makepyfile(s)\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=2, skipped=2)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMarkersWithParametrization.test_parametrize_ID_generation_string_int_works_TestMarkersWithParametrization.test_parametrize_ID_generation_string_int_works.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMarkersWithParametrization.test_parametrize_ID_generation_string_int_works_TestMarkersWithParametrization.test_parametrize_ID_generation_string_int_works.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1765, "end_line": 1783, "span_ids": ["TestMarkersWithParametrization.test_parametrize_ID_generation_string_int_works"], "tokens": 118}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMarkersWithParametrization:\n\n def test_parametrize_ID_generation_string_int_works(\n self, pytester: Pytester\n ) -> None:\n \"\"\"#290\"\"\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.fixture\n def myfixture():\n return 'example'\n @pytest.mark.parametrize(\n 'limit', (0, '0'))\n def test_limit(limit, myfixture):\n return\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=2)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMarkersWithParametrization.test_pytest_make_parametrize_id_with_argname_TestMarkersWithParametrization.test_parametrize_positional_args.result_assert_outcomes_pa": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMarkersWithParametrization.test_pytest_make_parametrize_id_with_argname_TestMarkersWithParametrization.test_parametrize_positional_args.result_assert_outcomes_pa", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1829, "end_line": 1865, "span_ids": ["TestMarkersWithParametrization.test_parametrize_positional_args", "TestMarkersWithParametrization.test_pytest_make_parametrize_id_with_argname"], "tokens": 265}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMarkersWithParametrization:\n\n def test_pytest_make_parametrize_id_with_argname(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n def pytest_make_parametrize_id(config, val, argname):\n return str(val * 2 if argname == 'x' else val * 10)\n \"\"\"\n )\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.mark.parametrize(\"x\", range(2))\n def test_func_a(x):\n pass\n\n @pytest.mark.parametrize(\"y\", [1])\n def test_func_b(y):\n pass\n \"\"\"\n )\n result = pytester.runpytest(\"-v\")\n result.stdout.fnmatch_lines(\n [\"*test_func_a*0*PASS*\", \"*test_func_a*2*PASS*\", \"*test_func_b*10*PASS*\"]\n )\n\n def test_parametrize_positional_args(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.mark.parametrize(\"a\", [1], False)\n def test_foo(a):\n pass\n \"\"\"\n )\n result = pytester.runpytest()\n result.assert_outcomes(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/raises.py_re_TestRaises.test_raises_repr_inflight.with_pytest_raises_E_as_.raise_E_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/raises.py_re_TestRaises.test_raises_repr_inflight.with_pytest_raises_E_as_.raise_E_", "embedding": null, "metadata": {"file_path": "testing/python/raises.py", "file_name": "raises.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 52, "span_ids": ["TestRaises.test_raises_function", "TestRaises", "TestRaises.test_raises_callable_no_exception", "TestRaises.test_raises", "TestRaises.test_raises_repr_inflight", "TestRaises.test_raises_callable_no_exception.A.__call__", "imports", "TestRaises.test_raises_falsey_type_error", "TestRaises.test_check_callable", "TestRaises.test_raises_repr_inflight.E:2", "TestRaises.test_raises_callable_no_exception.A", "TestRaises.test_raises_repr_inflight.E"], "tokens": 341}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import re\nimport sys\n\nimport pytest\nfrom _pytest.outcomes import Failed\nfrom _pytest.pytester import Pytester\n\n\nclass TestRaises:\n def test_check_callable(self) -> None:\n with pytest.raises(TypeError, match=r\".* must be callable\"):\n pytest.raises(RuntimeError, \"int('qwe')\") # type: ignore[call-overload]\n\n def test_raises(self):\n excinfo = pytest.raises(ValueError, int, \"qwe\")\n assert \"invalid literal\" in str(excinfo.value)\n\n def test_raises_function(self):\n excinfo = pytest.raises(ValueError, int, \"hello\")\n assert \"invalid literal\" in str(excinfo.value)\n\n def test_raises_callable_no_exception(self) -> None:\n class A:\n def __call__(self):\n pass\n\n try:\n pytest.raises(ValueError, A())\n except pytest.fail.Exception:\n pass\n\n def test_raises_falsey_type_error(self) -> None:\n with pytest.raises(TypeError):\n with pytest.raises(AssertionError, match=0): # type: ignore[call-overload]\n raise AssertionError(\"ohai\")\n\n def test_raises_repr_inflight(self):\n \"\"\"Ensure repr() on an exception info inside a pytest.raises with block works (#4386)\"\"\"\n\n class E(Exception):\n pass\n\n with pytest.raises(E) as excinfo:\n # this test prints the inflight uninitialized object\n # using repr and str as well as pprint to demonstrate\n # it works\n print(str(excinfo))\n print(repr(excinfo))\n import pprint\n\n pprint.pprint(excinfo)\n raise E()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/raises.py_TestRaises.test_raises_match_TestRaises.test_raises_match.None_3": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/raises.py_TestRaises.test_raises_match_TestRaises.test_raises_match.None_3", "embedding": null, "metadata": {"file_path": "testing/python/raises.py", "file_name": "raises.py", "file_type": "text/x-python", "category": "implementation", "start_line": 184, "end_line": 213, "span_ids": ["TestRaises.test_raises_match"], "tokens": 260}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRaises:\n\n def test_raises_match(self) -> None:\n msg = r\"with base \\d+\"\n with pytest.raises(ValueError, match=msg):\n int(\"asdf\")\n\n msg = \"with base 10\"\n with pytest.raises(ValueError, match=msg):\n int(\"asdf\")\n\n msg = \"with base 16\"\n expr = \"Regex pattern {!r} does not match \\\"invalid literal for int() with base 10: 'asdf'\\\".\".format(\n msg\n )\n with pytest.raises(AssertionError, match=re.escape(expr)):\n with pytest.raises(ValueError, match=msg):\n int(\"asdf\", base=10)\n\n # \"match\" without context manager.\n pytest.raises(ValueError, int, \"asdf\").match(\"invalid literal\")\n with pytest.raises(AssertionError) as excinfo:\n pytest.raises(ValueError, int, \"asdf\").match(msg)\n assert str(excinfo.value) == expr\n\n pytest.raises(TypeError, int, match=\"invalid\")\n\n def tfunc(match):\n raise ValueError(f\"match={match}\")\n\n pytest.raises(ValueError, tfunc, match=\"asdf\").match(\"match=asdf\")\n pytest.raises(ValueError, tfunc, match=\"\").match(\"match=\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestImportHookInstallation_TestImportHookInstallation.test_conftest_assertion_rewrite.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestImportHookInstallation_TestImportHookInstallation.test_conftest_assertion_rewrite.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 29, "end_line": 61, "span_ids": ["TestImportHookInstallation", "TestImportHookInstallation.test_conftest_assertion_rewrite"], "tokens": 285}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestImportHookInstallation:\n @pytest.mark.parametrize(\"initial_conftest\", [True, False])\n @pytest.mark.parametrize(\"mode\", [\"plain\", \"rewrite\"])\n def test_conftest_assertion_rewrite(\n self, pytester: Pytester, initial_conftest, mode\n ) -> None:\n \"\"\"Test that conftest files are using assertion rewrite on import (#1619).\"\"\"\n pytester.mkdir(\"foo\")\n pytester.mkdir(\"foo/tests\")\n conftest_path = \"conftest.py\" if initial_conftest else \"foo/conftest.py\"\n contents = {\n conftest_path: \"\"\"\n import pytest\n @pytest.fixture\n def check_first():\n def check(values, value):\n assert values.pop(0) == value\n return check\n \"\"\",\n \"foo/tests/test_foo.py\": \"\"\"\n def test(check_first):\n check_first([10, 30], 30)\n \"\"\",\n }\n pytester.makepyfile(**contents)\n result = pytester.runpytest_subprocess(\"--assert=%s\" % mode)\n if mode == \"plain\":\n expected = \"E AssertionError\"\n elif mode == \"rewrite\":\n expected = \"*assert 10 == 30*\"\n else:\n assert 0\n result.stdout.fnmatch_lines([expected])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestImportHookInstallation.test_rewrite_assertions_pytester_plugin_TestImportHookInstallation.test_rewrite_assertions_pytester_plugin.None_2": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestImportHookInstallation.test_rewrite_assertions_pytester_plugin_TestImportHookInstallation.test_rewrite_assertions_pytester_plugin.None_2", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 63, "end_line": 94, "span_ids": ["TestImportHookInstallation.test_rewrite_assertions_pytester_plugin"], "tokens": 295}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestImportHookInstallation:\n\n def test_rewrite_assertions_pytester_plugin(self, pytester: Pytester) -> None:\n \"\"\"\n Assertions in the pytester plugin must also benefit from assertion\n rewriting (#1920).\n \"\"\"\n pytester.makepyfile(\n \"\"\"\n pytest_plugins = ['pytester']\n def test_dummy_failure(pytester): # how meta!\n pytester.makepyfile('def test(): assert 0')\n r = pytester.inline_run()\n r.assertoutcome(passed=1)\n \"\"\"\n )\n result = pytester.runpytest_subprocess()\n result.stdout.fnmatch_lines(\n [\n \"> r.assertoutcome(passed=1)\",\n \"E AssertionError: ([[][]], [[][]], [[][]])*\",\n \"E assert {'failed': 1,... 'skipped': 0} == {'failed': 0,... 'skipped': 0}\",\n \"E Omitting 1 identical items, use -vv to show\",\n \"E Differing items:\",\n \"E Use -v to get the full diff\",\n ]\n )\n # XXX: unstable output.\n result.stdout.fnmatch_lines_random(\n [\n \"E {'failed': 1} != {'failed': 0}\",\n \"E {'passed': 0} != {'passed': 1}\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestBinReprIntegration_callequal.return.callop_left_right_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestBinReprIntegration_callequal.return.callop_left_right_", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 298, "end_line": 330, "span_ids": ["TestBinReprIntegration", "TestBinReprIntegration.test_pytest_assertrepr_compare_called", "callop", "callequal"], "tokens": 249}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestBinReprIntegration:\n def test_pytest_assertrepr_compare_called(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n import pytest\n values = []\n def pytest_assertrepr_compare(op, left, right):\n values.append((op, left, right))\n\n @pytest.fixture\n def list(request):\n return values\n \"\"\"\n )\n pytester.makepyfile(\n \"\"\"\n def test_hello():\n assert 0 == 1\n def test_check(list):\n assert list == [(\"==\", 0, 1)]\n \"\"\"\n )\n result = pytester.runpytest(\"-v\")\n result.stdout.fnmatch_lines([\"*test_hello*FAIL*\", \"*test_check*PASS*\"])\n\n\ndef callop(op: str, left: Any, right: Any, verbose: int = 0) -> Optional[List[str]]:\n config = mock_config(verbose=verbose)\n return plugin.pytest_assertrepr_compare(config, op, left, right)\n\n\ndef callequal(left: Any, right: Any, verbose: int = 0) -> Optional[List[str]]:\n return callop(\"==\", left, right, verbose)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare.test_list_dont_wrap_strings_TestAssert_reprcompare.test_list_dont_wrap_strings.assert_diff_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare.test_list_dont_wrap_strings_TestAssert_reprcompare.test_list_dont_wrap_strings.assert_diff_", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 508, "end_line": 529, "span_ids": ["TestAssert_reprcompare.test_list_dont_wrap_strings"], "tokens": 227}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssert_reprcompare:\n\n def test_list_dont_wrap_strings(self) -> None:\n long_a = \"a\" * 10\n l1 = [\"a\"] + [long_a for _ in range(0, 7)]\n l2 = [\"should not get wrapped\"]\n diff = callequal(l1, l2, verbose=True)\n assert diff == [\n \"['a', 'aaaaaa...aaaaaaa', ...] == ['should not get wrapped']\",\n \"At index 0 diff: 'a' != 'should not get wrapped'\",\n \"Left contains 7 more items, first extra item: 'aaaaaaaaaa'\",\n \"Full diff:\",\n \" [\",\n \"- 'should not get wrapped',\",\n \"+ 'a',\",\n \"+ 'aaaaaaaaaa',\",\n \"+ 'aaaaaaaaaa',\",\n \"+ 'aaaaaaaaaa',\",\n \"+ 'aaaaaaaaaa',\",\n \"+ 'aaaaaaaaaa',\",\n \"+ 'aaaaaaaaaa',\",\n \"+ 'aaaaaaaaaa',\",\n \" ]\",\n ]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare.test_list_bad_repr_TestAssert_reprcompare.test_list_bad_repr.assert_expl_1_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare.test_list_bad_repr_TestAssert_reprcompare.test_list_bad_repr.assert_expl_1_", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 700, "end_line": 718, "span_ids": ["TestAssert_reprcompare.test_list_bad_repr", "TestAssert_reprcompare.test_list_bad_repr.A.__repr__", "TestAssert_reprcompare.test_list_bad_repr.A"], "tokens": 176}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssert_reprcompare:\n\n def test_list_bad_repr(self) -> None:\n class A:\n def __repr__(self):\n raise ValueError(42)\n\n expl = callequal([], [A()])\n assert expl is not None\n assert \"ValueError\" in \"\".join(expl)\n expl = callequal({}, {\"1\": A()}, verbose=2)\n assert expl is not None\n assert expl[0].startswith(\"{} == <[ValueError\")\n assert \"raised in repr\" in expl[0]\n assert expl[1:] == [\n \"(pytest_assertion plugin: representation of details failed:\"\n \" {}:{}: ValueError: 42.\".format(\n __file__, A.__repr__.__code__.co_firstlineno + 1\n ),\n \" Probably an object has a faulty __repr__.)\",\n ]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare.test_one_repr_empty_TestAssert_reprcompare.test_mojibake.assert_msg": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare.test_one_repr_empty_TestAssert_reprcompare.test_mojibake.assert_msg", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 728, "end_line": 775, "span_ids": ["TestAssert_reprcompare.test_unicode", "TestAssert_reprcompare.test_one_repr_empty", "TestAssert_reprcompare.test_format_nonascii_explanation", "TestAssert_reprcompare.test_nonascii_text.A.__repr__", "TestAssert_reprcompare.test_mojibake", "TestAssert_reprcompare.test_repr_no_exc", "TestAssert_reprcompare.test_one_repr_empty.A", "TestAssert_reprcompare.test_nonascii_text", "TestAssert_reprcompare.test_nonascii_text.A", "TestAssert_reprcompare.test_one_repr_empty.A.__repr__"], "tokens": 325}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssert_reprcompare:\n\n def test_one_repr_empty(self) -> None:\n \"\"\"The faulty empty string repr did trigger an unbound local error in _diff_text.\"\"\"\n\n class A(str):\n def __repr__(self):\n return \"\"\n\n expl = callequal(A(), \"\")\n assert not expl\n\n def test_repr_no_exc(self) -> None:\n expl = callequal(\"foo\", \"bar\")\n assert expl is not None\n assert \"raised in repr()\" not in \" \".join(expl)\n\n def test_unicode(self) -> None:\n assert callequal(\"\u00a3\u20ac\", \"\u00a3\") == [\n \"'\u00a3\u20ac' == '\u00a3'\",\n \"- \u00a3\",\n \"+ \u00a3\u20ac\",\n ]\n\n def test_nonascii_text(self) -> None:\n \"\"\"\n :issue: 877\n non ascii python2 str caused a UnicodeDecodeError\n \"\"\"\n\n class A(str):\n def __repr__(self):\n return \"\\xff\"\n\n expl = callequal(A(), \"1\")\n assert expl == [\"\u00ff == '1'\", \"- 1\"]\n\n def test_format_nonascii_explanation(self) -> None:\n assert util.format_explanation(\"\u03bb\")\n\n def test_mojibake(self) -> None:\n # issue 429\n left = b\"e\"\n right = b\"\\xc3\\xa9\"\n expl = callequal(left, right)\n assert expl is not None\n for line in expl:\n assert isinstance(line, str)\n msg = \"\\n\".join(expl)\n assert msg", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_test_python25_compile_issue257_test_reprcompare_whitespaces.assert_callequal_r_n_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_test_python25_compile_issue257_test_reprcompare_whitespaces.assert_callequal_r_n_", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 1219, "end_line": 1263, "span_ids": ["test_python25_compile_issue257", "test_reprcompare_whitespaces", "test_rewritten", "test_reprcompare_notin"], "tokens": 282}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_python25_compile_issue257(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n def test_rewritten():\n assert 1 == 2\n # some comment\n \"\"\"\n )\n result = pytester.runpytest()\n assert result.ret == 1\n result.stdout.fnmatch_lines(\n \"\"\"\n *E*assert 1 == 2*\n *1 failed*\n \"\"\"\n )\n\n\ndef test_rewritten(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n def test_rewritten():\n assert \"@py_builtins\" in globals()\n \"\"\"\n )\n assert pytester.runpytest().ret == 0\n\n\ndef test_reprcompare_notin() -> None:\n assert callop(\"not in\", \"foo\", \"aaafoobbb\") == [\n \"'foo' not in 'aaafoobbb'\",\n \"'foo' is contained here:\",\n \" aaafoobbb\",\n \"? +++\",\n ]\n\n\ndef test_reprcompare_whitespaces() -> None:\n assert callequal(\"\\r\\n\", \"\\n\") == [\n r\"'\\r\\n' == '\\n'\",\n r\"Strings contain only whitespace, escaping them using repr()\",\n r\"- '\\n'\",\n r\"+ '\\r\\n'\",\n r\"? ++\",\n ]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_failedfirst_order_TestLastFailed.test_failedfirst_order.None_2": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_failedfirst_order_TestLastFailed.test_failedfirst_order.None_2", "embedding": null, "metadata": {"file_path": "testing/test_cacheprovider.py", "file_name": "test_cacheprovider.py", "file_type": "text/x-python", "category": "test", "start_line": 311, "end_line": 328, "span_ids": ["TestLastFailed.test_failedfirst_order"], "tokens": 165}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestLastFailed:\n\n def test_failedfirst_order(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n test_a=\"def test_always_passes(): pass\",\n test_b=\"def test_always_fails(): assert 0\",\n )\n result = pytester.runpytest()\n # Test order will be collection order; alphabetical\n result.stdout.fnmatch_lines([\"test_a.py*\", \"test_b.py*\"])\n result = pytester.runpytest(\"--ff\")\n # Test order will be failing tests first\n result.stdout.fnmatch_lines(\n [\n \"collected 2 items\",\n \"run-last-failure: rerun previous 1 failure first\",\n \"test_b.py*\",\n \"test_a.py*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_lastfailed_usecase_splice_TestLastFailed.test_lastfailed_usecase_splice.None_4": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_lastfailed_usecase_splice_TestLastFailed.test_lastfailed_usecase_splice.None_4", "embedding": null, "metadata": {"file_path": "testing/test_cacheprovider.py", "file_name": "test_cacheprovider.py", "file_type": "text/x-python", "category": "test", "start_line": 374, "end_line": 387, "span_ids": ["TestLastFailed.test_lastfailed_usecase_splice"], "tokens": 165}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestLastFailed:\n\n def test_lastfailed_usecase_splice(\n self, pytester: Pytester, monkeypatch: MonkeyPatch\n ) -> None:\n monkeypatch.setattr(\"sys.dont_write_bytecode\", True)\n pytester.makepyfile(\n \"def test_1(): assert 0\", test_something=\"def test_2(): assert 0\"\n )\n p2 = pytester.path.joinpath(\"test_something.py\")\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*2 failed*\"])\n result = pytester.runpytest(\"--lf\", p2)\n result.stdout.fnmatch_lines([\"*1 failed*\"])\n result = pytester.runpytest(\"--lf\")\n result.stdout.fnmatch_lines([\"*2 failed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestCaptureManager_TestCaptureManager.test_init_capturing.try_.finally_.capouter_stop_capturing_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestCaptureManager_TestCaptureManager.test_init_capturing.try_.finally_.capouter_stop_capturing_", "embedding": null, "metadata": {"file_path": "testing/test_capture.py", "file_name": "test_capture.py", "file_type": "text/x-python", "category": "test", "start_line": 58, "end_line": 97, "span_ids": ["TestCaptureManager.test_init_capturing", "TestCaptureManager", "TestCaptureManager.test_capturing_basic_api"], "tokens": 323}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCaptureManager:\n @pytest.mark.parametrize(\"method\", [\"no\", \"sys\", \"fd\"])\n def test_capturing_basic_api(self, method) -> None:\n capouter = StdCaptureFD()\n old = sys.stdout, sys.stderr, sys.stdin\n try:\n capman = CaptureManager(method)\n capman.start_global_capturing()\n capman.suspend_global_capture()\n outerr = capman.read_global_capture()\n assert outerr == (\"\", \"\")\n capman.suspend_global_capture()\n outerr = capman.read_global_capture()\n assert outerr == (\"\", \"\")\n print(\"hello\")\n capman.suspend_global_capture()\n out, err = capman.read_global_capture()\n if method == \"no\":\n assert old == (sys.stdout, sys.stderr, sys.stdin)\n else:\n assert not out\n capman.resume_global_capture()\n print(\"hello\")\n capman.suspend_global_capture()\n out, err = capman.read_global_capture()\n if method != \"no\":\n assert out == \"hello\\n\"\n capman.stop_global_capturing()\n finally:\n capouter.stop_capturing()\n\n def test_init_capturing(self):\n capouter = StdCaptureFD()\n try:\n capman = CaptureManager(\"fd\")\n capman.start_global_capturing()\n pytest.raises(AssertionError, capman.start_global_capturing)\n capman.stop_global_capturing()\n finally:\n capouter.stop_capturing()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_test_capturing_unicode_test_capturing_bytes_in_utf8_encoding.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_test_capturing_unicode_test_capturing_bytes_in_utf8_encoding.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_capture.py", "file_name": "test_capture.py", "file_type": "text/x-python", "category": "test", "start_line": 100, "end_line": 126, "span_ids": ["test_capturing_bytes_in_utf8_encoding", "test_capturing_unicode"], "tokens": 211}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\"method\", [\"fd\", \"sys\"])\ndef test_capturing_unicode(pytester: Pytester, method: str) -> None:\n obj = \"'b\\u00f6y'\"\n pytester.makepyfile(\n \"\"\"\\\n # taken from issue 227 from nosetests\n def test_unicode():\n import sys\n print(sys.stdout)\n print(%s)\n \"\"\"\n % obj\n )\n result = pytester.runpytest(\"--capture=%s\" % method)\n result.stdout.fnmatch_lines([\"*1 passed*\"])\n\n\n@pytest.mark.parametrize(\"method\", [\"fd\", \"sys\"])\ndef test_capturing_bytes_in_utf8_encoding(pytester: Pytester, method: str) -> None:\n pytester.makepyfile(\n \"\"\"\\\n def test_unicode():\n print('b\\\\u00f6y')\n \"\"\"\n )\n result = pytester.runpytest(\"--capture=%s\" % method)\n result.stdout.fnmatch_lines([\"*1 passed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_test_collect_capturing_test_collect_capturing.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_test_collect_capturing_test_collect_capturing.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_capture.py", "file_name": "test_capture.py", "file_type": "text/x-python", "category": "test", "start_line": 129, "end_line": 147, "span_ids": ["test_collect_capturing"], "tokens": 118}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_collect_capturing(pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n import sys\n\n print(\"collect %s failure\" % 13)\n sys.stderr.write(\"collect %s_stderr failure\" % 13)\n import xyz42123\n \"\"\"\n )\n result = pytester.runpytest(p)\n result.stdout.fnmatch_lines(\n [\n \"*Captured stdout*\",\n \"collect 13 failure\",\n \"*Captured stderr*\",\n \"collect 13_stderr failure\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestCaptureFixture.test_capsysbinary_TestCaptureFixture.test_capsysbinary.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestCaptureFixture.test_capsysbinary_TestCaptureFixture.test_capsysbinary.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_capture.py", "file_name": "test_capture.py", "file_type": "text/x-python", "category": "test", "start_line": 549, "end_line": 583, "span_ids": ["TestCaptureFixture.test_capsysbinary"], "tokens": 243}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCaptureFixture:\n\n def test_capsysbinary(self, pytester: Pytester) -> None:\n p1 = pytester.makepyfile(\n r\"\"\"\n def test_hello(capsysbinary):\n import sys\n\n sys.stdout.buffer.write(b'hello')\n\n # Some likely un-decodable bytes.\n sys.stdout.buffer.write(b'\\xfe\\x98\\x20')\n\n sys.stdout.buffer.flush()\n\n # Ensure writing in text mode still works and is captured.\n # https://github.com/pytest-dev/pytest/issues/6871\n print(\"world\", flush=True)\n\n out, err = capsysbinary.readouterr()\n assert out == b'hello\\xfe\\x98\\x20world\\n'\n assert err == b''\n\n print(\"stdout after\")\n print(\"stderr after\", file=sys.stderr)\n \"\"\"\n )\n result = pytester.runpytest(str(p1), \"-rA\")\n result.stdout.fnmatch_lines(\n [\n \"*- Captured stdout call -*\",\n \"stdout after\",\n \"*- Captured stderr call -*\",\n \"stderr after\",\n \"*= 1 passed in *\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_test_capture_conftest_runtest_setup_test_capture_badoutput_issue412.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_test_capture_conftest_runtest_setup_test_capture_badoutput_issue412.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_capture.py", "file_name": "test_capture.py", "file_type": "text/x-python", "category": "test", "start_line": 760, "end_line": 792, "span_ids": ["test_capture_conftest_runtest_setup", "test_capture_badoutput_issue412"], "tokens": 197}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_capture_conftest_runtest_setup(pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n def pytest_runtest_setup():\n print(\"hello19\")\n \"\"\"\n )\n pytester.makepyfile(\"def test_func(): pass\")\n result = pytester.runpytest()\n assert result.ret == 0\n result.stdout.no_fnmatch_line(\"*hello19*\")\n\n\ndef test_capture_badoutput_issue412(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import os\n\n def test_func():\n omg = bytearray([1,129,1])\n os.write(1, omg)\n assert 0\n \"\"\"\n )\n result = pytester.runpytest(\"--capture=fd\")\n result.stdout.fnmatch_lines(\n \"\"\"\n *def test_func*\n *assert 0*\n *Captured*\n *1 failed*\n \"\"\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_test_capture_early_option_parsing_test_capture_binary_output.result_assert_outcomes_pa": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_test_capture_early_option_parsing_test_capture_binary_output.result_assert_outcomes_pa", "embedding": null, "metadata": {"file_path": "testing/test_capture.py", "file_name": "test_capture.py", "file_type": "text/x-python", "category": "test", "start_line": 795, "end_line": 826, "span_ids": ["test_capture_binary_output", "test_capture_early_option_parsing"], "tokens": 199}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_capture_early_option_parsing(pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n def pytest_runtest_setup():\n print(\"hello19\")\n \"\"\"\n )\n pytester.makepyfile(\"def test_func(): pass\")\n result = pytester.runpytest(\"-vs\")\n assert result.ret == 0\n assert \"hello19\" in result.stdout.str()\n\n\ndef test_capture_binary_output(pytester: Pytester) -> None:\n pytester.makepyfile(\n r\"\"\"\n import pytest\n\n def test_a():\n import sys\n import subprocess\n subprocess.call([sys.executable, __file__])\n\n def test_foo():\n import os;os.write(1, b'\\xc3')\n\n if __name__ == '__main__':\n test_foo()\n \"\"\"\n )\n result = pytester.runpytest(\"--assert=plain\")\n result.assert_outcomes(passed=2)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestFDCapture_TestFDCapture.test_stdin.assert_x_b_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestFDCapture_TestFDCapture.test_stdin.assert_x_b_", "embedding": null, "metadata": {"file_path": "testing/test_capture.py", "file_name": "test_capture.py", "file_type": "text/x-python", "category": "test", "start_line": 956, "end_line": 999, "span_ids": ["TestFDCapture.test_simple", "TestFDCapture.test_simple_many_check_open_files", "TestFDCapture.test_simple_fail_second_start", "TestFDCapture.test_simple_many", "TestFDCapture.test_stdin", "TestFDCapture", "TestFDCapture.test_stderr"], "tokens": 334}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFDCapture:\n def test_simple(self, tmpfile: BinaryIO) -> None:\n fd = tmpfile.fileno()\n cap = capture.FDCapture(fd)\n data = b\"hello\"\n os.write(fd, data)\n pytest.raises(AssertionError, cap.snap)\n cap.done()\n cap = capture.FDCapture(fd)\n cap.start()\n os.write(fd, data)\n s = cap.snap()\n cap.done()\n assert s == \"hello\"\n\n def test_simple_many(self, tmpfile: BinaryIO) -> None:\n for i in range(10):\n self.test_simple(tmpfile)\n\n def test_simple_many_check_open_files(self, pytester: Pytester) -> None:\n with lsof_check():\n with pytester.makepyfile(\"\").open(\"wb+\") as tmpfile:\n self.test_simple_many(tmpfile)\n\n def test_simple_fail_second_start(self, tmpfile: BinaryIO) -> None:\n fd = tmpfile.fileno()\n cap = capture.FDCapture(fd)\n cap.done()\n pytest.raises(AssertionError, cap.start)\n\n def test_stderr(self) -> None:\n cap = capture.FDCapture(2)\n cap.start()\n print(\"hello\", file=sys.stderr)\n s = cap.snap()\n cap.done()\n assert s == \"hello\\n\"\n\n def test_stdin(self) -> None:\n cap = capture.FDCapture(0)\n cap.start()\n x = os.read(0, 100).strip()\n cap.done()\n assert x == b\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestFDCapture.test_writeorg_TestFDCapture.test_writeorg.with_open_tmpfile_name_.assert_stmp_data2": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestFDCapture.test_writeorg_TestFDCapture.test_writeorg.with_open_tmpfile_name_.assert_stmp_data2", "embedding": null, "metadata": {"file_path": "testing/test_capture.py", "file_name": "test_capture.py", "file_type": "text/x-python", "category": "test", "start_line": 1001, "end_line": 1013, "span_ids": ["TestFDCapture.test_writeorg"], "tokens": 128}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFDCapture:\n\n def test_writeorg(self, tmpfile: BinaryIO) -> None:\n data1, data2 = b\"foo\", b\"bar\"\n cap = capture.FDCapture(tmpfile.fileno())\n cap.start()\n tmpfile.write(data1)\n tmpfile.flush()\n cap.writeorg(data2.decode(\"ascii\"))\n scap = cap.snap()\n cap.done()\n assert scap == data1.decode(\"ascii\")\n with open(tmpfile.name, \"rb\") as stmp_file:\n stmp = stmp_file.read()\n assert stmp == data2", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestStdCapture.test_capturing_error_recursive_TestStdCapture.test_stdin_nulled_by_default.with_self_getcapture_.pytest_raises_OSError_sy": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestStdCapture.test_capturing_error_recursive_TestStdCapture.test_stdin_nulled_by_default.with_self_getcapture_.pytest_raises_OSError_sy", "embedding": null, "metadata": {"file_path": "testing/test_capture.py", "file_name": "test_capture.py", "file_type": "text/x-python", "category": "test", "start_line": 1140, "end_line": 1178, "span_ids": ["TestStdCapture.test_just_out_capture", "TestStdCapture.test_capturing_error_recursive", "TestStdCapture.test_just_err_capture", "TestStdCapture.test_stdin_restored", "TestStdCapture.test_stdin_nulled_by_default"], "tokens": 331}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestStdCapture:\n\n def test_capturing_error_recursive(self) -> None:\n with self.getcapture() as cap1:\n print(\"cap1\")\n with self.getcapture() as cap2:\n print(\"cap2\")\n out2, err2 = cap2.readouterr()\n out1, err1 = cap1.readouterr()\n assert out1 == \"cap1\\n\"\n assert out2 == \"cap2\\n\"\n\n def test_just_out_capture(self) -> None:\n with self.getcapture(out=True, err=False) as cap:\n sys.stdout.write(\"hello\")\n sys.stderr.write(\"world\")\n out, err = cap.readouterr()\n assert out == \"hello\"\n assert not err\n\n def test_just_err_capture(self) -> None:\n with self.getcapture(out=False, err=True) as cap:\n sys.stdout.write(\"hello\")\n sys.stderr.write(\"world\")\n out, err = cap.readouterr()\n assert err == \"world\"\n assert not out\n\n def test_stdin_restored(self) -> None:\n old = sys.stdin\n with self.getcapture(in_=True):\n newstdin = sys.stdin\n assert newstdin != sys.stdin\n assert sys.stdin is old\n\n def test_stdin_nulled_by_default(self) -> None:\n print(\"XXX this test may well hang instead of crashing\")\n print(\"XXX which indicates an error in the underlying capturing\")\n print(\"XXX mechanisms\")\n with self.getcapture():\n pytest.raises(OSError, sys.stdin.read)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestTeeStdCapture_TestTeeStdCapture.test_capturing_error_recursive.assert_out2_cap2_n_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestTeeStdCapture_TestTeeStdCapture.test_capturing_error_recursive.assert_out2_cap2_n_", "embedding": null, "metadata": {"file_path": "testing/test_capture.py", "file_name": "test_capture.py", "file_type": "text/x-python", "category": "test", "start_line": 1181, "end_line": 1195, "span_ids": ["TestTeeStdCapture", "TestTeeStdCapture.test_capturing_error_recursive"], "tokens": 152}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTeeStdCapture(TestStdCapture):\n captureclass = staticmethod(TeeStdCapture)\n\n def test_capturing_error_recursive(self) -> None:\n r\"\"\"For TeeStdCapture since we passthrough stderr/stdout, cap1\n should get all output, while cap2 should only get \"cap2\\n\".\"\"\"\n\n with self.getcapture() as cap1:\n print(\"cap1\")\n with self.getcapture() as cap2:\n print(\"cap2\")\n out2, err2 = cap2.readouterr()\n out1, err1 = cap1.readouterr()\n assert out1 == \"cap1\\ncap2\\n\"\n assert out2 == \"cap2\\n\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestNodekeywords.test_keyword_matching_is_case_insensitive_by_default_TestNodekeywords.test_keyword_matching_is_case_insensitive_by_default.for_expression_in_speci.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestNodekeywords.test_keyword_matching_is_case_insensitive_by_default_TestNodekeywords.test_keyword_matching_is_case_insensitive_by_default.for_expression_in_speci.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 816, "end_line": 853, "span_ids": ["TestNodekeywords.test_keyword_matching_is_case_insensitive_by_default"], "tokens": 251}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestNodekeywords:\n\n def test_keyword_matching_is_case_insensitive_by_default(\n self, pytester: Pytester\n ) -> None:\n \"\"\"Check that selection via -k EXPRESSION is case-insensitive.\n\n Since markers are also added to the node keywords, they too can\n be matched without having to think about case sensitivity.\n\n \"\"\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n def test_sPeCiFiCToPiC_1():\n assert True\n\n class TestSpecificTopic_2:\n def test(self):\n assert True\n\n @pytest.mark.sPeCiFiCToPic_3\n def test():\n assert True\n\n @pytest.mark.sPeCiFiCToPic_4\n class Test:\n def test(self):\n assert True\n\n def test_failing_5():\n assert False, \"This should not match\"\n\n \"\"\"\n )\n num_matching_tests = 4\n for expression in (\"specifictopic\", \"SPECIFICTOPIC\", \"SpecificTopic\"):\n reprec = pytester.inline_run(\"-k \" + expression)\n reprec.assertoutcome(passed=num_matching_tests, failed=0)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_exit_on_collection_with_maxfail_bigger_than_n_errors_test_exit_on_collection_with_maxfail_bigger_than_n_errors.res_stdout_fnmatch_lines_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_exit_on_collection_with_maxfail_bigger_than_n_errors_test_exit_on_collection_with_maxfail_bigger_than_n_errors.res_stdout_fnmatch_lines_", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 919, "end_line": 940, "span_ids": ["test_exit_on_collection_with_maxfail_bigger_than_n_errors"], "tokens": 179}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_exit_on_collection_with_maxfail_bigger_than_n_errors(\n pytester: Pytester,\n) -> None:\n \"\"\"\n Verify the test run aborts due to collection errors even if maxfail count of\n errors was not reached.\n \"\"\"\n pytester.makepyfile(**COLLECTION_ERROR_PY_FILES)\n\n res = pytester.runpytest(\"--maxfail=4\")\n assert res.ret == 2\n res.stdout.fnmatch_lines(\n [\n \"collected 2 items / 2 errors\",\n \"*ERROR collecting test_02_import_error.py*\",\n \"*No module named *asdfa*\",\n \"*ERROR collecting test_03_import_error.py*\",\n \"*No module named *asdfa*\",\n \"*! Interrupted: 2 errors during collection !*\",\n \"*= 2 errors in *\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_collector_respects_tbstyle_test_collector_respects_tbstyle.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_collector_respects_tbstyle_test_collector_respects_tbstyle.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 1304, "end_line": 1318, "span_ids": ["test_collector_respects_tbstyle"], "tokens": 159}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_collector_respects_tbstyle(pytester: Pytester) -> None:\n p1 = pytester.makepyfile(\"assert 0\")\n result = pytester.runpytest(p1, \"--tb=native\")\n assert result.ret == ExitCode.INTERRUPTED\n result.stdout.fnmatch_lines(\n [\n \"*_ ERROR collecting test_collector_respects_tbstyle.py _*\",\n \"Traceback (most recent call last):\",\n ' File \"*/test_collector_respects_tbstyle.py\", line 1, in ',\n \" assert 0\",\n \"AssertionError: assert 0\",\n \"*! Interrupted: 1 error during collection !*\",\n \"*= 1 error in *\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_does_not_eagerly_collect_packages_test_does_not_put_src_on_path.assert_result_ret_Exit": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_does_not_eagerly_collect_packages_test_does_not_put_src_on_path.assert_result_ret_Exit", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 1321, "end_line": 1339, "span_ids": ["test_does_not_put_src_on_path", "test_does_not_eagerly_collect_packages"], "tokens": 187}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_does_not_eagerly_collect_packages(pytester: Pytester) -> None:\n pytester.makepyfile(\"def test(): pass\")\n pydir = pytester.mkpydir(\"foopkg\")\n pydir.joinpath(\"__init__.py\").write_text(\"assert False\")\n result = pytester.runpytest()\n assert result.ret == ExitCode.OK\n\n\ndef test_does_not_put_src_on_path(pytester: Pytester) -> None:\n # `src` is not on sys.path so it should not be importable\n ensure_file(pytester.path / \"src/nope/__init__.py\")\n pytester.makepyfile(\n \"import pytest\\n\"\n \"def test():\\n\"\n \" with pytest.raises(ImportError):\\n\"\n \" import nope\\n\"\n )\n result = pytester.runpytest()\n assert result.ret == ExitCode.OK", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_compat.py_test_real_func_loop_limit_test_real_func_loop_limit.with_pytest_raises_.get_real_func_evil_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_compat.py_test_real_func_loop_limit_test_real_func_loop_limit.with_pytest_raises_.get_real_func_evil_", "embedding": null, "metadata": {"file_path": "testing/test_compat.py", "file_name": "test_compat.py", "file_type": "text/x-python", "category": "test", "start_line": 33, "end_line": 56, "span_ids": ["test_real_func_loop_limit.Evil", "test_real_func_loop_limit", "test_real_func_loop_limit.Evil.__init__"], "tokens": 144}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_real_func_loop_limit() -> None:\n class Evil:\n def __init__(self):\n self.left = 1000\n\n def __repr__(self):\n return f\"\"\n\n def __getattr__(self, attr):\n if not self.left:\n raise RuntimeError(\"it's over\") # pragma: no cover\n self.left -= 1\n return self\n\n evil = Evil()\n\n with pytest.raises(\n ValueError,\n match=(\n \"could not find real function of \\n\"\n \"stopped at \"\n ),\n ):\n get_real_func(evil)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestConfigFromdictargs_TestConfigFromdictargs.test_invocation_params_args.assert_config_option_capt": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestConfigFromdictargs_TestConfigFromdictargs.test_invocation_params_args.assert_config_option_capt", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 819, "end_line": 841, "span_ids": ["TestConfigFromdictargs.test_invocation_params_args", "TestConfigFromdictargs.test_basic_behavior", "TestConfigFromdictargs"], "tokens": 231}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestConfigFromdictargs:\n def test_basic_behavior(self, _sys_snapshot) -> None:\n option_dict = {\"verbose\": 444, \"foo\": \"bar\", \"capture\": \"no\"}\n args = [\"a\", \"b\"]\n\n config = Config.fromdictargs(option_dict, args)\n with pytest.raises(AssertionError):\n config.parse([\"should refuse to parse again\"])\n assert config.option.verbose == 444\n assert config.option.foo == \"bar\"\n assert config.option.capture == \"no\"\n assert config.args == args\n\n def test_invocation_params_args(self, _sys_snapshot) -> None:\n \"\"\"Show that fromdictargs can handle args in their \"orig\" format\"\"\"\n option_dict: Dict[str, object] = {}\n args = [\"-vvvv\", \"-s\", \"a\", \"b\"]\n\n config = Config.fromdictargs(option_dict, args)\n assert config.args == [\"a\", \"b\"]\n assert config.invocation_params.args == tuple(args)\n assert config.option.verbose == 4\n assert config.option.capture == \"no\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_disable_plugin_autoload_test_disable_plugin_autoload.if_should_load_.else_.assert_PseudoPlugin_attrs": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_disable_plugin_autoload_test_disable_plugin_autoload.if_should_load_.else_.assert_PseudoPlugin_attrs", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 1040, "end_line": 1084, "span_ids": ["test_disable_plugin_autoload", "test_disable_plugin_autoload.Distribution", "test_disable_plugin_autoload.DummyEntryPoint:2", "test_disable_plugin_autoload.PseudoPlugin", "test_disable_plugin_autoload.DummyEntryPoint", "test_disable_plugin_autoload.Distribution:2", "test_disable_plugin_autoload.PseudoPlugin:2"], "tokens": 321}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\n \"parse_args,should_load\", [((\"-p\", \"mytestplugin\"), True), ((), False)]\n)\ndef test_disable_plugin_autoload(\n pytester: Pytester,\n monkeypatch: MonkeyPatch,\n parse_args: Union[Tuple[str, str], Tuple[()]],\n should_load: bool,\n) -> None:\n class DummyEntryPoint:\n project_name = name = \"mytestplugin\"\n group = \"pytest11\"\n version = \"1.0\"\n\n def load(self):\n return sys.modules[self.name]\n\n class Distribution:\n metadata = {\"name\": \"foo\"}\n entry_points = (DummyEntryPoint(),)\n files = ()\n\n class PseudoPlugin:\n x = 42\n\n attrs_used = []\n\n def __getattr__(self, name):\n assert name == \"__loader__\"\n self.attrs_used.append(name)\n return object()\n\n def distributions():\n return (Distribution(),)\n\n monkeypatch.setenv(\"PYTEST_DISABLE_PLUGIN_AUTOLOAD\", \"1\")\n monkeypatch.setattr(importlib_metadata, \"distributions\", distributions)\n monkeypatch.setitem(sys.modules, \"mytestplugin\", PseudoPlugin()) # type: ignore[misc]\n config = pytester.parseconfig(*parse_args)\n has_loaded = config.pluginmanager.get_plugin(\"mytestplugin\") is not None\n assert has_loaded == should_load\n if should_load:\n assert PseudoPlugin.attrs_used == [\"__loader__\"]\n else:\n assert PseudoPlugin.attrs_used == []", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_plugin_loading_order_test_plugin_loading_order.assert_result_ret_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_plugin_loading_order_test_plugin_loading_order.assert_result_ret_0", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 1087, "end_line": 1110, "span_ids": ["test_plugin_loading_order"], "tokens": 168}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_plugin_loading_order(pytester: Pytester) -> None:\n \"\"\"Test order of plugin loading with `-p`.\"\"\"\n p1 = pytester.makepyfile(\n \"\"\"\n def test_terminal_plugin(request):\n import myplugin\n assert myplugin.terminal_plugin == [False, True]\n \"\"\",\n **{\n \"myplugin\": \"\"\"\n terminal_plugin = []\n\n def pytest_configure(config):\n terminal_plugin.append(bool(config.pluginmanager.get_plugin(\"terminalreporter\")))\n\n def pytest_sessionstart(session):\n config = session.config\n terminal_plugin.append(bool(config.pluginmanager.get_plugin(\"terminalreporter\")))\n \"\"\"\n },\n )\n pytester.syspathinsert()\n result = pytester.runpytest(\"-p\", \"myplugin\", str(p1))\n assert result.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestOverrideIniArgs.test_addopts_before_initini_TestOverrideIniArgs.test_addopts_from_env_not_concatenated.assert_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestOverrideIniArgs.test_addopts_before_initini_TestOverrideIniArgs.test_addopts_from_env_not_concatenated.assert_", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 1584, "end_line": 1604, "span_ids": ["TestOverrideIniArgs.test_addopts_from_env_not_concatenated", "TestOverrideIniArgs.test_addopts_before_initini"], "tokens": 241}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestOverrideIniArgs:\n\n def test_addopts_before_initini(\n self, monkeypatch: MonkeyPatch, _config_for_test, _sys_snapshot\n ) -> None:\n cache_dir = \".custom_cache\"\n monkeypatch.setenv(\"PYTEST_ADDOPTS\", \"-o cache_dir=%s\" % cache_dir)\n config = _config_for_test\n config._preparse([], addopts=True)\n assert config._override_ini == [\"cache_dir=%s\" % cache_dir]\n\n def test_addopts_from_env_not_concatenated(\n self, monkeypatch: MonkeyPatch, _config_for_test\n ) -> None:\n \"\"\"PYTEST_ADDOPTS should not take values from normal args (#4265).\"\"\"\n monkeypatch.setenv(\"PYTEST_ADDOPTS\", \"-o\")\n config = _config_for_test\n with pytest.raises(UsageError) as excinfo:\n config._preparse([\"cache_dir=ignored\"], addopts=True)\n assert (\n \"error: argument -o/--override-ini: expected one argument (via PYTEST_ADDOPTS)\"\n in excinfo.value.args[0]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_os_custom_pdb_calls.return.called": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_os_custom_pdb_calls.return.called", "embedding": null, "metadata": {"file_path": "testing/test_debugging.py", "file_name": "test_debugging.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 57, "span_ids": ["custom_pdb_calls._CustomPdb", "impl", "impl:2", "docstring", "runpdb_and_get_report", "docstring:2", "imports", "impl:3", "custom_pdb_calls._CustomPdb:2", "custom_pdb_calls", "pdb_env"], "tokens": 379}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import os\nimport sys\nfrom typing import List\n\nimport _pytest._code\nimport pytest\nfrom _pytest.debugging import _validate_usepdb_cls\nfrom _pytest.monkeypatch import MonkeyPatch\nfrom _pytest.pytester import Pytester\n\ntry:\n # Type ignored for Python <= 3.6.\n breakpoint # type: ignore\nexcept NameError:\n SUPPORTS_BREAKPOINT_BUILTIN = False\nelse:\n SUPPORTS_BREAKPOINT_BUILTIN = True\n\n\n_ENVIRON_PYTHONBREAKPOINT = os.environ.get(\"PYTHONBREAKPOINT\", \"\")\n\n\n@pytest.fixture(autouse=True)\ndef pdb_env(request):\n if \"pytester\" in request.fixturenames:\n # Disable pdb++ with inner tests.\n pytester = request.getfixturevalue(\"pytester\")\n pytester._monkeypatch.setenv(\"PDBPP_HIJACK_PDB\", \"0\")\n\n\ndef runpdb_and_get_report(pytester: Pytester, source: str):\n p = pytester.makepyfile(source)\n result = pytester.runpytest_inprocess(\"--pdb\", p)\n reports = result.reprec.getreports(\"pytest_runtest_logreport\") # type: ignore[attr-defined]\n assert len(reports) == 3, reports # setup/call/teardown\n return reports[1]\n\n\n@pytest.fixture\ndef custom_pdb_calls() -> List[str]:\n called = []\n\n # install dummy debugger class and track which methods were called on it\n class _CustomPdb:\n quitting = False\n\n def __init__(self, *args, **kwargs):\n called.append(\"init\")\n\n def reset(self):\n called.append(\"reset\")\n\n def interaction(self, *args):\n called.append(\"interaction\")\n\n _pytest._CustomPdb = _CustomPdb # type: ignore\n return called", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB_TestPDB.flush.assert_not_child_isalive_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB_TestPDB.flush.assert_not_child_isalive_", "embedding": null, "metadata": {"file_path": "testing/test_debugging.py", "file_name": "test_debugging.py", "file_type": "text/x-python", "category": "test", "start_line": 84, "end_line": 164, "span_ids": ["TestPDB.flush", "TestPDB", "TestPDB.test_pdb_on_xfail", "TestPDB.test_pdb_on_skip", "TestPDB.test_pdb_on_BdbQuit", "TestPDB.pdblist", "TestPDB.test_pdb_on_fail", "TestPDB.test_pdb_on_KeyboardInterrupt"], "tokens": 515}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPDB:\n @pytest.fixture\n def pdblist(self, request):\n monkeypatch = request.getfixturevalue(\"monkeypatch\")\n pdblist = []\n\n def mypdb(*args):\n pdblist.append(args)\n\n plugin = request.config.pluginmanager.getplugin(\"debugging\")\n monkeypatch.setattr(plugin, \"post_mortem\", mypdb)\n return pdblist\n\n def test_pdb_on_fail(self, pytester: Pytester, pdblist) -> None:\n rep = runpdb_and_get_report(\n pytester,\n \"\"\"\n def test_func():\n assert 0\n \"\"\",\n )\n assert rep.failed\n assert len(pdblist) == 1\n tb = _pytest._code.Traceback(pdblist[0][0])\n assert tb[-1].name == \"test_func\"\n\n def test_pdb_on_xfail(self, pytester: Pytester, pdblist) -> None:\n rep = runpdb_and_get_report(\n pytester,\n \"\"\"\n import pytest\n @pytest.mark.xfail\n def test_func():\n assert 0\n \"\"\",\n )\n assert \"xfail\" in rep.keywords\n assert not pdblist\n\n def test_pdb_on_skip(self, pytester, pdblist) -> None:\n rep = runpdb_and_get_report(\n pytester,\n \"\"\"\n import pytest\n def test_func():\n pytest.skip(\"hello\")\n \"\"\",\n )\n assert rep.skipped\n assert len(pdblist) == 0\n\n def test_pdb_on_BdbQuit(self, pytester, pdblist) -> None:\n rep = runpdb_and_get_report(\n pytester,\n \"\"\"\n import bdb\n def test_func():\n raise bdb.BdbQuit\n \"\"\",\n )\n assert rep.failed\n assert len(pdblist) == 0\n\n def test_pdb_on_KeyboardInterrupt(self, pytester, pdblist) -> None:\n rep = runpdb_and_get_report(\n pytester,\n \"\"\"\n def test_func():\n raise KeyboardInterrupt\n \"\"\",\n )\n assert rep.failed\n assert len(pdblist) == 1\n\n @staticmethod\n def flush(child):\n if child.isalive():\n # Read if the test has not (e.g. test_pdb_unittest_skip).\n child.read()\n child.wait()\n assert not child.isalive()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_unittest_postmortem_TestPDB.test_pdb_unittest_postmortem.self_flush_child_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_unittest_postmortem_TestPDB.test_pdb_unittest_postmortem.self_flush_child_", "embedding": null, "metadata": {"file_path": "testing/test_debugging.py", "file_name": "test_debugging.py", "file_type": "text/x-python", "category": "test", "start_line": 166, "end_line": 184, "span_ids": ["TestPDB.test_pdb_unittest_postmortem"], "tokens": 147}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPDB:\n\n def test_pdb_unittest_postmortem(self, pytester: Pytester) -> None:\n p1 = pytester.makepyfile(\n \"\"\"\n import unittest\n class Blub(unittest.TestCase):\n def tearDown(self):\n self.filename = None\n def test_false(self):\n self.filename = 'debug' + '.me'\n assert 0\n \"\"\"\n )\n child = pytester.spawn_pytest(f\"--pdb {p1}\")\n child.expect(\"Pdb\")\n child.sendline(\"p self.filename\")\n child.sendeof()\n rest = child.read().decode(\"utf8\")\n assert \"debug.me\" in rest\n self.flush(child)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_unittest_skip_TestPDB.test_pdb_unittest_skip.self_flush_child_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_unittest_skip_TestPDB.test_pdb_unittest_skip.self_flush_child_", "embedding": null, "metadata": {"file_path": "testing/test_debugging.py", "file_name": "test_debugging.py", "file_type": "text/x-python", "category": "test", "start_line": 186, "end_line": 201, "span_ids": ["TestPDB.test_pdb_unittest_skip"], "tokens": 133}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPDB:\n\n def test_pdb_unittest_skip(self, pytester: Pytester) -> None:\n \"\"\"Test for issue #2137\"\"\"\n p1 = pytester.makepyfile(\n \"\"\"\n import unittest\n @unittest.skipIf(True, 'Skipping also with pdb active')\n class MyTestCase(unittest.TestCase):\n def test_one(self):\n assert 0\n \"\"\"\n )\n child = pytester.spawn_pytest(f\"-rs --pdb {p1}\")\n child.expect(\"Skipping also with pdb active\")\n child.expect_exact(\"= 1 skipped in\")\n child.sendeof()\n self.flush(child)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_print_captured_stdout_and_stderr_TestPDB.test_pdb_print_captured_stdout_and_stderr.self_flush_child_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_print_captured_stdout_and_stderr_TestPDB.test_pdb_print_captured_stdout_and_stderr.self_flush_child_", "embedding": null, "metadata": {"file_path": "testing/test_debugging.py", "file_name": "test_debugging.py", "file_type": "text/x-python", "category": "test", "start_line": 203, "end_line": 230, "span_ids": ["TestPDB.test_pdb_print_captured_stdout_and_stderr"], "tokens": 230}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPDB:\n\n def test_pdb_print_captured_stdout_and_stderr(self, pytester: Pytester) -> None:\n p1 = pytester.makepyfile(\n \"\"\"\n def test_1():\n import sys\n sys.stderr.write(\"get\\\\x20rekt\")\n print(\"get\\\\x20rekt\")\n assert False\n\n def test_not_called_due_to_quit():\n pass\n \"\"\"\n )\n child = pytester.spawn_pytest(\"--pdb %s\" % p1)\n child.expect(\"captured stdout\")\n child.expect(\"get rekt\")\n child.expect(\"captured stderr\")\n child.expect(\"get rekt\")\n child.expect(\"traceback\")\n child.expect(\"def test_1\")\n child.expect(\"Pdb\")\n child.sendeof()\n rest = child.read().decode(\"utf8\")\n assert \"Exit: Quitting debugger\" in rest\n assert \"= 1 failed in\" in rest\n assert \"def test_1\" not in rest\n assert \"get rekt\" not in rest\n self.flush(child)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_dont_print_empty_captured_stdout_and_stderr_TestPDB.test_pdb_dont_print_empty_captured_stdout_and_stderr.self_flush_child_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_dont_print_empty_captured_stdout_and_stderr_TestPDB.test_pdb_dont_print_empty_captured_stdout_and_stderr.self_flush_child_", "embedding": null, "metadata": {"file_path": "testing/test_debugging.py", "file_name": "test_debugging.py", "file_type": "text/x-python", "category": "test", "start_line": 232, "end_line": 247, "span_ids": ["TestPDB.test_pdb_dont_print_empty_captured_stdout_and_stderr"], "tokens": 126}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPDB:\n\n def test_pdb_dont_print_empty_captured_stdout_and_stderr(\n self, pytester: Pytester\n ) -> None:\n p1 = pytester.makepyfile(\n \"\"\"\n def test_1():\n assert False\n \"\"\"\n )\n child = pytester.spawn_pytest(\"--pdb %s\" % p1)\n child.expect(\"Pdb\")\n output = child.before.decode(\"utf8\")\n child.sendeof()\n assert \"captured stdout\" not in output\n assert \"captured stderr\" not in output\n self.flush(child)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_print_captured_logs_TestPDB.test_pdb_print_captured_logs.self_flush_child_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_print_captured_logs_TestPDB.test_pdb_print_captured_logs.self_flush_child_", "embedding": null, "metadata": {"file_path": "testing/test_debugging.py", "file_name": "test_debugging.py", "file_type": "text/x-python", "category": "test", "start_line": 249, "end_line": 267, "span_ids": ["TestPDB.test_pdb_print_captured_logs"], "tokens": 173}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPDB:\n\n @pytest.mark.parametrize(\"showcapture\", [\"all\", \"no\", \"log\"])\n def test_pdb_print_captured_logs(self, pytester, showcapture: str) -> None:\n p1 = pytester.makepyfile(\n \"\"\"\n def test_1():\n import logging\n logging.warn(\"get \" + \"rekt\")\n assert False\n \"\"\"\n )\n child = pytester.spawn_pytest(f\"--show-capture={showcapture} --pdb {p1}\")\n if showcapture in (\"all\", \"log\"):\n child.expect(\"captured log\")\n child.expect(\"get rekt\")\n child.expect(\"Pdb\")\n child.sendeof()\n rest = child.read().decode(\"utf8\")\n assert \"1 failed\" in rest\n self.flush(child)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_print_captured_logs_nologging_TestPDB.test_pdb_print_captured_logs_nologging.self_flush_child_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_print_captured_logs_nologging_TestPDB.test_pdb_print_captured_logs_nologging.self_flush_child_", "embedding": null, "metadata": {"file_path": "testing/test_debugging.py", "file_name": "test_debugging.py", "file_type": "text/x-python", "category": "test", "start_line": 269, "end_line": 286, "span_ids": ["TestPDB.test_pdb_print_captured_logs_nologging"], "tokens": 162}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPDB:\n\n def test_pdb_print_captured_logs_nologging(self, pytester: Pytester) -> None:\n p1 = pytester.makepyfile(\n \"\"\"\n def test_1():\n import logging\n logging.warn(\"get \" + \"rekt\")\n assert False\n \"\"\"\n )\n child = pytester.spawn_pytest(\"--show-capture=all --pdb -p no:logging %s\" % p1)\n child.expect(\"get rekt\")\n output = child.before.decode(\"utf8\")\n assert \"captured log\" not in output\n child.expect(\"Pdb\")\n child.sendeof()\n rest = child.read().decode(\"utf8\")\n assert \"1 failed\" in rest\n self.flush(child)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_interaction_exception_TestPDB.test_pdb_interaction_on_collection_issue181.self_flush_child_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_interaction_exception_TestPDB.test_pdb_interaction_on_collection_issue181.self_flush_child_", "embedding": null, "metadata": {"file_path": "testing/test_debugging.py", "file_name": "test_debugging.py", "file_type": "text/x-python", "category": "test", "start_line": 288, "end_line": 320, "span_ids": ["TestPDB.test_pdb_interaction_on_collection_issue181", "TestPDB.test_pdb_interaction_exception"], "tokens": 239}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPDB:\n\n def test_pdb_interaction_exception(self, pytester: Pytester) -> None:\n p1 = pytester.makepyfile(\n \"\"\"\n import pytest\n def globalfunc():\n pass\n def test_1():\n pytest.raises(ValueError, globalfunc)\n \"\"\"\n )\n child = pytester.spawn_pytest(\"--pdb %s\" % p1)\n child.expect(\".*def test_1\")\n child.expect(\".*pytest.raises.*globalfunc\")\n child.expect(\"Pdb\")\n child.sendline(\"globalfunc\")\n child.expect(\".*function\")\n child.sendeof()\n child.expect(\"1 failed\")\n self.flush(child)\n\n def test_pdb_interaction_on_collection_issue181(self, pytester: Pytester) -> None:\n p1 = pytester.makepyfile(\n \"\"\"\n import pytest\n xxx\n \"\"\"\n )\n child = pytester.spawn_pytest(\"--pdb %s\" % p1)\n # child.expect(\".*import pytest.*\")\n child.expect(\"Pdb\")\n child.sendline(\"c\")\n child.expect(\"1 error\")\n self.flush(child)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_interaction_on_internal_error_TestPDB.test_pdb_interaction_on_internal_error.self_flush_child_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_interaction_on_internal_error_TestPDB.test_pdb_interaction_on_internal_error.self_flush_child_", "embedding": null, "metadata": {"file_path": "testing/test_debugging.py", "file_name": "test_debugging.py", "file_type": "text/x-python", "category": "test", "start_line": 322, "end_line": 346, "span_ids": ["TestPDB.test_pdb_interaction_on_internal_error"], "tokens": 158}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPDB:\n\n def test_pdb_interaction_on_internal_error(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n def pytest_runtest_protocol():\n 0/0\n \"\"\"\n )\n p1 = pytester.makepyfile(\"def test_func(): pass\")\n child = pytester.spawn_pytest(\"--pdb %s\" % p1)\n child.expect(\"Pdb\")\n\n # INTERNALERROR is only displayed once via terminal reporter.\n assert (\n len(\n [\n x\n for x in child.before.decode().splitlines()\n if x.startswith(\"INTERNALERROR> Traceback\")\n ]\n )\n == 1\n )\n\n child.sendeof()\n self.flush(child)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_interaction_capturing_simple_TestPDB.test_pdb_interaction_capturing_simple.self_flush_child_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_interaction_capturing_simple_TestPDB.test_pdb_interaction_capturing_simple.self_flush_child_", "embedding": null, "metadata": {"file_path": "testing/test_debugging.py", "file_name": "test_debugging.py", "file_type": "text/x-python", "category": "test", "start_line": 364, "end_line": 386, "span_ids": ["TestPDB.test_pdb_interaction_capturing_simple"], "tokens": 186}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPDB:\n\n def test_pdb_interaction_capturing_simple(self, pytester: Pytester) -> None:\n p1 = pytester.makepyfile(\n \"\"\"\n import pytest\n def test_1():\n i = 0\n print(\"hello17\")\n pytest.set_trace()\n i == 1\n assert 0\n \"\"\"\n )\n child = pytester.spawn_pytest(str(p1))\n child.expect(r\"test_1\\(\\)\")\n child.expect(\"i == 1\")\n child.expect(\"Pdb\")\n child.sendline(\"c\")\n rest = child.read().decode(\"utf-8\")\n assert \"AssertionError\" in rest\n assert \"1 failed\" in rest\n assert \"def test_1\" in rest\n assert \"hello17\" in rest # out is captured\n self.flush(child)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_set_trace_kwargs_TestPDB.test_pdb_set_trace_kwargs.self_flush_child_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_set_trace_kwargs_TestPDB.test_pdb_set_trace_kwargs.self_flush_child_", "embedding": null, "metadata": {"file_path": "testing/test_debugging.py", "file_name": "test_debugging.py", "file_type": "text/x-python", "category": "test", "start_line": 388, "end_line": 409, "span_ids": ["TestPDB.test_pdb_set_trace_kwargs"], "tokens": 184}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPDB:\n\n def test_pdb_set_trace_kwargs(self, pytester: Pytester) -> None:\n p1 = pytester.makepyfile(\n \"\"\"\n import pytest\n def test_1():\n i = 0\n print(\"hello17\")\n pytest.set_trace(header=\"== my_header ==\")\n x = 3\n assert 0\n \"\"\"\n )\n child = pytester.spawn_pytest(str(p1))\n child.expect(\"== my_header ==\")\n assert \"PDB set_trace\" not in child.before.decode()\n child.expect(\"Pdb\")\n child.sendline(\"c\")\n rest = child.read().decode(\"utf-8\")\n assert \"1 failed\" in rest\n assert \"def test_1\" in rest\n assert \"hello17\" in rest # out is captured\n self.flush(child)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_with_caplog_on_pdb_invocation_TestPDB.test_pdb_with_caplog_on_pdb_invocation.self_flush_child_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_with_caplog_on_pdb_invocation_TestPDB.test_pdb_with_caplog_on_pdb_invocation.self_flush_child_", "embedding": null, "metadata": {"file_path": "testing/test_debugging.py", "file_name": "test_debugging.py", "file_type": "text/x-python", "category": "test", "start_line": 446, "end_line": 462, "span_ids": ["TestPDB.test_pdb_with_caplog_on_pdb_invocation"], "tokens": 149}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPDB:\n\n def test_pdb_with_caplog_on_pdb_invocation(self, pytester: Pytester) -> None:\n p1 = pytester.makepyfile(\n \"\"\"\n def test_1(capsys, caplog):\n import logging\n logging.getLogger(__name__).warning(\"some_warning\")\n assert 0\n \"\"\"\n )\n child = pytester.spawn_pytest(\"--pdb %s\" % str(p1))\n child.send(\"caplog.record_tuples\\n\")\n child.expect_exact(\n \"[('test_pdb_with_caplog_on_pdb_invocation', 30, 'some_warning')]\"\n )\n child.sendeof()\n child.read()\n self.flush(child)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_set_trace_capturing_afterwards_TestPDB.test_set_trace_capturing_afterwards.self_flush_child_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_set_trace_capturing_afterwards_TestPDB.test_set_trace_capturing_afterwards.self_flush_child_", "embedding": null, "metadata": {"file_path": "testing/test_debugging.py", "file_name": "test_debugging.py", "file_type": "text/x-python", "category": "test", "start_line": 464, "end_line": 483, "span_ids": ["TestPDB.test_set_trace_capturing_afterwards"], "tokens": 135}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPDB:\n\n def test_set_trace_capturing_afterwards(self, pytester: Pytester) -> None:\n p1 = pytester.makepyfile(\n \"\"\"\n import pdb\n def test_1():\n pdb.set_trace()\n def test_2():\n print(\"hello\")\n assert 0\n \"\"\"\n )\n child = pytester.spawn_pytest(str(p1))\n child.expect(\"test_1\")\n child.send(\"c\\n\")\n child.expect(\"test_2\")\n child.expect(\"Captured\")\n child.expect(\"hello\")\n child.sendeof()\n child.read()\n self.flush(child)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_interaction_doctest_TestPDB.test_pdb_interaction_doctest.self_flush_child_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_interaction_doctest_TestPDB.test_pdb_interaction_doctest.self_flush_child_", "embedding": null, "metadata": {"file_path": "testing/test_debugging.py", "file_name": "test_debugging.py", "file_type": "text/x-python", "category": "test", "start_line": 485, "end_line": 509, "span_ids": ["TestPDB.test_pdb_interaction_doctest"], "tokens": 207}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPDB:\n\n def test_pdb_interaction_doctest(self, pytester: Pytester) -> None:\n p1 = pytester.makepyfile(\n \"\"\"\n def function_1():\n '''\n >>> i = 0\n >>> assert i == 1\n '''\n \"\"\"\n )\n child = pytester.spawn_pytest(\"--doctest-modules --pdb %s\" % p1)\n child.expect(\"Pdb\")\n\n assert \"UNEXPECTED EXCEPTION: AssertionError()\" in child.before.decode(\"utf8\")\n\n child.sendline(\"'i=%i.' % i\")\n child.expect(\"Pdb\")\n assert \"\\r\\n'i=0.'\\r\\n\" in child.before.decode(\"utf8\")\n\n child.sendeof()\n rest = child.read().decode(\"utf8\")\n assert \"! _pytest.outcomes.Exit: Quitting debugger !\" in rest\n assert \"BdbQuit\" not in rest\n assert \"1 failed\" in rest\n self.flush(child)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_doctest_set_trace_quit_TestPDB.test_doctest_set_trace_quit.assert_UNEXPECTED_EXCEPT": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_doctest_set_trace_quit_TestPDB.test_doctest_set_trace_quit.assert_UNEXPECTED_EXCEPT", "embedding": null, "metadata": {"file_path": "testing/test_debugging.py", "file_name": "test_debugging.py", "file_type": "text/x-python", "category": "test", "start_line": 511, "end_line": 530, "span_ids": ["TestPDB.test_doctest_set_trace_quit"], "tokens": 183}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPDB:\n\n def test_doctest_set_trace_quit(self, pytester: Pytester) -> None:\n p1 = pytester.makepyfile(\n \"\"\"\n def function_1():\n '''\n >>> __import__('pdb').set_trace()\n '''\n \"\"\"\n )\n # NOTE: does not use pytest.set_trace, but Python's patched pdb,\n # therefore \"-s\" is required.\n child = pytester.spawn_pytest(\"--doctest-modules --pdb -s %s\" % p1)\n child.expect(\"Pdb\")\n child.sendline(\"q\")\n rest = child.read().decode(\"utf8\")\n\n assert \"! _pytest.outcomes.Exit: Quitting debugger !\" in rest\n assert \"= no tests ran in\" in rest\n assert \"BdbQuit\" not in rest\n assert \"UNEXPECTED EXCEPTION\" not in rest", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_interaction_capturing_twice_TestPDB.test_pdb_interaction_capturing_twice.self_flush_child_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_interaction_capturing_twice_TestPDB.test_pdb_interaction_capturing_twice.self_flush_child_", "embedding": null, "metadata": {"file_path": "testing/test_debugging.py", "file_name": "test_debugging.py", "file_type": "text/x-python", "category": "test", "start_line": 532, "end_line": 565, "span_ids": ["TestPDB.test_pdb_interaction_capturing_twice"], "tokens": 295}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPDB:\n\n def test_pdb_interaction_capturing_twice(self, pytester: Pytester) -> None:\n p1 = pytester.makepyfile(\n \"\"\"\n import pytest\n def test_1():\n i = 0\n print(\"hello17\")\n pytest.set_trace()\n x = 3\n print(\"hello18\")\n pytest.set_trace()\n x = 4\n assert 0\n \"\"\"\n )\n child = pytester.spawn_pytest(str(p1))\n child.expect(r\"PDB set_trace \\(IO-capturing turned off\\)\")\n child.expect(\"test_1\")\n child.expect(\"x = 3\")\n child.expect(\"Pdb\")\n child.sendline(\"c\")\n child.expect(r\"PDB continue \\(IO-capturing resumed\\)\")\n child.expect(r\"PDB set_trace \\(IO-capturing turned off\\)\")\n child.expect(\"x = 4\")\n child.expect(\"Pdb\")\n child.sendline(\"c\")\n child.expect(\"_ test_1 _\")\n child.expect(\"def test_1\")\n rest = child.read().decode(\"utf8\")\n assert \"Captured stdout call\" in rest\n assert \"hello17\" in rest # out is captured\n assert \"hello18\" in rest # out is captured\n assert \"1 failed\" in rest\n self.flush(child)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_with_injected_do_debug_TestPDB.test_pdb_with_injected_do_debug.self_flush_child_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_with_injected_do_debug_TestPDB.test_pdb_with_injected_do_debug.self_flush_child_", "embedding": null, "metadata": {"file_path": "testing/test_debugging.py", "file_name": "test_debugging.py", "file_type": "text/x-python", "category": "test", "start_line": 567, "end_line": 640, "span_ids": ["TestPDB.test_pdb_with_injected_do_debug"], "tokens": 641}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPDB:\n\n def test_pdb_with_injected_do_debug(self, pytester: Pytester) -> None:\n \"\"\"Simulates pdbpp, which injects Pdb into do_debug, and uses\n self.__class__ in do_continue.\n \"\"\"\n p1 = pytester.makepyfile(\n mytest=\"\"\"\n import pdb\n import pytest\n\n count_continue = 0\n\n class CustomPdb(pdb.Pdb, object):\n def do_debug(self, arg):\n import sys\n import types\n\n do_debug_func = pdb.Pdb.do_debug\n\n newglobals = do_debug_func.__globals__.copy()\n newglobals['Pdb'] = self.__class__\n orig_do_debug = types.FunctionType(\n do_debug_func.__code__, newglobals,\n do_debug_func.__name__, do_debug_func.__defaults__,\n )\n return orig_do_debug(self, arg)\n do_debug.__doc__ = pdb.Pdb.do_debug.__doc__\n\n def do_continue(self, *args, **kwargs):\n global count_continue\n count_continue += 1\n return super(CustomPdb, self).do_continue(*args, **kwargs)\n\n def foo():\n print(\"print_from_foo\")\n\n def test_1():\n i = 0\n print(\"hello17\")\n pytest.set_trace()\n x = 3\n print(\"hello18\")\n\n assert count_continue == 2, \"unexpected_failure: %d != 2\" % count_continue\n pytest.fail(\"expected_failure\")\n \"\"\"\n )\n child = pytester.spawn_pytest(\"--pdbcls=mytest:CustomPdb %s\" % str(p1))\n child.expect(r\"PDB set_trace \\(IO-capturing turned off\\)\")\n child.expect(r\"\\n\\(Pdb\")\n child.sendline(\"debug foo()\")\n child.expect(\"ENTERING RECURSIVE DEBUGGER\")\n child.expect(r\"\\n\\(\\(Pdb\")\n child.sendline(\"c\")\n child.expect(\"LEAVING RECURSIVE DEBUGGER\")\n assert b\"PDB continue\" not in child.before\n # No extra newline.\n assert child.before.endswith(b\"c\\r\\nprint_from_foo\\r\\n\")\n\n # set_debug should not raise outcomes. Exit, if used recursively.\n child.sendline(\"debug 42\")\n child.sendline(\"q\")\n child.expect(\"LEAVING RECURSIVE DEBUGGER\")\n assert b\"ENTERING RECURSIVE DEBUGGER\" in child.before\n assert b\"Quitting debugger\" not in child.before\n\n child.sendline(\"c\")\n child.expect(r\"PDB continue \\(IO-capturing resumed\\)\")\n rest = child.read().decode(\"utf8\")\n assert \"hello17\" in rest # out is captured\n assert \"hello18\" in rest # out is captured\n assert \"1 failed\" in rest\n assert \"Failed: expected_failure\" in rest\n assert \"AssertionError: unexpected_failure\" not in rest\n self.flush(child)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_without_capture_TestPDB.test_pdb_without_capture.self_flush_child_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_without_capture_TestPDB.test_pdb_without_capture.self_flush_child_", "embedding": null, "metadata": {"file_path": "testing/test_debugging.py", "file_name": "test_debugging.py", "file_type": "text/x-python", "category": "test", "start_line": 642, "end_line": 656, "span_ids": ["TestPDB.test_pdb_without_capture"], "tokens": 120}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPDB:\n\n def test_pdb_without_capture(self, pytester: Pytester) -> None:\n p1 = pytester.makepyfile(\n \"\"\"\n import pytest\n def test_1():\n pytest.set_trace()\n \"\"\"\n )\n child = pytester.spawn_pytest(\"-s %s\" % p1)\n child.expect(r\">>> PDB set_trace >>>\")\n child.expect(\"Pdb\")\n child.sendline(\"c\")\n child.expect(r\">>> PDB continue >>>\")\n child.expect(\"1 passed\")\n self.flush(child)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_continue_with_recursive_debug_TestPDB.test_pdb_continue_with_recursive_debug.assert_1_passed_in_in": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_continue_with_recursive_debug_TestPDB.test_pdb_continue_with_recursive_debug.assert_1_passed_in_in", "embedding": null, "metadata": {"file_path": "testing/test_debugging.py", "file_name": "test_debugging.py", "file_type": "text/x-python", "category": "test", "start_line": 658, "end_line": 749, "span_ids": ["TestPDB.test_pdb_continue_with_recursive_debug"], "tokens": 754}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPDB:\n\n @pytest.mark.parametrize(\"capture_arg\", (\"\", \"-s\", \"-p no:capture\"))\n def test_pdb_continue_with_recursive_debug(\n self, capture_arg, pytester: Pytester\n ) -> None:\n \"\"\"Full coverage for do_debug without capturing.\n\n This is very similar to test_pdb_interaction_continue_recursive in general,\n but mocks out ``pdb.set_trace`` for providing more coverage.\n \"\"\"\n p1 = pytester.makepyfile(\n \"\"\"\n try:\n input = raw_input\n except NameError:\n pass\n\n def set_trace():\n __import__('pdb').set_trace()\n\n def test_1(monkeypatch):\n import _pytest.debugging\n\n class pytestPDBTest(_pytest.debugging.pytestPDB):\n @classmethod\n def set_trace(cls, *args, **kwargs):\n # Init PytestPdbWrapper to handle capturing.\n _pdb = cls._init_pdb(\"set_trace\", *args, **kwargs)\n\n # Mock out pdb.Pdb.do_continue.\n import pdb\n pdb.Pdb.do_continue = lambda self, arg: None\n\n print(\"===\" + \" SET_TRACE ===\")\n assert input() == \"debug set_trace()\"\n\n # Simulate PytestPdbWrapper.do_debug\n cls._recursive_debug += 1\n print(\"ENTERING RECURSIVE DEBUGGER\")\n print(\"===\" + \" SET_TRACE_2 ===\")\n\n assert input() == \"c\"\n _pdb.do_continue(\"\")\n print(\"===\" + \" SET_TRACE_3 ===\")\n\n # Simulate PytestPdbWrapper.do_debug\n print(\"LEAVING RECURSIVE DEBUGGER\")\n cls._recursive_debug -= 1\n\n print(\"===\" + \" SET_TRACE_4 ===\")\n assert input() == \"c\"\n _pdb.do_continue(\"\")\n\n def do_continue(self, arg):\n print(\"=== do_continue\")\n\n monkeypatch.setattr(_pytest.debugging, \"pytestPDB\", pytestPDBTest)\n\n import pdb\n monkeypatch.setattr(pdb, \"set_trace\", pytestPDBTest.set_trace)\n\n set_trace()\n \"\"\"\n )\n child = pytester.spawn_pytest(f\"--tb=short {p1} {capture_arg}\")\n child.expect(\"=== SET_TRACE ===\")\n before = child.before.decode(\"utf8\")\n if not capture_arg:\n assert \">>> PDB set_trace (IO-capturing turned off) >>>\" in before\n else:\n assert \">>> PDB set_trace >>>\" in before\n child.sendline(\"debug set_trace()\")\n child.expect(\"=== SET_TRACE_2 ===\")\n before = child.before.decode(\"utf8\")\n assert \"\\r\\nENTERING RECURSIVE DEBUGGER\\r\\n\" in before\n child.sendline(\"c\")\n child.expect(\"=== SET_TRACE_3 ===\")\n\n # No continue message with recursive debugging.\n before = child.before.decode(\"utf8\")\n assert \">>> PDB continue \" not in before\n\n child.sendline(\"c\")\n child.expect(\"=== SET_TRACE_4 ===\")\n before = child.before.decode(\"utf8\")\n assert \"\\r\\nLEAVING RECURSIVE DEBUGGER\\r\\n\" in before\n child.sendline(\"c\")\n rest = child.read().decode(\"utf8\")\n if not capture_arg:\n assert \"> PDB continue (IO-capturing resumed) >\" in rest\n else:\n assert \"> PDB continue >\" in rest\n assert \"= 1 passed in\" in rest", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_used_outside_test_TestPDB.test_pdb_collection_failure_is_shown.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_used_outside_test_TestPDB.test_pdb_collection_failure_is_shown.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_debugging.py", "file_name": "test_debugging.py", "file_type": "text/x-python", "category": "test", "start_line": 751, "end_line": 787, "span_ids": ["TestPDB.test_pdb_collection_failure_is_shown", "TestPDB.test_pdb_used_in_generate_tests", "TestPDB.test_pdb_used_outside_test"], "tokens": 289}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPDB:\n\n def test_pdb_used_outside_test(self, pytester: Pytester) -> None:\n p1 = pytester.makepyfile(\n \"\"\"\n import pytest\n pytest.set_trace()\n x = 5\n \"\"\"\n )\n child = pytester.spawn(f\"{sys.executable} {p1}\")\n child.expect(\"x = 5\")\n child.expect(\"Pdb\")\n child.sendeof()\n self.flush(child)\n\n def test_pdb_used_in_generate_tests(self, pytester: Pytester) -> None:\n p1 = pytester.makepyfile(\n \"\"\"\n import pytest\n def pytest_generate_tests(metafunc):\n pytest.set_trace()\n x = 5\n def test_foo(a):\n pass\n \"\"\"\n )\n child = pytester.spawn_pytest(str(p1))\n child.expect(\"x = 5\")\n child.expect(\"Pdb\")\n child.sendeof()\n self.flush(child)\n\n def test_pdb_collection_failure_is_shown(self, pytester: Pytester) -> None:\n p1 = pytester.makepyfile(\"xxx\")\n result = pytester.runpytest_subprocess(\"--pdb\", p1)\n result.stdout.fnmatch_lines(\n [\"E NameError: *xxx*\", \"*! *Exit: Quitting debugger !*\"] # due to EOF\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_enter_leave_pdb_hooks_are_called_TestPDB.test_enter_leave_pdb_hooks_are_called.self_flush_child_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_enter_leave_pdb_hooks_are_called_TestPDB.test_enter_leave_pdb_hooks_are_called.self_flush_child_", "embedding": null, "metadata": {"file_path": "testing/test_debugging.py", "file_name": "test_debugging.py", "file_type": "text/x-python", "category": "test", "start_line": 789, "end_line": 843, "span_ids": ["TestPDB.test_enter_leave_pdb_hooks_are_called"], "tokens": 380}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPDB:\n\n @pytest.mark.parametrize(\"post_mortem\", (False, True))\n def test_enter_leave_pdb_hooks_are_called(\n self, post_mortem, pytester: Pytester\n ) -> None:\n pytester.makeconftest(\n \"\"\"\n mypdb = None\n\n def pytest_configure(config):\n config.testing_verification = 'configured'\n\n def pytest_enter_pdb(config, pdb):\n assert config.testing_verification == 'configured'\n print('enter_pdb_hook')\n\n global mypdb\n mypdb = pdb\n mypdb.set_attribute = \"bar\"\n\n def pytest_leave_pdb(config, pdb):\n assert config.testing_verification == 'configured'\n print('leave_pdb_hook')\n\n global mypdb\n assert mypdb is pdb\n assert mypdb.set_attribute == \"bar\"\n \"\"\"\n )\n p1 = pytester.makepyfile(\n \"\"\"\n import pytest\n\n def test_set_trace():\n pytest.set_trace()\n assert 0\n\n def test_post_mortem():\n assert 0\n \"\"\"\n )\n if post_mortem:\n child = pytester.spawn_pytest(str(p1) + \" --pdb -s -k test_post_mortem\")\n else:\n child = pytester.spawn_pytest(str(p1) + \" -k test_set_trace\")\n child.expect(\"enter_pdb_hook\")\n child.sendline(\"c\")\n if post_mortem:\n child.expect(r\"PDB continue\")\n else:\n child.expect(r\"PDB continue \\(IO-capturing resumed\\)\")\n child.expect(\"Captured stdout call\")\n rest = child.read().decode(\"utf8\")\n assert \"leave_pdb_hook\" in rest\n assert \"1 failed\" in rest\n self.flush(child)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_custom_cls_with_set_trace_TestPDB.test_pdb_custom_cls_with_set_trace.self_flush_child_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_custom_cls_with_set_trace_TestPDB.test_pdb_custom_cls_with_set_trace.self_flush_child_", "embedding": null, "metadata": {"file_path": "testing/test_debugging.py", "file_name": "test_debugging.py", "file_type": "text/x-python", "category": "test", "start_line": 879, "end_line": 910, "span_ids": ["TestPDB.test_pdb_custom_cls_with_set_trace"], "tokens": 226}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPDB:\n\n def test_pdb_custom_cls_with_set_trace(\n self,\n pytester: Pytester,\n monkeypatch: MonkeyPatch,\n ) -> None:\n pytester.makepyfile(\n custom_pdb=\"\"\"\n class CustomPdb(object):\n def __init__(self, *args, **kwargs):\n skip = kwargs.pop(\"skip\")\n assert skip == [\"foo.*\"]\n print(\"__init__\")\n super(CustomPdb, self).__init__(*args, **kwargs)\n\n def set_trace(*args, **kwargs):\n print('custom set_trace>')\n \"\"\"\n )\n p1 = pytester.makepyfile(\n \"\"\"\n import pytest\n\n def test_foo():\n pytest.set_trace(skip=['foo.*'])\n \"\"\"\n )\n monkeypatch.setenv(\"PYTHONPATH\", str(pytester.path))\n child = pytester.spawn_pytest(\"--pdbcls=custom_pdb:CustomPdb %s\" % str(p1))\n\n child.expect(\"__init__\")\n child.expect(\"custom set_trace>\")\n self.flush(child)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestDebuggingBreakpoints.test_pdb_custom_cls_TestDebuggingBreakpoints.test_pdb_custom_cls.assert_custom_debugger_ho": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestDebuggingBreakpoints.test_pdb_custom_cls_TestDebuggingBreakpoints.test_pdb_custom_cls.assert_custom_debugger_ho", "embedding": null, "metadata": {"file_path": "testing/test_debugging.py", "file_name": "test_debugging.py", "file_type": "text/x-python", "category": "test", "start_line": 955, "end_line": 969, "span_ids": ["TestDebuggingBreakpoints.test_pdb_custom_cls"], "tokens": 138}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDebuggingBreakpoints:\n\n @pytest.mark.skipif(\n not SUPPORTS_BREAKPOINT_BUILTIN, reason=\"Requires breakpoint() builtin\"\n )\n def test_pdb_custom_cls(self, pytester: Pytester, custom_debugger_hook) -> None:\n p1 = pytester.makepyfile(\n \"\"\"\n def test_nothing():\n breakpoint()\n \"\"\"\n )\n result = pytester.runpytest_inprocess(\n \"--pdb\", \"--pdbcls=_pytest:_CustomDebugger\", p1\n )\n result.stdout.fnmatch_lines([\"*CustomDebugger*\", \"*1 passed*\"])\n assert custom_debugger_hook == [\"init\", \"set_trace\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestDebuggingBreakpoints.test_environ_custom_class_TestDebuggingBreakpoints.test_environ_custom_class.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestDebuggingBreakpoints.test_environ_custom_class_TestDebuggingBreakpoints.test_environ_custom_class.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_debugging.py", "file_name": "test_debugging.py", "file_type": "text/x-python", "category": "test", "start_line": 971, "end_line": 1003, "span_ids": ["TestDebuggingBreakpoints.test_environ_custom_class"], "tokens": 237}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDebuggingBreakpoints:\n\n @pytest.mark.parametrize(\"arg\", [\"--pdb\", \"\"])\n @pytest.mark.skipif(\n not SUPPORTS_BREAKPOINT_BUILTIN, reason=\"Requires breakpoint() builtin\"\n )\n def test_environ_custom_class(\n self, pytester: Pytester, custom_debugger_hook, arg: str\n ) -> None:\n pytester.makeconftest(\n \"\"\"\n import os\n import sys\n\n os.environ['PYTHONBREAKPOINT'] = '_pytest._CustomDebugger.set_trace'\n\n def pytest_configure(config):\n config._cleanup.append(check_restored)\n\n def check_restored():\n assert sys.breakpointhook == sys.__breakpointhook__\n\n def test_check():\n import _pytest\n assert sys.breakpointhook is _pytest._CustomDebugger.set_trace\n \"\"\"\n )\n pytester.makepyfile(\n \"\"\"\n def test_nothing(): pass\n \"\"\"\n )\n args = (arg,) if arg else ()\n result = pytester.runpytest_subprocess(*args)\n result.stdout.fnmatch_lines([\"*1 passed in *\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestDebuggingBreakpoints.test_sys_breakpoint_interception_TestDebuggingBreakpoints.test_sys_breakpoint_interception.TestPDB_flush_child_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestDebuggingBreakpoints.test_sys_breakpoint_interception_TestDebuggingBreakpoints.test_sys_breakpoint_interception.TestPDB_flush_child_", "embedding": null, "metadata": {"file_path": "testing/test_debugging.py", "file_name": "test_debugging.py", "file_type": "text/x-python", "category": "test", "start_line": 1005, "end_line": 1026, "span_ids": ["TestDebuggingBreakpoints.test_sys_breakpoint_interception"], "tokens": 180}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDebuggingBreakpoints:\n\n @pytest.mark.skipif(\n not SUPPORTS_BREAKPOINT_BUILTIN, reason=\"Requires breakpoint() builtin\"\n )\n @pytest.mark.skipif(\n not _ENVIRON_PYTHONBREAKPOINT == \"\",\n reason=\"Requires breakpoint() default value\",\n )\n def test_sys_breakpoint_interception(self, pytester: Pytester) -> None:\n p1 = pytester.makepyfile(\n \"\"\"\n def test_1():\n breakpoint()\n \"\"\"\n )\n child = pytester.spawn_pytest(str(p1))\n child.expect(\"test_1\")\n child.expect(\"Pdb\")\n child.sendline(\"quit\")\n rest = child.read().decode(\"utf8\")\n assert \"Quitting debugger\" in rest\n assert \"reading from stdin while output\" not in rest\n TestPDB.flush(child)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestDebuggingBreakpoints.test_pdb_not_altered_TestDebuggingBreakpoints.test_pdb_not_altered.TestPDB_flush_child_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestDebuggingBreakpoints.test_pdb_not_altered_TestDebuggingBreakpoints.test_pdb_not_altered.TestPDB_flush_child_", "embedding": null, "metadata": {"file_path": "testing/test_debugging.py", "file_name": "test_debugging.py", "file_type": "text/x-python", "category": "test", "start_line": 1028, "end_line": 1047, "span_ids": ["TestDebuggingBreakpoints.test_pdb_not_altered"], "tokens": 161}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDebuggingBreakpoints:\n\n @pytest.mark.skipif(\n not SUPPORTS_BREAKPOINT_BUILTIN, reason=\"Requires breakpoint() builtin\"\n )\n def test_pdb_not_altered(self, pytester: Pytester) -> None:\n p1 = pytester.makepyfile(\n \"\"\"\n import pdb\n def test_1():\n pdb.set_trace()\n assert 0\n \"\"\"\n )\n child = pytester.spawn_pytest(str(p1))\n child.expect(\"test_1\")\n child.expect(\"Pdb\")\n child.sendline(\"c\")\n rest = child.read().decode(\"utf8\")\n assert \"1 failed\" in rest\n assert \"reading from stdin while output\" not in rest\n TestPDB.flush(child)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestTraceOption_TestTraceOption.test_trace_sets_breakpoint.TestPDB_flush_child_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestTraceOption_TestTraceOption.test_trace_sets_breakpoint.TestPDB_flush_child_", "embedding": null, "metadata": {"file_path": "testing/test_debugging.py", "file_name": "test_debugging.py", "file_type": "text/x-python", "category": "test", "start_line": 1050, "end_line": 1080, "span_ids": ["TestTraceOption", "TestTraceOption.test_trace_sets_breakpoint"], "tokens": 230}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTraceOption:\n def test_trace_sets_breakpoint(self, pytester: Pytester) -> None:\n p1 = pytester.makepyfile(\n \"\"\"\n def test_1():\n assert True\n\n def test_2():\n pass\n\n def test_3():\n pass\n \"\"\"\n )\n child = pytester.spawn_pytest(\"--trace \" + str(p1))\n child.expect(\"test_1\")\n child.expect(\"Pdb\")\n child.sendline(\"c\")\n child.expect(\"test_2\")\n child.expect(\"Pdb\")\n child.sendline(\"c\")\n child.expect(\"test_3\")\n child.expect(\"Pdb\")\n child.sendline(\"q\")\n child.expect_exact(\"Exit: Quitting debugger\")\n rest = child.read().decode(\"utf8\")\n assert \"= 2 passed in\" in rest\n assert \"reading from stdin while output\" not in rest\n # Only printed once - not on stderr.\n assert \"Exit: Quitting debugger\" not in child.before.decode(\"utf8\")\n TestPDB.flush(child)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestTraceOption.test_trace_with_parametrize_handles_shared_fixtureinfo_TestTraceOption.test_trace_with_parametrize_handles_shared_fixtureinfo.TestPDB_flush_child_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestTraceOption.test_trace_with_parametrize_handles_shared_fixtureinfo_TestTraceOption.test_trace_with_parametrize_handles_shared_fixtureinfo.TestPDB_flush_child_", "embedding": null, "metadata": {"file_path": "testing/test_debugging.py", "file_name": "test_debugging.py", "file_type": "text/x-python", "category": "test", "start_line": 1082, "end_line": 1127, "span_ids": ["TestTraceOption.test_trace_with_parametrize_handles_shared_fixtureinfo"], "tokens": 461}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTraceOption:\n\n def test_trace_with_parametrize_handles_shared_fixtureinfo(\n self, pytester: Pytester\n ) -> None:\n p1 = pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.mark.parametrize('myparam', [1,2])\n def test_1(myparam, request):\n assert myparam in (1, 2)\n assert request.function.__name__ == \"test_1\"\n @pytest.mark.parametrize('func', [1,2])\n def test_func(func, request):\n assert func in (1, 2)\n assert request.function.__name__ == \"test_func\"\n @pytest.mark.parametrize('myparam', [1,2])\n def test_func_kw(myparam, request, func=\"func_kw\"):\n assert myparam in (1, 2)\n assert func == \"func_kw\"\n assert request.function.__name__ == \"test_func_kw\"\n \"\"\"\n )\n child = pytester.spawn_pytest(\"--trace \" + str(p1))\n for func, argname in [\n (\"test_1\", \"myparam\"),\n (\"test_func\", \"func\"),\n (\"test_func_kw\", \"myparam\"),\n ]:\n child.expect_exact(\"> PDB runcall (IO-capturing turned off) >\")\n child.expect_exact(func)\n child.expect_exact(\"Pdb\")\n child.sendline(\"args\")\n child.expect_exact(f\"{argname} = 1\\r\\n\")\n child.expect_exact(\"Pdb\")\n child.sendline(\"c\")\n child.expect_exact(\"Pdb\")\n child.sendline(\"args\")\n child.expect_exact(f\"{argname} = 2\\r\\n\")\n child.expect_exact(\"Pdb\")\n child.sendline(\"c\")\n child.expect_exact(\"> PDB continue (IO-capturing resumed) >\")\n rest = child.read().decode(\"utf8\")\n assert \"= 6 passed in\" in rest\n assert \"reading from stdin while output\" not in rest\n # Only printed once - not on stderr.\n assert \"Exit: Quitting debugger\" not in child.before.decode(\"utf8\")\n TestPDB.flush(child)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_test_trace_after_runpytest_test_trace_after_runpytest.assert_result_ret_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_test_trace_after_runpytest_test_trace_after_runpytest.assert_result_ret_0", "embedding": null, "metadata": {"file_path": "testing/test_debugging.py", "file_name": "test_debugging.py", "file_type": "text/x-python", "category": "test", "start_line": 1130, "end_line": 1158, "span_ids": ["test_trace_after_runpytest"], "tokens": 217}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_trace_after_runpytest(pytester: Pytester) -> None:\n \"\"\"Test that debugging's pytest_configure is re-entrant.\"\"\"\n p1 = pytester.makepyfile(\n \"\"\"\n from _pytest.debugging import pytestPDB\n\n def test_outer(pytester) -> None:\n assert len(pytestPDB._saved) == 1\n\n pytester.makepyfile(\n \\\"\"\"\n from _pytest.debugging import pytestPDB\n\n def test_inner():\n assert len(pytestPDB._saved) == 2\n print()\n print(\"test_inner_\" + \"end\")\n \\\"\"\"\n )\n\n result = pytester.runpytest(\"-s\", \"-k\", \"test_inner\")\n assert result.ret == 0\n\n assert len(pytestPDB._saved) == 1\n \"\"\"\n )\n result = pytester.runpytest_subprocess(\"-s\", \"-p\", \"pytester\", str(p1))\n result.stdout.fnmatch_lines([\"test_inner_end\"])\n assert result.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_test_quit_with_swallowed_SystemExit_test_quit_with_swallowed_SystemExit.TestPDB_flush_child_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_test_quit_with_swallowed_SystemExit_test_quit_with_swallowed_SystemExit.TestPDB_flush_child_", "embedding": null, "metadata": {"file_path": "testing/test_debugging.py", "file_name": "test_debugging.py", "file_type": "text/x-python", "category": "test", "start_line": 1161, "end_line": 1186, "span_ids": ["test_quit_with_swallowed_SystemExit"], "tokens": 161}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_quit_with_swallowed_SystemExit(pytester: Pytester) -> None:\n \"\"\"Test that debugging's pytest_configure is re-entrant.\"\"\"\n p1 = pytester.makepyfile(\n \"\"\"\n def call_pdb_set_trace():\n __import__('pdb').set_trace()\n\n\n def test_1():\n try:\n call_pdb_set_trace()\n except SystemExit:\n pass\n\n\n def test_2():\n pass\n \"\"\"\n )\n child = pytester.spawn_pytest(str(p1))\n child.expect(\"Pdb\")\n child.sendline(\"q\")\n child.expect_exact(\"Exit: Quitting debugger\")\n rest = child.read().decode(\"utf8\")\n assert \"no tests ran\" in rest\n TestPDB.flush(child)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_test_pdb_suspends_fixture_capturing_test_pdb_suspends_fixture_capturing.assert_PDB_continue_I": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_test_pdb_suspends_fixture_capturing_test_pdb_suspends_fixture_capturing.assert_PDB_continue_I", "embedding": null, "metadata": {"file_path": "testing/test_debugging.py", "file_name": "test_debugging.py", "file_type": "text/x-python", "category": "test", "start_line": 1189, "end_line": 1235, "span_ids": ["test_pdb_suspends_fixture_capturing"], "tokens": 379}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\"fixture\", (\"capfd\", \"capsys\"))\ndef test_pdb_suspends_fixture_capturing(pytester: Pytester, fixture: str) -> None:\n \"\"\"Using \"-s\" with pytest should suspend/resume fixture capturing.\"\"\"\n p1 = pytester.makepyfile(\n \"\"\"\n def test_inner({fixture}):\n import sys\n\n print(\"out_inner_before\")\n sys.stderr.write(\"err_inner_before\\\\n\")\n\n __import__(\"pdb\").set_trace()\n\n print(\"out_inner_after\")\n sys.stderr.write(\"err_inner_after\\\\n\")\n\n out, err = {fixture}.readouterr()\n assert out ==\"out_inner_before\\\\nout_inner_after\\\\n\"\n assert err ==\"err_inner_before\\\\nerr_inner_after\\\\n\"\n \"\"\".format(\n fixture=fixture\n )\n )\n\n child = pytester.spawn_pytest(str(p1) + \" -s\")\n\n child.expect(\"Pdb\")\n before = child.before.decode(\"utf8\")\n assert (\n \"> PDB set_trace (IO-capturing turned off for fixture %s) >\" % (fixture)\n in before\n )\n\n # Test that capturing is really suspended.\n child.sendline(\"p 40 + 2\")\n child.expect(\"Pdb\")\n assert \"\\r\\n42\\r\\n\" in child.before.decode(\"utf8\")\n\n child.sendline(\"c\")\n rest = child.read().decode(\"utf8\")\n assert \"out_inner\" not in rest\n assert \"err_inner\" not in rest\n\n TestPDB.flush(child)\n assert child.exitstatus == 0\n assert \"= 1 passed in\" in rest\n assert \"> PDB continue (IO-capturing resumed for fixture %s) >\" % (fixture) in rest", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_test_pdbcls_via_local_module_test_pdbcls_via_local_module.None_2": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_test_pdbcls_via_local_module_test_pdbcls_via_local_module.None_2", "embedding": null, "metadata": {"file_path": "testing/test_debugging.py", "file_name": "test_debugging.py", "file_type": "text/x-python", "category": "test", "start_line": 1238, "end_line": 1278, "span_ids": ["test_pdbcls_via_local_module"], "tokens": 345}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_pdbcls_via_local_module(pytester: Pytester) -> None:\n \"\"\"It should be imported in pytest_configure or later only.\"\"\"\n p1 = pytester.makepyfile(\n \"\"\"\n def test():\n print(\"before_set_trace\")\n __import__(\"pdb\").set_trace()\n \"\"\",\n mypdb=\"\"\"\n class Wrapped:\n class MyPdb:\n def set_trace(self, *args):\n print(\"set_trace_called\", args)\n\n def runcall(self, *args, **kwds):\n print(\"runcall_called\", args, kwds)\n \"\"\",\n )\n result = pytester.runpytest(\n str(p1), \"--pdbcls=really.invalid:Value\", syspathinsert=True\n )\n result.stdout.fnmatch_lines(\n [\n \"*= FAILURES =*\",\n \"E * --pdbcls: could not import 'really.invalid:Value': No module named *really*\",\n ]\n )\n assert result.ret == 1\n\n result = pytester.runpytest(\n str(p1), \"--pdbcls=mypdb:Wrapped.MyPdb\", syspathinsert=True\n )\n assert result.ret == 0\n result.stdout.fnmatch_lines([\"*set_trace_called*\", \"* 1 passed in *\"])\n\n # Ensure that it also works with --trace.\n result = pytester.runpytest(\n str(p1), \"--pdbcls=mypdb:Wrapped.MyPdb\", \"--trace\", syspathinsert=True\n )\n assert result.ret == 0\n result.stdout.fnmatch_lines([\"*runcall_called*\", \"* 1 passed in *\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_test_raises_bdbquit_with_eoferror_test_raises_bdbquit_with_eoferror.assert_result_ret_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_test_raises_bdbquit_with_eoferror_test_raises_bdbquit_with_eoferror.assert_result_ret_1", "embedding": null, "metadata": {"file_path": "testing/test_debugging.py", "file_name": "test_debugging.py", "file_type": "text/x-python", "category": "test", "start_line": 1281, "end_line": 1297, "span_ids": ["test_raises_bdbquit_with_eoferror"], "tokens": 142}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_raises_bdbquit_with_eoferror(pytester: Pytester) -> None:\n \"\"\"It is not guaranteed that DontReadFromInput's read is called.\"\"\"\n\n p1 = pytester.makepyfile(\n \"\"\"\n def input_without_read(*args, **kwargs):\n raise EOFError()\n\n def test(monkeypatch):\n import builtins\n monkeypatch.setattr(builtins, \"input\", input_without_read)\n __import__('pdb').set_trace()\n \"\"\"\n )\n result = pytester.runpytest(str(p1))\n result.stdout.fnmatch_lines([\"E *BdbQuit\", \"*= 1 failed in*\"])\n assert result.ret == 1", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_test_pdb_wrapper_class_is_reused_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_test_pdb_wrapper_class_is_reused_", "embedding": null, "metadata": {"file_path": "testing/test_debugging.py", "file_name": "test_debugging.py", "file_type": "text/x-python", "category": "test", "start_line": 1300, "end_line": 1328, "span_ids": ["test_pdb_wrapper_class_is_reused"], "tokens": 213}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_pdb_wrapper_class_is_reused(pytester: Pytester) -> None:\n p1 = pytester.makepyfile(\n \"\"\"\n def test():\n __import__(\"pdb\").set_trace()\n __import__(\"pdb\").set_trace()\n\n import mypdb\n instances = mypdb.instances\n assert len(instances) == 2\n assert instances[0].__class__ is instances[1].__class__\n \"\"\",\n mypdb=\"\"\"\n instances = []\n\n class MyPdb:\n def __init__(self, *args, **kwargs):\n instances.append(self)\n\n def set_trace(self, *args):\n print(\"set_trace_called\", args)\n \"\"\",\n )\n result = pytester.runpytest(str(p1), \"--pdbcls=mypdb:MyPdb\", syspathinsert=True)\n assert result.ret == 0\n result.stdout.fnmatch_lines(\n [\"*set_trace_called*\", \"*set_trace_called*\", \"* 1 passed in *\"]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_doctest_unexpected_exception_TestDoctests.test_doctest_unexpected_exception.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_doctest_unexpected_exception_TestDoctests.test_doctest_unexpected_exception.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 181, "end_line": 207, "span_ids": ["TestDoctests.test_doctest_unexpected_exception"], "tokens": 236}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDoctests:\n\n def test_doctest_unexpected_exception(self, pytester: Pytester):\n pytester.maketxtfile(\n \"\"\"\n >>> i = 0\n >>> 0 / i\n 2\n \"\"\"\n )\n result = pytester.runpytest(\"--doctest-modules\")\n result.stdout.fnmatch_lines(\n [\n \"test_doctest_unexpected_exception.txt F *\",\n \"\",\n \"*= FAILURES =*\",\n \"*_ [[]doctest[]] test_doctest_unexpected_exception.txt _*\",\n \"001 >>> i = 0\",\n \"002 >>> 0 / i\",\n \"UNEXPECTED EXCEPTION: ZeroDivisionError*\",\n \"Traceback (most recent call last):\",\n ' File \"*/doctest.py\", line *, in __run',\n \" *\",\n ' File \"\", line 1, in ',\n \"ZeroDivisionError: division by zero\",\n \"*/test_doctest_unexpected_exception.txt:2: UnexpectedException\",\n ],\n consecutive=True,\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_doctest_outcomes_TestDoctests.test_doctest_outcomes.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_doctest_outcomes_TestDoctests.test_doctest_outcomes.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 209, "end_line": 243, "span_ids": ["TestDoctests.test_doctest_outcomes"], "tokens": 209}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDoctests:\n\n def test_doctest_outcomes(self, pytester: Pytester):\n pytester.maketxtfile(\n test_skip=\"\"\"\n >>> 1\n 1\n >>> import pytest\n >>> pytest.skip(\"\")\n >>> 2\n 3\n \"\"\",\n test_xfail=\"\"\"\n >>> import pytest\n >>> pytest.xfail(\"xfail_reason\")\n >>> foo\n bar\n \"\"\",\n test_importorskip=\"\"\"\n >>> import pytest\n >>> pytest.importorskip(\"doesnotexist\")\n >>> foo\n bar\n \"\"\",\n )\n result = pytester.runpytest(\"--doctest-modules\")\n result.stdout.fnmatch_lines(\n [\n \"collected 3 items\",\n \"\",\n \"test_importorskip.txt s *\",\n \"test_skip.txt s *\",\n \"test_xfail.txt x *\",\n \"\",\n \"*= 2 skipped, 1 xfailed in *\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_doctest_linedata_on_property_TestDoctests.test_doctest_linedata_on_property.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_doctest_linedata_on_property_TestDoctests.test_doctest_linedata_on_property.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 338, "end_line": 366, "span_ids": ["TestDoctests.test_doctest_linedata_on_property"], "tokens": 193}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDoctests:\n\n def test_doctest_linedata_on_property(self, pytester: Pytester):\n pytester.makepyfile(\n \"\"\"\n class Sample(object):\n @property\n def some_property(self):\n '''\n >>> Sample().some_property\n 'another thing'\n '''\n return 'something'\n \"\"\"\n )\n result = pytester.runpytest(\"--doctest-modules\")\n result.stdout.fnmatch_lines(\n [\n \"*= FAILURES =*\",\n \"*_ [[]doctest[]] test_doctest_linedata_on_property.Sample.some_property _*\",\n \"004 \",\n \"005 >>> Sample().some_property\",\n \"Expected:\",\n \" 'another thing'\",\n \"Got:\",\n \" 'something'\",\n \"\",\n \"*/test_doctest_linedata_on_property.py:5: DocTestFailure\",\n \"*= 1 failed in *\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_doctest_no_linedata_on_overriden_property_TestDoctests.test_doctest_no_linedata_on_overriden_property.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_doctest_no_linedata_on_overriden_property_TestDoctests.test_doctest_no_linedata_on_overriden_property.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 368, "end_line": 397, "span_ids": ["TestDoctests.test_doctest_no_linedata_on_overriden_property"], "tokens": 238}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDoctests:\n\n def test_doctest_no_linedata_on_overriden_property(self, pytester: Pytester):\n pytester.makepyfile(\n \"\"\"\n class Sample(object):\n @property\n def some_property(self):\n '''\n >>> Sample().some_property\n 'another thing'\n '''\n return 'something'\n some_property = property(some_property.__get__, None, None, some_property.__doc__)\n \"\"\"\n )\n result = pytester.runpytest(\"--doctest-modules\")\n result.stdout.fnmatch_lines(\n [\n \"*= FAILURES =*\",\n \"*_ [[]doctest[]] test_doctest_no_linedata_on_overriden_property.Sample.some_property _*\",\n \"EXAMPLE LOCATION UNKNOWN, not showing all tests of that example\",\n \"[?][?][?] >>> Sample().some_property\",\n \"Expected:\",\n \" 'another thing'\",\n \"Got:\",\n \" 'something'\",\n \"\",\n \"*/test_doctest_no_linedata_on_overriden_property.py:None: DocTestFailure\",\n \"*= 1 failed in *\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_error_diffs.py___TESTCASES": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_error_diffs.py___TESTCASES", "embedding": null, "metadata": {"file_path": "testing/test_error_diffs.py", "file_name": "test_error_diffs.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 213, "span_ids": ["impl", "docstring", "imports"], "tokens": 55}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"\nTests and examples for correct \"+/-\" usage in error diffs.\n\nSee https://github.com/pytest-dev/pytest/issues/3333 for details.\n\n\"\"\"\nimport sys\n\nimport pytest\nfrom _pytest.pytester import Pytester\n\n\nTESTCASES =\n # ... other code", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_error_diffs.py_if_sys_version_info_2__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_error_diffs.py_if_sys_version_info_2__", "embedding": null, "metadata": {"file_path": "testing/test_error_diffs.py", "file_name": "test_error_diffs.py", "file_type": "text/x-python", "category": "test", "start_line": 214, "end_line": 284, "span_ids": ["test_error_diff", "impl:3"], "tokens": 465}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "if sys.version_info[:2] >= (3, 7):\n TESTCASES.extend(\n [\n pytest.param(\n \"\"\"\n from dataclasses import dataclass\n\n @dataclass\n class A:\n a: int\n b: str\n\n def test_this():\n result = A(1, 'spam')\n expected = A(2, 'spam')\n assert result == expected\n \"\"\",\n \"\"\"\n > assert result == expected\n E AssertionError: assert A(a=1, b='spam') == A(a=2, b='spam')\n E Matching attributes:\n E ['b']\n E Differing attributes:\n E ['a']\n E Drill down into differing attribute a:\n E a: 1 != 2\n E +1\n E -2\n \"\"\",\n id=\"Compare data classes\",\n ),\n pytest.param(\n \"\"\"\n import attr\n\n @attr.s(auto_attribs=True)\n class A:\n a: int\n b: str\n\n def test_this():\n result = A(1, 'spam')\n expected = A(1, 'eggs')\n assert result == expected\n \"\"\",\n \"\"\"\n > assert result == expected\n E AssertionError: assert A(a=1, b='spam') == A(a=1, b='eggs')\n E Matching attributes:\n E ['a']\n E Differing attributes:\n E ['b']\n E Drill down into differing attribute b:\n E b: 'spam' != 'eggs'\n E - eggs\n E + spam\n \"\"\",\n id=\"Compare attrs classes\",\n ),\n ]\n )\n\n\n@pytest.mark.parametrize(\"code, expected\", TESTCASES)\ndef test_error_diff(code: str, expected: str, pytester: Pytester) -> None:\n expected_lines = [line.lstrip() for line in expected.splitlines()]\n p = pytester.makepyfile(code)\n result = pytester.runpytest(p, \"-vv\")\n result.stdout.fnmatch_lines(expected_lines)\n assert result.ret == 1", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_faulthandler.py_test_cancel_timeout_on_hook_test_cancel_timeout_on_hook.assert_called_1_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_faulthandler.py_test_cancel_timeout_on_hook_test_cancel_timeout_on_hook.assert_called_1_", "embedding": null, "metadata": {"file_path": "testing/test_faulthandler.py", "file_name": "test_faulthandler.py", "file_type": "text/x-python", "category": "test", "start_line": 110, "end_line": 128, "span_ids": ["test_cancel_timeout_on_hook"], "tokens": 194}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\"hook_name\", [\"pytest_enter_pdb\", \"pytest_exception_interact\"])\ndef test_cancel_timeout_on_hook(monkeypatch, hook_name) -> None:\n \"\"\"Make sure that we are cancelling any scheduled traceback dumping due\n to timeout before entering pdb (pytest-dev/pytest-faulthandler#12) or any\n other interactive exception (pytest-dev/pytest-faulthandler#14).\"\"\"\n import faulthandler\n from _pytest import faulthandler as faulthandler_plugin\n\n called = []\n\n monkeypatch.setattr(\n faulthandler, \"cancel_dump_traceback_later\", lambda: called.append(1)\n )\n\n # call our hook explicitly, we can trust that pytest will call the hook\n # for us at the appropriate moment\n hook_func = getattr(faulthandler_plugin, hook_name)\n hook_func()\n assert called == [1]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_failure_function_TestPython.test_failure_function.if_junit_logging_no_.None_2": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_failure_function_TestPython.test_failure_function.if_junit_logging_no_.None_2", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 497, "end_line": 566, "span_ids": ["TestPython.test_failure_function"], "tokens": 693}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPython:\n\n @pytest.mark.parametrize(\n \"junit_logging\", [\"no\", \"log\", \"system-out\", \"system-err\", \"out-err\", \"all\"]\n )\n @parametrize_families\n def test_failure_function(\n self,\n pytester: Pytester,\n junit_logging,\n run_and_parse: RunAndParse,\n xunit_family,\n ) -> None:\n pytester.makepyfile(\n \"\"\"\n import logging\n import sys\n\n def test_fail():\n print(\"hello-stdout\")\n sys.stderr.write(\"hello-stderr\\\\n\")\n logging.info('info msg')\n logging.warning('warning msg')\n raise ValueError(42)\n \"\"\"\n )\n\n result, dom = run_and_parse(\n \"-o\", \"junit_logging=%s\" % junit_logging, family=xunit_family\n )\n assert result.ret, \"Expected ret > 0\"\n node = dom.find_first_by_tag(\"testsuite\")\n node.assert_attr(failures=1, tests=1)\n tnode = node.find_first_by_tag(\"testcase\")\n tnode.assert_attr(classname=\"test_failure_function\", name=\"test_fail\")\n fnode = tnode.find_first_by_tag(\"failure\")\n fnode.assert_attr(message=\"ValueError: 42\")\n assert \"ValueError\" in fnode.toxml(), \"ValueError not included\"\n\n if junit_logging in [\"log\", \"all\"]:\n logdata = tnode.find_first_by_tag(\"system-out\")\n log_xml = logdata.toxml()\n assert logdata.tag == \"system-out\", \"Expected tag: system-out\"\n assert \"info msg\" not in log_xml, \"Unexpected INFO message\"\n assert \"warning msg\" in log_xml, \"Missing WARN message\"\n if junit_logging in [\"system-out\", \"out-err\", \"all\"]:\n systemout = tnode.find_first_by_tag(\"system-out\")\n systemout_xml = systemout.toxml()\n assert systemout.tag == \"system-out\", \"Expected tag: system-out\"\n assert \"info msg\" not in systemout_xml, \"INFO message found in system-out\"\n assert (\n \"hello-stdout\" in systemout_xml\n ), \"Missing 'hello-stdout' in system-out\"\n if junit_logging in [\"system-err\", \"out-err\", \"all\"]:\n systemerr = tnode.find_first_by_tag(\"system-err\")\n systemerr_xml = systemerr.toxml()\n assert systemerr.tag == \"system-err\", \"Expected tag: system-err\"\n assert \"info msg\" not in systemerr_xml, \"INFO message found in system-err\"\n assert (\n \"hello-stderr\" in systemerr_xml\n ), \"Missing 'hello-stderr' in system-err\"\n assert (\n \"warning msg\" not in systemerr_xml\n ), \"WARN message found in system-err\"\n if junit_logging == \"no\":\n assert not tnode.find_by_tag(\"log\"), \"Found unexpected content: log\"\n assert not tnode.find_by_tag(\n \"system-out\"\n ), \"Found unexpected content: system-out\"\n assert not tnode.find_by_tag(\n \"system-err\"\n ), \"Found unexpected content: system-err\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_failure_escape_TestPython.test_failure_escape.for_index_char_in_enumer.assert_s_n_char_in_t": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_failure_escape_TestPython.test_failure_escape.for_index_char_in_enumer.assert_s_n_char_in_t", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 585, "end_line": 613, "span_ids": ["TestPython.test_failure_escape"], "tokens": 237}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPython:\n\n @parametrize_families\n def test_failure_escape(\n self, pytester: Pytester, run_and_parse: RunAndParse, xunit_family: str\n ) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.mark.parametrize('arg1', \"<&'\", ids=\"<&'\")\n def test_func(arg1):\n print(arg1)\n assert 0\n \"\"\"\n )\n result, dom = run_and_parse(\n \"-o\", \"junit_logging=system-out\", family=xunit_family\n )\n assert result.ret\n node = dom.find_first_by_tag(\"testsuite\")\n node.assert_attr(failures=3, tests=3)\n\n for index, char in enumerate(\"<&'\"):\n\n tnode = node.find_nth_by_tag(\"testcase\", index)\n tnode.assert_attr(\n classname=\"test_failure_escape\", name=\"test_func[%s]\" % char\n )\n sysout = tnode.find_first_by_tag(\"system-out\")\n text = sysout.text\n assert \"%s\\n\" % char in text", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_xfail_captures_output_once_TestPython.test_xfail_captures_output_once.if_junit_logging_in_log.else_.assert_len_tnode_find_by_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_xfail_captures_output_once_TestPython.test_xfail_captures_output_once.if_junit_logging_in_log.else_.assert_len_tnode_find_by_", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 680, "end_line": 709, "span_ids": ["TestPython.test_xfail_captures_output_once"], "tokens": 295}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPython:\n\n @pytest.mark.parametrize(\n \"junit_logging\", [\"no\", \"log\", \"system-out\", \"system-err\", \"out-err\", \"all\"]\n )\n def test_xfail_captures_output_once(\n self, pytester: Pytester, junit_logging: str, run_and_parse: RunAndParse\n ) -> None:\n pytester.makepyfile(\n \"\"\"\n import sys\n import pytest\n\n @pytest.mark.xfail()\n def test_fail():\n sys.stdout.write('XFAIL This is stdout')\n sys.stderr.write('XFAIL This is stderr')\n assert 0\n \"\"\"\n )\n result, dom = run_and_parse(\"-o\", \"junit_logging=%s\" % junit_logging)\n node = dom.find_first_by_tag(\"testsuite\")\n tnode = node.find_first_by_tag(\"testcase\")\n if junit_logging in [\"system-err\", \"out-err\", \"all\"]:\n assert len(tnode.find_by_tag(\"system-err\")) == 1\n else:\n assert len(tnode.find_by_tag(\"system-err\")) == 0\n\n if junit_logging in [\"log\", \"system-out\", \"out-err\", \"all\"]:\n assert len(tnode.find_by_tag(\"system-out\")) == 1\n else:\n assert len(tnode.find_by_tag(\"system-out\")) == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_pass_captures_stdout_TestPython.test_pass_captures_stdout.if_junit_logging_syst.assert_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_pass_captures_stdout_TestPython.test_pass_captures_stdout.if_junit_logging_syst.assert_", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 799, "end_line": 820, "span_ids": ["TestPython.test_pass_captures_stdout"], "tokens": 211}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPython:\n\n @pytest.mark.parametrize(\"junit_logging\", [\"no\", \"system-out\"])\n def test_pass_captures_stdout(\n self, pytester: Pytester, run_and_parse: RunAndParse, junit_logging: str\n ) -> None:\n pytester.makepyfile(\n \"\"\"\n def test_pass():\n print('hello-stdout')\n \"\"\"\n )\n result, dom = run_and_parse(\"-o\", \"junit_logging=%s\" % junit_logging)\n node = dom.find_first_by_tag(\"testsuite\")\n pnode = node.find_first_by_tag(\"testcase\")\n if junit_logging == \"no\":\n assert not node.find_by_tag(\n \"system-out\"\n ), \"system-out should not be generated\"\n if junit_logging == \"system-out\":\n systemout = pnode.find_first_by_tag(\"system-out\")\n assert (\n \"hello-stdout\" in systemout.toxml()\n ), \"'hello-stdout' should be in system-out\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_pass_captures_stderr_TestPython.test_pass_captures_stderr.if_junit_logging_syst.assert_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_pass_captures_stderr_TestPython.test_pass_captures_stderr.if_junit_logging_syst.assert_", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 822, "end_line": 844, "span_ids": ["TestPython.test_pass_captures_stderr"], "tokens": 223}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPython:\n\n @pytest.mark.parametrize(\"junit_logging\", [\"no\", \"system-err\"])\n def test_pass_captures_stderr(\n self, pytester: Pytester, run_and_parse: RunAndParse, junit_logging: str\n ) -> None:\n pytester.makepyfile(\n \"\"\"\n import sys\n def test_pass():\n sys.stderr.write('hello-stderr')\n \"\"\"\n )\n result, dom = run_and_parse(\"-o\", \"junit_logging=%s\" % junit_logging)\n node = dom.find_first_by_tag(\"testsuite\")\n pnode = node.find_first_by_tag(\"testcase\")\n if junit_logging == \"no\":\n assert not node.find_by_tag(\n \"system-err\"\n ), \"system-err should not be generated\"\n if junit_logging == \"system-err\":\n systemerr = pnode.find_first_by_tag(\"system-err\")\n assert (\n \"hello-stderr\" in systemerr.toxml()\n ), \"'hello-stderr' should be in system-err\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_setup_error_captures_stdout_TestPython.test_setup_error_captures_stdout.if_junit_logging_syst.assert_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_setup_error_captures_stdout_TestPython.test_setup_error_captures_stdout.if_junit_logging_syst.assert_", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 846, "end_line": 873, "span_ids": ["TestPython.test_setup_error_captures_stdout"], "tokens": 234}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPython:\n\n @pytest.mark.parametrize(\"junit_logging\", [\"no\", \"system-out\"])\n def test_setup_error_captures_stdout(\n self, pytester: Pytester, run_and_parse: RunAndParse, junit_logging: str\n ) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.fixture\n def arg(request):\n print('hello-stdout')\n raise ValueError()\n def test_function(arg):\n pass\n \"\"\"\n )\n result, dom = run_and_parse(\"-o\", \"junit_logging=%s\" % junit_logging)\n node = dom.find_first_by_tag(\"testsuite\")\n pnode = node.find_first_by_tag(\"testcase\")\n if junit_logging == \"no\":\n assert not node.find_by_tag(\n \"system-out\"\n ), \"system-out should not be generated\"\n if junit_logging == \"system-out\":\n systemout = pnode.find_first_by_tag(\"system-out\")\n assert (\n \"hello-stdout\" in systemout.toxml()\n ), \"'hello-stdout' should be in system-out\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_setup_error_captures_stderr_TestPython.test_setup_error_captures_stderr.if_junit_logging_syst.assert_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_setup_error_captures_stderr_TestPython.test_setup_error_captures_stderr.if_junit_logging_syst.assert_", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 875, "end_line": 903, "span_ids": ["TestPython.test_setup_error_captures_stderr"], "tokens": 246}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPython:\n\n @pytest.mark.parametrize(\"junit_logging\", [\"no\", \"system-err\"])\n def test_setup_error_captures_stderr(\n self, pytester: Pytester, run_and_parse: RunAndParse, junit_logging: str\n ) -> None:\n pytester.makepyfile(\n \"\"\"\n import sys\n import pytest\n\n @pytest.fixture\n def arg(request):\n sys.stderr.write('hello-stderr')\n raise ValueError()\n def test_function(arg):\n pass\n \"\"\"\n )\n result, dom = run_and_parse(\"-o\", \"junit_logging=%s\" % junit_logging)\n node = dom.find_first_by_tag(\"testsuite\")\n pnode = node.find_first_by_tag(\"testcase\")\n if junit_logging == \"no\":\n assert not node.find_by_tag(\n \"system-err\"\n ), \"system-err should not be generated\"\n if junit_logging == \"system-err\":\n systemerr = pnode.find_first_by_tag(\"system-err\")\n assert (\n \"hello-stderr\" in systemerr.toxml()\n ), \"'hello-stderr' should be in system-err\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_avoid_double_stdout_TestPython.test_avoid_double_stdout.if_junit_logging_syst.assert_hello_stdout_tear": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_avoid_double_stdout_TestPython.test_avoid_double_stdout.if_junit_logging_syst.assert_hello_stdout_tear", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 905, "end_line": 933, "span_ids": ["TestPython.test_avoid_double_stdout"], "tokens": 249}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPython:\n\n @pytest.mark.parametrize(\"junit_logging\", [\"no\", \"system-out\"])\n def test_avoid_double_stdout(\n self, pytester: Pytester, run_and_parse: RunAndParse, junit_logging: str\n ) -> None:\n pytester.makepyfile(\n \"\"\"\n import sys\n import pytest\n\n @pytest.fixture\n def arg(request):\n yield\n sys.stdout.write('hello-stdout teardown')\n raise ValueError()\n def test_function(arg):\n sys.stdout.write('hello-stdout call')\n \"\"\"\n )\n result, dom = run_and_parse(\"-o\", \"junit_logging=%s\" % junit_logging)\n node = dom.find_first_by_tag(\"testsuite\")\n pnode = node.find_first_by_tag(\"testcase\")\n if junit_logging == \"no\":\n assert not node.find_by_tag(\n \"system-out\"\n ), \"system-out should not be generated\"\n if junit_logging == \"system-out\":\n systemout = pnode.find_first_by_tag(\"system-out\")\n assert \"hello-stdout call\" in systemout.toxml()\n assert \"hello-stdout teardown\" in systemout.toxml()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_nullbyte_test_nullbyte.if_junit_logging_no_.assert_x00_not_in_text": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_nullbyte_test_nullbyte.if_junit_logging_no_.assert_x00_not_in_text", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 1004, "end_line": 1023, "span_ids": ["test_nullbyte"], "tokens": 218}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\"junit_logging\", [\"no\", \"system-out\"])\ndef test_nullbyte(pytester: Pytester, junit_logging: str) -> None:\n # A null byte can not occur in XML (see section 2.2 of the spec)\n pytester.makepyfile(\n \"\"\"\n import sys\n def test_print_nullbyte():\n sys.stdout.write('Here the null -->' + chr(0) + '<--')\n sys.stdout.write('In repr form -->' + repr(chr(0)) + '<--')\n assert False\n \"\"\"\n )\n xmlf = pytester.path.joinpath(\"junit.xml\")\n pytester.runpytest(\"--junitxml=%s\" % xmlf, \"-o\", \"junit_logging=%s\" % junit_logging)\n text = xmlf.read_text()\n assert \"\\x00\" not in text\n if junit_logging == \"system-out\":\n assert \"#x00\" in text\n if junit_logging == \"no\":\n assert \"#x00\" not in text", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_nullbyte_replace_test_nullbyte_replace.if_junit_logging_no_.assert_x0_not_in_text": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_nullbyte_replace_test_nullbyte_replace.if_junit_logging_no_.assert_x0_not_in_text", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 1026, "end_line": 1044, "span_ids": ["test_nullbyte_replace"], "tokens": 198}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\"junit_logging\", [\"no\", \"system-out\"])\ndef test_nullbyte_replace(pytester: Pytester, junit_logging: str) -> None:\n # Check if the null byte gets replaced\n pytester.makepyfile(\n \"\"\"\n import sys\n def test_print_nullbyte():\n sys.stdout.write('Here the null -->' + chr(0) + '<--')\n sys.stdout.write('In repr form -->' + repr(chr(0)) + '<--')\n assert False\n \"\"\"\n )\n xmlf = pytester.path.joinpath(\"junit.xml\")\n pytester.runpytest(\"--junitxml=%s\" % xmlf, \"-o\", \"junit_logging=%s\" % junit_logging)\n text = xmlf.read_text()\n if junit_logging == \"system-out\":\n assert \"#x0\" in text\n if junit_logging == \"no\":\n assert \"#x0\" not in text", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_record_property_test_record_property.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_record_property_test_record_property.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 1211, "end_line": 1230, "span_ids": ["test_record_property"], "tokens": 181}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_record_property(pytester: Pytester, run_and_parse: RunAndParse) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.fixture\n def other(record_property):\n record_property(\"bar\", 1)\n def test_record(record_property, other):\n record_property(\"foo\", \"<1\");\n \"\"\"\n )\n result, dom = run_and_parse()\n node = dom.find_first_by_tag(\"testsuite\")\n tnode = node.find_first_by_tag(\"testcase\")\n psnode = tnode.find_first_by_tag(\"properties\")\n pnodes = psnode.find_by_tag(\"property\")\n pnodes[0].assert_attr(name=\"bar\", value=\"1\")\n pnodes[1].assert_attr(name=\"foo\", value=\"<1\")\n result.stdout.fnmatch_lines([\"*= 1 passed in *\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_logging_passing_tests_disabled_does_not_log_test_output_test_logging_passing_tests_disabled_does_not_log_test_output.None_3": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_logging_passing_tests_disabled_does_not_log_test_output_test_logging_passing_tests_disabled_does_not_log_test_output.None_3", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 1628, "end_line": 1658, "span_ids": ["test_logging_passing_tests_disabled_does_not_log_test_output"], "tokens": 208}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@parametrize_families\ndef test_logging_passing_tests_disabled_does_not_log_test_output(\n pytester: Pytester, run_and_parse: RunAndParse, xunit_family: str\n) -> None:\n pytester.makeini(\n \"\"\"\n [pytest]\n junit_log_passing_tests=False\n junit_logging=system-out\n junit_family={family}\n \"\"\".format(\n family=xunit_family\n )\n )\n pytester.makepyfile(\n \"\"\"\n import pytest\n import logging\n import sys\n\n def test_func():\n sys.stdout.write('This is stdout')\n sys.stderr.write('This is stderr')\n logging.warning('hello')\n \"\"\"\n )\n result, dom = run_and_parse(family=xunit_family)\n assert result.ret == 0\n node = dom.find_first_by_tag(\"testcase\")\n assert len(node.find_by_tag(\"system-err\")) == 0\n assert len(node.find_by_tag(\"system-out\")) == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_logging_passing_tests_disabled_logs_output_for_failing_test_issue5430_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_logging_passing_tests_disabled_logs_output_for_failing_test_issue5430_", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 1661, "end_line": 1704, "span_ids": ["test_logging_passing_tests_disabled_logs_output_for_failing_test_issue5430"], "tokens": 329}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@parametrize_families\n@pytest.mark.parametrize(\"junit_logging\", [\"no\", \"system-out\", \"system-err\"])\ndef test_logging_passing_tests_disabled_logs_output_for_failing_test_issue5430(\n pytester: Pytester,\n junit_logging: str,\n run_and_parse: RunAndParse,\n xunit_family: str,\n) -> None:\n pytester.makeini(\n \"\"\"\n [pytest]\n junit_log_passing_tests=False\n junit_family={family}\n \"\"\".format(\n family=xunit_family\n )\n )\n pytester.makepyfile(\n \"\"\"\n import pytest\n import logging\n import sys\n\n def test_func():\n logging.warning('hello')\n assert 0\n \"\"\"\n )\n result, dom = run_and_parse(\n \"-o\", \"junit_logging=%s\" % junit_logging, family=xunit_family\n )\n assert result.ret == 1\n node = dom.find_first_by_tag(\"testcase\")\n if junit_logging == \"system-out\":\n assert len(node.find_by_tag(\"system-err\")) == 0\n assert len(node.find_by_tag(\"system-out\")) == 1\n elif junit_logging == \"system-err\":\n assert len(node.find_by_tag(\"system-err\")) == 1\n assert len(node.find_by_tag(\"system-out\")) == 0\n else:\n assert junit_logging == \"no\"\n assert len(node.find_by_tag(\"system-err\")) == 0\n assert len(node.find_by_tag(\"system-out\")) == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestFunctional.test_keywords_at_node_level_TestFunctional.test_keywords_at_node_level.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestFunctional.test_keywords_at_node_level_TestFunctional.test_keywords_at_node_level.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/test_mark.py", "file_name": "test_mark.py", "file_type": "text/x-python", "category": "test", "start_line": 645, "end_line": 665, "span_ids": ["TestFunctional.test_keywords_at_node_level"], "tokens": 146}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFunctional:\n\n def test_keywords_at_node_level(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.fixture(scope=\"session\", autouse=True)\n def some(request):\n request.keywords[\"hello\"] = 42\n assert \"world\" not in request.keywords\n\n @pytest.fixture(scope=\"function\", autouse=True)\n def funcsetup(request):\n assert \"world\" in request.keywords\n assert \"hello\" in request.keywords\n\n @pytest.mark.world\n def test_function():\n pass\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestMarkDecorator_TestMarkDecorator.test_aliases.assert_md_kwargs_thr": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestMarkDecorator_TestMarkDecorator.test_aliases.assert_md_kwargs_thr", "embedding": null, "metadata": {"file_path": "testing/test_mark.py", "file_name": "test_mark.py", "file_type": "text/x-python", "category": "test", "start_line": 921, "end_line": 938, "span_ids": ["TestMarkDecorator", "TestMarkDecorator.test__eq__", "TestMarkDecorator.test_aliases"], "tokens": 152}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMarkDecorator:\n @pytest.mark.parametrize(\n \"lhs, rhs, expected\",\n [\n (pytest.mark.foo(), pytest.mark.foo(), True),\n (pytest.mark.foo(), pytest.mark.bar(), False),\n (pytest.mark.foo(), \"bar\", False),\n (\"foo\", pytest.mark.bar(), False),\n ],\n )\n def test__eq__(self, lhs, rhs, expected) -> None:\n assert (lhs == rhs) == expected\n\n def test_aliases(self) -> None:\n md = pytest.mark.foo(1, \"2\", three=3)\n assert md.name == \"foo\"\n assert md.args == (1, \"2\")\n assert md.kwargs == {\"three\": 3}", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_parameterset_for_parametrize_bad_markname_test_mark_expressions_no_smear._assert_skipped_k_fai": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_parameterset_for_parametrize_bad_markname_test_mark_expressions_no_smear._assert_skipped_k_fai", "embedding": null, "metadata": {"file_path": "testing/test_mark.py", "file_name": "test_mark.py", "file_type": "text/x-python", "category": "test", "start_line": 1011, "end_line": 1048, "span_ids": ["test_mark_expressions_no_smear", "test_parameterset_for_parametrize_bad_markname"], "tokens": 279}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_parameterset_for_parametrize_bad_markname(pytester: Pytester) -> None:\n with pytest.raises(pytest.UsageError):\n test_parameterset_for_parametrize_marks(pytester, \"bad\")\n\n\ndef test_mark_expressions_no_smear(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n class BaseTests(object):\n def test_something(self):\n pass\n\n @pytest.mark.FOO\n class TestFooClass(BaseTests):\n pass\n\n @pytest.mark.BAR\n class TestBarClass(BaseTests):\n pass\n \"\"\"\n )\n\n reprec = pytester.inline_run(\"-m\", \"FOO\")\n passed, skipped, failed = reprec.countoutcomes()\n dlist = reprec.getcalls(\"pytest_deselected\")\n assert passed == 1\n assert skipped == failed == 0\n deselected_tests = dlist[0].items\n assert len(deselected_tests) == 1\n\n # todo: fixed\n # keywords smear - expected behaviour\n # reprec_keywords = pytester.inline_run(\"-k\", \"FOO\")\n # passed_k, skipped_k, failed_k = reprec_keywords.countoutcomes()\n # assert passed_k == 2\n # assert skipped_k == failed_k == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_addmarker_order_test_markers_from_parametrize.result_assert_outcomes_pa": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_addmarker_order_test_markers_from_parametrize.result_assert_outcomes_pa", "embedding": null, "metadata": {"file_path": "testing/test_mark.py", "file_name": "test_mark.py", "file_type": "text/x-python", "category": "test", "start_line": 1051, "end_line": 1099, "span_ids": ["test_addmarker_order", "test_markers_from_parametrize"], "tokens": 342}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_addmarker_order() -> None:\n session = mock.Mock()\n session.own_markers = []\n session.parent = None\n session.nodeid = \"\"\n node = Node.from_parent(session, name=\"Test\")\n node.add_marker(\"foo\")\n node.add_marker(\"bar\")\n node.add_marker(\"baz\", append=False)\n extracted = [x.name for x in node.iter_markers()]\n assert extracted == [\"baz\", \"foo\", \"bar\"]\n\n\n@pytest.mark.filterwarnings(\"ignore\")\ndef test_markers_from_parametrize(pytester: Pytester) -> None:\n \"\"\"#3605\"\"\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n first_custom_mark = pytest.mark.custom_marker\n custom_mark = pytest.mark.custom_mark\n @pytest.fixture(autouse=True)\n def trigger(request):\n custom_mark = list(request.node.iter_markers('custom_mark'))\n print(\"Custom mark %s\" % custom_mark)\n\n @custom_mark(\"custom mark non parametrized\")\n def test_custom_mark_non_parametrized():\n print(\"Hey from test\")\n\n @pytest.mark.parametrize(\n \"obj_type\",\n [\n first_custom_mark(\"first custom mark\")(\"template\"),\n pytest.param( # Think this should be recommended way?\n \"disk\",\n marks=custom_mark('custom mark1')\n ),\n custom_mark(\"custom mark2\")(\"vm\"), # Tried also this\n ]\n )\n def test_custom_mark_parametrized(obj_type):\n print(\"obj_type is:\", obj_type)\n \"\"\"\n )\n\n result = pytester.runpytest()\n result.assert_outcomes(passed=4)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_pytest_param_id_requires_string_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_pytest_param_id_requires_string_", "embedding": null, "metadata": {"file_path": "testing/test_mark.py", "file_name": "test_mark.py", "file_type": "text/x-python", "category": "test", "start_line": 1102, "end_line": 1129, "span_ids": ["test_marker_expr_eval_failure_handling", "test_pytest_param_id_allows_none_or_string", "test_pytest_param_id_requires_string"], "tokens": 231}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_pytest_param_id_requires_string() -> None:\n with pytest.raises(TypeError) as excinfo:\n pytest.param(id=True) # type: ignore[arg-type]\n (msg,) = excinfo.value.args\n assert msg == \"Expected id to be a string, got : True\"\n\n\n@pytest.mark.parametrize(\"s\", (None, \"hello world\"))\ndef test_pytest_param_id_allows_none_or_string(s) -> None:\n assert pytest.param(id=s)\n\n\n@pytest.mark.parametrize(\"expr\", (\"NOT internal_err\", \"NOT (internal_err)\", \"bogus/\"))\ndef test_marker_expr_eval_failure_handling(pytester: Pytester, expr) -> None:\n foo = pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.mark.internal_err\n def test_foo():\n pass\n \"\"\"\n )\n expected = f\"ERROR: Wrong expression passed to '-m': {expr}: *\"\n result = pytester.runpytest(foo, \"-m\", expr)\n result.stderr.fnmatch_lines([expected])\n assert result.ret == ExitCode.USAGE_ERROR", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_nose.py_test_SkipTest_during_collection_test_skip_test_with_unicode.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_nose.py_test_SkipTest_during_collection_test_skip_test_with_unicode.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_nose.py", "file_name": "test_nose.py", "file_type": "text/x-python", "category": "test", "start_line": 284, "end_line": 378, "span_ids": ["test_nottest_class_decorator", "test_skip_test_with_unicode", "test_istest_class_decorator", "test_nottest_function_decorator", "test_istest_function_decorator", "test_SkipTest_in_test", "test_SkipTest_during_collection"], "tokens": 551}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_SkipTest_during_collection(pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n import nose\n raise nose.SkipTest(\"during collection\")\n def test_failing():\n assert False\n \"\"\"\n )\n result = pytester.runpytest(p)\n result.assert_outcomes(skipped=1)\n\n\ndef test_SkipTest_in_test(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import nose\n\n def test_skipping():\n raise nose.SkipTest(\"in test\")\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(skipped=1)\n\n\ndef test_istest_function_decorator(pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n import nose.tools\n @nose.tools.istest\n def not_test_prefix():\n pass\n \"\"\"\n )\n result = pytester.runpytest(p)\n result.assert_outcomes(passed=1)\n\n\ndef test_nottest_function_decorator(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import nose.tools\n @nose.tools.nottest\n def test_prefix():\n pass\n \"\"\"\n )\n reprec = pytester.inline_run()\n assert not reprec.getfailedcollections()\n calls = reprec.getreports(\"pytest_runtest_logreport\")\n assert not calls\n\n\ndef test_istest_class_decorator(pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n import nose.tools\n @nose.tools.istest\n class NotTestPrefix(object):\n def test_method(self):\n pass\n \"\"\"\n )\n result = pytester.runpytest(p)\n result.assert_outcomes(passed=1)\n\n\ndef test_nottest_class_decorator(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import nose.tools\n @nose.tools.nottest\n class TestPrefix(object):\n def test_method(self):\n pass\n \"\"\"\n )\n reprec = pytester.inline_run()\n assert not reprec.getfailedcollections()\n calls = reprec.getreports(\"pytest_runtest_logreport\")\n assert not calls\n\n\ndef test_skip_test_with_unicode(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\\\n import unittest\n class TestClass():\n def test_io(self):\n raise unittest.SkipTest('\ud83d\ude0a')\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"* 1 skipped *\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_nose.py_test_raises_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_nose.py_test_raises_", "embedding": null, "metadata": {"file_path": "testing/test_nose.py", "file_name": "test_nose.py", "file_type": "text/x-python", "category": "test", "start_line": 381, "end_line": 426, "span_ids": ["test_raises"], "tokens": 295}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_raises(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n from nose.tools import raises\n\n @raises(RuntimeError)\n def test_raises_runtimeerror():\n raise RuntimeError\n\n @raises(Exception)\n def test_raises_baseexception_not_caught():\n raise BaseException\n\n @raises(BaseException)\n def test_raises_baseexception_caught():\n raise BaseException\n \"\"\"\n )\n result = pytester.runpytest(\"-vv\")\n result.stdout.fnmatch_lines(\n [\n \"test_raises.py::test_raises_runtimeerror PASSED*\",\n \"test_raises.py::test_raises_baseexception_not_caught FAILED*\",\n \"test_raises.py::test_raises_baseexception_caught PASSED*\",\n \"*= FAILURES =*\",\n \"*_ test_raises_baseexception_not_caught _*\",\n \"\",\n \"arg = (), kw = {}\",\n \"\",\n \" def newfunc(*arg, **kw):\",\n \" try:\",\n \"> func(*arg, **kw)\",\n \"\",\n \"*/nose/*: \",\n \"_ _ *\",\n \"\",\n \" @raises(Exception)\",\n \" def test_raises_baseexception_not_caught():\",\n \"> raise BaseException\",\n \"E BaseException\",\n \"\",\n \"test_raises.py:9: BaseException\",\n \"* 1 failed, 2 passed *\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_parseopt.py_argparse_TestParser.test_custom_prog.None_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_parseopt.py_argparse_TestParser.test_custom_prog.None_1", "embedding": null, "metadata": {"file_path": "testing/test_parseopt.py", "file_name": "test_parseopt.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 30, "span_ids": ["parser", "TestParser.test_custom_prog", "imports", "TestParser.test_no_help_by_default", "TestParser"], "tokens": 193}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import argparse\nimport os\nimport shlex\nimport subprocess\nimport sys\n\nimport py\n\nimport pytest\nfrom _pytest.config import argparsing as parseopt\nfrom _pytest.config.exceptions import UsageError\nfrom _pytest.monkeypatch import MonkeyPatch\nfrom _pytest.pytester import Pytester\n\n\n@pytest.fixture\ndef parser() -> parseopt.Parser:\n return parseopt.Parser()\n\n\nclass TestParser:\n def test_no_help_by_default(self) -> None:\n parser = parseopt.Parser(usage=\"xyz\")\n pytest.raises(UsageError, lambda: parser.parse([\"-h\"]))\n\n def test_custom_prog(self, parser: parseopt.Parser) -> None:\n \"\"\"Custom prog can be set for `argparse.ArgumentParser`.\"\"\"\n assert parser._getparser().prog == os.path.basename(sys.argv[0])\n parser.prog = \"custom-prog\"\n assert parser._getparser().prog == \"custom-prog\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_parseopt.py_TestParser.test_argument_TestParser.test_argument.assert_str_argument_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_parseopt.py_TestParser.test_argument_TestParser.test_argument.assert_str_argument_", "embedding": null, "metadata": {"file_path": "testing/test_parseopt.py", "file_name": "test_parseopt.py", "file_type": "text/x-python", "category": "test", "start_line": 30, "end_line": 46, "span_ids": ["TestParser.test_argument"], "tokens": 172}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestParser:\n\n def test_argument(self) -> None:\n with pytest.raises(parseopt.ArgumentError):\n # need a short or long option\n argument = parseopt.Argument()\n argument = parseopt.Argument(\"-t\")\n assert argument._short_opts == [\"-t\"]\n assert argument._long_opts == []\n assert argument.dest == \"t\"\n argument = parseopt.Argument(\"-t\", \"--test\")\n assert argument._short_opts == [\"-t\"]\n assert argument._long_opts == [\"--test\"]\n assert argument.dest == \"test\"\n argument = parseopt.Argument(\"-t\", \"--test\", dest=\"abc\")\n assert argument.dest == \"abc\"\n assert str(argument) == (\n \"Argument(_short_opts: ['-t'], _long_opts: ['--test'], dest: 'abc')\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_parseopt.py_TestParser.test_argument_processopt_TestParser.test_parse_known_and_unknown_args.assert_unknown_y_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_parseopt.py_TestParser.test_argument_processopt_TestParser.test_parse_known_and_unknown_args.assert_unknown_y_", "embedding": null, "metadata": {"file_path": "testing/test_parseopt.py", "file_name": "test_parseopt.py", "file_type": "text/x-python", "category": "test", "start_line": 63, "end_line": 142, "span_ids": ["TestParser.test_parser_addoption", "TestParser.test_group_add_and_get", "TestParser.test_parse_known_and_unknown_args", "TestParser.test_group_ordering", "TestParser.test_parse2", "TestParser.test_getgroup_simple", "TestParser.test_group_addoption", "TestParser.test_group_shortopt_lowercase", "TestParser.test_group_addoption_conflict", "TestParser.test_parse", "TestParser.test_argument_processopt", "TestParser.test_parse_known_args"], "tokens": 788}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestParser:\n\n def test_argument_processopt(self) -> None:\n argument = parseopt.Argument(\"-t\", type=int)\n argument.default = 42\n argument.dest = \"abc\"\n res = argument.attrs()\n assert res[\"default\"] == 42\n assert res[\"dest\"] == \"abc\"\n\n def test_group_add_and_get(self, parser: parseopt.Parser) -> None:\n group = parser.getgroup(\"hello\", description=\"desc\")\n assert group.name == \"hello\"\n assert group.description == \"desc\"\n\n def test_getgroup_simple(self, parser: parseopt.Parser) -> None:\n group = parser.getgroup(\"hello\", description=\"desc\")\n assert group.name == \"hello\"\n assert group.description == \"desc\"\n group2 = parser.getgroup(\"hello\")\n assert group2 is group\n\n def test_group_ordering(self, parser: parseopt.Parser) -> None:\n parser.getgroup(\"1\")\n parser.getgroup(\"2\")\n parser.getgroup(\"3\", after=\"1\")\n groups = parser._groups\n groups_names = [x.name for x in groups]\n assert groups_names == list(\"132\")\n\n def test_group_addoption(self) -> None:\n group = parseopt.OptionGroup(\"hello\")\n group.addoption(\"--option1\", action=\"store_true\")\n assert len(group.options) == 1\n assert isinstance(group.options[0], parseopt.Argument)\n\n def test_group_addoption_conflict(self) -> None:\n group = parseopt.OptionGroup(\"hello again\")\n group.addoption(\"--option1\", \"--option-1\", action=\"store_true\")\n with pytest.raises(ValueError) as err:\n group.addoption(\"--option1\", \"--option-one\", action=\"store_true\")\n assert str({\"--option1\"}) in str(err.value)\n\n def test_group_shortopt_lowercase(self, parser: parseopt.Parser) -> None:\n group = parser.getgroup(\"hello\")\n with pytest.raises(ValueError):\n group.addoption(\"-x\", action=\"store_true\")\n assert len(group.options) == 0\n group._addoption(\"-x\", action=\"store_true\")\n assert len(group.options) == 1\n\n def test_parser_addoption(self, parser: parseopt.Parser) -> None:\n group = parser.getgroup(\"custom options\")\n assert len(group.options) == 0\n group.addoption(\"--option1\", action=\"store_true\")\n assert len(group.options) == 1\n\n def test_parse(self, parser: parseopt.Parser) -> None:\n parser.addoption(\"--hello\", dest=\"hello\", action=\"store\")\n args = parser.parse([\"--hello\", \"world\"])\n assert args.hello == \"world\"\n assert not getattr(args, parseopt.FILE_OR_DIR)\n\n def test_parse2(self, parser: parseopt.Parser) -> None:\n args = parser.parse([py.path.local()])\n assert getattr(args, parseopt.FILE_OR_DIR)[0] == py.path.local()\n\n def test_parse_known_args(self, parser: parseopt.Parser) -> None:\n parser.parse_known_args([py.path.local()])\n parser.addoption(\"--hello\", action=\"store_true\")\n ns = parser.parse_known_args([\"x\", \"--y\", \"--hello\", \"this\"])\n assert ns.hello\n assert ns.file_or_dir == [\"x\"]\n\n def test_parse_known_and_unknown_args(self, parser: parseopt.Parser) -> None:\n parser.addoption(\"--hello\", action=\"store_true\")\n ns, unknown = parser.parse_known_and_unknown_args(\n [\"x\", \"--y\", \"--hello\", \"this\"]\n )\n assert ns.hello\n assert ns.file_or_dir == [\"x\"]\n assert unknown == [\"--y\", \"this\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_parseopt.py_TestParser.test_parse_will_set_default_TestParser.test_parse_special_destination.assert_args_ultimate_answ": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_parseopt.py_TestParser.test_parse_will_set_default_TestParser.test_parse_special_destination.assert_args_ultimate_answ", "embedding": null, "metadata": {"file_path": "testing/test_parseopt.py", "file_name": "test_parseopt.py", "file_type": "text/x-python", "category": "test", "start_line": 144, "end_line": 165, "span_ids": ["TestParser.test_parse_setoption", "TestParser.test_parse_special_destination", "TestParser.test_parse_will_set_default"], "tokens": 221}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestParser:\n\n def test_parse_will_set_default(self, parser: parseopt.Parser) -> None:\n parser.addoption(\"--hello\", dest=\"hello\", default=\"x\", action=\"store\")\n option = parser.parse([])\n assert option.hello == \"x\"\n del option.hello\n parser.parse_setoption([], option)\n assert option.hello == \"x\"\n\n def test_parse_setoption(self, parser: parseopt.Parser) -> None:\n parser.addoption(\"--hello\", dest=\"hello\", action=\"store\")\n parser.addoption(\"--world\", dest=\"world\", default=42)\n\n option = argparse.Namespace()\n args = parser.parse_setoption([\"--hello\", \"world\"], option)\n assert option.hello == \"world\"\n assert option.world == 42\n assert not args\n\n def test_parse_special_destination(self, parser: parseopt.Parser) -> None:\n parser.addoption(\"--ultimate-answer\", type=int)\n args = parser.parse([\"--ultimate-answer\", \"42\"])\n assert args.ultimate_answer == 42", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_runresult_assertion_on_xfail_make_holder.return.apiclass_apimod": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_runresult_assertion_on_xfail_make_holder.return.apiclass_apimod", "embedding": null, "metadata": {"file_path": "testing/test_pytester.py", "file_name": "test_pytester.py", "file_type": "text/x-python", "category": "test", "start_line": 119, "end_line": 188, "span_ids": ["test_runresult_assertion_on_xfail", "make_holder.apiclass.pytest_xyz", "test_runresult_assertion_on_xpassed", "test_xpassed_with_strict_is_considered_a_failure", "make_holder.apiclass", "make_holder"], "tokens": 372}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_runresult_assertion_on_xfail(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n pytest_plugins = \"pytester\"\n\n @pytest.mark.xfail\n def test_potato():\n assert False\n \"\"\"\n )\n result = pytester.runpytest()\n result.assert_outcomes(xfailed=1)\n assert result.ret == 0\n\n\ndef test_runresult_assertion_on_xpassed(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n pytest_plugins = \"pytester\"\n\n @pytest.mark.xfail\n def test_potato():\n assert True\n \"\"\"\n )\n result = pytester.runpytest()\n result.assert_outcomes(xpassed=1)\n assert result.ret == 0\n\n\ndef test_xpassed_with_strict_is_considered_a_failure(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n pytest_plugins = \"pytester\"\n\n @pytest.mark.xfail(strict=True)\n def test_potato():\n assert True\n \"\"\"\n )\n result = pytester.runpytest()\n result.assert_outcomes(failed=1)\n assert result.ret != 0\n\n\ndef make_holder():\n class apiclass:\n def pytest_xyz(self, arg):\n \"\"\"X\"\"\"\n\n def pytest_xyz_noarg(self):\n \"\"\"X\"\"\"\n\n apimod = type(os)(\"api\")\n\n def pytest_xyz(arg):\n \"\"\"X\"\"\"\n\n def pytest_xyz_noarg():\n \"\"\"X\"\"\"\n\n apimod.pytest_xyz = pytest_xyz # type: ignore\n apimod.pytest_xyz_noarg = pytest_xyz_noarg # type: ignore\n return apiclass, apimod", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_linematcher_with_nonlist_test_linematcher_with_nonlist.None_3": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_linematcher_with_nonlist_test_linematcher_with_nonlist.None_3", "embedding": null, "metadata": {"file_path": "testing/test_pytester.py", "file_name": "test_pytester.py", "file_type": "text/x-python", "category": "test", "start_line": 481, "end_line": 502, "span_ids": ["test_linematcher_with_nonlist"], "tokens": 292}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_linematcher_with_nonlist() -> None:\n \"\"\"Test LineMatcher with regard to passing in a set (accidentally).\"\"\"\n from _pytest._code.source import Source\n\n lm = LineMatcher([])\n with pytest.raises(TypeError, match=\"invalid type for lines2: set\"):\n lm.fnmatch_lines(set()) # type: ignore[arg-type]\n with pytest.raises(TypeError, match=\"invalid type for lines2: dict\"):\n lm.fnmatch_lines({}) # type: ignore[arg-type]\n with pytest.raises(TypeError, match=\"invalid type for lines2: set\"):\n lm.re_match_lines(set()) # type: ignore[arg-type]\n with pytest.raises(TypeError, match=\"invalid type for lines2: dict\"):\n lm.re_match_lines({}) # type: ignore[arg-type]\n with pytest.raises(TypeError, match=\"invalid type for lines2: Source\"):\n lm.fnmatch_lines(Source()) # type: ignore[arg-type]\n lm.fnmatch_lines([])\n lm.fnmatch_lines(())\n lm.fnmatch_lines(\"\")\n assert lm._getlines({}) == {} # type: ignore[arg-type,comparison-overlap]\n assert lm._getlines(set()) == set() # type: ignore[arg-type,comparison-overlap]\n assert lm._getlines(Source()) == []\n assert lm._getlines(Source(\"pass\\npass\")) == [\"pass\", \"pass\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_linematcher_match_failure_test_linematcher_match_failure.None_3": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_linematcher_match_failure_test_linematcher_match_failure.None_3", "embedding": null, "metadata": {"file_path": "testing/test_pytester.py", "file_name": "test_pytester.py", "file_type": "text/x-python", "category": "test", "start_line": 505, "end_line": 530, "span_ids": ["test_linematcher_match_failure"], "tokens": 234}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_linematcher_match_failure() -> None:\n lm = LineMatcher([\"foo\", \"foo\", \"bar\"])\n with pytest.raises(pytest.fail.Exception) as e:\n lm.fnmatch_lines([\"foo\", \"f*\", \"baz\"])\n assert e.value.msg is not None\n assert e.value.msg.splitlines() == [\n \"exact match: 'foo'\",\n \"fnmatch: 'f*'\",\n \" with: 'foo'\",\n \"nomatch: 'baz'\",\n \" and: 'bar'\",\n \"remains unmatched: 'baz'\",\n ]\n\n lm = LineMatcher([\"foo\", \"foo\", \"bar\"])\n with pytest.raises(pytest.fail.Exception) as e:\n lm.re_match_lines([\"foo\", \"^f.*\", \"baz\"])\n assert e.value.msg is not None\n assert e.value.msg.splitlines() == [\n \"exact match: 'foo'\",\n \"re.match: '^f.*'\",\n \" with: 'foo'\",\n \" nomatch: 'baz'\",\n \" and: 'bar'\",\n \"remains unmatched: 'baz'\",\n ]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_linematcher_consecutive_test_linematcher_consecutive.None_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_linematcher_consecutive_test_linematcher_consecutive.None_1", "embedding": null, "metadata": {"file_path": "testing/test_pytester.py", "file_name": "test_pytester.py", "file_type": "text/x-python", "category": "test", "start_line": 539, "end_line": 556, "span_ids": ["test_linematcher_consecutive"], "tokens": 176}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_linematcher_consecutive() -> None:\n lm = LineMatcher([\"1\", \"\", \"2\"])\n with pytest.raises(pytest.fail.Exception) as excinfo:\n lm.fnmatch_lines([\"1\", \"2\"], consecutive=True)\n assert str(excinfo.value).splitlines() == [\n \"exact match: '1'\",\n \"no consecutive match: '2'\",\n \" with: ''\",\n ]\n\n lm.re_match_lines([\"1\", r\"\\d?\", \"2\"], consecutive=True)\n with pytest.raises(pytest.fail.Exception) as excinfo:\n lm.re_match_lines([\"1\", r\"\\d\", \"2\"], consecutive=True)\n assert str(excinfo.value).splitlines() == [\n \"exact match: '1'\",\n r\"no consecutive match: '\\\\d'\",\n \" with: ''\",\n ]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_linematcher_no_matching_test_linematcher_no_matching._bad_pattern_does_not_ma": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_linematcher_no_matching_test_linematcher_no_matching._bad_pattern_does_not_ma", "embedding": null, "metadata": {"file_path": "testing/test_pytester.py", "file_name": "test_pytester.py", "file_type": "text/x-python", "category": "test", "start_line": 559, "end_line": 605, "span_ids": ["test_linematcher_no_matching"], "tokens": 401}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\"function\", [\"no_fnmatch_line\", \"no_re_match_line\"])\ndef test_linematcher_no_matching(function: str) -> None:\n if function == \"no_fnmatch_line\":\n good_pattern = \"*.py OK*\"\n bad_pattern = \"*X.py OK*\"\n else:\n assert function == \"no_re_match_line\"\n good_pattern = r\".*py OK\"\n bad_pattern = r\".*Xpy OK\"\n\n lm = LineMatcher(\n [\n \"cachedir: .pytest_cache\",\n \"collecting ... collected 1 item\",\n \"\",\n \"show_fixtures_per_test.py OK\",\n \"=== elapsed 1s ===\",\n ]\n )\n\n # check the function twice to ensure we don't accumulate the internal buffer\n for i in range(2):\n with pytest.raises(pytest.fail.Exception) as e:\n func = getattr(lm, function)\n func(good_pattern)\n obtained = str(e.value).splitlines()\n if function == \"no_fnmatch_line\":\n assert obtained == [\n f\"nomatch: '{good_pattern}'\",\n \" and: 'cachedir: .pytest_cache'\",\n \" and: 'collecting ... collected 1 item'\",\n \" and: ''\",\n f\"fnmatch: '{good_pattern}'\",\n \" with: 'show_fixtures_per_test.py OK'\",\n ]\n else:\n assert obtained == [\n f\" nomatch: '{good_pattern}'\",\n \" and: 'cachedir: .pytest_cache'\",\n \" and: 'collecting ... collected 1 item'\",\n \" and: ''\",\n f\"re.match: '{good_pattern}'\",\n \" with: 'show_fixtures_per_test.py OK'\",\n ]\n\n func = getattr(lm, function)\n func(bad_pattern) # bad pattern does not match any line: passes", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_run_stdin_test_run_stdin.assert_result_ret_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_run_stdin_test_run_stdin.assert_result_ret_0", "embedding": null, "metadata": {"file_path": "testing/test_pytester.py", "file_name": "test_pytester.py", "file_type": "text/x-python", "category": "test", "start_line": 632, "end_line": 659, "span_ids": ["test_run_stdin"], "tokens": 199}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_run_stdin(pytester: Pytester) -> None:\n with pytest.raises(pytester.TimeoutExpired):\n pytester.run(\n sys.executable,\n \"-c\",\n \"import sys, time; time.sleep(1); print(sys.stdin.read())\",\n stdin=subprocess.PIPE,\n timeout=0.1,\n )\n\n with pytest.raises(pytester.TimeoutExpired):\n result = pytester.run(\n sys.executable,\n \"-c\",\n \"import sys, time; time.sleep(1); print(sys.stdin.read())\",\n stdin=b\"input\\n2ndline\",\n timeout=0.1,\n )\n\n result = pytester.run(\n sys.executable,\n \"-c\",\n \"import sys; print(sys.stdin.read())\",\n stdin=b\"input\\n2ndline\",\n )\n assert result.stdout.lines == [\"input\", \"2ndline\"]\n assert result.stderr.str() == \"\"\n assert result.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_popen_stdin_pipe_test_popen_stdin_pipe.assert_proc_returncode_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_popen_stdin_pipe_test_popen_stdin_pipe.assert_proc_returncode_", "embedding": null, "metadata": {"file_path": "testing/test_pytester.py", "file_name": "test_pytester.py", "file_type": "text/x-python", "category": "test", "start_line": 662, "end_line": 673, "span_ids": ["test_popen_stdin_pipe"], "tokens": 117}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_popen_stdin_pipe(pytester: Pytester) -> None:\n proc = pytester.popen(\n [sys.executable, \"-c\", \"import sys; print(sys.stdin.read())\"],\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE,\n stdin=subprocess.PIPE,\n )\n stdin = b\"input\\n2ndline\"\n stdout, stderr = proc.communicate(input=stdin)\n assert stdout.decode(\"utf8\").splitlines() == [\"input\", \"2ndline\"]\n assert stderr == b\"\"\n assert proc.returncode == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_popen_stdin_bytes_test_popen_stdin_bytes.assert_proc_returncode_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_popen_stdin_bytes_test_popen_stdin_bytes.assert_proc_returncode_", "embedding": null, "metadata": {"file_path": "testing/test_pytester.py", "file_name": "test_pytester.py", "file_type": "text/x-python", "category": "test", "start_line": 676, "end_line": 686, "span_ids": ["test_popen_stdin_bytes"], "tokens": 108}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_popen_stdin_bytes(pytester: Pytester) -> None:\n proc = pytester.popen(\n [sys.executable, \"-c\", \"import sys; print(sys.stdin.read())\"],\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE,\n stdin=b\"input\\n2ndline\",\n )\n stdout, stderr = proc.communicate()\n assert stdout.decode(\"utf8\").splitlines() == [\"input\", \"2ndline\"]\n assert stderr == b\"\"\n assert proc.returncode == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_popen_default_stdin_stderr_and_stdin_None_test_popen_default_stdin_stderr_and_stdin_None.assert_result_ret_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_popen_default_stdin_stderr_and_stdin_None_test_popen_default_stdin_stderr_and_stdin_None.assert_result_ret_0", "embedding": null, "metadata": {"file_path": "testing/test_pytester.py", "file_name": "test_pytester.py", "file_type": "text/x-python", "category": "test", "start_line": 689, "end_line": 716, "span_ids": ["test_popen_default_stdin_stderr_and_stdin_None"], "tokens": 240}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_popen_default_stdin_stderr_and_stdin_None(pytester: Pytester) -> None:\n # stdout, stderr default to pipes,\n # stdin can be None to not close the pipe, avoiding\n # \"ValueError: flush of closed file\" with `communicate()`.\n #\n # Wraps the test to make it not hang when run with \"-s\".\n p1 = pytester.makepyfile(\n '''\n import sys\n\n def test_inner(pytester):\n p1 = pytester.makepyfile(\n \"\"\"\n import sys\n print(sys.stdin.read()) # empty\n print('stdout')\n sys.stderr.write('stderr')\n \"\"\"\n )\n proc = pytester.popen([sys.executable, str(p1)], stdin=None)\n stdout, stderr = proc.communicate(b\"ignored\")\n assert stdout.splitlines() == [b\"\", b\"stdout\"]\n assert stderr.splitlines() == [b\"stderr\"]\n assert proc.returncode == 0\n '''\n )\n result = pytester.runpytest(\"-p\", \"pytester\", str(p1))\n assert result.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_run_result_repr_test_run_result_repr.None_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_run_result_repr_test_run_result_repr.None_1", "embedding": null, "metadata": {"file_path": "testing/test_pytester.py", "file_name": "test_pytester.py", "file_type": "text/x-python", "category": "test", "start_line": 741, "end_line": 757, "span_ids": ["test_run_result_repr"], "tokens": 176}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_run_result_repr() -> None:\n outlines = [\"some\", \"normal\", \"output\"]\n errlines = [\"some\", \"nasty\", \"errors\", \"happened\"]\n\n # known exit code\n r = pytester_mod.RunResult(1, outlines, errlines, duration=0.5)\n assert (\n repr(r) == \"\"\n )\n\n # unknown exit code: just the number\n r = pytester_mod.RunResult(99, outlines, errlines, duration=0.5)\n assert (\n repr(r) == \"\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_recwarn.py_TestDeprecatedCall_TestDeprecatedCall.test_deprecated_call_ret.assert_ret_42": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_recwarn.py_TestDeprecatedCall_TestDeprecatedCall.test_deprecated_call_ret.assert_ret_42", "embedding": null, "metadata": {"file_path": "testing/test_recwarn.py", "file_name": "test_recwarn.py", "file_type": "text/x-python", "category": "test", "start_line": 76, "end_line": 99, "span_ids": ["TestDeprecatedCall.test_deprecated_call_raises", "TestDeprecatedCall.dep", "TestDeprecatedCall.test_deprecated_call", "TestDeprecatedCall.test_deprecated_call_ret", "TestDeprecatedCall", "TestDeprecatedCall.dep_explicit"], "tokens": 211}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDeprecatedCall:\n \"\"\"test pytest.deprecated_call()\"\"\"\n\n def dep(self, i: int, j: Optional[int] = None) -> int:\n if i == 0:\n warnings.warn(\"is deprecated\", DeprecationWarning, stacklevel=1)\n return 42\n\n def dep_explicit(self, i: int) -> None:\n if i == 0:\n warnings.warn_explicit(\n \"dep_explicit\", category=DeprecationWarning, filename=\"hello\", lineno=3\n )\n\n def test_deprecated_call_raises(self) -> None:\n with pytest.raises(pytest.fail.Exception, match=\"No warnings of type\"):\n pytest.deprecated_call(self.dep, 3, 5)\n\n def test_deprecated_call(self) -> None:\n pytest.deprecated_call(self.dep, 0, 5)\n\n def test_deprecated_call_ret(self) -> None:\n ret = pytest.deprecated_call(self.dep, 0)\n assert ret == 42", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_recwarn.py_TestDeprecatedCall.test_deprecated_call_preserves_TestDeprecatedCall.test_deprecated_call_preserves.assert_warn_explicit_is_w": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_recwarn.py_TestDeprecatedCall.test_deprecated_call_preserves_TestDeprecatedCall.test_deprecated_call_preserves.assert_warn_explicit_is_w", "embedding": null, "metadata": {"file_path": "testing/test_recwarn.py", "file_name": "test_recwarn.py", "file_type": "text/x-python", "category": "test", "start_line": 101, "end_line": 113, "span_ids": ["TestDeprecatedCall.test_deprecated_call_preserves"], "tokens": 148}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDeprecatedCall:\n\n def test_deprecated_call_preserves(self) -> None:\n # Type ignored because `onceregistry` and `filters` are not\n # documented API.\n onceregistry = warnings.onceregistry.copy() # type: ignore\n filters = warnings.filters[:] # type: ignore\n warn = warnings.warn\n warn_explicit = warnings.warn_explicit\n self.test_deprecated_call_raises()\n self.test_deprecated_call()\n assert onceregistry == warnings.onceregistry # type: ignore\n assert filters == warnings.filters # type: ignore\n assert warn is warnings.warn\n assert warn_explicit is warnings.warn_explicit", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_recwarn.py_TestDeprecatedCall.test_deprecated_explicit_call_raises_TestDeprecatedCall.test_deprecated_call_no_warning.with_pytest_raises_pytest.if_mode_call_.else_.with_pytest_deprecated_ca.f_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_recwarn.py_TestDeprecatedCall.test_deprecated_explicit_call_raises_TestDeprecatedCall.test_deprecated_call_no_warning.with_pytest_raises_pytest.if_mode_call_.else_.with_pytest_deprecated_ca.f_", "embedding": null, "metadata": {"file_path": "testing/test_recwarn.py", "file_name": "test_recwarn.py", "file_type": "text/x-python", "category": "test", "start_line": 115, "end_line": 138, "span_ids": ["TestDeprecatedCall.test_deprecated_explicit_call_raises", "TestDeprecatedCall.test_deprecated_call_no_warning", "TestDeprecatedCall.test_deprecated_explicit_call"], "tokens": 205}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDeprecatedCall:\n\n def test_deprecated_explicit_call_raises(self) -> None:\n with pytest.raises(pytest.fail.Exception):\n pytest.deprecated_call(self.dep_explicit, 3)\n\n def test_deprecated_explicit_call(self) -> None:\n pytest.deprecated_call(self.dep_explicit, 0)\n pytest.deprecated_call(self.dep_explicit, 0)\n\n @pytest.mark.parametrize(\"mode\", [\"context_manager\", \"call\"])\n def test_deprecated_call_no_warning(self, mode) -> None:\n \"\"\"Ensure deprecated_call() raises the expected failure when its block/function does\n not raise a deprecation warning.\n \"\"\"\n\n def f():\n pass\n\n msg = \"No warnings of type (.*DeprecationWarning.*, .*PendingDeprecationWarning.*)\"\n with pytest.raises(pytest.fail.Exception, match=msg):\n if mode == \"call\":\n pytest.deprecated_call(f)\n else:\n with pytest.deprecated_call():\n f()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_recwarn.py_TestWarns.test_double_test_TestWarns.test_double_test.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_recwarn.py_TestWarns.test_double_test_TestWarns.test_double_test.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_recwarn.py", "file_name": "test_recwarn.py", "file_type": "text/x-python", "category": "test", "start_line": 332, "end_line": 346, "span_ids": ["TestWarns.test_double_test"], "tokens": 115}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestWarns:\n\n def test_double_test(self, pytester: Pytester) -> None:\n \"\"\"If a test is run again, the warning should still be raised\"\"\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n import warnings\n\n @pytest.mark.parametrize('run', [1, 2])\n def test(run):\n with pytest.warns(RuntimeWarning):\n warnings.warn(\"runtime\", RuntimeWarning)\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*2 passed in*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_recwarn.py_TestWarns.test_match_regex_TestWarns.test_match_regex.None_2.with_pytest_warns_FutureW.warnings_warn_value_must": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_recwarn.py_TestWarns.test_match_regex_TestWarns.test_match_regex.None_2.with_pytest_warns_FutureW.warnings_warn_value_must", "embedding": null, "metadata": {"file_path": "testing/test_recwarn.py", "file_name": "test_recwarn.py", "file_type": "text/x-python", "category": "test", "start_line": 347, "end_line": 357, "span_ids": ["TestWarns.test_match_regex"], "tokens": 123}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestWarns:\n\n def test_match_regex(self) -> None:\n with pytest.warns(UserWarning, match=r\"must be \\d+$\"):\n warnings.warn(\"value must be 42\", UserWarning)\n\n with pytest.raises(pytest.fail.Exception):\n with pytest.warns(UserWarning, match=r\"must be \\d+$\"):\n warnings.warn(\"this is not here\", UserWarning)\n\n with pytest.raises(pytest.fail.Exception):\n with pytest.warns(FutureWarning, match=r\"must be \\d+$\"):\n warnings.warn(\"value must be 42\", UserWarning)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestReportSerialization.test_chained_exceptions_no_reprcrash_TestReportSerialization.test_chained_exceptions_no_reprcrash.reports.reprec_getreports_pytest": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestReportSerialization.test_chained_exceptions_no_reprcrash_TestReportSerialization.test_chained_exceptions_no_reprcrash.reports.reprec_getreports_pytest", "embedding": null, "metadata": {"file_path": "testing/test_reports.py", "file_name": "test_reports.py", "file_type": "text/x-python", "category": "test", "start_line": 344, "end_line": 367, "span_ids": ["TestReportSerialization.test_chained_exceptions_no_reprcrash"], "tokens": 179}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestReportSerialization:\n\n def test_chained_exceptions_no_reprcrash(self, pytester: Pytester, tw_mock) -> None:\n \"\"\"Regression test for tracebacks without a reprcrash (#5971)\n\n This happens notably on exceptions raised by multiprocess.pool: the exception transfer\n from subprocess to main process creates an artificial exception, which ExceptionInfo\n can't obtain the ReprFileLocation from.\n \"\"\"\n pytester.makepyfile(\n \"\"\"\n from concurrent.futures import ProcessPoolExecutor\n\n def func():\n raise ValueError('value error')\n\n def test_a():\n with ProcessPoolExecutor() as p:\n p.submit(func).result()\n \"\"\"\n )\n\n pytester.syspathinsert()\n reprec = pytester.inline_run()\n\n reports = reprec.getreports(\"pytest_runtest_logreport\")\n # ... other code", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_BaseFunctionalTests.test_systemexit_does_not_bail_out_TestExecutionNonForked.test_keyboardinterrupt_propagates.try_.else_.assert_False_did_not_ra": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_BaseFunctionalTests.test_systemexit_does_not_bail_out_TestExecutionNonForked.test_keyboardinterrupt_propagates.try_.else_.assert_False_did_not_ra", "embedding": null, "metadata": {"file_path": "testing/test_runner.py", "file_name": "test_runner.py", "file_type": "text/x-python", "category": "test", "start_line": 385, "end_line": 432, "span_ids": ["TestExecutionNonForked.getrunner", "TestExecutionNonForked.test_keyboardinterrupt_propagates", "TestExecutionNonForked", "BaseFunctionalTests.test_exit_propagates", "BaseFunctionalTests.test_systemexit_does_not_bail_out"], "tokens": 262}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class BaseFunctionalTests:\n\n def test_systemexit_does_not_bail_out(self, pytester: Pytester) -> None:\n try:\n reports = pytester.runitem(\n \"\"\"\n def test_func():\n raise SystemExit(42)\n \"\"\"\n )\n except SystemExit:\n assert False, \"runner did not catch SystemExit\"\n rep = reports[1]\n assert rep.failed\n assert rep.when == \"call\"\n\n def test_exit_propagates(self, pytester: Pytester) -> None:\n try:\n pytester.runitem(\n \"\"\"\n import pytest\n def test_func():\n raise pytest.exit.Exception()\n \"\"\"\n )\n except pytest.exit.Exception:\n pass\n else:\n assert False, \"did not raise\"\n\n\nclass TestExecutionNonForked(BaseFunctionalTests):\n def getrunner(self):\n def f(item):\n return runner.runtestprotocol(item, log=False)\n\n return f\n\n def test_keyboardinterrupt_propagates(self, pytester: Pytester) -> None:\n try:\n pytester.runitem(\n \"\"\"\n def test_func():\n raise KeyboardInterrupt(\"fake\")\n \"\"\"\n )\n except KeyboardInterrupt:\n pass\n else:\n assert False, \"did not raise\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_test_importorskip_module_level_test_importorskip_custom_reason.None_2": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_test_importorskip_module_level_test_importorskip_custom_reason.None_2", "embedding": null, "metadata": {"file_path": "testing/test_runner.py", "file_name": "test_runner.py", "file_type": "text/x-python", "category": "test", "start_line": 749, "end_line": 777, "span_ids": ["test_importorskip_custom_reason", "test_importorskip_module_level"], "tokens": 214}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_importorskip_module_level(pytester: Pytester) -> None:\n \"\"\"`importorskip` must be able to skip entire modules when used at module level.\"\"\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n foobarbaz = pytest.importorskip(\"foobarbaz\")\n\n def test_foo():\n pass\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*collected 0 items / 1 skipped*\"])\n\n\ndef test_importorskip_custom_reason(pytester: Pytester) -> None:\n \"\"\"Make sure custom reasons are used.\"\"\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n foobarbaz = pytest.importorskip(\"foobarbaz2\", reason=\"just because\")\n\n def test_foo():\n pass\n \"\"\"\n )\n result = pytester.runpytest(\"-ra\")\n result.stdout.fnmatch_lines([\"*just because*\"])\n result.stdout.fnmatch_lines([\"*collected 0 items / 1 skipped*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_test_pytest_cmdline_main_test_unicode_in_longrepr.assert_UnicodeEncodeErro": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_test_pytest_cmdline_main_test_unicode_in_longrepr.assert_UnicodeEncodeErro", "embedding": null, "metadata": {"file_path": "testing/test_runner.py", "file_name": "test_runner.py", "file_type": "text/x-python", "category": "test", "start_line": 780, "end_line": 818, "span_ids": ["test_pytest_cmdline_main", "test_unicode_in_longrepr"], "tokens": 241}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_pytest_cmdline_main(pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n import pytest\n def test_hello():\n assert 1\n if __name__ == '__main__':\n pytest.cmdline.main([__file__])\n \"\"\"\n )\n import subprocess\n\n popen = subprocess.Popen([sys.executable, str(p)], stdout=subprocess.PIPE)\n popen.communicate()\n ret = popen.wait()\n assert ret == 0\n\n\ndef test_unicode_in_longrepr(pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\\\n import pytest\n @pytest.hookimpl(hookwrapper=True)\n def pytest_runtest_makereport():\n outcome = yield\n rep = outcome.get_result()\n if rep.when == \"call\":\n rep.longrepr = '\u00e4'\n \"\"\"\n )\n pytester.makepyfile(\n \"\"\"\n def test_out():\n assert 0\n \"\"\"\n )\n result = pytester.runpytest()\n assert result.ret == 1\n assert \"UnicodeEncodeError\" not in result.stderr.str()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_test_store_except_info_on_error_test_store_except_info_on_error.None_6": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_test_store_except_info_on_error_test_store_except_info_on_error.None_6", "embedding": null, "metadata": {"file_path": "testing/test_runner.py", "file_name": "test_runner.py", "file_type": "text/x-python", "category": "test", "start_line": 880, "end_line": 907, "span_ids": ["test_store_except_info_on_error", "test_store_except_info_on_error.ItemMightRaise", "test_store_except_info_on_error.ItemMightRaise:2"], "tokens": 250}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_store_except_info_on_error() -> None:\n \"\"\"Test that upon test failure, the exception info is stored on\n sys.last_traceback and friends.\"\"\"\n # Simulate item that might raise a specific exception, depending on `raise_error` class var\n class ItemMightRaise:\n nodeid = \"item_that_raises\"\n raise_error = True\n\n def runtest(self):\n if self.raise_error:\n raise IndexError(\"TEST\")\n\n try:\n runner.pytest_runtest_call(ItemMightRaise()) # type: ignore[arg-type]\n except IndexError:\n pass\n # Check that exception info is stored on sys\n assert sys.last_type is IndexError\n assert isinstance(sys.last_value, IndexError)\n assert sys.last_value.args[0] == \"TEST\"\n assert sys.last_traceback\n\n # The next run should clear the exception info stored by the previous run\n ItemMightRaise.raise_error = False\n runner.pytest_runtest_call(ItemMightRaise()) # type: ignore[arg-type]\n assert not hasattr(sys, \"last_type\")\n assert not hasattr(sys, \"last_value\")\n assert not hasattr(sys, \"last_traceback\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_setupplan.py_test_show_multi_test_fixture_setup_and_teardown_correctly_simple_test_show_multi_test_fixture_setup_and_teardown_correctly_simple.assert_teardown_count_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_setupplan.py_test_show_multi_test_fixture_setup_and_teardown_correctly_simple_test_show_multi_test_fixture_setup_and_teardown_correctly_simple.assert_teardown_count_", "embedding": null, "metadata": {"file_path": "testing/test_setupplan.py", "file_name": "test_setupplan.py", "file_type": "text/x-python", "category": "test", "start_line": 27, "end_line": 73, "span_ids": ["test_show_multi_test_fixture_setup_and_teardown_correctly_simple"], "tokens": 360}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_show_multi_test_fixture_setup_and_teardown_correctly_simple(\n pytester: Pytester,\n) -> None:\n \"\"\"Verify that when a fixture lives for longer than a single test, --setup-plan\n correctly displays the SETUP/TEARDOWN indicators the right number of times.\n\n As reported in https://github.com/pytest-dev/pytest/issues/2049\n --setup-plan was showing SETUP/TEARDOWN on every test, even when the fixture\n should persist through multiple tests.\n\n (Note that this bug never affected actual test execution, which used the\n correct fixture lifetimes. It was purely a display bug for --setup-plan, and\n did not affect the related --setup-show or --setup-only.)\n \"\"\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.fixture(scope = 'class')\n def fix():\n return object()\n class TestClass:\n def test_one(self, fix):\n assert False\n def test_two(self, fix):\n assert False\n \"\"\"\n )\n\n result = pytester.runpytest(\"--setup-plan\")\n assert result.ret == 0\n\n setup_fragment = \"SETUP C fix\"\n setup_count = 0\n\n teardown_fragment = \"TEARDOWN C fix\"\n teardown_count = 0\n\n for line in result.stdout.lines:\n if setup_fragment in line:\n setup_count += 1\n if teardown_fragment in line:\n teardown_count += 1\n\n # before the fix this tests, there would have been a setup/teardown\n # message for each test, so the counts would each have been 2\n assert setup_count == 1\n assert teardown_count == 1", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_setupplan.py_test_show_multi_test_fixture_setup_and_teardown_same_as_setup_show_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_setupplan.py_test_show_multi_test_fixture_setup_and_teardown_same_as_setup_show_", "embedding": null, "metadata": {"file_path": "testing/test_setupplan.py", "file_name": "test_setupplan.py", "file_type": "text/x-python", "category": "test", "start_line": 76, "end_line": 121, "span_ids": ["test_show_multi_test_fixture_setup_and_teardown_same_as_setup_show"], "tokens": 304}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_show_multi_test_fixture_setup_and_teardown_same_as_setup_show(\n pytester: Pytester,\n) -> None:\n \"\"\"Verify that SETUP/TEARDOWN messages match what comes out of --setup-show.\"\"\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.fixture(scope = 'session')\n def sess():\n return True\n @pytest.fixture(scope = 'module')\n def mod():\n return True\n @pytest.fixture(scope = 'class')\n def cls():\n return True\n @pytest.fixture(scope = 'function')\n def func():\n return True\n def test_outside(sess, mod, cls, func):\n assert True\n class TestCls:\n def test_one(self, sess, mod, cls, func):\n assert True\n def test_two(self, sess, mod, cls, func):\n assert True\n \"\"\"\n )\n\n plan_result = pytester.runpytest(\"--setup-plan\")\n show_result = pytester.runpytest(\"--setup-show\")\n\n # the number and text of these lines should be identical\n plan_lines = [\n line\n for line in plan_result.stdout.lines\n if \"SETUP\" in line or \"TEARDOWN\" in line\n ]\n show_lines = [\n line\n for line in show_result.stdout.lines\n if \"SETUP\" in line or \"TEARDOWN\" in line\n ]\n\n assert plan_lines == show_lines", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_store.py_pytest_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_store.py_pytest_", "embedding": null, "metadata": {"file_path": "testing/test_store.py", "file_name": "test_store.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 61, "span_ids": ["test_store", "imports"], "tokens": 459}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import pytest\nfrom _pytest.store import Store\nfrom _pytest.store import StoreKey\n\n\ndef test_store() -> None:\n store = Store()\n\n key1 = StoreKey[str]()\n key2 = StoreKey[int]()\n\n # Basic functionality - single key.\n assert key1 not in store\n store[key1] = \"hello\"\n assert key1 in store\n assert store[key1] == \"hello\"\n assert store.get(key1, None) == \"hello\"\n store[key1] = \"world\"\n assert store[key1] == \"world\"\n # Has correct type (no mypy error).\n store[key1] + \"string\"\n\n # No interaction with another key.\n assert key2 not in store\n assert store.get(key2, None) is None\n with pytest.raises(KeyError):\n store[key2]\n with pytest.raises(KeyError):\n del store[key2]\n store[key2] = 1\n assert store[key2] == 1\n # Has correct type (no mypy error).\n store[key2] + 20\n del store[key1]\n with pytest.raises(KeyError):\n del store[key1]\n with pytest.raises(KeyError):\n store[key1]\n\n # setdefault\n store[key1] = \"existing\"\n assert store.setdefault(key1, \"default\") == \"existing\"\n assert store[key1] == \"existing\"\n key_setdefault = StoreKey[bytes]()\n assert store.setdefault(key_setdefault, b\"default\") == b\"default\"\n assert store[key_setdefault] == b\"default\"\n\n # Can't accidentally add attributes to store object itself.\n with pytest.raises(AttributeError):\n store.foo = \"nope\" # type: ignore[attr-defined]\n\n # No interaction with anoter store.\n store2 = Store()\n key3 = StoreKey[int]()\n assert key2 not in store2\n store2[key2] = 100\n store2[key3] = 200\n assert store2[key2] + store2[key3] == 300\n assert store[key2] == 1\n assert key3 not in store", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminal.test_itemreport_subclasses_show_subclassed_file_TestTerminal.test_itemreport_subclasses_show_subclassed_file.None_3": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminal.test_itemreport_subclasses_show_subclassed_file_TestTerminal.test_itemreport_subclasses_show_subclassed_file.None_3", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 184, "end_line": 235, "span_ids": ["TestTerminal.test_itemreport_subclasses_show_subclassed_file"], "tokens": 436}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTerminal:\n\n def test_itemreport_subclasses_show_subclassed_file(\n self, pytester: Pytester\n ) -> None:\n pytester.makepyfile(\n **{\n \"tests/test_p1\": \"\"\"\n class BaseTests(object):\n fail = False\n\n def test_p1(self):\n if self.fail: assert 0\n \"\"\",\n \"tests/test_p2\": \"\"\"\n from test_p1 import BaseTests\n\n class TestMore(BaseTests): pass\n \"\"\",\n \"tests/test_p3.py\": \"\"\"\n from test_p1 import BaseTests\n\n BaseTests.fail = True\n\n class TestMore(BaseTests): pass\n \"\"\",\n }\n )\n result = pytester.runpytest(\"tests/test_p2.py\", \"--rootdir=tests\")\n result.stdout.fnmatch_lines([\"tests/test_p2.py .*\", \"=* 1 passed in *\"])\n\n result = pytester.runpytest(\"-vv\", \"-rA\", \"tests/test_p2.py\", \"--rootdir=tests\")\n result.stdout.fnmatch_lines(\n [\n \"tests/test_p2.py::TestMore::test_p1 <- test_p1.py PASSED *\",\n \"*= short test summary info =*\",\n \"PASSED tests/test_p2.py::TestMore::test_p1\",\n ]\n )\n result = pytester.runpytest(\"-vv\", \"-rA\", \"tests/test_p3.py\", \"--rootdir=tests\")\n result.stdout.fnmatch_lines(\n [\n \"tests/test_p3.py::TestMore::test_p1 <- test_p1.py FAILED *\",\n \"*_ TestMore.test_p1 _*\",\n \" def test_p1(self):\",\n \"> if self.fail: assert 0\",\n \"E assert 0\",\n \"\",\n \"tests/test_p1.py:5: AssertionError\",\n \"*= short test summary info =*\",\n \"FAILED tests/test_p3.py::TestMore::test_p1 - assert 0\",\n \"*= 1 failed in *\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminal.test_report_teststatus_explicit_markup_TestTerminal.test_report_teststatus_explicit_markup.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminal.test_report_teststatus_explicit_markup_TestTerminal.test_report_teststatus_explicit_markup.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 326, "end_line": 347, "span_ids": ["TestTerminal.test_report_teststatus_explicit_markup"], "tokens": 168}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTerminal:\n\n def test_report_teststatus_explicit_markup(\n self, monkeypatch: MonkeyPatch, pytester: Pytester, color_mapping\n ) -> None:\n \"\"\"Test that TerminalReporter handles markup explicitly provided by\n a pytest_report_teststatus hook.\"\"\"\n monkeypatch.setenv(\"PY_COLORS\", \"1\")\n pytester.makeconftest(\n \"\"\"\n def pytest_report_teststatus(report):\n return 'foo', 'F', ('FOO', {'red': True})\n \"\"\"\n )\n pytester.makepyfile(\n \"\"\"\n def test_foobar():\n pass\n \"\"\"\n )\n result = pytester.runpytest(\"-v\")\n result.stdout.fnmatch_lines(\n color_mapping.format_for_fnmatch([\"*{red}FOO{reset}*\"])\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestCollectonly_TestCollectonly.test_collectonly_skipped_module.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestCollectonly_TestCollectonly.test_collectonly_skipped_module.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 408, "end_line": 429, "span_ids": ["TestCollectonly.test_collectonly_basic", "TestCollectonly.test_collectonly_skipped_module", "TestCollectonly"], "tokens": 149}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCollectonly:\n def test_collectonly_basic(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n def test_func():\n pass\n \"\"\"\n )\n result = pytester.runpytest(\"--collect-only\")\n result.stdout.fnmatch_lines(\n [\"\", \" \"]\n )\n\n def test_collectonly_skipped_module(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n pytest.skip(\"hello\")\n \"\"\"\n )\n result = pytester.runpytest(\"--collect-only\", \"-rs\")\n result.stdout.fnmatch_lines([\"*ERROR collecting*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestCollectonly.test_collectonly_displays_test_description_TestCollectonly.test_collectonly_displays_test_description.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestCollectonly.test_collectonly_displays_test_description_TestCollectonly.test_collectonly_displays_test_description.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 431, "end_line": 457, "span_ids": ["TestCollectonly.test_collectonly_displays_test_description"], "tokens": 182}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCollectonly:\n\n def test_collectonly_displays_test_description(\n self, pytester: Pytester, dummy_yaml_custom_test\n ) -> None:\n \"\"\"Used dummy_yaml_custom_test for an Item without ``obj``.\"\"\"\n pytester.makepyfile(\n \"\"\"\n def test_with_description():\n ''' This test has a description.\n\n more1.\n more2.'''\n \"\"\"\n )\n result = pytester.runpytest(\"--collect-only\", \"--verbose\")\n result.stdout.fnmatch_lines(\n [\n \"\",\n \" \",\n \"\",\n \" \",\n \" This test has a description.\",\n \" \",\n \" more1.\",\n \" more2.\",\n ],\n consecutive=True,\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestCollectonly.test_collectonly_failed_module_TestCollectonly.test_collectonly_fatal.assert_result_ret_3": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestCollectonly.test_collectonly_failed_module_TestCollectonly.test_collectonly_fatal.assert_result_ret_3", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 459, "end_line": 473, "span_ids": ["TestCollectonly.test_collectonly_failed_module", "TestCollectonly.test_collectonly_fatal"], "tokens": 142}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCollectonly:\n\n def test_collectonly_failed_module(self, pytester: Pytester) -> None:\n pytester.makepyfile(\"\"\"raise ValueError(0)\"\"\")\n result = pytester.runpytest(\"--collect-only\")\n result.stdout.fnmatch_lines([\"*raise ValueError*\", \"*1 error*\"])\n\n def test_collectonly_fatal(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n def pytest_collectstart(collector):\n assert 0, \"urgs\"\n \"\"\"\n )\n result = pytester.runpytest(\"--collect-only\")\n result.stdout.fnmatch_lines([\"*INTERNAL*args*\"])\n assert result.ret == 3", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminalFunctional.test_showlocals_short_TestTerminalFunctional.test_showlocals_short.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminalFunctional.test_showlocals_short_TestTerminalFunctional.test_showlocals_short.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 943, "end_line": 961, "span_ids": ["TestTerminalFunctional.test_showlocals_short"], "tokens": 135}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTerminalFunctional:\n\n def test_showlocals_short(self, pytester: Pytester) -> None:\n p1 = pytester.makepyfile(\n \"\"\"\n def test_showlocals_short():\n x = 3\n y = \"xxxx\"\n assert 0\n \"\"\"\n )\n result = pytester.runpytest(p1, \"-l\", \"--tb=short\")\n result.stdout.fnmatch_lines(\n [\n \"test_showlocals_short.py:*\",\n \" assert 0\",\n \"E assert 0\",\n \" x = 3\",\n \" y = 'xxxx'\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminalFunctional.verbose_testfile_TestTerminalFunctional.test_verbose_reporting.assert_result_ret_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminalFunctional.verbose_testfile_TestTerminalFunctional.test_verbose_reporting.assert_result_ret_1", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 963, "end_line": 994, "span_ids": ["TestTerminalFunctional.test_verbose_reporting", "TestTerminalFunctional.verbose_testfile"], "tokens": 226}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTerminalFunctional:\n\n @pytest.fixture\n def verbose_testfile(self, pytester: Pytester) -> Path:\n return pytester.makepyfile(\n \"\"\"\n import pytest\n def test_fail():\n raise ValueError()\n def test_pass():\n pass\n class TestClass(object):\n def test_skip(self):\n pytest.skip(\"hello\")\n def test_gen():\n def check(x):\n assert x == 1\n yield check, 0\n \"\"\"\n )\n\n def test_verbose_reporting(self, verbose_testfile, pytester: Pytester) -> None:\n result = pytester.runpytest(\n verbose_testfile, \"-v\", \"-Walways::pytest.PytestWarning\"\n )\n result.stdout.fnmatch_lines(\n [\n \"*test_verbose_reporting.py::test_fail *FAIL*\",\n \"*test_verbose_reporting.py::test_pass *PASS*\",\n \"*test_verbose_reporting.py::TestClass::test_skip *SKIP*\",\n \"*test_verbose_reporting.py::test_gen *XFAIL*\",\n ]\n )\n assert result.ret == 1", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminalFunctional.test_summary_f_alias_TestTerminalFunctional.test_summary_f_alias.assert_result_stdout_line": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminalFunctional.test_summary_f_alias_TestTerminalFunctional.test_summary_f_alias.assert_result_stdout_line", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 1054, "end_line": 1065, "span_ids": ["TestTerminalFunctional.test_summary_f_alias"], "tokens": 119}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTerminalFunctional:\n\n def test_summary_f_alias(self, pytester: Pytester) -> None:\n \"\"\"Test that 'f' and 'F' report chars are aliases and don't show up twice in the summary (#6334)\"\"\"\n pytester.makepyfile(\n \"\"\"\n def test():\n assert False\n \"\"\"\n )\n result = pytester.runpytest(\"-rfF\")\n expected = \"FAILED test_summary_f_alias.py::test - assert False\"\n result.stdout.fnmatch_lines([expected])\n assert result.stdout.lines.count(expected) == 1", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminalFunctional.test_summary_s_alias_TestTerminalFunctional.test_summary_s_alias.assert_result_stdout_line": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminalFunctional.test_summary_s_alias_TestTerminalFunctional.test_summary_s_alias.assert_result_stdout_line", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 1067, "end_line": 1081, "span_ids": ["TestTerminalFunctional.test_summary_s_alias"], "tokens": 129}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTerminalFunctional:\n\n def test_summary_s_alias(self, pytester: Pytester) -> None:\n \"\"\"Test that 's' and 'S' report chars are aliases and don't show up twice in the summary\"\"\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.mark.skip\n def test():\n pass\n \"\"\"\n )\n result = pytester.runpytest(\"-rsS\")\n expected = \"SKIPPED [1] test_summary_s_alias.py:3: unconditional skip\"\n result.stdout.fnmatch_lines([expected])\n assert result.stdout.lines.count(expected) == 1", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_color_no_test_color_yes_collection_on_non_atty.assert_collected_10_item": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_color_no_test_color_yes_collection_on_non_atty.assert_collected_10_item", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 1217, "end_line": 1244, "span_ids": ["test_color_no", "test_color_yes_collection_on_non_atty"], "tokens": 255}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_color_no(pytester: Pytester) -> None:\n pytester.makepyfile(\"def test_this(): assert 1\")\n result = pytester.runpytest(\"--color=no\")\n assert \"test session starts\" in result.stdout.str()\n result.stdout.no_fnmatch_line(\"*\\x1b[1m*\")\n\n\n@pytest.mark.parametrize(\"verbose\", [True, False])\ndef test_color_yes_collection_on_non_atty(pytester: Pytester, verbose) -> None:\n \"\"\"#1397: Skip collect progress report when working on non-terminals.\"\"\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.mark.parametrize('i', range(10))\n def test_this(i):\n assert 1\n \"\"\"\n )\n args = [\"--color=yes\"]\n if verbose:\n args.append(\"-vv\")\n result = pytester.runpytest(*args)\n assert \"test session starts\" in result.stdout.str()\n assert \"\\x1b[1m\" in result.stdout.str()\n result.stdout.no_fnmatch_line(\"*collecting 10 items*\")\n if verbose:\n assert \"collecting ...\" in result.stdout.str()\n assert \"collected 10 items\" in result.stdout.str()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_getreportopt_test_getreportopt.None_14": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_getreportopt_test_getreportopt.None_14", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 1102, "end_line": 1158, "span_ids": ["test_getreportopt", "test_getreportopt.FakeConfig.Option:2", "test_getreportopt.FakeConfig"], "tokens": 394}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_getreportopt() -> None:\n from _pytest.terminal import _REPORTCHARS_DEFAULT\n\n class FakeConfig:\n class Option:\n reportchars = _REPORTCHARS_DEFAULT\n disable_warnings = False\n\n option = Option()\n\n config = cast(Config, FakeConfig())\n\n assert _REPORTCHARS_DEFAULT == \"fE\"\n\n # Default.\n assert getreportopt(config) == \"wfE\"\n\n config.option.reportchars = \"sf\"\n assert getreportopt(config) == \"wsf\"\n\n config.option.reportchars = \"sfxw\"\n assert getreportopt(config) == \"sfxw\"\n\n config.option.reportchars = \"a\"\n assert getreportopt(config) == \"wsxXEf\"\n\n config.option.reportchars = \"N\"\n assert getreportopt(config) == \"w\"\n\n config.option.reportchars = \"NwfE\"\n assert getreportopt(config) == \"wfE\"\n\n config.option.reportchars = \"NfENx\"\n assert getreportopt(config) == \"wx\"\n\n # Now with --disable-warnings.\n config.option.disable_warnings = True\n config.option.reportchars = \"a\"\n assert getreportopt(config) == \"sxXEf\"\n\n config.option.reportchars = \"sfx\"\n assert getreportopt(config) == \"sfx\"\n\n config.option.reportchars = \"sfxw\"\n assert getreportopt(config) == \"sfx\"\n\n config.option.reportchars = \"a\"\n assert getreportopt(config) == \"sxXEf\"\n\n config.option.reportchars = \"A\"\n assert getreportopt(config) == \"PpsxXEf\"\n\n config.option.reportchars = \"AN\"\n assert getreportopt(config) == \"\"\n\n config.option.reportchars = \"NwfE\"\n assert getreportopt(config) == \"fE\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestGenericReporting.test_maxfailures_with_interrupted_TestGenericReporting.test_maxfailures_with_interrupted.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestGenericReporting.test_maxfailures_with_interrupted_TestGenericReporting.test_maxfailures_with_interrupted.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 1387, "end_line": 1404, "span_ids": ["TestGenericReporting.test_maxfailures_with_interrupted"], "tokens": 132}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestGenericReporting:\n\n def test_maxfailures_with_interrupted(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n def test(request):\n request.session.shouldstop = \"session_interrupted\"\n assert 0\n \"\"\"\n )\n result = pytester.runpytest(\"--maxfail=1\", \"-ra\")\n result.stdout.fnmatch_lines(\n [\n \"*= short test summary info =*\",\n \"FAILED *\",\n \"*! stopping after 1 failures !*\",\n \"*! session_interrupted !*\",\n \"*= 1 failed in*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestProgressOutputStyle.test_colored_progress_TestProgressOutputStyle.test_colored_progress.None_3": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestProgressOutputStyle.test_colored_progress_TestProgressOutputStyle.test_colored_progress.None_3", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 1973, "end_line": 2023, "span_ids": ["TestProgressOutputStyle.test_colored_progress"], "tokens": 452}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestProgressOutputStyle:\n\n def test_colored_progress(\n self, pytester: Pytester, monkeypatch, color_mapping\n ) -> None:\n monkeypatch.setenv(\"PY_COLORS\", \"1\")\n pytester.makepyfile(\n test_axfail=\"\"\"\n import pytest\n @pytest.mark.xfail\n def test_axfail(): assert 0\n \"\"\",\n test_bar=\"\"\"\n import pytest\n @pytest.mark.parametrize('i', range(10))\n def test_bar(i): pass\n \"\"\",\n test_foo=\"\"\"\n import pytest\n import warnings\n @pytest.mark.parametrize('i', range(5))\n def test_foo(i):\n warnings.warn(DeprecationWarning(\"collection\"))\n pass\n \"\"\",\n test_foobar=\"\"\"\n import pytest\n @pytest.mark.parametrize('i', range(5))\n def test_foobar(i): raise ValueError()\n \"\"\",\n )\n result = pytester.runpytest()\n result.stdout.re_match_lines(\n color_mapping.format_for_rematch(\n [\n r\"test_axfail.py {yellow}x{reset}{green} \\s+ \\[ 4%\\]{reset}\",\n r\"test_bar.py ({green}\\.{reset}){{10}}{green} \\s+ \\[ 52%\\]{reset}\",\n r\"test_foo.py ({green}\\.{reset}){{5}}{yellow} \\s+ \\[ 76%\\]{reset}\",\n r\"test_foobar.py ({red}F{reset}){{5}}{red} \\s+ \\[100%\\]{reset}\",\n ]\n )\n )\n\n # Only xfail should have yellow progress indicator.\n result = pytester.runpytest(\"test_axfail.py\")\n result.stdout.re_match_lines(\n color_mapping.format_for_rematch(\n [\n r\"test_axfail.py {yellow}x{reset}{yellow} \\s+ \\[100%\\]{reset}\",\n r\"^{yellow}=+ ({yellow}{bold}|{bold}{yellow})1 xfailed{reset}{yellow} in \",\n ]\n )\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestCodeHighlight_TestCodeHighlight.test_code_highlight_simple.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestCodeHighlight_TestCodeHighlight.test_code_highlight_simple.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 2369, "end_line": 2386, "span_ids": ["TestCodeHighlight.test_code_highlight_simple", "TestCodeHighlight"], "tokens": 154}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCodeHighlight:\n def test_code_highlight_simple(self, pytester: Pytester, color_mapping) -> None:\n pytester.makepyfile(\n \"\"\"\n def test_foo():\n assert 1 == 10\n \"\"\"\n )\n result = pytester.runpytest(\"--color=yes\")\n result.stdout.fnmatch_lines(\n color_mapping.format_for_fnmatch(\n [\n \" {kw}def{hl-reset} {function}test_foo{hl-reset}():\",\n \"> {kw}assert{hl-reset} {number}1{hl-reset} == {number}10{hl-reset}\",\n \"{bold}{red}E assert 1 == 10{reset}\",\n ]\n )\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_tmpdir.py_TestConfigTmpdir_testdata._": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_tmpdir.py_TestConfigTmpdir_testdata._", "embedding": null, "metadata": {"file_path": "testing/test_tmpdir.py", "file_name": "test_tmpdir.py", "file_type": "text/x-python", "category": "test", "start_line": 71, "end_line": 98, "span_ids": ["impl", "TestConfigTmpdir.test_getbasetemp_custom_removes_old", "TestConfigTmpdir"], "tokens": 218}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestConfigTmpdir:\n def test_getbasetemp_custom_removes_old(self, pytester: Pytester) -> None:\n mytemp = pytester.path.joinpath(\"xyz\")\n p = pytester.makepyfile(\n \"\"\"\n def test_1(tmpdir):\n pass\n \"\"\"\n )\n pytester.runpytest(p, \"--basetemp=%s\" % mytemp)\n assert mytemp.exists()\n mytemp.joinpath(\"hello\").touch()\n\n pytester.runpytest(p, \"--basetemp=%s\" % mytemp)\n assert mytemp.exists()\n assert not mytemp.joinpath(\"hello\").exists()\n\n\ntestdata = [\n (\"mypath\", True),\n (\"/mypath1\", False),\n (\"./mypath1\", True),\n (\"../mypath3\", False),\n (\"../../mypath4\", False),\n (\"mypath5/..\", False),\n (\"mypath6/../mypath6\", True),\n (\"mypath7/../mypath7/..\", False),\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_tmpdir.py_test_mktemp_test_mktemp.if_is_ok_.else_.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_tmpdir.py_test_mktemp_test_mktemp.if_is_ok_.else_.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_tmpdir.py", "file_name": "test_tmpdir.py", "file_type": "text/x-python", "category": "test", "start_line": 101, "end_line": 119, "span_ids": ["test_mktemp"], "tokens": 153}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\"basename, is_ok\", testdata)\ndef test_mktemp(pytester: Pytester, basename: str, is_ok: bool) -> None:\n mytemp = pytester.mkdir(\"mytemp\")\n p = pytester.makepyfile(\n \"\"\"\n def test_abs_path(tmpdir_factory):\n tmpdir_factory.mktemp('{}', numbered=False)\n \"\"\".format(\n basename\n )\n )\n\n result = pytester.runpytest(p, \"--basetemp=%s\" % mytemp)\n if is_ok:\n assert result.ret == 0\n assert mytemp.joinpath(basename).exists()\n else:\n assert result.ret == 1\n result.stdout.fnmatch_lines(\"*ValueError*\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_cleanup_functions_test_cleanup_functions.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_cleanup_functions_test_cleanup_functions.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_unittest.py", "file_name": "test_unittest.py", "file_type": "text/x-python", "category": "test", "start_line": 886, "end_line": 914, "span_ids": ["test_cleanup_functions"], "tokens": 196}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_cleanup_functions(pytester: Pytester) -> None:\n \"\"\"Ensure functions added with addCleanup are always called after each test ends (#6947)\"\"\"\n pytester.makepyfile(\n \"\"\"\n import unittest\n\n cleanups = []\n\n class Test(unittest.TestCase):\n\n def test_func_1(self):\n self.addCleanup(cleanups.append, \"test_func_1\")\n\n def test_func_2(self):\n self.addCleanup(cleanups.append, \"test_func_2\")\n assert 0\n\n def test_func_3_check_cleanups(self):\n assert cleanups == [\"test_func_1\", \"test_func_2\"]\n \"\"\"\n )\n result = pytester.runpytest(\"-v\")\n result.stdout.fnmatch_lines(\n [\n \"*::test_func_1 PASSED *\",\n \"*::test_func_2 FAILED *\",\n \"*::test_func_3_check_cleanups PASSED *\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_BdbQuit_test_exit_outcome.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_BdbQuit_test_exit_outcome.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_unittest.py", "file_name": "test_unittest.py", "file_type": "text/x-python", "category": "test", "start_line": 1109, "end_line": 1142, "span_ids": ["test_exit_outcome", "test_BdbQuit"], "tokens": 200}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_BdbQuit(pytester: Pytester) -> None:\n pytester.makepyfile(\n test_foo=\"\"\"\n import unittest\n\n class MyTestCase(unittest.TestCase):\n def test_bdbquit(self):\n import bdb\n raise bdb.BdbQuit()\n\n def test_should_not_run(self):\n pass\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(failed=1, passed=1)\n\n\ndef test_exit_outcome(pytester: Pytester) -> None:\n pytester.makepyfile(\n test_foo=\"\"\"\n import pytest\n import unittest\n\n class MyTestCase(unittest.TestCase):\n def test_exit_outcome(self):\n pytest.exit(\"pytest_exit called\")\n\n def test_should_not_run(self):\n pass\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*Exit: pytest_exit called*\", \"*= no tests ran in *\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_trace_test_trace.assert_result_ret_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_trace_test_trace.assert_result_ret_0", "embedding": null, "metadata": {"file_path": "testing/test_unittest.py", "file_name": "test_unittest.py", "file_type": "text/x-python", "category": "test", "start_line": 1145, "end_line": 1171, "span_ids": ["test_trace.check_call._pdb.runcall", "test_trace.check_call._pdb", "test_trace"], "tokens": 172}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_trace(pytester: Pytester, monkeypatch: MonkeyPatch) -> None:\n calls = []\n\n def check_call(*args, **kwargs):\n calls.append((args, kwargs))\n assert args == (\"runcall\",)\n\n class _pdb:\n def runcall(*args, **kwargs):\n calls.append((args, kwargs))\n\n return _pdb\n\n monkeypatch.setattr(\"_pytest.debugging.pytestPDB._init_pdb\", check_call)\n\n p1 = pytester.makepyfile(\n \"\"\"\n import unittest\n\n class MyTestCase(unittest.TestCase):\n def test(self):\n self.assertEqual('foo', 'foo')\n \"\"\"\n )\n result = pytester.runpytest(\"--trace\", str(p1))\n assert len(calls) == 2\n assert result.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_pdb_teardown_called_test_pdb_teardown_called.assert_teardowns_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_pdb_teardown_called_test_pdb_teardown_called.assert_teardowns_", "embedding": null, "metadata": {"file_path": "testing/test_unittest.py", "file_name": "test_unittest.py", "file_type": "text/x-python", "category": "test", "start_line": 1174, "end_line": 1206, "span_ids": ["test_pdb_teardown_called"], "tokens": 242}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_pdb_teardown_called(pytester: Pytester, monkeypatch: MonkeyPatch) -> None:\n \"\"\"Ensure tearDown() is always called when --pdb is given in the command-line.\n\n We delay the normal tearDown() calls when --pdb is given, so this ensures we are calling\n tearDown() eventually to avoid memory leaks when using --pdb.\n \"\"\"\n teardowns: List[str] = []\n monkeypatch.setattr(\n pytest, \"test_pdb_teardown_called_teardowns\", teardowns, raising=False\n )\n\n pytester.makepyfile(\n \"\"\"\n import unittest\n import pytest\n\n class MyTestCase(unittest.TestCase):\n\n def tearDown(self):\n pytest.test_pdb_teardown_called_teardowns.append(self.id())\n\n def test_1(self):\n pass\n def test_2(self):\n pass\n \"\"\"\n )\n result = pytester.runpytest_inprocess(\"--pdb\")\n result.stdout.fnmatch_lines(\"* 2 passed in *\")\n assert teardowns == [\n \"test_pdb_teardown_called.MyTestCase.test_1\",\n \"test_pdb_teardown_called.MyTestCase.test_2\",\n ]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_os_pyfile_with_warnings.return.str_test_file_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_os_pyfile_with_warnings.return.str_test_file_", "embedding": null, "metadata": {"file_path": "testing/test_warnings.py", "file_name": "test_warnings.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 38, "span_ids": ["impl", "pyfile_with_warnings", "imports"], "tokens": 223}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import os\nimport warnings\nfrom typing import List\nfrom typing import Optional\nfrom typing import Tuple\n\nimport pytest\nfrom _pytest.fixtures import FixtureRequest\nfrom _pytest.pytester import Pytester\n\nWARNINGS_SUMMARY_HEADER = \"warnings summary\"\n\n\n@pytest.fixture\ndef pyfile_with_warnings(pytester: Pytester, request: FixtureRequest) -> str:\n \"\"\"Create a test file which calls a function in a module which generates warnings.\"\"\"\n pytester.syspathinsert()\n test_name = request.function.__name__\n module_name = test_name.lstrip(\"test_\") + \"_module\"\n test_file = pytester.makepyfile(\n \"\"\"\n import {module_name}\n def test_func():\n assert {module_name}.foo() == 1\n \"\"\".format(\n module_name=module_name\n ),\n **{\n module_name: \"\"\"\n import warnings\n def foo():\n warnings.warn(UserWarning(\"user warning\"))\n warnings.warn(RuntimeWarning(\"runtime warning\"))\n return 1\n \"\"\",\n },\n )\n return str(test_file)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_TestAssertionWarnings_test_warnings_checker_twice.None_1.warnings_warn_Message_B_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_TestAssertionWarnings_test_warnings_checker_twice.None_1.warnings_warn_Message_B_", "embedding": null, "metadata": {"file_path": "testing/test_warnings.py", "file_name": "test_warnings.py", "file_type": "text/x-python", "category": "test", "start_line": 557, "end_line": 581, "span_ids": ["TestAssertionWarnings.assert_result_warns", "test_warnings_checker_twice", "TestAssertionWarnings", "TestAssertionWarnings.test_tuple_warning"], "tokens": 171}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssertionWarnings:\n @staticmethod\n def assert_result_warns(result, msg) -> None:\n result.stdout.fnmatch_lines([\"*PytestAssertRewriteWarning: %s*\" % msg])\n\n def test_tuple_warning(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\\\n def test_foo():\n assert (1,2)\n \"\"\"\n )\n result = pytester.runpytest()\n self.assert_result_warns(\n result, \"assertion is always true, perhaps remove parentheses?\"\n )\n\n\ndef test_warnings_checker_twice() -> None:\n \"\"\"Issue #4617\"\"\"\n expectation = pytest.warns(UserWarning)\n with expectation:\n warnings.warn(\"Message A\", UserWarning)\n with expectation:\n warnings.warn(\"Message B\", UserWarning)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_test_group_warnings_by_message_test_group_warnings_by_message.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_test_group_warnings_by_message_test_group_warnings_by_message.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_warnings.py", "file_name": "test_warnings.py", "file_type": "text/x-python", "category": "test", "start_line": 584, "end_line": 613, "span_ids": ["test_group_warnings_by_message"], "tokens": 346}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.filterwarnings(\"ignore::pytest.PytestExperimentalApiWarning\")\n@pytest.mark.filterwarnings(\"always\")\ndef test_group_warnings_by_message(pytester: Pytester) -> None:\n pytester.copy_example(\"warnings/test_group_warnings_by_message.py\")\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n [\n \"*== %s ==*\" % WARNINGS_SUMMARY_HEADER,\n \"test_group_warnings_by_message.py::test_foo[[]0[]]\",\n \"test_group_warnings_by_message.py::test_foo[[]1[]]\",\n \"test_group_warnings_by_message.py::test_foo[[]2[]]\",\n \"test_group_warnings_by_message.py::test_foo[[]3[]]\",\n \"test_group_warnings_by_message.py::test_foo[[]4[]]\",\n \"test_group_warnings_by_message.py::test_foo_1\",\n \" */test_group_warnings_by_message.py:*: UserWarning: foo\",\n \" warnings.warn(UserWarning(msg))\",\n \"\",\n \"test_group_warnings_by_message.py::test_bar[[]0[]]\",\n \"test_group_warnings_by_message.py::test_bar[[]1[]]\",\n \"test_group_warnings_by_message.py::test_bar[[]2[]]\",\n \"test_group_warnings_by_message.py::test_bar[[]3[]]\",\n \"test_group_warnings_by_message.py::test_bar[[]4[]]\",\n \" */test_group_warnings_by_message.py:*: UserWarning: bar\",\n \" warnings.warn(UserWarning(msg))\",\n \"\",\n \"-- Docs: *\",\n \"*= 11 passed, 11 warnings *\",\n ],\n consecutive=True,\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_test_group_warnings_by_message_summary_test_pytest_configure_warning.assert_str_warning_messag": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_test_group_warnings_by_message_summary_test_pytest_configure_warning.assert_str_warning_messag", "embedding": null, "metadata": {"file_path": "testing/test_warnings.py", "file_name": "test_warnings.py", "file_type": "text/x-python", "category": "test", "start_line": 616, "end_line": 656, "span_ids": ["test_pytest_configure_warning", "test_group_warnings_by_message_summary"], "tokens": 309}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.filterwarnings(\"ignore::pytest.PytestExperimentalApiWarning\")\n@pytest.mark.filterwarnings(\"always\")\ndef test_group_warnings_by_message_summary(pytester: Pytester) -> None:\n pytester.copy_example(\"warnings/test_group_warnings_by_message_summary\")\n pytester.syspathinsert()\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n [\n \"*== %s ==*\" % WARNINGS_SUMMARY_HEADER,\n \"test_1.py: 21 warnings\",\n \"test_2.py: 1 warning\",\n \" */test_1.py:7: UserWarning: foo\",\n \" warnings.warn(UserWarning(msg))\",\n \"\",\n \"test_1.py: 20 warnings\",\n \" */test_1.py:7: UserWarning: bar\",\n \" warnings.warn(UserWarning(msg))\",\n \"\",\n \"-- Docs: *\",\n \"*= 42 passed, 42 warnings *\",\n ],\n consecutive=True,\n )\n\n\ndef test_pytest_configure_warning(pytester: Pytester, recwarn) -> None:\n \"\"\"Issue 5115.\"\"\"\n pytester.makeconftest(\n \"\"\"\n def pytest_configure():\n import warnings\n\n warnings.warn(\"from pytest_configure\")\n \"\"\"\n )\n\n result = pytester.runpytest()\n assert result.ret == 5\n assert \"INTERNALERROR\" not in result.stderr.str()\n warning = recwarn.pop()\n assert str(warning.message) == \"from pytest_configure\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_TestStackLevel_TestStackLevel.test_issue4445_rewrite.assert_lineno_4": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_TestStackLevel_TestStackLevel.test_issue4445_rewrite.assert_lineno_4", "embedding": null, "metadata": {"file_path": "testing/test_warnings.py", "file_name": "test_warnings.py", "file_type": "text/x-python", "category": "test", "start_line": 659, "end_line": 699, "span_ids": ["TestStackLevel.capwarn.CapturedWarnings", "TestStackLevel", "TestStackLevel.capwarn.CapturedWarnings:2", "TestStackLevel.test_issue4445_rewrite", "TestStackLevel.capwarn"], "tokens": 317}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestStackLevel:\n @pytest.fixture\n def capwarn(self, pytester: Pytester):\n class CapturedWarnings:\n captured: List[\n Tuple[warnings.WarningMessage, Optional[Tuple[str, int, str]]]\n ] = []\n\n @classmethod\n def pytest_warning_recorded(cls, warning_message, when, nodeid, location):\n cls.captured.append((warning_message, location))\n\n pytester.plugins = [CapturedWarnings()]\n\n return CapturedWarnings\n\n def test_issue4445_rewrite(self, pytester: Pytester, capwarn) -> None:\n \"\"\"#4445: Make sure the warning points to a reasonable location\n See origin of _issue_warning_captured at: _pytest.assertion.rewrite.py:241\n \"\"\"\n pytester.makepyfile(some_mod=\"\")\n conftest = pytester.makeconftest(\n \"\"\"\n import some_mod\n import pytest\n\n pytest.register_assert_rewrite(\"some_mod\")\n \"\"\"\n )\n pytester.parseconfig()\n\n # with stacklevel=5 the warning originates from register_assert_rewrite\n # function in the created conftest.py\n assert len(capwarn.captured) == 1\n warning, location = capwarn.captured.pop()\n file, lineno, func = location\n\n assert \"Module already imported\" in str(warning.message)\n assert file == str(conftest)\n assert func == \"\" # the above conftest.py\n assert lineno == 4", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_TestStackLevel.test_issue4445_preparse_TestStackLevel.test_issue4445_preparse.assert_func__preparse": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_TestStackLevel.test_issue4445_preparse_TestStackLevel.test_issue4445_preparse.assert_func__preparse", "embedding": null, "metadata": {"file_path": "testing/test_warnings.py", "file_name": "test_warnings.py", "file_type": "text/x-python", "category": "test", "start_line": 701, "end_line": 720, "span_ids": ["TestStackLevel.test_issue4445_preparse"], "tokens": 197}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestStackLevel:\n\n def test_issue4445_preparse(self, pytester: Pytester, capwarn) -> None:\n \"\"\"#4445: Make sure the warning points to a reasonable location\n See origin of _issue_warning_captured at: _pytest.config.__init__.py:910\n \"\"\"\n pytester.makeconftest(\n \"\"\"\n import nothing\n \"\"\"\n )\n pytester.parseconfig(\"--help\")\n\n # with stacklevel=2 the warning should originate from config._preparse and is\n # thrown by an errorneous conftest.py\n assert len(capwarn.captured) == 1\n warning, location = capwarn.captured.pop()\n file, _, func = location\n\n assert \"could not load initial conftests\" in str(warning.message)\n assert f\"config{os.sep}__init__.py\" in file\n assert func == \"_preparse\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_TestStackLevel.test_issue4445_issue5928_mark_generator_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_TestStackLevel.test_issue4445_issue5928_mark_generator_", "embedding": null, "metadata": {"file_path": "testing/test_warnings.py", "file_name": "test_warnings.py", "file_type": "text/x-python", "category": "test", "start_line": 758, "end_line": 779, "span_ids": ["TestStackLevel.test_issue4445_issue5928_mark_generator"], "tokens": 160}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestStackLevel:\n\n def test_issue4445_issue5928_mark_generator(self, pytester: Pytester) -> None:\n \"\"\"#4445 and #5928: Make sure the warning from an unknown mark points to\n the test file where this mark is used.\n \"\"\"\n testfile = pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.mark.unknown\n def test_it():\n pass\n \"\"\"\n )\n result = pytester.runpytest_subprocess()\n # with stacklevel=2 the warning should originate from the above created test file\n result.stdout.fnmatch_lines_random(\n [\n \"*{testfile}:3*\".format(testfile=str(testfile)),\n \"*Unknown pytest.mark.unknown*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/__init__.py__Python_inspection_code_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/__init__.py__Python_inspection_code_", "embedding": null, "metadata": {"file_path": "src/_pytest/_code/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 23, "span_ids": ["impl", "docstring", "imports"], "tokens": 127}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"Python inspection/code generation API.\"\"\"\nfrom .code import Code\nfrom .code import ExceptionInfo\nfrom .code import filter_traceback\nfrom .code import Frame\nfrom .code import getfslineno\nfrom .code import Traceback\nfrom .code import TracebackEntry\nfrom .source import getrawcode\nfrom .source import Source\n\n__all__ = [\n \"Code\",\n \"ExceptionInfo\",\n \"filter_traceback\",\n \"Frame\",\n \"getfslineno\",\n \"getrawcode\",\n \"Traceback\",\n \"TracebackEntry\",\n \"Source\",\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_Code.fullsource_Code.getargs.return.raw_co_varnames_argcount": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_Code.fullsource_Code.getargs.return.raw_co_varnames_argcount", "embedding": null, "metadata": {"file_path": "src/_pytest/_code/code.py", "file_name": "code.py", "file_type": "text/x-python", "category": "implementation", "start_line": 90, "end_line": 113, "span_ids": ["Code.fullsource", "Code.source", "Code.getargs"], "tokens": 214}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Code:\n\n @property\n def fullsource(self) -> Optional[\"Source\"]:\n \"\"\"Return a _pytest._code.Source object for the full source file of the code.\"\"\"\n full, _ = findsource(self.raw)\n return full\n\n def source(self) -> \"Source\":\n \"\"\"Return a _pytest._code.Source object for the code object's source only.\"\"\"\n # return source only for that part of code\n return Source(self.raw)\n\n def getargs(self, var: bool = False) -> Tuple[str, ...]:\n \"\"\"Return a tuple with the argument names for the code object.\n\n If 'var' is set True also return the names of the variable and\n keyword arguments when present.\n \"\"\"\n # Handy shortcut for getting args.\n raw = self.raw\n argcount = raw.co_argcount\n if var:\n argcount += raw.co_flags & CO_VARARGS\n argcount += raw.co_flags & CO_VARKEYWORDS\n return raw.co_varnames[:argcount]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_TracebackEntry.getsource_TracebackEntry.source.property_getsource_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_TracebackEntry.getsource_TracebackEntry.source.property_getsource_", "embedding": null, "metadata": {"file_path": "src/_pytest/_code/code.py", "file_name": "code.py", "file_type": "text/x-python", "category": "implementation", "start_line": 240, "end_line": 264, "span_ids": ["TracebackEntry:5", "TracebackEntry.getsource"], "tokens": 197}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TracebackEntry:\n\n def getsource(self, astcache=None) -> Optional[\"Source\"]:\n \"\"\"Return failing source code.\"\"\"\n # we use the passed in astcache to not reparse asttrees\n # within exception info printing\n source = self.frame.code.fullsource\n if source is None:\n return None\n key = astnode = None\n if astcache is not None:\n key = self.frame.code.path\n if key is not None:\n astnode = astcache.get(key, None)\n start = self.getfirstlinesource()\n try:\n astnode, _, end = getstatementrange_ast(\n self.lineno, source, astnode=astnode\n )\n except SyntaxError:\n end = self.lineno + 1\n else:\n if key is not None:\n astcache[key] = astnode\n return source[start:end]\n\n source = property(getsource)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_FormattedExcinfo.repr_traceback_entry_FormattedExcinfo.repr_traceback_entry.if_style_in_short_lo.else_.return.ReprEntry_lines_None_No": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_FormattedExcinfo.repr_traceback_entry_FormattedExcinfo.repr_traceback_entry.if_style_in_short_lo.else_.return.ReprEntry_lines_None_No", "embedding": null, "metadata": {"file_path": "src/_pytest/_code/code.py", "file_name": "code.py", "file_type": "text/x-python", "category": "implementation", "start_line": 786, "end_line": 820, "span_ids": ["FormattedExcinfo.repr_traceback_entry"], "tokens": 357}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@attr.s\nclass FormattedExcinfo:\n\n def repr_traceback_entry(\n self,\n entry: TracebackEntry,\n excinfo: Optional[ExceptionInfo[BaseException]] = None,\n ) -> \"ReprEntry\":\n lines: List[str] = []\n style = entry._repr_style if entry._repr_style is not None else self.style\n if style in (\"short\", \"long\"):\n source = self._getentrysource(entry)\n if source is None:\n source = Source(\"???\")\n line_index = 0\n else:\n line_index = entry.lineno - entry.getfirstlinesource()\n short = style == \"short\"\n reprargs = self.repr_args(entry) if not short else None\n s = self.get_source(source, line_index, excinfo, short=short)\n lines.extend(s)\n if short:\n message = \"in %s\" % (entry.name)\n else:\n message = excinfo and excinfo.typename or \"\"\n entry_path = entry.path\n path = self._makepath(entry_path)\n reprfileloc = ReprFileLocation(path, entry.lineno + 1, message)\n localsrepr = self.repr_locals(entry.locals)\n return ReprEntry(lines, reprargs, localsrepr, reprfileloc, style)\n elif style == \"value\":\n if excinfo:\n lines.extend(str(excinfo.value).split(\"\\n\"))\n return ReprEntry(lines, None, None, None, style)\n else:\n if excinfo:\n lines.extend(self.get_exconly(excinfo, indent=4))\n return ReprEntry(lines, None, None, None, style)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_io/__init__.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_io/__init__.py__", "embedding": null, "metadata": {"file_path": "src/_pytest/_io/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 9, "span_ids": ["impl", "imports"], "tokens": 34}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from .terminalwriter import get_terminal_width\nfrom .terminalwriter import TerminalWriter\n\n\n__all__ = [\n \"TerminalWriter\",\n \"get_terminal_width\",\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_io/terminalwriter.py__Helper_functions_for_w_should_do_markup.return._": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_io/terminalwriter.py__Helper_functions_for_w_should_do_markup.return._", "embedding": null, "metadata": {"file_path": "src/_pytest/_io/terminalwriter.py", "file_name": "terminalwriter.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 37, "span_ids": ["get_terminal_width", "should_do_markup", "docstring", "imports"], "tokens": 238}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"Helper functions for writing to terminals and files.\"\"\"\nimport os\nimport shutil\nimport sys\nfrom typing import Optional\nfrom typing import Sequence\nfrom typing import TextIO\n\nfrom .wcwidth import wcswidth\nfrom _pytest.compat import final\n\n\n# This code was initially copied from py 1.8.1, file _io/terminalwriter.py.\n\n\ndef get_terminal_width() -> int:\n width, _ = shutil.get_terminal_size(fallback=(80, 24))\n\n # The Windows get_terminal_size may be bogus, let's sanify a bit.\n if width < 40:\n width = 80\n\n return width\n\n\ndef should_do_markup(file: TextIO) -> bool:\n if os.environ.get(\"PY_COLORS\") == \"1\":\n return True\n if os.environ.get(\"PY_COLORS\") == \"0\":\n return False\n if \"NO_COLOR\" in os.environ:\n return False\n if \"FORCE_COLOR\" in os.environ:\n return True\n return (\n hasattr(file, \"isatty\") and file.isatty() and os.environ.get(\"TERM\") != \"dumb\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_io/terminalwriter.py_TerminalWriter.markup_TerminalWriter.markup.return.text": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_io/terminalwriter.py_TerminalWriter.markup_TerminalWriter.markup.return.text", "embedding": null, "metadata": {"file_path": "src/_pytest/_io/terminalwriter.py", "file_name": "terminalwriter.py", "file_type": "text/x-python", "category": "implementation", "start_line": 97, "end_line": 105, "span_ids": ["TerminalWriter.markup"], "tokens": 119}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass TerminalWriter:\n\n def markup(self, text: str, **markup: bool) -> str:\n for name in markup:\n if name not in self._esctable:\n raise ValueError(f\"unknown markup: {name!r}\")\n if self.hasmarkup:\n esc = [self._esctable[name] for name, on in markup.items() if on]\n if esc:\n text = \"\".join(\"\\x1b[%sm\" % cod for cod in esc) + text + \"\\x1b[0m\"\n return text", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_io/terminalwriter.py_TerminalWriter.sep_TerminalWriter.sep.self_line_line_markup_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_io/terminalwriter.py_TerminalWriter.sep_TerminalWriter.sep.self_line_line_markup_", "embedding": null, "metadata": {"file_path": "src/_pytest/_io/terminalwriter.py", "file_name": "terminalwriter.py", "file_type": "text/x-python", "category": "implementation", "start_line": 107, "end_line": 142, "span_ids": ["TerminalWriter.sep"], "tokens": 445}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass TerminalWriter:\n\n def sep(\n self,\n sepchar: str,\n title: Optional[str] = None,\n fullwidth: Optional[int] = None,\n **markup: bool,\n ) -> None:\n if fullwidth is None:\n fullwidth = self.fullwidth\n # The goal is to have the line be as long as possible\n # under the condition that len(line) <= fullwidth.\n if sys.platform == \"win32\":\n # If we print in the last column on windows we are on a\n # new line but there is no way to verify/neutralize this\n # (we may not know the exact line width).\n # So let's be defensive to avoid empty lines in the output.\n fullwidth -= 1\n if title is not None:\n # we want 2 + 2*len(fill) + len(title) <= fullwidth\n # i.e. 2 + 2*len(sepchar)*N + len(title) <= fullwidth\n # 2*len(sepchar)*N <= fullwidth - len(title) - 2\n # N <= (fullwidth - len(title) - 2) // (2*len(sepchar))\n N = max((fullwidth - len(title) - 2) // (2 * len(sepchar)), 1)\n fill = sepchar * N\n line = f\"{fill} {title} {fill}\"\n else:\n # we want len(sepchar)*N <= fullwidth\n # i.e. N <= fullwidth // len(sepchar)\n line = sepchar * (fullwidth // len(sepchar))\n # In some situations there is room for an extra sepchar at the right,\n # in particular if we consider that with a sepchar like \"_ \" the\n # trailing space is not important at the end of the line.\n if len(line) + len(sepchar.rstrip()) <= fullwidth:\n line += sepchar.rstrip()\n\n self.line(line, **markup)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_io/terminalwriter.py_TerminalWriter.write_TerminalWriter.flush.self__file_flush_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_io/terminalwriter.py_TerminalWriter.write_TerminalWriter.flush.self__file_flush_", "embedding": null, "metadata": {"file_path": "src/_pytest/_io/terminalwriter.py", "file_name": "terminalwriter.py", "file_type": "text/x-python", "category": "implementation", "start_line": 144, "end_line": 174, "span_ids": ["TerminalWriter.flush", "TerminalWriter.write", "TerminalWriter.line"], "tokens": 250}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass TerminalWriter:\n\n def write(self, msg: str, *, flush: bool = False, **markup: bool) -> None:\n if msg:\n current_line = msg.rsplit(\"\\n\", 1)[-1]\n if \"\\n\" in msg:\n self._current_line = current_line\n else:\n self._current_line += current_line\n\n msg = self.markup(msg, **markup)\n\n try:\n self._file.write(msg)\n except UnicodeEncodeError:\n # Some environments don't support printing general Unicode\n # strings, due to misconfiguration or otherwise; in that case,\n # print the string escaped to ASCII.\n # When the Unicode situation improves we should consider\n # letting the error propagate instead of masking it (see #7475\n # for one brief attempt).\n msg = msg.encode(\"unicode-escape\").decode(\"ascii\")\n self._file.write(msg)\n\n if flush:\n self.flush()\n\n def line(self, s: str = \"\", **markup: bool) -> None:\n self.write(s, **markup)\n self.write(\"\\n\")\n\n def flush(self) -> None:\n self._file.flush()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_io/terminalwriter.py_TerminalWriter._write_source_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_io/terminalwriter.py_TerminalWriter._write_source_", "embedding": null, "metadata": {"file_path": "src/_pytest/_io/terminalwriter.py", "file_name": "terminalwriter.py", "file_type": "text/x-python", "category": "implementation", "start_line": 176, "end_line": 211, "span_ids": ["TerminalWriter._write_source", "TerminalWriter._highlight"], "tokens": 316}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass TerminalWriter:\n\n def _write_source(self, lines: Sequence[str], indents: Sequence[str] = ()) -> None:\n \"\"\"Write lines of source code possibly highlighted.\n\n Keeping this private for now because the API is clunky. We should discuss how\n to evolve the terminal writer so we can have more precise color support, for example\n being able to write part of a line in one color and the rest in another, and so on.\n \"\"\"\n if indents and len(indents) != len(lines):\n raise ValueError(\n \"indents size ({}) should have same size as lines ({})\".format(\n len(indents), len(lines)\n )\n )\n if not indents:\n indents = [\"\"] * len(lines)\n source = \"\\n\".join(lines)\n new_lines = self._highlight(source).splitlines()\n for indent, new_line in zip(indents, new_lines):\n self.line(indent + new_line)\n\n def _highlight(self, source: str) -> str:\n \"\"\"Highlight the given source code if we have markup support.\"\"\"\n if not self.hasmarkup or not self.code_highlight:\n return source\n try:\n from pygments.formatters.terminal import TerminalFormatter\n from pygments.lexers.python import PythonLexer\n from pygments import highlight\n except ImportError:\n return source\n else:\n highlighted: str = highlight(\n source, PythonLexer(), TerminalFormatter(bg=\"dark\")\n )\n return highlighted", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py__py36_windowsconsoleio_workaround__py36_windowsconsoleio_workaround.sys.stderr._reopen_stdio_sys_stderr_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py__py36_windowsconsoleio_workaround__py36_windowsconsoleio_workaround.sys.stderr._reopen_stdio_sys_stderr_", "embedding": null, "metadata": {"file_path": "src/_pytest/capture.py", "file_name": "capture.py", "file_type": "text/x-python", "category": "implementation", "start_line": 95, "end_line": 148, "span_ids": ["_py36_windowsconsoleio_workaround"], "tokens": 499}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _py36_windowsconsoleio_workaround(stream: TextIO) -> None:\n \"\"\"Workaround for Windows Unicode console handling on Python>=3.6.\n\n Python 3.6 implemented Unicode console handling for Windows. This works\n by reading/writing to the raw console handle using\n ``{Read,Write}ConsoleW``.\n\n The problem is that we are going to ``dup2`` over the stdio file\n descriptors when doing ``FDCapture`` and this will ``CloseHandle`` the\n handles used by Python to write to the console. Though there is still some\n weirdness and the console handle seems to only be closed randomly and not\n on the first call to ``CloseHandle``, or maybe it gets reopened with the\n same handle value when we suspend capturing.\n\n The workaround in this case will reopen stdio with a different fd which\n also means a different handle by replicating the logic in\n \"Py_lifecycle.c:initstdio/create_stdio\".\n\n :param stream:\n In practice ``sys.stdout`` or ``sys.stderr``, but given\n here as parameter for unittesting purposes.\n\n See https://github.com/pytest-dev/py/issues/103.\n \"\"\"\n if not sys.platform.startswith(\"win32\") or hasattr(sys, \"pypy_version_info\"):\n return\n\n # Bail out if ``stream`` doesn't seem like a proper ``io`` stream (#2666).\n if not hasattr(stream, \"buffer\"): # type: ignore[unreachable]\n return\n\n buffered = hasattr(stream.buffer, \"raw\")\n raw_stdout = stream.buffer.raw if buffered else stream.buffer # type: ignore[attr-defined]\n\n if not isinstance(raw_stdout, io._WindowsConsoleIO): # type: ignore[attr-defined]\n return\n\n def _reopen_stdio(f, mode):\n if not buffered and mode[0] == \"w\":\n buffering = 0\n else:\n buffering = -1\n\n return io.TextIOWrapper(\n open(os.dup(f.fileno()), mode, buffering), # type: ignore[arg-type]\n f.encoding,\n f.errors,\n f.newlines,\n f.line_buffering,\n )\n\n sys.stdin = _reopen_stdio(sys.stdin, \"rb\")\n sys.stdout = _reopen_stdio(sys.stdout, \"wb\")\n sys.stderr = _reopen_stdio(sys.stderr, \"wb\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py__IO_Helpers__EncodedFile.mode.return.self_buffer_mode_replace_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py__IO_Helpers__EncodedFile.mode.return.self_buffer_mode_replace_", "embedding": null, "metadata": {"file_path": "src/_pytest/capture.py", "file_name": "capture.py", "file_type": "text/x-python", "category": "implementation", "start_line": 168, "end_line": 184, "span_ids": ["EncodedFile", "EncodedFile.mode", "pytest_load_initial_conftests", "EncodedFile.name"], "tokens": 123}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "# IO Helpers.\n\n\nclass EncodedFile(io.TextIOWrapper):\n __slots__ = ()\n\n @property\n def name(self) -> str:\n # Ensure that file.name is a string. Workaround for a Python bug\n # fixed in >=3.7.4: https://bugs.python.org/issue36015\n return repr(self.buffer)\n\n @property\n def mode(self) -> str:\n # TextIOWrapper doesn't expose a mode, but at least some of our\n # tests check it.\n return self.buffer.mode.replace(\"b\", \"\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_DontReadFromInput_NoCapture.__init__.start.done.suspend.resume.lambda_args_None": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_DontReadFromInput_NoCapture.__init__.start.done.suspend.resume.lambda_args_None", "embedding": null, "metadata": {"file_path": "src/_pytest/capture.py", "file_name": "capture.py", "file_type": "text/x-python", "category": "implementation", "start_line": 212, "end_line": 249, "span_ids": ["DontReadFromInput.__iter__", "impl:4", "DontReadFromInput.read", "DontReadFromInput.isatty", "DontReadFromInput:4", "DontReadFromInput.buffer", "NoCapture", "DontReadFromInput.close", "DontReadFromInput.fileno", "DontReadFromInput"], "tokens": 192}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class DontReadFromInput:\n encoding = None\n\n def read(self, *args):\n raise OSError(\n \"pytest: reading from stdin while output is captured! Consider using `-s`.\"\n )\n\n readline = read\n readlines = read\n __next__ = read\n\n def __iter__(self):\n return self\n\n def fileno(self) -> int:\n raise UnsupportedOperation(\"redirected stdin is pseudofile, has no fileno()\")\n\n def isatty(self) -> bool:\n return False\n\n def close(self) -> None:\n pass\n\n @property\n def buffer(self):\n return self\n\n\n# Capture classes.\n\n\npatchsysdict = {0: \"stdin\", 1: \"stdout\", 2: \"stderr\"}\n\n\nclass NoCapture:\n EMPTY_BUFFER = None\n __init__ = start = done = suspend = resume = lambda *args: None", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_FDCaptureBinary.done_FDCaptureBinary.done.self._state._done_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_FDCaptureBinary.done_FDCaptureBinary.done.self._state._done_", "embedding": null, "metadata": {"file_path": "src/_pytest/capture.py", "file_name": "capture.py", "file_type": "text/x-python", "category": "implementation", "start_line": 430, "end_line": 444, "span_ids": ["FDCaptureBinary.done"], "tokens": 144}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class FDCaptureBinary:\n\n def done(self) -> None:\n \"\"\"Stop capturing, restore streams, return original capture file,\n seeked to position zero.\"\"\"\n self._assert_state(\"done\", (\"initialized\", \"started\", \"suspended\", \"done\"))\n if self._state == \"done\":\n return\n os.dup2(self.targetfd_save, self.targetfd)\n os.close(self.targetfd_save)\n if self.targetfd_invalid is not None:\n if self.targetfd_invalid != self.targetfd:\n os.close(self.targetfd)\n os.close(self.targetfd_invalid)\n self.syscapture.done()\n self.tmpfile.close()\n self._state = \"done\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_FDCaptureBinary.suspend_FDCaptureBinary.writeorg.os_write_self_targetfd_sa": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_FDCaptureBinary.suspend_FDCaptureBinary.writeorg.os_write_self_targetfd_sa", "embedding": null, "metadata": {"file_path": "src/_pytest/capture.py", "file_name": "capture.py", "file_type": "text/x-python", "category": "implementation", "start_line": 446, "end_line": 465, "span_ids": ["FDCaptureBinary.writeorg", "FDCaptureBinary.suspend", "FDCaptureBinary.resume"], "tokens": 176}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class FDCaptureBinary:\n\n def suspend(self) -> None:\n self._assert_state(\"suspend\", (\"started\", \"suspended\"))\n if self._state == \"suspended\":\n return\n self.syscapture.suspend()\n os.dup2(self.targetfd_save, self.targetfd)\n self._state = \"suspended\"\n\n def resume(self) -> None:\n self._assert_state(\"resume\", (\"started\", \"suspended\"))\n if self._state == \"started\":\n return\n self.syscapture.resume()\n os.dup2(self.tmpfile.fileno(), self.targetfd)\n self._state = \"started\"\n\n def writeorg(self, data):\n \"\"\"Write to original file descriptor.\"\"\"\n self._assert_state(\"writeorg\", (\"started\", \"suspended\"))\n os.write(self.targetfd_save, data)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_MultiCapture_MultiCapture.readouterr.return.CaptureResult_out_err_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_MultiCapture_MultiCapture.readouterr.return.CaptureResult_out_err_", "embedding": null, "metadata": {"file_path": "src/_pytest/capture.py", "file_name": "capture.py", "file_type": "text/x-python", "category": "implementation", "start_line": 548, "end_line": 629, "span_ids": ["MultiCapture.is_started", "MultiCapture.pop_outerr_to_orig", "MultiCapture.resume_capturing", "MultiCapture.readouterr", "MultiCapture.__repr__", "MultiCapture", "MultiCapture.start_capturing", "MultiCapture.stop_capturing", "MultiCapture.suspend_capturing"], "tokens": 546}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class MultiCapture(Generic[AnyStr]):\n _state = None\n _in_suspended = False\n\n def __init__(self, in_, out, err) -> None:\n self.in_ = in_\n self.out = out\n self.err = err\n\n def __repr__(self) -> str:\n return \"\".format(\n self.out,\n self.err,\n self.in_,\n self._state,\n self._in_suspended,\n )\n\n def start_capturing(self) -> None:\n self._state = \"started\"\n if self.in_:\n self.in_.start()\n if self.out:\n self.out.start()\n if self.err:\n self.err.start()\n\n def pop_outerr_to_orig(self) -> Tuple[AnyStr, AnyStr]:\n \"\"\"Pop current snapshot out/err capture and flush to orig streams.\"\"\"\n out, err = self.readouterr()\n if out:\n self.out.writeorg(out)\n if err:\n self.err.writeorg(err)\n return out, err\n\n def suspend_capturing(self, in_: bool = False) -> None:\n self._state = \"suspended\"\n if self.out:\n self.out.suspend()\n if self.err:\n self.err.suspend()\n if in_ and self.in_:\n self.in_.suspend()\n self._in_suspended = True\n\n def resume_capturing(self) -> None:\n self._state = \"started\"\n if self.out:\n self.out.resume()\n if self.err:\n self.err.resume()\n if self._in_suspended:\n self.in_.resume()\n self._in_suspended = False\n\n def stop_capturing(self) -> None:\n \"\"\"Stop capturing and reset capturing streams.\"\"\"\n if self._state == \"stopped\":\n raise ValueError(\"was already stopped\")\n self._state = \"stopped\"\n if self.out:\n self.out.done()\n if self.err:\n self.err.done()\n if self.in_:\n self.in_.done()\n\n def is_started(self) -> bool:\n \"\"\"Whether actively capturing -- not suspended or stopped.\"\"\"\n return self._state == \"started\"\n\n def readouterr(self) -> CaptureResult[AnyStr]:\n if self.out:\n out = self.out.snap()\n else:\n out = \"\"\n if self.err:\n err = self.err.snap()\n else:\n err = \"\"\n return CaptureResult(out, err)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py__The_fixtures__capsys.capman_unset_fixture_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py__The_fixtures__capsys.capman_unset_fixture_", "embedding": null, "metadata": {"file_path": "src/_pytest/capture.py", "file_name": "capture.py", "file_type": "text/x-python", "category": "implementation", "start_line": 905, "end_line": 922, "span_ids": ["capsys", "CaptureFixture.disabled"], "tokens": 157}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "# The fixtures.\n\n\n@fixture\ndef capsys(request: SubRequest) -> Generator[CaptureFixture[str], None, None]:\n \"\"\"Enable text capturing of writes to ``sys.stdout`` and ``sys.stderr``.\n\n The captured output is made available via ``capsys.readouterr()`` method\n calls, which return a ``(out, err)`` namedtuple.\n ``out`` and ``err`` will be ``text`` objects.\n \"\"\"\n capman = request.config.pluginmanager.getplugin(\"capturemanager\")\n capture_fixture = CaptureFixture[str](SysCapture, request, _ispytest=True)\n capman.set_fixture(capture_fixture)\n capture_fixture._start()\n yield capture_fixture\n capture_fixture.close()\n capman.unset_fixture()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_capsysbinary_capsysbinary.capman_unset_fixture_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_capsysbinary_capsysbinary.capman_unset_fixture_", "embedding": null, "metadata": {"file_path": "src/_pytest/capture.py", "file_name": "capture.py", "file_type": "text/x-python", "category": "implementation", "start_line": 925, "end_line": 939, "span_ids": ["capsysbinary"], "tokens": 157}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@fixture\ndef capsysbinary(request: SubRequest) -> Generator[CaptureFixture[bytes], None, None]:\n \"\"\"Enable bytes capturing of writes to ``sys.stdout`` and ``sys.stderr``.\n\n The captured output is made available via ``capsysbinary.readouterr()``\n method calls, which return a ``(out, err)`` namedtuple.\n ``out`` and ``err`` will be ``bytes`` objects.\n \"\"\"\n capman = request.config.pluginmanager.getplugin(\"capturemanager\")\n capture_fixture = CaptureFixture[bytes](SysCaptureBinary, request, _ispytest=True)\n capman.set_fixture(capture_fixture)\n capture_fixture._start()\n yield capture_fixture\n capture_fixture.close()\n capman.unset_fixture()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_capfd_capfd.capman_unset_fixture_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_capfd_capfd.capman_unset_fixture_", "embedding": null, "metadata": {"file_path": "src/_pytest/capture.py", "file_name": "capture.py", "file_type": "text/x-python", "category": "implementation", "start_line": 942, "end_line": 956, "span_ids": ["capfd"], "tokens": 154}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@fixture\ndef capfd(request: SubRequest) -> Generator[CaptureFixture[str], None, None]:\n \"\"\"Enable text capturing of writes to file descriptors ``1`` and ``2``.\n\n The captured output is made available via ``capfd.readouterr()`` method\n calls, which return a ``(out, err)`` namedtuple.\n ``out`` and ``err`` will be ``text`` objects.\n \"\"\"\n capman = request.config.pluginmanager.getplugin(\"capturemanager\")\n capture_fixture = CaptureFixture[str](FDCapture, request, _ispytest=True)\n capman.set_fixture(capture_fixture)\n capture_fixture._start()\n yield capture_fixture\n capture_fixture.close()\n capman.unset_fixture()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_capfdbinary_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_capfdbinary_", "embedding": null, "metadata": {"file_path": "src/_pytest/capture.py", "file_name": "capture.py", "file_type": "text/x-python", "category": "implementation", "start_line": 959, "end_line": 974, "span_ids": ["capfdbinary"], "tokens": 158}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@fixture\ndef capfdbinary(request: SubRequest) -> Generator[CaptureFixture[bytes], None, None]:\n \"\"\"Enable bytes capturing of writes to file descriptors ``1`` and ``2``.\n\n The captured output is made available via ``capfd.readouterr()`` method\n calls, which return a ``(out, err)`` namedtuple.\n ``out`` and ``err`` will be ``byte`` objects.\n \"\"\"\n capman = request.config.pluginmanager.getplugin(\"capturemanager\")\n capture_fixture = CaptureFixture[bytes](FDCaptureBinary, request, _ispytest=True)\n capman.set_fixture(capture_fixture)\n capture_fixture._start()\n yield capture_fixture\n capture_fixture.close()\n capman.unset_fixture()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_console_main_console_main.try_.except_BrokenPipeError_._Python_exits_with_error": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_console_main_console_main.try_.except_BrokenPipeError_._Python_exits_with_error", "embedding": null, "metadata": {"file_path": "src/_pytest/config/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 180, "end_line": 195, "span_ids": ["console_main"], "tokens": 148}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def console_main() -> int:\n \"\"\"The CLI entry point of pytest.\n\n This function is not meant for programmable use; use `main()` instead.\n \"\"\"\n # https://docs.python.org/3/library/signal.html#note-on-sigpipe\n try:\n code = main()\n sys.stdout.flush()\n return code\n except BrokenPipeError:\n # Python flushes standard streams on exit; redirect remaining output\n # to devnull to avoid another BrokenPipeError at shutdown\n devnull = os.open(os.devnull, os.O_WRONLY)\n os.dup2(devnull, sys.stdout.fileno())\n return 1 # Python exits with error code 1 on EPIPE", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_PytestPluginManager._importconftest_PytestPluginManager._importconftest.return.mod": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_PytestPluginManager._importconftest_PytestPluginManager._importconftest.return.mod", "embedding": null, "metadata": {"file_path": "src/_pytest/config/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 566, "end_line": 604, "span_ids": ["PytestPluginManager._importconftest"], "tokens": 383}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass PytestPluginManager(PluginManager):\n\n def _importconftest(\n self,\n conftestpath: Path,\n importmode: Union[str, ImportMode],\n ) -> types.ModuleType:\n # Use a resolved Path object as key to avoid loading the same conftest\n # twice with build systems that create build directories containing\n # symlinks to actual files.\n # Using Path().resolve() is better than py.path.realpath because\n # it resolves to the correct path/drive in case-insensitive file systems (#5792)\n key = conftestpath.resolve()\n\n with contextlib.suppress(KeyError):\n return self._conftestpath2mod[key]\n\n pkgpath = resolve_package_path(conftestpath)\n if pkgpath is None:\n _ensure_removed_sysmodule(conftestpath.stem)\n\n try:\n mod = import_path(conftestpath, mode=importmode)\n except Exception as e:\n assert e.__traceback__ is not None\n exc_info = (type(e), e, e.__traceback__)\n raise ConftestImportFailure(conftestpath, exc_info) from e\n\n self._check_non_top_pytest_plugins(mod, conftestpath)\n\n self._conftest_plugins.add(mod)\n self._conftestpath2mod[key] = mod\n dirpath = conftestpath.parent\n if dirpath in self._dirpath2confmods:\n for path, mods in self._dirpath2confmods.items():\n if path and dirpath in path.parents or path == dirpath:\n assert mod not in mods\n mods.append(mod)\n self.trace(f\"loading conftestmodule {mod!r}\")\n self.consider_conftest(mod)\n return mod", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_PytestPluginManager._check_non_top_pytest_plugins_PytestPluginManager._check_non_top_pytest_plugins.if_.fail_msg_format_conftestp": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_PytestPluginManager._check_non_top_pytest_plugins_PytestPluginManager._check_non_top_pytest_plugins.if_.fail_msg_format_conftestp", "embedding": null, "metadata": {"file_path": "src/_pytest/config/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 606, "end_line": 625, "span_ids": ["PytestPluginManager._check_non_top_pytest_plugins"], "tokens": 212}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass PytestPluginManager(PluginManager):\n\n def _check_non_top_pytest_plugins(\n self,\n mod: types.ModuleType,\n conftestpath: Path,\n ) -> None:\n if (\n hasattr(mod, \"pytest_plugins\")\n and self._configured\n and not self._using_pyargs\n ):\n msg = (\n \"Defining 'pytest_plugins' in a non-top-level conftest is no longer supported:\\n\"\n \"It affects the entire test suite instead of just below the conftest as expected.\\n\"\n \" {}\\n\"\n \"Please move it to a top level conftest file at the rootdir:\\n\"\n \" {}\\n\"\n \"For more information, visit:\\n\"\n \" https://docs.pytest.org/en/stable/deprecations.html#pytest-plugins-in-non-top-level-conftest-files\"\n )\n fail(msg.format(conftestpath, self._confcutdir), pytrace=False)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_pytest_addoption_pytest_addoption.None_11": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_pytest_addoption_pytest_addoption.None_11", "embedding": null, "metadata": {"file_path": "src/_pytest/logging.py", "file_name": "logging.py", "file_type": "text/x-python", "category": "implementation", "start_line": 195, "end_line": 278, "span_ids": ["pytest_addoption"], "tokens": 582}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_addoption(parser: Parser) -> None:\n \"\"\"Add options to control log capturing.\"\"\"\n group = parser.getgroup(\"logging\")\n\n def add_option_ini(option, dest, default=None, type=None, **kwargs):\n parser.addini(\n dest, default=default, type=type, help=\"default value for \" + option\n )\n group.addoption(option, dest=dest, **kwargs)\n\n add_option_ini(\n \"--log-level\",\n dest=\"log_level\",\n default=None,\n metavar=\"LEVEL\",\n help=(\n \"level of messages to catch/display.\\n\"\n \"Not set by default, so it depends on the root/parent log handler's\"\n ' effective level, where it is \"WARNING\" by default.'\n ),\n )\n add_option_ini(\n \"--log-format\",\n dest=\"log_format\",\n default=DEFAULT_LOG_FORMAT,\n help=\"log format as used by the logging module.\",\n )\n add_option_ini(\n \"--log-date-format\",\n dest=\"log_date_format\",\n default=DEFAULT_LOG_DATE_FORMAT,\n help=\"log date format as used by the logging module.\",\n )\n parser.addini(\n \"log_cli\",\n default=False,\n type=\"bool\",\n help='enable log display during test run (also known as \"live logging\").',\n )\n add_option_ini(\n \"--log-cli-level\", dest=\"log_cli_level\", default=None, help=\"cli logging level.\"\n )\n add_option_ini(\n \"--log-cli-format\",\n dest=\"log_cli_format\",\n default=None,\n help=\"log format as used by the logging module.\",\n )\n add_option_ini(\n \"--log-cli-date-format\",\n dest=\"log_cli_date_format\",\n default=None,\n help=\"log date format as used by the logging module.\",\n )\n add_option_ini(\n \"--log-file\",\n dest=\"log_file\",\n default=None,\n help=\"path to a file when logging will be written to.\",\n )\n add_option_ini(\n \"--log-file-level\",\n dest=\"log_file_level\",\n default=None,\n help=\"log file logging level.\",\n )\n add_option_ini(\n \"--log-file-format\",\n dest=\"log_file_format\",\n default=DEFAULT_LOG_FORMAT,\n help=\"log format as used by the logging module.\",\n )\n add_option_ini(\n \"--log-file-date-format\",\n dest=\"log_file_date_format\",\n default=DEFAULT_LOG_DATE_FORMAT,\n help=\"log date format as used by the logging module.\",\n )\n add_option_ini(\n \"--log-auto-indent\",\n dest=\"log_auto_indent\",\n default=None,\n help=\"Auto-indent multiline messages passed to the logging module. Accepts true|on, false|off or an integer.\",\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_LogCaptureHandler_LogCaptureHandler.handleError.if_logging_raiseException.raise": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_LogCaptureHandler_LogCaptureHandler.handleError.if_logging_raiseException.raise", "embedding": null, "metadata": {"file_path": "src/_pytest/logging.py", "file_name": "logging.py", "file_type": "text/x-python", "category": "implementation", "start_line": 318, "end_line": 343, "span_ids": ["LogCaptureHandler.handleError", "LogCaptureHandler", "LogCaptureHandler.emit", "LogCaptureHandler.reset"], "tokens": 202}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class LogCaptureHandler(logging.StreamHandler):\n \"\"\"A logging handler that stores log records and the log text.\"\"\"\n\n stream: StringIO\n\n def __init__(self) -> None:\n \"\"\"Create a new log handler.\"\"\"\n super().__init__(StringIO())\n self.records: List[logging.LogRecord] = []\n\n def emit(self, record: logging.LogRecord) -> None:\n \"\"\"Keep the log records in a list in addition to the log text.\"\"\"\n self.records.append(record)\n super().emit(record)\n\n def reset(self) -> None:\n self.records = []\n self.stream = StringIO()\n\n def handleError(self, record: logging.LogRecord) -> None:\n if logging.raiseExceptions:\n # Fail the test if the log message is bad (emit failed).\n # The default behavior of logging is to print \"Logging error\"\n # to stderr with the call stack and some extra details.\n # pytest wants to make such mistakes visible during testing.\n raise", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_LoggingPlugin_LoggingPlugin.__init__.self_log_cli_handler_setF": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_LoggingPlugin_LoggingPlugin.__init__.self_log_cli_handler_setF", "embedding": null, "metadata": {"file_path": "src/_pytest/logging.py", "file_name": "logging.py", "file_type": "text/x-python", "category": "implementation", "start_line": 521, "end_line": 581, "span_ids": ["LoggingPlugin"], "tokens": 558}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class LoggingPlugin:\n \"\"\"Attaches to the logging module and captures log messages for each test.\"\"\"\n\n def __init__(self, config: Config) -> None:\n \"\"\"Create a new plugin to capture log messages.\n\n The formatter can be safely shared across all handlers so\n create a single one for the entire test session here.\n \"\"\"\n self._config = config\n\n # Report logging.\n self.formatter = self._create_formatter(\n get_option_ini(config, \"log_format\"),\n get_option_ini(config, \"log_date_format\"),\n get_option_ini(config, \"log_auto_indent\"),\n )\n self.log_level = get_log_level_for_setting(config, \"log_level\")\n self.caplog_handler = LogCaptureHandler()\n self.caplog_handler.setFormatter(self.formatter)\n self.report_handler = LogCaptureHandler()\n self.report_handler.setFormatter(self.formatter)\n\n # File logging.\n self.log_file_level = get_log_level_for_setting(config, \"log_file_level\")\n log_file = get_option_ini(config, \"log_file\") or os.devnull\n if log_file != os.devnull:\n directory = os.path.dirname(os.path.abspath(log_file))\n if not os.path.isdir(directory):\n os.makedirs(directory)\n\n self.log_file_handler = _FileHandler(log_file, mode=\"w\", encoding=\"UTF-8\")\n log_file_format = get_option_ini(config, \"log_file_format\", \"log_format\")\n log_file_date_format = get_option_ini(\n config, \"log_file_date_format\", \"log_date_format\"\n )\n\n log_file_formatter = logging.Formatter(\n log_file_format, datefmt=log_file_date_format\n )\n self.log_file_handler.setFormatter(log_file_formatter)\n\n # CLI/live logging.\n self.log_cli_level = get_log_level_for_setting(\n config, \"log_cli_level\", \"log_level\"\n )\n if self._log_cli_enabled():\n terminal_reporter = config.pluginmanager.get_plugin(\"terminalreporter\")\n capture_manager = config.pluginmanager.get_plugin(\"capturemanager\")\n # if capturemanager plugin is disabled, live logging still works.\n self.log_cli_handler: Union[\n _LiveLoggingStreamHandler, _LiveLoggingNullHandler\n ] = _LiveLoggingStreamHandler(terminal_reporter, capture_manager)\n else:\n self.log_cli_handler = _LiveLoggingNullHandler()\n log_cli_formatter = self._create_formatter(\n get_option_ini(config, \"log_cli_format\", \"log_format\"),\n get_option_ini(config, \"log_cli_date_format\", \"log_date_format\"),\n get_option_ini(config, \"log_auto_indent\"),\n )\n self.log_cli_handler.setFormatter(log_cli_formatter)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_LoggingPlugin.set_log_path_LoggingPlugin.set_log_path.if_old_stream_.old_stream_close_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_LoggingPlugin.set_log_path_LoggingPlugin.set_log_path.if_old_stream_.old_stream_close_", "embedding": null, "metadata": {"file_path": "src/_pytest/logging.py", "file_name": "logging.py", "file_type": "text/x-python", "category": "implementation", "start_line": 601, "end_line": 629, "span_ids": ["LoggingPlugin.set_log_path"], "tokens": 201}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class LoggingPlugin:\n\n def set_log_path(self, fname: str) -> None:\n \"\"\"Set the filename parameter for Logging.FileHandler().\n\n Creates parent directory if it does not exist.\n\n .. warning::\n This is an experimental API.\n \"\"\"\n fpath = Path(fname)\n\n if not fpath.is_absolute():\n fpath = self._config.rootpath / fpath\n\n if not fpath.parent.exists():\n fpath.parent.mkdir(exist_ok=True, parents=True)\n\n stream = fpath.open(mode=\"w\", encoding=\"UTF-8\")\n if sys.version_info >= (3, 7):\n old_stream = self.log_file_handler.setStream(stream)\n else:\n old_stream = self.log_file_handler.stream\n self.log_file_handler.acquire()\n try:\n self.log_file_handler.flush()\n self.log_file_handler.stream = stream\n finally:\n self.log_file_handler.release()\n if old_stream:\n old_stream.close()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_LoggingPlugin._log_cli_enabled_LoggingPlugin.pytest_collection.with_catching_logs_self_l.with_catching_logs_self_l.yield": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_LoggingPlugin._log_cli_enabled_LoggingPlugin.pytest_collection.with_catching_logs_self_l.with_catching_logs_self_l.yield", "embedding": null, "metadata": {"file_path": "src/_pytest/logging.py", "file_name": "logging.py", "file_type": "text/x-python", "category": "implementation", "start_line": 631, "end_line": 660, "span_ids": ["LoggingPlugin.pytest_collection", "LoggingPlugin.pytest_sessionstart", "LoggingPlugin._log_cli_enabled"], "tokens": 250}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class LoggingPlugin:\n\n def _log_cli_enabled(self):\n \"\"\"Return whether live logging is enabled.\"\"\"\n enabled = self._config.getoption(\n \"--log-cli-level\"\n ) is not None or self._config.getini(\"log_cli\")\n if not enabled:\n return False\n\n terminal_reporter = self._config.pluginmanager.get_plugin(\"terminalreporter\")\n if terminal_reporter is None:\n # terminal reporter is disabled e.g. by pytest-xdist.\n return False\n\n return True\n\n @hookimpl(hookwrapper=True, tryfirst=True)\n def pytest_sessionstart(self) -> Generator[None, None, None]:\n self.log_cli_handler.set_when(\"sessionstart\")\n\n with catching_logs(self.log_cli_handler, level=self.log_cli_level):\n with catching_logs(self.log_file_handler, level=self.log_file_level):\n yield\n\n @hookimpl(hookwrapper=True, tryfirst=True)\n def pytest_collection(self) -> Generator[None, None, None]:\n self.log_cli_handler.set_when(\"collection\")\n\n with catching_logs(self.log_cli_handler, level=self.log_cli_level):\n with catching_logs(self.log_file_handler, level=self.log_file_level):\n yield", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_LoggingPlugin.pytest_runtest_setup_LoggingPlugin.pytest_unconfigure.self_log_file_handler_clo": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_LoggingPlugin.pytest_runtest_setup_LoggingPlugin.pytest_unconfigure.self_log_file_handler_clo", "embedding": null, "metadata": {"file_path": "src/_pytest/logging.py", "file_name": "logging.py", "file_type": "text/x-python", "category": "implementation", "start_line": 704, "end_line": 742, "span_ids": ["LoggingPlugin.pytest_runtest_teardown", "LoggingPlugin.pytest_runtest_logfinish", "LoggingPlugin.pytest_sessionfinish", "LoggingPlugin.pytest_runtest_setup", "LoggingPlugin.pytest_unconfigure", "LoggingPlugin.pytest_runtest_call"], "tokens": 361}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class LoggingPlugin:\n\n @hookimpl(hookwrapper=True)\n def pytest_runtest_setup(self, item: nodes.Item) -> Generator[None, None, None]:\n self.log_cli_handler.set_when(\"setup\")\n\n empty: Dict[str, List[logging.LogRecord]] = {}\n item._store[caplog_records_key] = empty\n yield from self._runtest_for(item, \"setup\")\n\n @hookimpl(hookwrapper=True)\n def pytest_runtest_call(self, item: nodes.Item) -> Generator[None, None, None]:\n self.log_cli_handler.set_when(\"call\")\n\n yield from self._runtest_for(item, \"call\")\n\n @hookimpl(hookwrapper=True)\n def pytest_runtest_teardown(self, item: nodes.Item) -> Generator[None, None, None]:\n self.log_cli_handler.set_when(\"teardown\")\n\n yield from self._runtest_for(item, \"teardown\")\n del item._store[caplog_records_key]\n del item._store[caplog_handler_key]\n\n @hookimpl\n def pytest_runtest_logfinish(self) -> None:\n self.log_cli_handler.set_when(\"finish\")\n\n @hookimpl(hookwrapper=True, tryfirst=True)\n def pytest_sessionfinish(self) -> Generator[None, None, None]:\n self.log_cli_handler.set_when(\"sessionfinish\")\n\n with catching_logs(self.log_cli_handler, level=self.log_cli_level):\n with catching_logs(self.log_file_handler, level=self.log_file_level):\n yield\n\n @hookimpl\n def pytest_unconfigure(self) -> None:\n # Close the FileHandler explicitly.\n # (logging.shutdown might have lost the weakref?!)\n self.log_file_handler.close()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py__FileHandler__LiveLoggingStreamHandler.set_when.if_when_start_.self._test_outcome_written.False": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py__FileHandler__LiveLoggingStreamHandler.set_when.if_when_start_.self._test_outcome_written.False", "embedding": null, "metadata": {"file_path": "src/_pytest/logging.py", "file_name": "logging.py", "file_type": "text/x-python", "category": "implementation", "start_line": 745, "end_line": 786, "span_ids": ["_FileHandler.handleError", "_LiveLoggingStreamHandler.set_when", "_FileHandler", "_LiveLoggingStreamHandler", "_LiveLoggingStreamHandler.reset"], "tokens": 332}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class _FileHandler(logging.FileHandler):\n \"\"\"A logging FileHandler with pytest tweaks.\"\"\"\n\n def handleError(self, record: logging.LogRecord) -> None:\n # Handled by LogCaptureHandler.\n pass\n\n\nclass _LiveLoggingStreamHandler(logging.StreamHandler):\n \"\"\"A logging StreamHandler used by the live logging feature: it will\n write a newline before the first log message in each test.\n\n During live logging we must also explicitly disable stdout/stderr\n capturing otherwise it will get captured and won't appear in the\n terminal.\n \"\"\"\n\n # Officially stream needs to be a IO[str], but TerminalReporter\n # isn't. So force it.\n stream: TerminalReporter = None # type: ignore\n\n def __init__(\n self,\n terminal_reporter: TerminalReporter,\n capture_manager: Optional[CaptureManager],\n ) -> None:\n logging.StreamHandler.__init__(self, stream=terminal_reporter) # type: ignore[arg-type]\n self.capture_manager = capture_manager\n self.reset()\n self.set_when(None)\n self._test_outcome_written = False\n\n def reset(self) -> None:\n \"\"\"Reset the handler; should be called before the start of each test.\"\"\"\n self._first_record_emitted = False\n\n def set_when(self, when: Optional[str]) -> None:\n \"\"\"Prepare for the given test phase (setup/call/teardown).\"\"\"\n self._when = when\n self._section_name_shown = False\n if when == \"start\":\n self._test_outcome_written = False", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/__init__.py_KeywordMatcher_KeywordMatcher._names.attr_ib_type_AbstractSet_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/__init__.py_KeywordMatcher_KeywordMatcher._names.attr_ib_type_AbstractSet_", "embedding": null, "metadata": {"file_path": "src/_pytest/mark/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 114, "end_line": 129, "span_ids": ["KeywordMatcher"], "tokens": 130}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@attr.s(slots=True)\nclass KeywordMatcher:\n \"\"\"A matcher for keywords.\n\n Given a list of names, matches any substring of one of these names. The\n string inclusion check is case-insensitive.\n\n Will match on the name of colitem, including the names of its parents.\n Only matches names of items which are either a :class:`Class` or a\n :class:`Function`.\n\n Additionally, matches on names in the 'extra_keyword_matches' set of\n any item, as well as names directly assigned to test functions.\n \"\"\"\n\n _names = attr.ib(type=AbstractSet[str])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/__init__.py_KeywordMatcher.from_item_KeywordMatcher.__call__.return.False": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/__init__.py_KeywordMatcher.from_item_KeywordMatcher.__call__.return.False", "embedding": null, "metadata": {"file_path": "src/_pytest/mark/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 153, "end_line": 184, "span_ids": ["KeywordMatcher.from_item", "KeywordMatcher.__call__"], "tokens": 241}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@attr.s(slots=True)\nclass KeywordMatcher:\n\n @classmethod\n def from_item(cls, item: \"Item\") -> \"KeywordMatcher\":\n mapped_names = set()\n\n # Add the names of the current item and any parent items.\n import pytest\n\n for node in item.listchain():\n if not isinstance(node, (pytest.Instance, pytest.Session)):\n mapped_names.add(node.name)\n\n # Add the names added as extra keywords to current or parent items.\n mapped_names.update(item.listextrakeywords())\n\n # Add the names attached to the current function through direct assignment.\n function_obj = getattr(item, \"function\", None)\n if function_obj:\n mapped_names.update(function_obj.__dict__)\n\n # Add the markers to the keywords as we no longer handle them correctly.\n mapped_names.update(mark.name for mark in item.iter_markers())\n\n return cls(mapped_names)\n\n def __call__(self, subname: str) -> bool:\n subname = subname.lower()\n names = (name.lower() for name in self._names)\n\n for name in names:\n if subname in name:\n return True\n return False", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/__init__.py_MarkMatcher_MarkMatcher.__call__.return.name_in_self_own_mark_nam": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/__init__.py_MarkMatcher_MarkMatcher.__call__.return.name_in_self_own_mark_nam", "embedding": null, "metadata": {"file_path": "src/_pytest/mark/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 203, "end_line": 218, "span_ids": ["MarkMatcher.from_item", "MarkMatcher", "MarkMatcher.__call__"], "tokens": 108}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@attr.s(slots=True)\nclass MarkMatcher:\n \"\"\"A matcher for markers which are present.\n\n Tries to match on any marker names, attached to the given colitem.\n \"\"\"\n\n own_mark_names = attr.ib()\n\n @classmethod\n def from_item(cls, item) -> \"MarkMatcher\":\n mark_names = {mark.name for mark in item.iter_markers()}\n return cls(mark_names)\n\n def __call__(self, name: str) -> bool:\n return name in self.own_mark_names", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/expression.py_Scanner_Scanner.lex.yield_Token_TokenType_EOF": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/expression.py_Scanner_Scanner.lex.yield_Token_TokenType_EOF", "embedding": null, "metadata": {"file_path": "src/_pytest/mark/expression.py", "file_name": "expression.py", "file_type": "text/x-python", "category": "implementation", "start_line": 72, "end_line": 108, "span_ids": ["Scanner", "Scanner.lex"], "tokens": 288}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Scanner:\n __slots__ = (\"tokens\", \"current\")\n\n def __init__(self, input: str) -> None:\n self.tokens = self.lex(input)\n self.current = next(self.tokens)\n\n def lex(self, input: str) -> Iterator[Token]:\n pos = 0\n while pos < len(input):\n if input[pos] in (\" \", \"\\t\"):\n pos += 1\n elif input[pos] == \"(\":\n yield Token(TokenType.LPAREN, \"(\", pos)\n pos += 1\n elif input[pos] == \")\":\n yield Token(TokenType.RPAREN, \")\", pos)\n pos += 1\n else:\n match = re.match(r\"(:?\\w|:|\\+|-|\\.|\\[|\\])+\", input[pos:])\n if match:\n value = match.group(0)\n if value == \"or\":\n yield Token(TokenType.OR, value, pos)\n elif value == \"and\":\n yield Token(TokenType.AND, value, pos)\n elif value == \"not\":\n yield Token(TokenType.NOT, value, pos)\n else:\n yield Token(TokenType.IDENT, value, pos)\n pos += len(value)\n else:\n raise ParseError(\n pos + 1,\n 'unexpected character \"{}\"'.format(input[pos]),\n )\n yield Token(TokenType.EOF, \"\", pos)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/expression.py_Scanner.accept_Scanner.reject.raise_ParseError_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/expression.py_Scanner.accept_Scanner.reject.raise_ParseError_", "embedding": null, "metadata": {"file_path": "src/_pytest/mark/expression.py", "file_name": "expression.py", "file_type": "text/x-python", "category": "implementation", "start_line": 110, "end_line": 127, "span_ids": ["Scanner.accept", "Scanner.reject"], "tokens": 132}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Scanner:\n\n def accept(self, type: TokenType, *, reject: bool = False) -> Optional[Token]:\n if self.current.type is type:\n token = self.current\n if token.type is not TokenType.EOF:\n self.current = next(self.tokens)\n return token\n if reject:\n self.reject((type,))\n return None\n\n def reject(self, expected: Sequence[TokenType]) -> \"NoReturn\":\n raise ParseError(\n self.current.pos + 1,\n \"expected {}; got {}\".format(\n \" OR \".join(type.value for type in expected),\n self.current.type.value,\n ),\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/expression.py_Expression_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/expression.py_Expression_", "embedding": null, "metadata": {"file_path": "src/_pytest/mark/expression.py", "file_name": "expression.py", "file_type": "text/x-python", "category": "implementation", "start_line": 190, "end_line": 226, "span_ids": ["Expression", "Expression.compile", "Expression.evaluate"], "tokens": 233}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Expression:\n \"\"\"A compiled match expression as used by -k and -m.\n\n The expression can be evaulated against different matchers.\n \"\"\"\n\n __slots__ = (\"code\",)\n\n def __init__(self, code: types.CodeType) -> None:\n self.code = code\n\n @classmethod\n def compile(self, input: str) -> \"Expression\":\n \"\"\"Compile a match expression.\n\n :param input: The input expression - one line.\n \"\"\"\n astexpr = expression(Scanner(input))\n code: types.CodeType = compile(\n astexpr,\n filename=\"\",\n mode=\"eval\",\n )\n return Expression(code)\n\n def evaluate(self, matcher: Callable[[str], bool]) -> bool:\n \"\"\"Evaluate the match expression.\n\n :param matcher:\n Given an identifier, should return whether it matches or not.\n Should be prepared to handle arbitrary strings as input.\n\n :returns: Whether the expression matches or not.\n \"\"\"\n ret: bool = eval(self.code, {\"__builtins__\": {}}, MatcherAdapter(matcher))\n return ret", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Package_Package.__init__.self.name.os_path_basename_str_fspa": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Package_Package.__init__.self.name.os_path_basename_str_fspa", "embedding": null, "metadata": {"file_path": "src/_pytest/python.py", "file_name": "python.py", "file_type": "text/x-python", "category": "implementation", "start_line": 627, "end_line": 643, "span_ids": ["Package"], "tokens": 141}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Package(Module):\n def __init__(\n self,\n fspath: py.path.local,\n parent: nodes.Collector,\n # NOTE: following args are unused:\n config=None,\n session=None,\n nodeid=None,\n ) -> None:\n # NOTE: Could be just the following, but kept as-is for compat.\n # nodes.FSCollector.__init__(self, fspath, parent=parent)\n session = parent.session\n nodes.FSCollector.__init__(\n self, fspath, parent=parent, config=config, session=session, nodeid=nodeid\n )\n self.name = os.path.basename(str(fspath.dirname))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py__showfixtures_main_write_docstring.for_line_in_doc_split_n.tw_line_indent_line_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py__showfixtures_main_write_docstring.for_line_in_doc_split_n.tw_line_indent_line_", "embedding": null, "metadata": {"file_path": "src/_pytest/python.py", "file_name": "python.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1474, "end_line": 1533, "span_ids": ["write_docstring", "_showfixtures_main"], "tokens": 449}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _showfixtures_main(config: Config, session: Session) -> None:\n import _pytest.config\n\n session.perform_collect()\n curdir = Path.cwd()\n tw = _pytest.config.create_terminal_writer(config)\n verbose = config.getvalue(\"verbose\")\n\n fm = session._fixturemanager\n\n available = []\n seen: Set[Tuple[str, str]] = set()\n\n for argname, fixturedefs in fm._arg2fixturedefs.items():\n assert fixturedefs is not None\n if not fixturedefs:\n continue\n for fixturedef in fixturedefs:\n loc = getlocation(fixturedef.func, str(curdir))\n if (fixturedef.argname, loc) in seen:\n continue\n seen.add((fixturedef.argname, loc))\n available.append(\n (\n len(fixturedef.baseid),\n fixturedef.func.__module__,\n bestrelpath(curdir, Path(loc)),\n fixturedef.argname,\n fixturedef,\n )\n )\n\n available.sort()\n currentmodule = None\n for baseid, module, bestrel, argname, fixturedef in available:\n if currentmodule != module:\n if not module.startswith(\"_pytest.\"):\n tw.line()\n tw.sep(\"-\", f\"fixtures defined from {module}\")\n currentmodule = module\n if verbose <= 0 and argname[0] == \"_\":\n continue\n tw.write(argname, green=True)\n if fixturedef.scope != \"function\":\n tw.write(\" [%s scope]\" % fixturedef.scope, cyan=True)\n if verbose > 0:\n tw.write(\" -- %s\" % bestrel, yellow=True)\n tw.write(\"\\n\")\n loc = getlocation(fixturedef.func, str(curdir))\n doc = inspect.getdoc(fixturedef.func)\n if doc:\n write_docstring(tw, doc)\n else:\n tw.line(f\" {loc}: no docstring available\", red=True)\n tw.line()\n\n\ndef write_docstring(tw: TerminalWriter, doc: str, indent: str = \" \") -> None:\n for line in doc.split(\"\\n\"):\n tw.line(indent + line)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.write_fspath_result_TerminalReporter.write_fspath_result.self__tw_write_res_flush": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.write_fspath_result_TerminalReporter.write_fspath_result.self__tw_write_res_flush", "embedding": null, "metadata": {"file_path": "src/_pytest/terminal.py", "file_name": "terminal.py", "file_type": "text/x-python", "category": "implementation", "start_line": 388, "end_line": 397, "span_ids": ["TerminalReporter.write_fspath_result"], "tokens": 144}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass TerminalReporter:\n\n def write_fspath_result(self, nodeid: str, res, **markup: bool) -> None:\n fspath = self.config.rootpath / nodeid.split(\"::\")[0]\n if self.currentfspath is None or fspath != self.currentfspath:\n if self.currentfspath is not None and self._show_progress_info:\n self._write_progress_information_filling_space()\n self.currentfspath = fspath\n relfspath = bestrelpath(self.startpath, fspath)\n self._tw.line()\n self._tw.write(relfspath + \" \")\n self._tw.write(res, flush=True, **markup)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.pytest_runtest_logreport_TerminalReporter.pytest_runtest_logreport.self_flush_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.pytest_runtest_logreport_TerminalReporter.pytest_runtest_logreport.self_flush_", "embedding": null, "metadata": {"file_path": "src/_pytest/terminal.py", "file_name": "terminal.py", "file_type": "text/x-python", "category": "implementation", "start_line": 509, "end_line": 568, "span_ids": ["TerminalReporter.pytest_runtest_logreport"], "tokens": 520}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass TerminalReporter:\n\n def pytest_runtest_logreport(self, report: TestReport) -> None:\n self._tests_ran = True\n rep = report\n res: Tuple[\n str, str, Union[str, Tuple[str, Mapping[str, bool]]]\n ] = self.config.hook.pytest_report_teststatus(report=rep, config=self.config)\n category, letter, word = res\n if not isinstance(word, tuple):\n markup = None\n else:\n word, markup = word\n self._add_stats(category, [rep])\n if not letter and not word:\n # Probably passed setup/teardown.\n return\n running_xdist = hasattr(rep, \"node\")\n if markup is None:\n was_xfail = hasattr(report, \"wasxfail\")\n if rep.passed and not was_xfail:\n markup = {\"green\": True}\n elif rep.passed and was_xfail:\n markup = {\"yellow\": True}\n elif rep.failed:\n markup = {\"red\": True}\n elif rep.skipped:\n markup = {\"yellow\": True}\n else:\n markup = {}\n if self.verbosity <= 0:\n self._tw.write(letter, **markup)\n else:\n self._progress_nodeids_reported.add(rep.nodeid)\n line = self._locationline(rep.nodeid, *rep.location)\n if not running_xdist:\n self.write_ensure_prefix(line, word, **markup)\n if rep.skipped or hasattr(report, \"wasxfail\"):\n available_width = (\n (self._tw.fullwidth - self._tw.width_of_current_line)\n - len(\" [100%]\")\n - 1\n )\n reason = _get_raw_skip_reason(rep)\n reason_ = _format_trimmed(\" ({})\", reason, available_width)\n if reason and reason_ is not None:\n self._tw.write(reason_)\n if self._show_progress_info:\n self._write_progress_information_filling_space()\n else:\n self.ensure_newline()\n self._tw.write(\"[%s]\" % rep.node.gateway.id)\n if self._show_progress_info:\n self._tw.write(\n self._get_progress_information_message() + \" \", cyan=True\n )\n else:\n self._tw.write(\" \")\n self._tw.write(word, **markup)\n self._tw.write(\" \" + line)\n self.currentfspath = -2\n self.flush()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.pytest_collection_finish_TerminalReporter.pytest_collection_finish.if_self_config_getoption_.if_failed_.for_rep_in_failed_.rep_toterminal_self__tw_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.pytest_collection_finish_TerminalReporter.pytest_collection_finish.if_self_config_getoption_.if_failed_.for_rep_in_failed_.rep_toterminal_self__tw_", "embedding": null, "metadata": {"file_path": "src/_pytest/terminal.py", "file_name": "terminal.py", "file_type": "text/x-python", "category": "implementation", "start_line": 739, "end_line": 760, "span_ids": ["TerminalReporter.pytest_collection_finish"], "tokens": 158}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass TerminalReporter:\n\n def pytest_collection_finish(self, session: \"Session\") -> None:\n self.report_collect(True)\n\n lines = self.config.hook.pytest_report_collectionfinish(\n config=self.config,\n startpath=self.startpath,\n startdir=self.startdir,\n items=session.items,\n )\n self._write_report_lines_from_hooks(lines)\n\n if self.config.getoption(\"collectonly\"):\n if session.items:\n if self.config.option.verbose > -1:\n self._tw.line(\"\")\n self._printcollecteditems(session.items)\n\n failed = self.stats.get(\"failed\")\n if failed:\n self._tw.sep(\"!\", \"collection failures\")\n for rep in failed:\n rep.toterminal(self._tw)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_test_tee_stdio_captures_and_live_prints_test_tee_stdio_captures_and_live_prints.assert_this_is_stderr_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_test_tee_stdio_captures_and_live_prints_test_tee_stdio_captures_and_live_prints.assert_this_is_stderr_", "embedding": null, "metadata": {"file_path": "testing/acceptance_test.py", "file_name": "acceptance_test.py", "file_type": "text/x-python", "category": "test", "start_line": 1274, "end_line": 1299, "span_ids": ["test_tee_stdio_captures_and_live_prints"], "tokens": 208}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_tee_stdio_captures_and_live_prints(pytester: Pytester) -> None:\n testpath = pytester.makepyfile(\n \"\"\"\n import sys\n def test_simple():\n print (\"@this is stdout@\")\n print (\"@this is stderr@\", file=sys.stderr)\n \"\"\"\n )\n result = pytester.runpytest_subprocess(\n testpath,\n \"--capture=tee-sys\",\n \"--junitxml=output.xml\",\n \"-o\",\n \"junit_logging=all\",\n )\n\n # ensure stdout/stderr were 'live printed'\n result.stdout.fnmatch_lines([\"*@this is stdout@*\"])\n result.stderr.fnmatch_lines([\"*@this is stderr@*\"])\n\n # now ensure the output is in the junitxml\n with open(pytester.path.joinpath(\"output.xml\")) as f:\n fullXml = f.read()\n assert \"@this is stdout@\\n\" in fullXml\n assert \"@this is stderr@\\n\" in fullXml", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_test_no_brokenpipeerror_message_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_test_no_brokenpipeerror_message_", "embedding": null, "metadata": {"file_path": "testing/acceptance_test.py", "file_name": "acceptance_test.py", "file_type": "text/x-python", "category": "test", "start_line": 1302, "end_line": 1321, "span_ids": ["test_no_brokenpipeerror_message"], "tokens": 171}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.skipif(\n sys.platform == \"win32\",\n reason=\"Windows raises `OSError: [Errno 22] Invalid argument` instead\",\n)\ndef test_no_brokenpipeerror_message(pytester: Pytester) -> None:\n \"\"\"Ensure that the broken pipe error message is supressed.\n\n In some Python versions, it reaches sys.unraisablehook, in others\n a BrokenPipeError exception is propagated, but either way it prints\n to stderr on shutdown, so checking nothing is printed is enough.\n \"\"\"\n popen = pytester.popen((*pytester._getpytestargs(), \"--help\"))\n popen.stdout.close()\n ret = popen.wait()\n assert popen.stderr.read() == b\"\"\n assert ret == 1\n\n # Cleanup.\n popen.stderr.close()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_format_excinfo_TestFormattedExcinfo.test_format_excinfo.assert_file_getvalue_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_format_excinfo_TestFormattedExcinfo.test_format_excinfo.assert_file_getvalue_", "embedding": null, "metadata": {"file_path": "testing/code/test_excinfo.py", "file_name": "test_excinfo.py", "file_type": "text/x-python", "category": "test", "start_line": 1045, "end_line": 1080, "span_ids": ["TestFormattedExcinfo.test_format_excinfo"], "tokens": 281}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFormattedExcinfo:\n\n @pytest.mark.parametrize(\n \"reproptions\",\n [\n pytest.param(\n {\n \"style\": style,\n \"showlocals\": showlocals,\n \"funcargs\": funcargs,\n \"tbfilter\": tbfilter,\n },\n id=\"style={},showlocals={},funcargs={},tbfilter={}\".format(\n style, showlocals, funcargs, tbfilter\n ),\n )\n for style in [\"long\", \"short\", \"line\", \"no\", \"native\", \"value\", \"auto\"]\n for showlocals in (True, False)\n for tbfilter in (True, False)\n for funcargs in (True, False)\n ],\n )\n def test_format_excinfo(self, reproptions: Dict[str, Any]) -> None:\n def bar():\n assert False, \"some error\"\n\n def foo():\n bar()\n\n # using inline functions as opposed to importasmod so we get source code lines\n # in the tracebacks (otherwise getinspect doesn't find the source code).\n with pytest.raises(AssertionError) as excinfo:\n foo()\n file = io.StringIO()\n tw = TerminalWriter(file=file)\n repr = excinfo.getrepr(**reproptions)\n repr.toterminal(tw)\n assert file.getvalue()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_source.py_test_comments_test_comment_in_statement.for_line_in_range_1_3_.assert_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_source.py_test_comments_test_comment_in_statement.for_line_in_range_1_3_.assert_", "embedding": null, "metadata": {"file_path": "testing/code/test_source.py", "file_name": "test_source.py", "file_type": "text/x-python", "category": "test", "start_line": 589, "end_line": 624, "span_ids": ["test_comment_in_statement", "test_comments"], "tokens": 265}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_comments() -> None:\n source = '''def test():\n \"comment 1\"\n x = 1\n # comment 2\n # comment 3\n\n assert False\n\n\"\"\"\ncomment 4\n\"\"\"\n'''\n for line in range(2, 6):\n assert str(getstatement(line, source)) == \" x = 1\"\n if sys.version_info >= (3, 8) or hasattr(sys, \"pypy_version_info\"):\n tqs_start = 8\n else:\n tqs_start = 10\n assert str(getstatement(10, source)) == '\"\"\"'\n for line in range(6, tqs_start):\n assert str(getstatement(line, source)) == \" assert False\"\n for line in range(tqs_start, 10):\n assert str(getstatement(line, source)) == '\"\"\"\\ncomment 4\\n\"\"\"'\n\n\ndef test_comment_in_statement() -> None:\n source = \"\"\"test(foo=1,\n # comment 1\n bar=2)\n\"\"\"\n for line in range(1, 3):\n assert (\n str(getstatement(line, source))\n == \"test(foo=1,\\n # comment 1\\n bar=2)\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_source.py_test_single_line_else_test_multiline.assert_str_source_ra": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_source.py_test_single_line_else_test_multiline.assert_str_source_ra", "embedding": null, "metadata": {"file_path": "testing/code/test_source.py", "file_name": "test_source.py", "file_type": "text/x-python", "category": "test", "start_line": 652, "end_line": 681, "span_ids": ["test_single_line_else", "test_single_line_finally", "test_issue55", "test_multiline"], "tokens": 212}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_single_line_else() -> None:\n source = getstatement(1, \"if False: 2\\nelse: 3\")\n assert str(source) == \"else: 3\"\n\n\ndef test_single_line_finally() -> None:\n source = getstatement(1, \"try: 1\\nfinally: 3\")\n assert str(source) == \"finally: 3\"\n\n\ndef test_issue55() -> None:\n source = (\n \"def round_trip(dinp):\\n assert 1 == dinp\\n\"\n 'def test_rt():\\n round_trip(\"\"\"\\n\"\"\")\\n'\n )\n s = getstatement(3, source)\n assert str(s) == ' round_trip(\"\"\"\\n\"\"\")'\n\n\ndef test_multiline() -> None:\n source = getstatement(\n 0,\n \"\"\"\\\nraise ValueError(\n 23\n)\nx = 3\n\"\"\",\n )\n assert str(source) == \"raise ValueError(\\n 23\\n)\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/io/test_terminalwriter.py_TestTerminalWriter_TestTerminalWriter.tw.with_f_.yield_tw": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/io/test_terminalwriter.py_TestTerminalWriter_TestTerminalWriter.tw.with_f_.yield_tw", "embedding": null, "metadata": {"file_path": "testing/io/test_terminalwriter.py", "file_name": "test_terminalwriter.py", "file_type": "text/x-python", "category": "test", "start_line": 65, "end_line": 92, "span_ids": ["TestTerminalWriter", "TestTerminalWriter.tw"], "tokens": 197}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTerminalWriter:\n @pytest.fixture(params=[\"path\", \"stringio\"])\n def tw(\n self, request, tmp_path: Path\n ) -> Generator[terminalwriter.TerminalWriter, None, None]:\n if request.param == \"path\":\n p = tmp_path.joinpath(\"tmpfile\")\n f = open(str(p), \"w+\", encoding=\"utf8\")\n tw = terminalwriter.TerminalWriter(f)\n\n def getlines():\n f.flush()\n with open(str(p), encoding=\"utf8\") as fp:\n return fp.readlines()\n\n elif request.param == \"stringio\":\n f = io.StringIO()\n tw = terminalwriter.TerminalWriter(f)\n\n def getlines():\n f.seek(0)\n return f.readlines()\n\n tw.getlines = getlines # type: ignore\n tw.getvalue = lambda: \"\".join(getlines()) # type: ignore\n\n with f:\n yield tw", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/io/test_terminalwriter.py_TestTerminalWriter.test_line_TestTerminalWriter.test_attr_fullwidth.assert_len_lines_0_l": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/io/test_terminalwriter.py_TestTerminalWriter.test_line_TestTerminalWriter.test_attr_fullwidth.assert_len_lines_0_l", "embedding": null, "metadata": {"file_path": "testing/io/test_terminalwriter.py", "file_name": "test_terminalwriter.py", "file_type": "text/x-python", "category": "test", "start_line": 84, "end_line": 141, "span_ids": ["TestTerminalWriter.test_sep_with_title", "TestTerminalWriter.test_line_unicode", "TestTerminalWriter.test_sep_longer_than_width", "TestTerminalWriter.test_markup_bad", "TestTerminalWriter.test_sep_no_title", "TestTerminalWriter.test_attr_fullwidth", "TestTerminalWriter.test_markup", "TestTerminalWriter.test_line", "TestTerminalWriter.test_line_write_markup"], "tokens": 579}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTerminalWriter:\n\n def test_line(self, tw) -> None:\n tw.line(\"hello\")\n lines = tw.getlines()\n assert len(lines) == 1\n assert lines[0] == \"hello\\n\"\n\n def test_line_unicode(self, tw) -> None:\n msg = \"b\\u00f6y\"\n tw.line(msg)\n lines = tw.getlines()\n assert lines[0] == msg + \"\\n\"\n\n def test_sep_no_title(self, tw) -> None:\n tw.sep(\"-\", fullwidth=60)\n lines = tw.getlines()\n assert len(lines) == 1\n assert lines[0] == \"-\" * (60 - win32) + \"\\n\"\n\n def test_sep_with_title(self, tw) -> None:\n tw.sep(\"-\", \"hello\", fullwidth=60)\n lines = tw.getlines()\n assert len(lines) == 1\n assert lines[0] == \"-\" * 26 + \" hello \" + \"-\" * (27 - win32) + \"\\n\"\n\n def test_sep_longer_than_width(self, tw) -> None:\n tw.sep(\"-\", \"a\" * 10, fullwidth=5)\n (line,) = tw.getlines()\n # even though the string is wider than the line, still have a separator\n assert line == \"- aaaaaaaaaa -\\n\"\n\n @pytest.mark.skipif(sys.platform == \"win32\", reason=\"win32 has no native ansi\")\n @pytest.mark.parametrize(\"bold\", (True, False))\n @pytest.mark.parametrize(\"color\", (\"red\", \"green\"))\n def test_markup(self, tw, bold: bool, color: str) -> None:\n text = tw.markup(\"hello\", **{color: True, \"bold\": bold})\n assert \"hello\" in text\n\n def test_markup_bad(self, tw) -> None:\n with pytest.raises(ValueError):\n tw.markup(\"x\", wronkw=3)\n with pytest.raises(ValueError):\n tw.markup(\"x\", wronkw=0)\n\n def test_line_write_markup(self, tw) -> None:\n tw.hasmarkup = True\n tw.line(\"x\", bold=True)\n tw.write(\"x\\n\", red=True)\n lines = tw.getlines()\n if sys.platform != \"win32\":\n assert len(lines[0]) >= 2, lines\n assert len(lines[1]) >= 2, lines\n\n def test_attr_fullwidth(self, tw) -> None:\n tw.sep(\"-\", \"hello\", fullwidth=70)\n tw.fullwidth = 70\n tw.sep(\"-\", \"hello\")\n lines = tw.getlines()\n assert len(lines[0]) == len(lines[1])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/io/test_terminalwriter.py_TestTerminalWriterLineWidth_TestTerminalWriterLineWidth.test_combining.assert_tw_width_of_curren": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/io/test_terminalwriter.py_TestTerminalWriterLineWidth_TestTerminalWriterLineWidth.test_combining.assert_tw_width_of_curren", "embedding": null, "metadata": {"file_path": "testing/io/test_terminalwriter.py", "file_name": "test_terminalwriter.py", "file_type": "text/x-python", "category": "test", "start_line": 180, "end_line": 212, "span_ids": ["TestTerminalWriterLineWidth", "TestTerminalWriterLineWidth.test_update_with_newline", "TestTerminalWriterLineWidth.test_composed", "TestTerminalWriterLineWidth.test_combining", "TestTerminalWriterLineWidth.test_init", "TestTerminalWriterLineWidth.test_update_with_wide_text", "TestTerminalWriterLineWidth.test_update"], "tokens": 290}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTerminalWriterLineWidth:\n def test_init(self) -> None:\n tw = terminalwriter.TerminalWriter()\n assert tw.width_of_current_line == 0\n\n def test_update(self) -> None:\n tw = terminalwriter.TerminalWriter()\n tw.write(\"hello world\")\n assert tw.width_of_current_line == 11\n\n def test_update_with_newline(self) -> None:\n tw = terminalwriter.TerminalWriter()\n tw.write(\"hello\\nworld\")\n assert tw.width_of_current_line == 5\n\n def test_update_with_wide_text(self) -> None:\n tw = terminalwriter.TerminalWriter()\n tw.write(\"\u4e47\u4e42\u3112\u5c3a\u5342 \u3112\u5344\u4e28\u531a\u531a\")\n assert tw.width_of_current_line == 21 # 5*2 + 1 + 5*2\n\n def test_composed(self) -> None:\n tw = terminalwriter.TerminalWriter()\n text = \"caf\u00e9 food\"\n assert len(text) == 9\n tw.write(text)\n assert tw.width_of_current_line == 9\n\n def test_combining(self) -> None:\n tw = terminalwriter.TerminalWriter()\n text = \"cafe\u0301 food\"\n assert len(text) == 10\n tw.write(text)\n assert tw.width_of_current_line == 9", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/io/test_terminalwriter.py_test_code_highlight_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/io/test_terminalwriter.py_test_code_highlight_", "embedding": null, "metadata": {"file_path": "testing/io/test_terminalwriter.py", "file_name": "test_terminalwriter.py", "file_type": "text/x-python", "category": "test", "start_line": 251, "end_line": 294, "span_ids": ["test_code_highlight"], "tokens": 271}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\n (\"has_markup\", \"code_highlight\", \"expected\"),\n [\n pytest.param(\n True,\n True,\n \"{kw}assert{hl-reset} {number}0{hl-reset}\\n\",\n id=\"with markup and code_highlight\",\n ),\n pytest.param(\n True,\n False,\n \"assert 0\\n\",\n id=\"with markup but no code_highlight\",\n ),\n pytest.param(\n False,\n True,\n \"assert 0\\n\",\n id=\"without markup but with code_highlight\",\n ),\n pytest.param(\n False,\n False,\n \"assert 0\\n\",\n id=\"neither markup nor code_highlight\",\n ),\n ],\n)\ndef test_code_highlight(has_markup, code_highlight, expected, color_mapping):\n f = io.StringIO()\n tw = terminalwriter.TerminalWriter(f)\n tw.hasmarkup = has_markup\n tw.code_highlight = code_highlight\n tw._write_source([\"assert 0\"])\n\n assert f.getvalue().splitlines(keepends=True) == color_mapping.format([expected])\n\n with pytest.raises(\n ValueError,\n match=re.escape(\"indents size (2) should have same size as lines (1)\"),\n ):\n tw._write_source([\"assert 0\"], [\" \", \" \"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_colored_ansi_esc_caplogtext_test_colored_ansi_esc_caplogtext.assert_result_ret_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_colored_ansi_esc_caplogtext_test_colored_ansi_esc_caplogtext.assert_result_ret_0", "embedding": null, "metadata": {"file_path": "testing/logging/test_reporting.py", "file_name": "test_reporting.py", "file_type": "text/x-python", "category": "test", "start_line": 1101, "end_line": 1115, "span_ids": ["test_colored_ansi_esc_caplogtext"], "tokens": 115}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_colored_ansi_esc_caplogtext(pytester: Pytester) -> None:\n \"\"\"Make sure that caplog.text does not contain ANSI escape sequences.\"\"\"\n pytester.makepyfile(\n \"\"\"\n import logging\n\n logger = logging.getLogger(__name__)\n\n def test_foo(caplog):\n logger.info('text going to logger from call')\n assert '\\x1b' not in caplog.text\n \"\"\"\n )\n result = pytester.runpytest(\"--log-level=INFO\", \"--color=yes\")\n assert result.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_logging_emit_error_test_logging_emit_error.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_logging_emit_error_test_logging_emit_error.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/logging/test_reporting.py", "file_name": "test_reporting.py", "file_type": "text/x-python", "category": "test", "start_line": 1118, "end_line": 1141, "span_ids": ["test_logging_emit_error"], "tokens": 150}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_logging_emit_error(pytester: Pytester) -> None:\n \"\"\"An exception raised during emit() should fail the test.\n\n The default behavior of logging is to print \"Logging error\"\n to stderr with the call stack and some extra details.\n\n pytest overrides this behavior to propagate the exception.\n \"\"\"\n pytester.makepyfile(\n \"\"\"\n import logging\n\n def test_bad_log():\n logging.warning('oops', 'first', 2)\n \"\"\"\n )\n result = pytester.runpytest()\n result.assert_outcomes(failed=1)\n result.stdout.fnmatch_lines(\n [\n \"====* FAILURES *====\",\n \"*not all arguments converted during string formatting*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_logging_emit_error_supressed_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_logging_emit_error_supressed_", "embedding": null, "metadata": {"file_path": "testing/logging/test_reporting.py", "file_name": "test_reporting.py", "file_type": "text/x-python", "category": "test", "start_line": 1144, "end_line": 1168, "span_ids": ["test_log_file_cli_subdirectories_are_successfully_created", "test_logging_emit_error_supressed"], "tokens": 199}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_logging_emit_error_supressed(pytester: Pytester) -> None:\n \"\"\"If logging is configured to silently ignore errors, pytest\n doesn't propagate errors either.\"\"\"\n pytester.makepyfile(\n \"\"\"\n import logging\n\n def test_bad_log(monkeypatch):\n monkeypatch.setattr(logging, 'raiseExceptions', False)\n logging.warning('oops', 'first', 2)\n \"\"\"\n )\n result = pytester.runpytest()\n result.assert_outcomes(passed=1)\n\n\ndef test_log_file_cli_subdirectories_are_successfully_created(\n pytester: Pytester,\n) -> None:\n path = pytester.makepyfile(\"\"\" def test_logger(): pass \"\"\")\n expected = os.path.join(os.path.dirname(str(path)), \"foo\", \"bar\")\n result = pytester.runpytest(\"--log-file=foo/bar/logf.log\")\n assert \"logf.log\" in os.listdir(expected)\n assert result.ret == ExitCode.OK", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.test_function_with_square_brackets_TestFunction.test_function_with_square_brackets.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.test_function_with_square_brackets_TestFunction.test_function_with_square_brackets.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 706, "end_line": 719, "span_ids": ["TestFunction.test_function_with_square_brackets"], "tokens": 124}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFunction:\n\n def test_function_with_square_brackets(self, pytester: Pytester) -> None:\n \"\"\"Check that functions with square brackets don't cause trouble.\"\"\"\n p1 = pytester.makepyfile(\n \"\"\"\n locals()[\"test_foo[name]\"] = lambda: None\n \"\"\"\n )\n result = pytester.runpytest(\"-v\", str(p1))\n result.stdout.fnmatch_lines(\n [\n \"test_function_with_square_brackets.py::test_foo[[]name[]] PASSED *\",\n \"*= 1 passed in *\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_test_syntax_error_with_non_ascii_chars_test_collect_error_with_fulltrace.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_test_syntax_error_with_non_ascii_chars_test_collect_error_with_fulltrace.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1299, "end_line": 1322, "span_ids": ["test_collect_error_with_fulltrace", "test_syntax_error_with_non_ascii_chars"], "tokens": 207}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_syntax_error_with_non_ascii_chars(pytester: Pytester) -> None:\n \"\"\"Fix decoding issue while formatting SyntaxErrors during collection (#578).\"\"\"\n pytester.makepyfile(\"\u2603\")\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*ERROR collecting*\", \"*SyntaxError*\", \"*1 error in*\"])\n\n\ndef test_collect_error_with_fulltrace(pytester: Pytester) -> None:\n pytester.makepyfile(\"assert 0\")\n result = pytester.runpytest(\"--fulltrace\")\n result.stdout.fnmatch_lines(\n [\n \"collected 0 items / 1 error\",\n \"\",\n \"*= ERRORS =*\",\n \"*_ ERROR collecting test_collect_error_with_fulltrace.py _*\",\n \"\",\n \"> assert 0\",\n \"E assert 0\",\n \"\",\n \"test_collect_error_with_fulltrace.py:1: AssertionError\",\n \"*! Interrupted: 1 error during collection !*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMarkersWithParametrization.test_parametrize_iterator_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMarkersWithParametrization.test_parametrize_iterator_", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1867, "end_line": 1902, "span_ids": ["TestMarkersWithParametrization.test_parametrize_iterator"], "tokens": 296}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMarkersWithParametrization:\n\n def test_parametrize_iterator(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import itertools\n import pytest\n\n id_parametrize = pytest.mark.parametrize(\n ids=(\"param%d\" % i for i in itertools.count())\n )\n\n @id_parametrize('y', ['a', 'b'])\n def test1(y):\n pass\n\n @id_parametrize('y', ['a', 'b'])\n def test2(y):\n pass\n\n @pytest.mark.parametrize(\"a, b\", [(1, 2), (3, 4)], ids=itertools.count())\n def test_converted_to_str(a, b):\n pass\n \"\"\"\n )\n result = pytester.runpytest(\"-vv\", \"-s\")\n result.stdout.fnmatch_lines(\n [\n \"test_parametrize_iterator.py::test1[param0] PASSED\",\n \"test_parametrize_iterator.py::test1[param1] PASSED\",\n \"test_parametrize_iterator.py::test2[param0] PASSED\",\n \"test_parametrize_iterator.py::test2[param1] PASSED\",\n \"test_parametrize_iterator.py::test_converted_to_str[0] PASSED\",\n \"test_parametrize_iterator.py::test_converted_to_str[1] PASSED\",\n \"*= 6 passed in *\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewriteHookDetails.test_reload_is_same_and_reloads_TestAssertionRewriteHookDetails.test_reload_is_same_and_reloads.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewriteHookDetails.test_reload_is_same_and_reloads_TestAssertionRewriteHookDetails.test_reload_is_same_and_reloads.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_assertrewrite.py", "file_name": "test_assertrewrite.py", "file_type": "text/x-python", "category": "test", "start_line": 1119, "end_line": 1149, "span_ids": ["TestAssertionRewriteHookDetails.test_reload_is_same_and_reloads"], "tokens": 201}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssertionRewriteHookDetails:\n\n def test_reload_is_same_and_reloads(self, pytester: Pytester) -> None:\n \"\"\"Reloading a (collected) module after change picks up the change.\"\"\"\n pytester.makeini(\n \"\"\"\n [pytest]\n python_files = *.py\n \"\"\"\n )\n pytester.makepyfile(\n file=\"\"\"\n def reloaded():\n return False\n\n def rewrite_self():\n with open(__file__, 'w') as self:\n self.write('def reloaded(): return True')\n \"\"\",\n test_fun=\"\"\"\n import sys\n from importlib import reload\n\n def test_loader():\n import file\n assert not file.reloaded()\n file.rewrite_self()\n assert sys.modules[\"file\"] is reload(file)\n assert file.reloaded()\n \"\"\",\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"* 1 passed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_lastfailed_with_known_failures_not_being_selected_TestLastFailed.test_lastfailed_with_known_failures_not_being_selected.None_8": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_lastfailed_with_known_failures_not_being_selected_TestLastFailed.test_lastfailed_with_known_failures_not_being_selected.None_8", "embedding": null, "metadata": {"file_path": "testing/test_cacheprovider.py", "file_name": "test_cacheprovider.py", "file_type": "text/x-python", "category": "test", "start_line": 850, "end_line": 906, "span_ids": ["TestLastFailed.test_lastfailed_with_known_failures_not_being_selected"], "tokens": 491}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestLastFailed:\n\n def test_lastfailed_with_known_failures_not_being_selected(\n self, pytester: Pytester\n ) -> None:\n pytester.makepyfile(\n **{\n \"pkg1/test_1.py\": \"\"\"def test_1(): assert 0\"\"\",\n \"pkg1/test_2.py\": \"\"\"def test_2(): pass\"\"\",\n }\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"collected 2 items\", \"* 1 failed, 1 passed in *\"])\n\n Path(\"pkg1/test_1.py\").unlink()\n result = pytester.runpytest(\"--lf\")\n result.stdout.fnmatch_lines(\n [\n \"collected 1 item\",\n \"run-last-failure: 1 known failures not in selected tests\",\n \"* 1 passed in *\",\n ]\n )\n\n # Recreate file with known failure.\n pytester.makepyfile(**{\"pkg1/test_1.py\": \"\"\"def test_1(): assert 0\"\"\"})\n result = pytester.runpytest(\"--lf\")\n result.stdout.fnmatch_lines(\n [\n \"collected 1 item\",\n \"run-last-failure: rerun previous 1 failure (skipped 1 file)\",\n \"* 1 failed in *\",\n ]\n )\n\n # Remove/rename test: collects the file again.\n pytester.makepyfile(**{\"pkg1/test_1.py\": \"\"\"def test_renamed(): assert 0\"\"\"})\n result = pytester.runpytest(\"--lf\", \"-rf\")\n result.stdout.fnmatch_lines(\n [\n \"collected 2 items\",\n \"run-last-failure: 1 known failures not in selected tests\",\n \"pkg1/test_1.py F *\",\n \"pkg1/test_2.py . *\",\n \"FAILED pkg1/test_1.py::test_renamed - assert 0\",\n \"* 1 failed, 1 passed in *\",\n ]\n )\n\n result = pytester.runpytest(\"--lf\", \"--co\")\n result.stdout.fnmatch_lines(\n [\n \"collected 1 item\",\n \"run-last-failure: rerun previous 1 failure (skipped 1 file)\",\n \"\",\n \"\",\n \" \",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_lastfailed_args_with_deselected_TestLastFailed.test_lastfailed_args_with_deselected.None_3": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_lastfailed_args_with_deselected_TestLastFailed.test_lastfailed_args_with_deselected.None_3", "embedding": null, "metadata": {"file_path": "testing/test_cacheprovider.py", "file_name": "test_cacheprovider.py", "file_type": "text/x-python", "category": "test", "start_line": 908, "end_line": 952, "span_ids": ["TestLastFailed.test_lastfailed_args_with_deselected"], "tokens": 365}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestLastFailed:\n\n def test_lastfailed_args_with_deselected(self, pytester: Pytester) -> None:\n \"\"\"Test regression with --lf running into NoMatch error.\n\n This was caused by it not collecting (non-failed) nodes given as\n arguments.\n \"\"\"\n pytester.makepyfile(\n **{\n \"pkg1/test_1.py\": \"\"\"\n def test_pass(): pass\n def test_fail(): assert 0\n \"\"\",\n }\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"collected 2 items\", \"* 1 failed, 1 passed in *\"])\n assert result.ret == 1\n\n result = pytester.runpytest(\"pkg1/test_1.py::test_pass\", \"--lf\", \"--co\")\n assert result.ret == 0\n result.stdout.fnmatch_lines(\n [\n \"*collected 1 item\",\n \"run-last-failure: 1 known failures not in selected tests\",\n \"\",\n \"\",\n \" \",\n ],\n consecutive=True,\n )\n\n result = pytester.runpytest(\n \"pkg1/test_1.py::test_pass\", \"pkg1/test_1.py::test_fail\", \"--lf\", \"--co\"\n )\n assert result.ret == 0\n result.stdout.fnmatch_lines(\n [\n \"collected 2 items / 1 deselected / 1 selected\",\n \"run-last-failure: rerun previous 1 failure\",\n \"\",\n \"\",\n \" \",\n \"*= 1/2 tests collected (1 deselected) in *\",\n ],\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_lastfailed_with_class_items_TestLastFailed.test_lastfailed_with_class_items.None_2": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_lastfailed_with_class_items_TestLastFailed.test_lastfailed_with_class_items.None_2", "embedding": null, "metadata": {"file_path": "testing/test_cacheprovider.py", "file_name": "test_cacheprovider.py", "file_type": "text/x-python", "category": "test", "start_line": 954, "end_line": 986, "span_ids": ["TestLastFailed.test_lastfailed_with_class_items"], "tokens": 259}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestLastFailed:\n\n def test_lastfailed_with_class_items(self, pytester: Pytester) -> None:\n \"\"\"Test regression with --lf deselecting whole classes.\"\"\"\n pytester.makepyfile(\n **{\n \"pkg1/test_1.py\": \"\"\"\n class TestFoo:\n def test_pass(self): pass\n def test_fail(self): assert 0\n\n def test_other(): assert 0\n \"\"\",\n }\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"collected 3 items\", \"* 2 failed, 1 passed in *\"])\n assert result.ret == 1\n\n result = pytester.runpytest(\"--lf\", \"--co\")\n assert result.ret == 0\n result.stdout.fnmatch_lines(\n [\n \"collected 3 items / 1 deselected / 2 selected\",\n \"run-last-failure: rerun previous 2 failures\",\n \"\",\n \"\",\n \" \",\n \" \",\n \" \",\n \"\",\n \"*= 2/3 tests collected (1 deselected) in *\",\n ],\n consecutive=True,\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_lastfailed_with_all_filtered_TestLastFailed.test_lastfailed_with_all_filtered.assert_result_ret_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_lastfailed_with_all_filtered_TestLastFailed.test_lastfailed_with_all_filtered.assert_result_ret_0", "embedding": null, "metadata": {"file_path": "testing/test_cacheprovider.py", "file_name": "test_cacheprovider.py", "file_type": "text/x-python", "category": "test", "start_line": 988, "end_line": 1022, "span_ids": ["TestLastFailed.test_lastfailed_with_all_filtered"], "tokens": 240}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestLastFailed:\n\n def test_lastfailed_with_all_filtered(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n **{\n \"pkg1/test_1.py\": \"\"\"\n def test_fail(): assert 0\n def test_pass(): pass\n \"\"\",\n }\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"collected 2 items\", \"* 1 failed, 1 passed in *\"])\n assert result.ret == 1\n\n # Remove known failure.\n pytester.makepyfile(\n **{\n \"pkg1/test_1.py\": \"\"\"\n def test_pass(): pass\n \"\"\",\n }\n )\n result = pytester.runpytest(\"--lf\", \"--co\")\n result.stdout.fnmatch_lines(\n [\n \"collected 1 item\",\n \"run-last-failure: 1 known failures not in selected tests\",\n \"\",\n \"\",\n \" \",\n \"\",\n \"*= 1 test collected in*\",\n ],\n consecutive=True,\n )\n assert result.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestNewFirst_TestNewFirst.test_newfirst_usecase.None_9": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestNewFirst_TestNewFirst.test_newfirst_usecase.None_9", "embedding": null, "metadata": {"file_path": "testing/test_cacheprovider.py", "file_name": "test_cacheprovider.py", "file_type": "text/x-python", "category": "test", "start_line": 1055, "end_line": 1110, "span_ids": ["TestNewFirst.test_newfirst_usecase", "TestNewFirst"], "tokens": 533}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestNewFirst:\n def test_newfirst_usecase(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n **{\n \"test_1/test_1.py\": \"\"\"\n def test_1(): assert 1\n \"\"\",\n \"test_2/test_2.py\": \"\"\"\n def test_1(): assert 1\n \"\"\",\n }\n )\n\n p1 = pytester.path.joinpath(\"test_1/test_1.py\")\n os.utime(p1, ns=(p1.stat().st_atime_ns, int(1e9)))\n\n result = pytester.runpytest(\"-v\")\n result.stdout.fnmatch_lines(\n [\"*test_1/test_1.py::test_1 PASSED*\", \"*test_2/test_2.py::test_1 PASSED*\"]\n )\n\n result = pytester.runpytest(\"-v\", \"--nf\")\n result.stdout.fnmatch_lines(\n [\"*test_2/test_2.py::test_1 PASSED*\", \"*test_1/test_1.py::test_1 PASSED*\"]\n )\n\n p1.write_text(\"def test_1(): assert 1\\n\" \"def test_2(): assert 1\\n\")\n os.utime(p1, ns=(p1.stat().st_atime_ns, int(1e9)))\n\n result = pytester.runpytest(\"--nf\", \"--collect-only\", \"-q\")\n result.stdout.fnmatch_lines(\n [\n \"test_1/test_1.py::test_2\",\n \"test_2/test_2.py::test_1\",\n \"test_1/test_1.py::test_1\",\n ]\n )\n\n # Newest first with (plugin) pytest_collection_modifyitems hook.\n pytester.makepyfile(\n myplugin=\"\"\"\n def pytest_collection_modifyitems(items):\n items[:] = sorted(items, key=lambda item: item.nodeid)\n print(\"new_items:\", [x.nodeid for x in items])\n \"\"\"\n )\n pytester.syspathinsert()\n result = pytester.runpytest(\"--nf\", \"-p\", \"myplugin\", \"--collect-only\", \"-q\")\n result.stdout.fnmatch_lines(\n [\n \"new_items: *test_1.py*test_1.py*test_2.py*\",\n \"test_1/test_1.py::test_2\",\n \"test_2/test_2.py::test_1\",\n \"test_1/test_1.py::test_1\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_contextlib_TeeStdCapture.return.capture_MultiCapture_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_contextlib_TeeStdCapture.return.capture_MultiCapture_", "embedding": null, "metadata": {"file_path": "testing/test_capture.py", "file_name": "test_capture.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 55, "span_ids": ["StdCaptureFD", "imports", "TeeStdCapture", "StdCapture"], "tokens": 414}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import contextlib\nimport io\nimport os\nimport subprocess\nimport sys\nimport textwrap\nfrom io import UnsupportedOperation\nfrom typing import BinaryIO\nfrom typing import cast\nfrom typing import Generator\nfrom typing import TextIO\n\nimport pytest\nfrom _pytest import capture\nfrom _pytest.capture import _get_multicapture\nfrom _pytest.capture import CaptureFixture\nfrom _pytest.capture import CaptureManager\nfrom _pytest.capture import CaptureResult\nfrom _pytest.capture import MultiCapture\nfrom _pytest.config import ExitCode\nfrom _pytest.monkeypatch import MonkeyPatch\nfrom _pytest.pytester import Pytester\n\n# note: py.io capture tests where copied from\n# pylib 1.4.20.dev2 (rev 13d9af95547e)\n\n\ndef StdCaptureFD(\n out: bool = True, err: bool = True, in_: bool = True\n) -> MultiCapture[str]:\n return capture.MultiCapture(\n in_=capture.FDCapture(0) if in_ else None,\n out=capture.FDCapture(1) if out else None,\n err=capture.FDCapture(2) if err else None,\n )\n\n\ndef StdCapture(\n out: bool = True, err: bool = True, in_: bool = True\n) -> MultiCapture[str]:\n return capture.MultiCapture(\n in_=capture.SysCapture(0) if in_ else None,\n out=capture.SysCapture(1) if out else None,\n err=capture.SysCapture(2) if err else None,\n )\n\n\ndef TeeStdCapture(\n out: bool = True, err: bool = True, in_: bool = True\n) -> MultiCapture[str]:\n return capture.MultiCapture(\n in_=capture.SysCapture(0, tee=True) if in_ else None,\n out=capture.SysCapture(1, tee=True) if out else None,\n err=capture.SysCapture(2, tee=True) if err else None,\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestStdCaptureFDinvalidFD.test_fdcapture_invalid_fd_with_fd_reuse_TestStdCaptureFDinvalidFD.test_fdcapture_invalid_fd_with_fd_reuse.with_saved_fd_1_.with_pytest_raises_OSErro.os_write_1_b_done_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestStdCaptureFDinvalidFD.test_fdcapture_invalid_fd_with_fd_reuse_TestStdCaptureFDinvalidFD.test_fdcapture_invalid_fd_with_fd_reuse.with_saved_fd_1_.with_pytest_raises_OSErro.os_write_1_b_done_", "embedding": null, "metadata": {"file_path": "testing/test_capture.py", "file_name": "test_capture.py", "file_type": "text/x-python", "category": "test", "start_line": 1285, "end_line": 1298, "span_ids": ["TestStdCaptureFDinvalidFD.test_fdcapture_invalid_fd_with_fd_reuse"], "tokens": 130}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestStdCaptureFDinvalidFD:\n\n def test_fdcapture_invalid_fd_with_fd_reuse(self, pytester: Pytester) -> None:\n with saved_fd(1):\n os.close(1)\n cap = capture.FDCaptureBinary(1)\n cap.start()\n os.write(1, b\"started\")\n cap.suspend()\n os.write(1, b\" suspended\")\n cap.resume()\n os.write(1, b\" resumed\")\n assert cap.snap() == b\"started resumed\"\n cap.done()\n with pytest.raises(OSError):\n os.write(1, b\"done\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestStdCaptureFDinvalidFD.test_fdcapture_invalid_fd_without_fd_reuse_TestStdCaptureFDinvalidFD.test_fdcapture_invalid_fd_without_fd_reuse.with_saved_fd_1_saved_f.with_pytest_raises_OSErro.os_write_2_b_done_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestStdCaptureFDinvalidFD.test_fdcapture_invalid_fd_without_fd_reuse_TestStdCaptureFDinvalidFD.test_fdcapture_invalid_fd_without_fd_reuse.with_saved_fd_1_saved_f.with_pytest_raises_OSErro.os_write_2_b_done_", "embedding": null, "metadata": {"file_path": "testing/test_capture.py", "file_name": "test_capture.py", "file_type": "text/x-python", "category": "test", "start_line": 1300, "end_line": 1314, "span_ids": ["TestStdCaptureFDinvalidFD.test_fdcapture_invalid_fd_without_fd_reuse"], "tokens": 141}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestStdCaptureFDinvalidFD:\n\n def test_fdcapture_invalid_fd_without_fd_reuse(self, pytester: Pytester) -> None:\n with saved_fd(1), saved_fd(2):\n os.close(1)\n os.close(2)\n cap = capture.FDCaptureBinary(2)\n cap.start()\n os.write(2, b\"started\")\n cap.suspend()\n os.write(2, b\" suspended\")\n cap.resume()\n os.write(2, b\" resumed\")\n assert cap.snap() == b\"started resumed\"\n cap.done()\n with pytest.raises(OSError):\n os.write(2, b\"done\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_test_typeerror_encodedfile_write_test_typeerror_encodedfile_write.assert_TypeError_write": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_test_typeerror_encodedfile_write_test_typeerror_encodedfile_write.assert_TypeError_write", "embedding": null, "metadata": {"file_path": "testing/test_capture.py", "file_name": "test_capture.py", "file_type": "text/x-python", "category": "test", "start_line": 1597, "end_line": 1613, "span_ids": ["test_typeerror_encodedfile_write"], "tokens": 151}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_typeerror_encodedfile_write(pytester: Pytester) -> None:\n \"\"\"It should behave the same with and without output capturing (#4861).\"\"\"\n p = pytester.makepyfile(\n \"\"\"\n def test_fails():\n import sys\n sys.stdout.write(b\"foo\")\n \"\"\"\n )\n result_without_capture = pytester.runpytest(\"-s\", str(p))\n result_with_capture = pytester.runpytest(str(p))\n\n assert result_with_capture.ret == result_without_capture.ret\n out = result_with_capture.stdout.str()\n assert (\"TypeError: write() argument must be str, not bytes\" in out) or (\n \"TypeError: unicode argument expected, got 'bytes'\" in out\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_test_stderr_write_returns_len_test__get_multicapture.pytest_raises_ValueError_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_test_stderr_write_returns_len_test__get_multicapture.pytest_raises_ValueError_", "embedding": null, "metadata": {"file_path": "testing/test_capture.py", "file_name": "test_capture.py", "file_type": "text/x-python", "category": "test", "start_line": 1616, "end_line": 1638, "span_ids": ["test__get_multicapture", "test_stderr_write_returns_len", "test_encodedfile_writelines"], "tokens": 228}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_stderr_write_returns_len(capsys: CaptureFixture[str]) -> None:\n \"\"\"Write on Encoded files, namely captured stderr, should return number of characters written.\"\"\"\n assert sys.stderr.write(\"Foo\") == 3\n\n\ndef test_encodedfile_writelines(tmpfile: BinaryIO) -> None:\n ef = capture.EncodedFile(tmpfile, encoding=\"utf-8\")\n with pytest.raises(TypeError):\n ef.writelines([b\"line1\", b\"line2\"]) # type: ignore[list-item]\n assert ef.writelines([\"line3\", \"line4\"]) is None # type: ignore[func-returns-value]\n ef.flush()\n tmpfile.seek(0)\n assert tmpfile.read() == b\"line3line4\"\n tmpfile.close()\n with pytest.raises(ValueError):\n ef.read()\n\n\ndef test__get_multicapture() -> None:\n assert isinstance(_get_multicapture(\"no\"), MultiCapture)\n pytest.raises(ValueError, _get_multicapture, \"unknown\").match(\n r\"^unknown capturing method: 'unknown'\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_test_logging_while_collecting_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_test_logging_while_collecting_", "embedding": null, "metadata": {"file_path": "testing/test_capture.py", "file_name": "test_capture.py", "file_type": "text/x-python", "category": "test", "start_line": 1641, "end_line": 1668, "span_ids": ["test_logging_while_collecting"], "tokens": 189}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_logging_while_collecting(pytester: Pytester) -> None:\n \"\"\"Issue #6240: Calls to logging.xxx() during collection causes all logging calls to be duplicated to stderr\"\"\"\n p = pytester.makepyfile(\n \"\"\"\\\n import logging\n\n logging.warning(\"during collection\")\n\n def test_logging():\n logging.warning(\"during call\")\n assert False\n \"\"\"\n )\n result = pytester.runpytest_subprocess(p)\n assert result.ret == ExitCode.TESTS_FAILED\n result.stdout.fnmatch_lines(\n [\n \"*test_*.py F*\",\n \"====* FAILURES *====\",\n \"____*____\",\n \"*--- Captured log call*\",\n \"WARNING * during call\",\n \"*1 failed*\",\n ]\n )\n result.stdout.no_fnmatch_line(\"*Captured stderr call*\")\n result.stdout.no_fnmatch_line(\"*during collection*\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestParseIni_TestParseIni.test_getcfg_and_config.assert_config_inicfg_nam": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestParseIni_TestParseIni.test_getcfg_and_config.assert_config_inicfg_nam", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 34, "end_line": 63, "span_ids": ["TestParseIni", "TestParseIni.test_getcfg_and_config"], "tokens": 194}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestParseIni:\n @pytest.mark.parametrize(\n \"section, filename\", [(\"pytest\", \"pytest.ini\"), (\"tool:pytest\", \"setup.cfg\")]\n )\n def test_getcfg_and_config(\n self,\n pytester: Pytester,\n tmp_path: Path,\n section: str,\n filename: str,\n monkeypatch: MonkeyPatch,\n ) -> None:\n sub = tmp_path / \"sub\"\n sub.mkdir()\n monkeypatch.chdir(sub)\n (tmp_path / filename).write_text(\n textwrap.dedent(\n \"\"\"\\\n [{section}]\n name = value\n \"\"\".format(\n section=section\n )\n ),\n encoding=\"utf-8\",\n )\n _, _, cfg = locate_config([sub])\n assert cfg[\"name\"] == \"value\"\n config = pytester.parseconfigure(str(sub))\n assert config.inicfg[\"name\"] == \"value\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestPytestPluginsVariable.test_pytest_plugins_in_non_top_level_conftest_unsupported_no_false_positives_TestPytestPluginsVariable.test_pytest_plugins_in_non_top_level_conftest_unsupported_no_false_positives.assert_msg_not_in_res_std": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestPytestPluginsVariable.test_pytest_plugins_in_non_top_level_conftest_unsupported_no_false_positives_TestPytestPluginsVariable.test_pytest_plugins_in_non_top_level_conftest_unsupported_no_false_positives.assert_msg_not_in_res_std", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 1918, "end_line": 1935, "span_ids": ["TestPytestPluginsVariable.test_pytest_plugins_in_non_top_level_conftest_unsupported_no_false_positives"], "tokens": 158}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPytestPluginsVariable:\n\n def test_pytest_plugins_in_non_top_level_conftest_unsupported_no_false_positives(\n self, pytester: Pytester\n ) -> None:\n pytester.makepyfile(\n \"def test_func(): pass\",\n **{\n \"subdirectory/conftest\": \"pass\",\n \"conftest\": \"\"\"\n import warnings\n warnings.filterwarnings('always', category=DeprecationWarning)\n pytest_plugins=['capture']\n \"\"\",\n },\n )\n res = pytester.runpytest_subprocess()\n assert res.ret == 0\n msg = \"Defining 'pytest_plugins' in a non-top-level conftest is no longer supported\"\n assert msg not in res.stdout.str()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_prevent_ConftestImportFailure_hiding_exception_TestPDB.test_pdb_prevent_ConftestImportFailure_hiding_exception.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_prevent_ConftestImportFailure_hiding_exception_TestPDB.test_pdb_prevent_ConftestImportFailure_hiding_exception.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_debugging.py", "file_name": "test_debugging.py", "file_type": "text/x-python", "category": "test", "start_line": 348, "end_line": 362, "span_ids": ["TestPDB.test_pdb_prevent_ConftestImportFailure_hiding_exception"], "tokens": 152}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPDB:\n\n def test_pdb_prevent_ConftestImportFailure_hiding_exception(\n self, pytester: Pytester\n ) -> None:\n pytester.makepyfile(\"def test_func(): pass\")\n sub_dir = pytester.path.joinpath(\"ns\")\n sub_dir.mkdir()\n sub_dir.joinpath(\"conftest\").with_suffix(\".py\").write_text(\n \"import unknown\", \"utf-8\"\n )\n sub_dir.joinpath(\"test_file\").with_suffix(\".py\").write_text(\n \"def test_func(): pass\", \"utf-8\"\n )\n\n result = pytester.runpytest_subprocess(\"--pdb\", \".\")\n result.stdout.fnmatch_lines([\"-> import unknown\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_test_warning_on_unwrap_of_broken_object_test_warning_on_unwrap_of_broken_object.None_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_test_warning_on_unwrap_of_broken_object_test_warning_on_unwrap_of_broken_object.None_1", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 1479, "end_line": 1494, "span_ids": ["test_warning_on_unwrap_of_broken_object"], "tokens": 173}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize( # pragma: no branch (lambdas are not called)\n \"stop\", [None, _is_mocked, lambda f: None, lambda f: False, lambda f: True]\n)\ndef test_warning_on_unwrap_of_broken_object(\n stop: Optional[Callable[[object], object]]\n) -> None:\n bad_instance = Broken()\n assert inspect.unwrap.__module__ == \"inspect\"\n with _patch_unwrap_mock_aware():\n assert inspect.unwrap.__module__ != \"inspect\"\n with pytest.warns(\n pytest.PytestWarning, match=\"^Got KeyError.* when unwrapping\"\n ):\n with pytest.raises(KeyError):\n inspect.unwrap(bad_instance, stop=stop) # type: ignore[arg-type]\n assert inspect.unwrap.__module__ == \"inspect\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_helpconfig.py_pytest_test_version_less_verbose.result_stderr_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_helpconfig.py_pytest_test_version_less_verbose.result_stderr_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_helpconfig.py", "file_name": "test_helpconfig.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 19, "span_ids": ["test_version_less_verbose", "imports", "test_version_verbose"], "tokens": 192}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import pytest\nfrom _pytest.config import ExitCode\nfrom _pytest.pytester import Pytester\n\n\ndef test_version_verbose(pytester: Pytester, pytestconfig, monkeypatch) -> None:\n monkeypatch.delenv(\"PYTEST_DISABLE_PLUGIN_AUTOLOAD\")\n result = pytester.runpytest(\"--version\", \"--version\")\n assert result.ret == 0\n result.stderr.fnmatch_lines([f\"*pytest*{pytest.__version__}*imported from*\"])\n if pytestconfig.pluginmanager.list_plugin_distinfo():\n result.stderr.fnmatch_lines([\"*setuptools registered plugins:\", \"*at*\"])\n\n\ndef test_version_less_verbose(pytester: Pytester, pytestconfig, monkeypatch) -> None:\n monkeypatch.delenv(\"PYTEST_DISABLE_PLUGIN_AUTOLOAD\")\n result = pytester.runpytest(\"--version\")\n assert result.ret == 0\n result.stderr.fnmatch_lines([f\"pytest {pytest.__version__}\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_helpconfig.py_test_help_test_help.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_helpconfig.py_test_help_test_help.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_helpconfig.py", "file_name": "test_helpconfig.py", "file_type": "text/x-python", "category": "test", "start_line": 22, "end_line": 39, "span_ids": ["test_help"], "tokens": 140}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_help(pytester: Pytester) -> None:\n result = pytester.runpytest(\"--help\")\n assert result.ret == 0\n result.stdout.fnmatch_lines(\n \"\"\"\n -m MARKEXPR only run tests matching given mark expression.\n For example: -m 'mark1 and not mark2'.\n reporting:\n --durations=N *\n -V, --version display pytest version and information about plugins.\n When given twice, also display information about\n plugins.\n *setup.cfg*\n *minversion*\n *to see*markers*pytest --markers*\n *to see*fixtures*pytest --fixtures*\n \"\"\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_helpconfig.py_test_hookvalidation_unknown_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_helpconfig.py_test_hookvalidation_unknown_", "embedding": null, "metadata": {"file_path": "testing/test_helpconfig.py", "file_name": "test_helpconfig.py", "file_type": "text/x-python", "category": "test", "start_line": 75, "end_line": 119, "span_ids": ["test_hookvalidation_optional", "test_hookvalidation_unknown", "test_PYTEST_DEBUG", "test_debug", "test_traceconfig"], "tokens": 329}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_hookvalidation_unknown(pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n def pytest_hello(xyz):\n pass\n \"\"\"\n )\n result = pytester.runpytest()\n assert result.ret != 0\n result.stdout.fnmatch_lines([\"*unknown hook*pytest_hello*\"])\n\n\ndef test_hookvalidation_optional(pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n import pytest\n @pytest.hookimpl(optionalhook=True)\n def pytest_hello(xyz):\n pass\n \"\"\"\n )\n result = pytester.runpytest()\n assert result.ret == ExitCode.NO_TESTS_COLLECTED\n\n\ndef test_traceconfig(pytester: Pytester) -> None:\n result = pytester.runpytest(\"--traceconfig\")\n result.stdout.fnmatch_lines([\"*using*pytest*py*\", \"*active plugins*\"])\n\n\ndef test_debug(pytester: Pytester) -> None:\n result = pytester.runpytest_subprocess(\"--debug\")\n assert result.ret == ExitCode.NO_TESTS_COLLECTED\n p = pytester.path.joinpath(\"pytestdebug.log\")\n assert \"pytest_sessionstart\" in p.read_text(\"utf-8\")\n\n\ndef test_PYTEST_DEBUG(pytester: Pytester, monkeypatch) -> None:\n monkeypatch.setenv(\"PYTEST_DEBUG\", \"1\")\n result = pytester.runpytest_subprocess()\n assert result.ret == ExitCode.NO_TESTS_COLLECTED\n result.stderr.fnmatch_lines(\n [\"*pytest_plugin_registered*\", \"*manager*PluginManager*\"]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_keyword_option_wrong_arguments_test_keyword_option_wrong_arguments.assert_expected_error_in_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_keyword_option_wrong_arguments_test_keyword_option_wrong_arguments.assert_expected_error_in_", "embedding": null, "metadata": {"file_path": "testing/test_mark.py", "file_name": "test_mark.py", "file_type": "text/x-python", "category": "test", "start_line": 348, "end_line": 388, "span_ids": ["test_keyword_option_wrong_arguments"], "tokens": 255}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\n (\"expr\", \"expected_error\"),\n [\n (\n \"foo or\",\n \"at column 7: expected not OR left parenthesis OR identifier; got end of input\",\n ),\n (\n \"foo or or\",\n \"at column 8: expected not OR left parenthesis OR identifier; got or\",\n ),\n (\n \"(foo\",\n \"at column 5: expected right parenthesis; got end of input\",\n ),\n (\n \"foo bar\",\n \"at column 5: expected end of input; got identifier\",\n ),\n (\n \"or or\",\n \"at column 1: expected not OR left parenthesis OR identifier; got or\",\n ),\n (\n \"not or\",\n \"at column 5: expected not OR left parenthesis OR identifier; got or\",\n ),\n ],\n)\ndef test_keyword_option_wrong_arguments(\n expr: str, expected_error: str, pytester: Pytester, capsys\n) -> None:\n pytester.makepyfile(\n \"\"\"\n def test_func(arg):\n pass\n \"\"\"\n )\n pytester.inline_run(\"-k\", expr)\n err = capsys.readouterr().err\n assert expected_error in err", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestKeywordSelection.test_no_magic_values_TestKeywordSelection.test_no_magic_values.assert_len_deselected_tes": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestKeywordSelection.test_no_magic_values_TestKeywordSelection.test_no_magic_values.assert_len_deselected_tes", "embedding": null, "metadata": {"file_path": "testing/test_mark.py", "file_name": "test_mark.py", "file_type": "text/x-python", "category": "test", "start_line": 871, "end_line": 890, "span_ids": ["TestKeywordSelection.test_no_magic_values"], "tokens": 176}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestKeywordSelection:\n\n @pytest.mark.parametrize(\n \"keyword\",\n [\"__\", \"+\", \"..\"],\n )\n def test_no_magic_values(self, pytester: Pytester, keyword: str) -> None:\n \"\"\"Make sure the tests do not match on magic values,\n no double underscored values, like '__dict__' and '+'.\n \"\"\"\n p = pytester.makepyfile(\n \"\"\"\n def test_one(): assert 1\n \"\"\"\n )\n\n reprec = pytester.inline_run(\"-k\", keyword, p)\n passed, skipped, failed = reprec.countoutcomes()\n dlist = reprec.getcalls(\"pytest_deselected\")\n assert passed + skipped + failed == 0\n deselected_tests = dlist[0].items\n assert len(deselected_tests) == 1", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestKeywordSelection.test_no_match_directories_outside_the_suite_TestKeywordSelection.test_no_match_directories_outside_the_suite.None_2": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestKeywordSelection.test_no_match_directories_outside_the_suite_TestKeywordSelection.test_no_match_directories_outside_the_suite.None_2", "embedding": null, "metadata": {"file_path": "testing/test_mark.py", "file_name": "test_mark.py", "file_type": "text/x-python", "category": "test", "start_line": 892, "end_line": 918, "span_ids": ["TestKeywordSelection.test_no_match_directories_outside_the_suite"], "tokens": 294}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestKeywordSelection:\n\n def test_no_match_directories_outside_the_suite(self, pytester: Pytester) -> None:\n \"\"\"`-k` should not match against directories containing the test suite (#7040).\"\"\"\n test_contents = \"\"\"\n def test_aaa(): pass\n def test_ddd(): pass\n \"\"\"\n pytester.makepyfile(\n **{\"ddd/tests/__init__.py\": \"\", \"ddd/tests/test_foo.py\": test_contents}\n )\n\n def get_collected_names(*args):\n _, rec = pytester.inline_genitems(*args)\n calls = rec.getcalls(\"pytest_collection_finish\")\n assert len(calls) == 1\n return [x.name for x in calls[0].session.items]\n\n # sanity check: collect both tests in normal runs\n assert get_collected_names() == [\"test_aaa\", \"test_ddd\"]\n\n # do not collect anything based on names outside the collection tree\n assert get_collected_names(\"-k\", pytester._name) == []\n\n # \"-k ddd\" should only collect \"test_ddd\", but not\n # 'test_aaa' just because one of its parent directories is named \"ddd\";\n # this was matched previously because Package.name would contain the full path\n # to the package\n assert get_collected_names(\"-k\", \"ddd\") == [\"test_ddd\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark_expression.py_from_typing_import_Callab_test_basic.assert_evaluate_expr_mat": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark_expression.py_from_typing_import_Callab_test_basic.assert_evaluate_expr_mat", "embedding": null, "metadata": {"file_path": "testing/test_mark_expression.py", "file_name": "test_mark_expression.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 50, "span_ids": ["test_basic", "test_empty_is_false", "imports", "evaluate"], "tokens": 381}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from typing import Callable\n\nimport pytest\nfrom _pytest.mark.expression import Expression\nfrom _pytest.mark.expression import ParseError\n\n\ndef evaluate(input: str, matcher: Callable[[str], bool]) -> bool:\n return Expression.compile(input).evaluate(matcher)\n\n\ndef test_empty_is_false() -> None:\n assert not evaluate(\"\", lambda ident: False)\n assert not evaluate(\"\", lambda ident: True)\n assert not evaluate(\" \", lambda ident: False)\n assert not evaluate(\"\\t\", lambda ident: False)\n\n\n@pytest.mark.parametrize(\n (\"expr\", \"expected\"),\n (\n (\"true\", True),\n (\"true\", True),\n (\"false\", False),\n (\"not true\", False),\n (\"not false\", True),\n (\"not not true\", True),\n (\"not not false\", False),\n (\"true and true\", True),\n (\"true and false\", False),\n (\"false and true\", False),\n (\"true and true and true\", True),\n (\"true and true and false\", False),\n (\"true and true and not true\", False),\n (\"false or false\", False),\n (\"false or true\", True),\n (\"true or true\", True),\n (\"true or true or false\", True),\n (\"true and true or false\", True),\n (\"not true or true\", True),\n (\"(not true) or true\", True),\n (\"not (true or true)\", False),\n (\"true and true or false and false\", True),\n (\"true and (true or false) and false\", False),\n (\"true and (true or (not (not false))) and false\", False),\n ),\n)\ndef test_basic(expr: str, expected: bool) -> None:\n matcher = {\"true\": True, \"false\": False}.__getitem__\n assert evaluate(expr, matcher) is expected", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark_expression.py_test_syntax_oddeties_test_syntax_oddeties.assert_evaluate_expr_mat": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark_expression.py_test_syntax_oddeties_test_syntax_oddeties.assert_evaluate_expr_mat", "embedding": null, "metadata": {"file_path": "testing/test_mark_expression.py", "file_name": "test_mark_expression.py", "file_type": "text/x-python", "category": "test", "start_line": 53, "end_line": 66, "span_ids": ["test_syntax_oddeties"], "tokens": 134}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\n (\"expr\", \"expected\"),\n (\n (\" true \", True),\n (\" ((((((true)))))) \", True),\n (\" ( ((\\t (((true))))) \\t \\t)\", True),\n (\"( true and (((false))))\", False),\n (\"not not not not true\", True),\n (\"not not not not not true\", False),\n ),\n)\ndef test_syntax_oddeties(expr: str, expected: bool) -> None:\n matcher = {\"true\": True, \"false\": False}.__getitem__\n assert evaluate(expr, matcher) is expected", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark_expression.py_test_syntax_errors_test_syntax_errors.assert_excinfo_value_mess": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark_expression.py_test_syntax_errors_test_syntax_errors.assert_excinfo_value_mess", "embedding": null, "metadata": {"file_path": "testing/test_mark_expression.py", "file_name": "test_mark_expression.py", "file_type": "text/x-python", "category": "test", "start_line": 69, "end_line": 121, "span_ids": ["test_syntax_errors"], "tokens": 344}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\n (\"expr\", \"column\", \"message\"),\n (\n (\"(\", 2, \"expected not OR left parenthesis OR identifier; got end of input\"),\n (\n \" (\",\n 3,\n \"expected not OR left parenthesis OR identifier; got end of input\",\n ),\n (\n \")\",\n 1,\n \"expected not OR left parenthesis OR identifier; got right parenthesis\",\n ),\n (\n \") \",\n 1,\n \"expected not OR left parenthesis OR identifier; got right parenthesis\",\n ),\n (\n \"not\",\n 4,\n \"expected not OR left parenthesis OR identifier; got end of input\",\n ),\n (\n \"not not\",\n 8,\n \"expected not OR left parenthesis OR identifier; got end of input\",\n ),\n (\n \"(not)\",\n 5,\n \"expected not OR left parenthesis OR identifier; got right parenthesis\",\n ),\n (\"and\", 1, \"expected not OR left parenthesis OR identifier; got and\"),\n (\n \"ident and\",\n 10,\n \"expected not OR left parenthesis OR identifier; got end of input\",\n ),\n (\n \"ident and or\",\n 11,\n \"expected not OR left parenthesis OR identifier; got or\",\n ),\n (\"ident ident\", 7, \"expected end of input; got identifier\"),\n ),\n)\ndef test_syntax_errors(expr: str, column: int, message: str) -> None:\n with pytest.raises(ParseError) as excinfo:\n evaluate(expr, lambda ident: True)\n assert excinfo.value.column == column\n assert excinfo.value.message == message", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark_expression.py_test_valid_idents_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark_expression.py_test_valid_idents_", "embedding": null, "metadata": {"file_path": "testing/test_mark_expression.py", "file_name": "test_mark_expression.py", "file_type": "text/x-python", "category": "test", "start_line": 112, "end_line": 170, "span_ids": ["test_invalid_idents", "test_valid_idents"], "tokens": 258}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\n \"ident\",\n (\n \".\",\n \"...\",\n \":::\",\n \"a:::c\",\n \"a+-b\",\n \"\u05d0\u05d1\u05d2\u05d3\",\n \"aa\u05d0\u05d1\u05d2\u05d3cc\",\n \"a[bcd]\",\n \"1234\",\n \"1234abcd\",\n \"1234and\",\n \"notandor\",\n \"not_and_or\",\n \"not[and]or\",\n \"1234+5678\",\n \"123.232\",\n \"True\",\n \"False\",\n \"None\",\n \"if\",\n \"else\",\n \"while\",\n ),\n)\ndef test_valid_idents(ident: str) -> None:\n assert evaluate(ident, {ident: True}.__getitem__)\n\n\n@pytest.mark.parametrize(\n \"ident\",\n (\n \"/\",\n \"\\\\\",\n \"^\",\n \"*\",\n \"=\",\n \"&\",\n \"%\",\n \"$\",\n \"#\",\n \"@\",\n \"!\",\n \"~\",\n \"{\",\n \"}\",\n '\"',\n \"'\",\n \"|\",\n \";\",\n \"\u2190\",\n ),\n)\ndef test_invalid_idents(ident: str) -> None:\n with pytest.raises(ParseError):\n evaluate(ident, lambda ident: True)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_TestReportContents.test_captured_text_TestReportContents.test_captured_text.assert_teardown_capstderr": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_TestReportContents.test_captured_text_TestReportContents.test_captured_text.assert_teardown_capstderr", "embedding": null, "metadata": {"file_path": "testing/test_runner.py", "file_name": "test_runner.py", "file_type": "text/x-python", "category": "test", "start_line": 996, "end_line": 1024, "span_ids": ["TestReportContents.test_captured_text"], "tokens": 252}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestReportContents:\n\n def test_captured_text(self, pytester: Pytester) -> None:\n reports = pytester.runitem(\n \"\"\"\n import pytest\n import sys\n\n @pytest.fixture\n def fix():\n sys.stdout.write('setup: stdout\\\\n')\n sys.stderr.write('setup: stderr\\\\n')\n yield\n sys.stdout.write('teardown: stdout\\\\n')\n sys.stderr.write('teardown: stderr\\\\n')\n assert 0\n\n def test_func(fix):\n sys.stdout.write('call: stdout\\\\n')\n sys.stderr.write('call: stderr\\\\n')\n assert 0\n \"\"\"\n )\n setup, call, teardown = reports\n assert setup.capstdout == \"setup: stdout\\n\"\n assert call.capstdout == \"setup: stdout\\ncall: stdout\\n\"\n assert teardown.capstdout == \"setup: stdout\\ncall: stdout\\nteardown: stdout\\n\"\n\n assert setup.capstderr == \"setup: stderr\\n\"\n assert call.capstderr == \"setup: stderr\\ncall: stderr\\n\"\n assert teardown.capstderr == \"setup: stderr\\ncall: stderr\\nteardown: stderr\\n\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_TestReportContents.test_no_captured_text_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_TestReportContents.test_no_captured_text_", "embedding": null, "metadata": {"file_path": "testing/test_runner.py", "file_name": "test_runner.py", "file_type": "text/x-python", "category": "test", "start_line": 1026, "end_line": 1062, "span_ids": ["TestReportContents.test_no_captured_text", "test_outcome_exception_bad_msg", "TestReportContents.test_longrepr_type"], "tokens": 238}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestReportContents:\n\n def test_no_captured_text(self, pytester: Pytester) -> None:\n reports = pytester.runitem(\n \"\"\"\n def test_func():\n pass\n \"\"\"\n )\n rep = reports[1]\n assert rep.capstdout == \"\"\n assert rep.capstderr == \"\"\n\n def test_longrepr_type(self, pytester: Pytester) -> None:\n reports = pytester.runitem(\n \"\"\"\n import pytest\n def test_func():\n pytest.fail(pytrace=False)\n \"\"\"\n )\n rep = reports[1]\n assert isinstance(rep.longrepr, ExceptionChainRepr)\n\n\ndef test_outcome_exception_bad_msg() -> None:\n \"\"\"Check that OutcomeExceptions validate their input to prevent confusing errors (#5578)\"\"\"\n\n def func() -> None:\n raise NotImplementedError()\n\n expected = (\n \"OutcomeException expected string as 'msg' parameter, got 'function' instead.\\n\"\n \"Perhaps you meant to use a mark?\"\n )\n with pytest.raises(TypeError) as excinfo:\n OutcomeException(func) # type: ignore\n assert str(excinfo.value) == expected", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_setuponly.py_sys_test_show_only_active_fixtures.result_stdout_no_fnmatch_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_setuponly.py_sys_test_show_only_active_fixtures.result_stdout_no_fnmatch_", "embedding": null, "metadata": {"file_path": "testing/test_setuponly.py", "file_name": "test_setuponly.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 36, "span_ids": ["mode", "test_show_only_active_fixtures", "imports"], "tokens": 211}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import sys\n\nimport pytest\nfrom _pytest.config import ExitCode\nfrom _pytest.pytester import Pytester\n\n\n@pytest.fixture(params=[\"--setup-only\", \"--setup-plan\", \"--setup-show\"], scope=\"module\")\ndef mode(request):\n return request.param\n\n\ndef test_show_only_active_fixtures(\n pytester: Pytester, mode, dummy_yaml_custom_test\n) -> None:\n pytester.makepyfile(\n '''\n import pytest\n @pytest.fixture\n def _arg0():\n \"\"\"hidden arg0 fixture\"\"\"\n @pytest.fixture\n def arg1():\n \"\"\"arg1 docstring\"\"\"\n def test_arg1(arg1):\n pass\n '''\n )\n\n result = pytester.runpytest(mode)\n assert result.ret == 0\n\n result.stdout.fnmatch_lines(\n [\"*SETUP F arg1*\", \"*test_arg1 (fixtures used: arg1)*\", \"*TEARDOWN F arg1*\"]\n )\n result.stdout.no_fnmatch_line(\"*_arg0*\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_setuponly.py_test_setup_show_with_KeyboardInterrupt_in_test_test_setup_show_with_KeyboardInterrupt_in_test.assert_result_ret_Exit": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_setuponly.py_test_setup_show_with_KeyboardInterrupt_in_test_test_setup_show_with_KeyboardInterrupt_in_test.assert_result_ret_Exit", "embedding": null, "metadata": {"file_path": "testing/test_setuponly.py", "file_name": "test_setuponly.py", "file_type": "text/x-python", "category": "test", "start_line": 280, "end_line": 301, "span_ids": ["test_setup_show_with_KeyboardInterrupt_in_test"], "tokens": 149}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_setup_show_with_KeyboardInterrupt_in_test(pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.fixture\n def arg():\n pass\n def test_arg(arg):\n raise KeyboardInterrupt()\n \"\"\"\n )\n result = pytester.runpytest(\"--setup-show\", p, no_reraise_ctrlc=True)\n result.stdout.fnmatch_lines(\n [\n \"*SETUP F arg*\",\n \"*test_arg (fixtures used: arg)*\",\n \"*TEARDOWN F arg*\",\n \"*! KeyboardInterrupt !*\",\n \"*= no tests ran in *\",\n ]\n )\n assert result.ret == ExitCode.INTERRUPTED", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_setuponly.py_test_show_fixture_action_with_bytes_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_setuponly.py_test_show_fixture_action_with_bytes_", "embedding": null, "metadata": {"file_path": "testing/test_setuponly.py", "file_name": "test_setuponly.py", "file_type": "text/x-python", "category": "test", "start_line": 304, "end_line": 319, "span_ids": ["test_show_fixture_action_with_bytes"], "tokens": 115}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_show_fixture_action_with_bytes(pytester: Pytester) -> None:\n # Issue 7126, BytesWarning when using --setup-show with bytes parameter\n test_file = pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.mark.parametrize('data', [b'Hello World'])\n def test_data(data):\n pass\n \"\"\"\n )\n result = pytester.run(\n sys.executable, \"-bb\", \"-m\", \"pytest\", \"--setup-show\", str(test_file)\n )\n assert result.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_test_summary_list_after_errors_test_summary_list_after_errors.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_test_summary_list_after_errors_test_summary_list_after_errors.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_skipping.py", "file_name": "test_skipping.py", "file_type": "text/x-python", "category": "test", "start_line": 1375, "end_line": 1391, "span_ids": ["test_summary_list_after_errors"], "tokens": 123}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_summary_list_after_errors(pytester: Pytester) -> None:\n \"\"\"Ensure the list of errors/fails/xfails/skips appears after tracebacks in terminal reporting.\"\"\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n def test_fail():\n assert 0\n \"\"\"\n )\n result = pytester.runpytest(\"-ra\")\n result.stdout.fnmatch_lines(\n [\n \"=* FAILURES *=\",\n \"*= short test summary info =*\",\n \"FAILED test_summary_list_after_errors.py::test_fail - assert 0\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_test_importorskip_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_test_importorskip_", "embedding": null, "metadata": {"file_path": "testing/test_skipping.py", "file_name": "test_skipping.py", "file_type": "text/x-python", "category": "test", "start_line": 1394, "end_line": 1417, "span_ids": ["test_relpath_rootdir", "test_importorskip"], "tokens": 159}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_importorskip() -> None:\n with pytest.raises(\n pytest.skip.Exception,\n match=\"^could not import 'doesnotexist': No module named .*\",\n ):\n pytest.importorskip(\"doesnotexist\")\n\n\ndef test_relpath_rootdir(pytester: Pytester) -> None:\n pytester.makepyfile(\n **{\n \"tests/test_1.py\": \"\"\"\n import pytest\n @pytest.mark.skip()\n def test_pass():\n pass\n \"\"\",\n }\n )\n result = pytester.runpytest(\"-rs\", \"tests/test_1.py\", \"--rootdir=tests\")\n result.stdout.fnmatch_lines(\n [\"SKIPPED [[]1[]] tests/test_1.py:2: unconditional skip\"]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_pdb_teardown_skipped_test_pdb_teardown_skipped.assert_tracked_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_pdb_teardown_skipped_test_pdb_teardown_skipped.assert_tracked_", "embedding": null, "metadata": {"file_path": "testing/test_unittest.py", "file_name": "test_unittest.py", "file_type": "text/x-python", "category": "test", "start_line": 1209, "end_line": 1240, "span_ids": ["test_pdb_teardown_skipped"], "tokens": 224}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\"mark\", [\"@unittest.skip\", \"@pytest.mark.skip\"])\ndef test_pdb_teardown_skipped(\n pytester: Pytester, monkeypatch: MonkeyPatch, mark: str\n) -> None:\n \"\"\"With --pdb, setUp and tearDown should not be called for skipped tests.\"\"\"\n tracked: List[str] = []\n monkeypatch.setattr(pytest, \"test_pdb_teardown_skipped\", tracked, raising=False)\n\n pytester.makepyfile(\n \"\"\"\n import unittest\n import pytest\n\n class MyTestCase(unittest.TestCase):\n\n def setUp(self):\n pytest.test_pdb_teardown_skipped.append(\"setUp:\" + self.id())\n\n def tearDown(self):\n pytest.test_pdb_teardown_skipped.append(\"tearDown:\" + self.id())\n\n {mark}(\"skipped for reasons\")\n def test_1(self):\n pass\n\n \"\"\".format(\n mark=mark\n )\n )\n result = pytester.runpytest_inprocess(\"--pdb\")\n result.stdout.fnmatch_lines(\"* 1 skipped in *\")\n assert tracked == []", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/scripts/towncrier-draft-to-file.py_sys_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/scripts/towncrier-draft-to-file.py_sys_", "embedding": null, "metadata": {"file_path": "scripts/towncrier-draft-to-file.py", "file_name": "towncrier-draft-to-file.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 16, "span_ids": ["main", "impl", "imports"], "tokens": 105}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import sys\nfrom subprocess import call\n\n\ndef main():\n \"\"\"\n Platform agnostic wrapper script for towncrier.\n Fixes the issue (#7251) where windows users are unable to natively run tox -e docs to build pytest docs.\n \"\"\"\n with open(\"doc/en/_changelog_towncrier_draft.rst\", \"w\") as draft_file:\n return call((\"towncrier\", \"--draft\"), stdout=draft_file)\n\n\nif __name__ == \"__main__\":\n sys.exit(main())", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/setup.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/setup.py__", "embedding": null, "metadata": {"file_path": "setup.py", "file_name": "setup.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 5, "span_ids": ["impl", "imports"], "tokens": 16}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from setuptools import setup\n\nif __name__ == \"__main__\":\n setup()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py__This_class_is_abstract__ExceptionRepr.toterminal.for_name_content_sep_in.tw_line_content_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py__This_class_is_abstract__ExceptionRepr.toterminal.for_name_content_sep_in.tw_line_content_", "embedding": null, "metadata": {"file_path": "src/_pytest/_code/code.py", "file_name": "code.py", "file_type": "text/x-python", "category": "implementation", "start_line": 964, "end_line": 980, "span_ids": ["ExceptionRepr.addsection", "TerminalRepr.toterminal", "ExceptionRepr.__attrs_post_init__", "ExceptionRepr", "ExceptionRepr.toterminal"], "tokens": 155}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "# This class is abstract -- only subclasses are instantiated.\n@attr.s(eq=False)\nclass ExceptionRepr(TerminalRepr):\n # Provided by subclasses.\n reprcrash: Optional[\"ReprFileLocation\"]\n reprtraceback: \"ReprTraceback\"\n\n def __attrs_post_init__(self) -> None:\n self.sections: List[Tuple[str, str, str]] = []\n\n def addsection(self, name: str, content: str, sep: str = \"-\") -> None:\n self.sections.append((name, content, sep))\n\n def toterminal(self, tw: TerminalWriter) -> None:\n for name, content, sep in self.sections:\n tw.sep(sep, name)\n tw.line(content)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_io/terminalwriter.py_TerminalWriter_TerminalWriter.width_of_current_line.return.wcswidth_self__current_li": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_io/terminalwriter.py_TerminalWriter_TerminalWriter.width_of_current_line.return.wcswidth_self__current_li", "embedding": null, "metadata": {"file_path": "src/_pytest/_io/terminalwriter.py", "file_name": "terminalwriter.py", "file_type": "text/x-python", "category": "implementation", "start_line": 40, "end_line": 95, "span_ids": ["TerminalWriter.fullwidth", "TerminalWriter", "TerminalWriter.width_of_current_line", "TerminalWriter.fullwidth_1"], "tokens": 357}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass TerminalWriter:\n _esctable = dict(\n black=30,\n red=31,\n green=32,\n yellow=33,\n blue=34,\n purple=35,\n cyan=36,\n white=37,\n Black=40,\n Red=41,\n Green=42,\n Yellow=43,\n Blue=44,\n Purple=45,\n Cyan=46,\n White=47,\n bold=1,\n light=2,\n blink=5,\n invert=7,\n )\n\n def __init__(self, file: Optional[TextIO] = None) -> None:\n if file is None:\n file = sys.stdout\n if hasattr(file, \"isatty\") and file.isatty() and sys.platform == \"win32\":\n try:\n import colorama\n except ImportError:\n pass\n else:\n file = colorama.AnsiToWin32(file).stream\n assert file is not None\n self._file = file\n self.hasmarkup = should_do_markup(file)\n self._current_line = \"\"\n self._terminal_width: Optional[int] = None\n self.code_highlight = True\n\n @property\n def fullwidth(self) -> int:\n if self._terminal_width is not None:\n return self._terminal_width\n return get_terminal_width()\n\n @fullwidth.setter\n def fullwidth(self, value: int) -> None:\n self._terminal_width = value\n\n @property\n def width_of_current_line(self) -> int:\n \"\"\"Return an estimate of the width so far in the current line.\"\"\"\n return wcswidth(self._current_line)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_io/wcwidth.py_unicodedata_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_io/wcwidth.py_unicodedata_", "embedding": null, "metadata": {"file_path": "src/_pytest/_io/wcwidth.py", "file_name": "wcwidth.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 56, "span_ids": ["wcswidth", "wcwidth", "imports"], "tokens": 366}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unicodedata\nfrom functools import lru_cache\n\n\n@lru_cache(100)\ndef wcwidth(c: str) -> int:\n \"\"\"Determine how many columns are needed to display a character in a terminal.\n\n Returns -1 if the character is not printable.\n Returns 0, 1 or 2 for other characters.\n \"\"\"\n o = ord(c)\n\n # ASCII fast path.\n if 0x20 <= o < 0x07F:\n return 1\n\n # Some Cf/Zp/Zl characters which should be zero-width.\n if (\n o == 0x0000\n or 0x200B <= o <= 0x200F\n or 0x2028 <= o <= 0x202E\n or 0x2060 <= o <= 0x2063\n ):\n return 0\n\n category = unicodedata.category(c)\n\n # Control characters.\n if category == \"Cc\":\n return -1\n\n # Combining characters with zero width.\n if category in (\"Me\", \"Mn\"):\n return 0\n\n # Full/Wide east asian characters.\n if unicodedata.east_asian_width(c) in (\"F\", \"W\"):\n return 2\n\n return 1\n\n\ndef wcswidth(s: str) -> int:\n \"\"\"Determine how many columns are needed to display a string in a terminal.\n\n Returns -1 if the string contains non-printable characters.\n \"\"\"\n width = 0\n for c in unicodedata.normalize(\"NFC\", s):\n wc = wcwidth(c)\n if wc < 0:\n return -1\n width += wc\n return width", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/__init__.py_pytest_addoption_pytest_addoption.parser_addini_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/__init__.py_pytest_addoption_pytest_addoption.parser_addini_", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 24, "end_line": 46, "span_ids": ["pytest_addoption"], "tokens": 162}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_addoption(parser: Parser) -> None:\n group = parser.getgroup(\"debugconfig\")\n group.addoption(\n \"--assert\",\n action=\"store\",\n dest=\"assertmode\",\n choices=(\"rewrite\", \"plain\"),\n default=\"rewrite\",\n metavar=\"MODE\",\n help=(\n \"Control assertion debugging tools.\\n\"\n \"'plain' performs no assertion debugging.\\n\"\n \"'rewrite' (the default) rewrites assert statements in test modules\"\n \" on import to provide assert expression information.\"\n ),\n )\n parser.addini(\n \"enable_assertion_pass_hook\",\n type=\"bool\",\n default=False,\n help=\"Enables the pytest_assertion_pass hook.\"\n \"Make sure to delete any previously generated pyc cache files.\",\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewritingHook._is_marked_for_rewrite_AssertionRewritingHook._is_marked_for_rewrite.try_.except_KeyError_.return.False": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewritingHook._is_marked_for_rewrite_AssertionRewritingHook._is_marked_for_rewrite.try_.except_KeyError_.return.False", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/rewrite.py", "file_name": "rewrite.py", "file_type": "text/x-python", "category": "implementation", "start_line": 231, "end_line": 242, "span_ids": ["AssertionRewritingHook._is_marked_for_rewrite"], "tokens": 133}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class AssertionRewritingHook(importlib.abc.MetaPathFinder, importlib.abc.Loader):\n\n def _is_marked_for_rewrite(self, name: str, state: \"AssertionState\") -> bool:\n try:\n return self._marked_for_rewrite_cache[name]\n except KeyError:\n for marked in self._must_rewrite:\n if name == marked or name.startswith(marked + \".\"):\n state.trace(f\"matched marked file {name!r} (from {marked!r})\")\n self._marked_for_rewrite_cache[name] = True\n return True\n\n self._marked_for_rewrite_cache[name] = False\n return False", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewritingHook.mark_rewrite_AssertionRewritingHook.mark_rewrite.self__marked_for_rewrite_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_AssertionRewritingHook.mark_rewrite_AssertionRewritingHook.mark_rewrite.self__marked_for_rewrite_", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/rewrite.py", "file_name": "rewrite.py", "file_type": "text/x-python", "category": "implementation", "start_line": 249, "end_line": 265, "span_ids": ["AssertionRewritingHook.mark_rewrite"], "tokens": 159}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class AssertionRewritingHook(importlib.abc.MetaPathFinder, importlib.abc.Loader):\n\n def mark_rewrite(self, *names: str) -> None:\n \"\"\"Mark import names as needing to be rewritten.\n\n The named module or package as well as any nested modules will\n be rewritten on import.\n \"\"\"\n already_imported = (\n set(names).intersection(sys.modules).difference(self._rewritten_names)\n )\n for name in already_imported:\n mod = sys.modules[name]\n if not AssertionRewriter.is_rewrite_disabled(\n mod.__doc__ or \"\"\n ) and not isinstance(mod.__loader__, type(self)):\n self._warn_already_imported(name)\n self._must_rewrite.update(names)\n self._marked_for_rewrite_cache.clear()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_if_sys_platform_win32_if_sys_platform_win32.else_._write_pyc.return.True": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py_if_sys_platform_win32_if_sys_platform_win32.else_._write_pyc.return.True", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/rewrite.py", "file_name": "rewrite.py", "file_type": "text/x-python", "category": "implementation", "start_line": 297, "end_line": 344, "span_ids": ["impl:10"], "tokens": 364}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "if sys.platform == \"win32\":\n from atomicwrites import atomic_write\n\n def _write_pyc(\n state: \"AssertionState\",\n co: types.CodeType,\n source_stat: os.stat_result,\n pyc: Path,\n ) -> bool:\n try:\n with atomic_write(os.fspath(pyc), mode=\"wb\", overwrite=True) as fp:\n _write_pyc_fp(fp, source_stat, co)\n except OSError as e:\n state.trace(f\"error writing pyc file at {pyc}: {e}\")\n # we ignore any failure to write the cache file\n # there are many reasons, permission-denied, pycache dir being a\n # file etc.\n return False\n return True\n\n\nelse:\n\n def _write_pyc(\n state: \"AssertionState\",\n co: types.CodeType,\n source_stat: os.stat_result,\n pyc: Path,\n ) -> bool:\n proc_pyc = f\"{pyc}.{os.getpid()}\"\n try:\n fp = open(proc_pyc, \"wb\")\n except OSError as e:\n state.trace(f\"error writing pyc file at {proc_pyc}: errno={e.errno}\")\n return False\n\n try:\n _write_pyc_fp(fp, source_stat, co)\n os.rename(proc_pyc, os.fspath(pyc))\n except OSError as e:\n state.trace(f\"error writing pyc file at {pyc}: {e}\")\n # we ignore any failure to write the cache file\n # there are many reasons, permission-denied, pycache dir being a\n # file etc.\n return False\n finally:\n fp.close()\n return True", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py__rewrite_test__rewrite_test.return.stat_co": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py__rewrite_test__rewrite_test.return.stat_co", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/rewrite.py", "file_name": "rewrite.py", "file_type": "text/x-python", "category": "implementation", "start_line": 347, "end_line": 356, "span_ids": ["_rewrite_test"], "tokens": 114}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _rewrite_test(fn: Path, config: Config) -> Tuple[os.stat_result, types.CodeType]:\n \"\"\"Read and rewrite *fn* and return the code object.\"\"\"\n fn_ = os.fspath(fn)\n stat = os.stat(fn_)\n with open(fn_, \"rb\") as f:\n source = f.read()\n tree = ast.parse(source, filename=fn_)\n rewrite_asserts(tree, source, fn_, config)\n co = compile(tree, fn_, \"exec\", dont_inherit=True)\n return stat, co", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/util.py__compare_eq_any__compare_eq_any.return.explanation": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/util.py__compare_eq_any__compare_eq_any.return.explanation", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/util.py", "file_name": "util.py", "file_type": "text/x-python", "category": "implementation", "start_line": 173, "end_line": 197, "span_ids": ["_compare_eq_any"], "tokens": 283}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _compare_eq_any(left: Any, right: Any, verbose: int = 0) -> List[str]:\n explanation = []\n if istext(left) and istext(right):\n explanation = _diff_text(left, right, verbose)\n else:\n if type(left) == type(right) and (\n isdatacls(left) or isattrs(left) or isnamedtuple(left)\n ):\n # Note: unlike dataclasses/attrs, namedtuples compare only the\n # field values, not the type or field names. But this branch\n # intentionally only handles the same-type case, which was often\n # used in older code bases before dataclasses/attrs were available.\n explanation = _compare_eq_cls(left, right, verbose)\n elif issequence(left) and issequence(right):\n explanation = _compare_eq_sequence(left, right, verbose)\n elif isset(left) and isset(right):\n explanation = _compare_eq_set(left, right, verbose)\n elif isdict(left) and isdict(right):\n explanation = _compare_eq_dict(left, right, verbose)\n elif verbose > 0:\n explanation = _compare_eq_verbose(left, right)\n if isiterable(left) and isiterable(right):\n expl = _compare_eq_iterable(left, right, verbose)\n explanation.extend(expl)\n return explanation", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py_NFPlugin_NFPlugin.pytest_collection_modifyitems.if_self_active_.else_.self_cached_nodeids_updat": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py_NFPlugin_NFPlugin.pytest_collection_modifyitems.if_self_active_.else_.self_cached_nodeids_updat", "embedding": null, "metadata": {"file_path": "src/_pytest/cacheprovider.py", "file_name": "cacheprovider.py", "file_type": "text/x-python", "category": "implementation", "start_line": 386, "end_line": 415, "span_ids": ["NFPlugin.pytest_collection_modifyitems", "NFPlugin"], "tokens": 246}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class NFPlugin:\n \"\"\"Plugin which implements the --nf (run new-first) option.\"\"\"\n\n def __init__(self, config: Config) -> None:\n self.config = config\n self.active = config.option.newfirst\n assert config.cache is not None\n self.cached_nodeids = set(config.cache.get(\"cache/nodeids\", []))\n\n @hookimpl(hookwrapper=True, tryfirst=True)\n def pytest_collection_modifyitems(\n self, items: List[nodes.Item]\n ) -> Generator[None, None, None]:\n yield\n\n if self.active:\n new_items: Dict[str, nodes.Item] = {}\n other_items: Dict[str, nodes.Item] = {}\n for item in items:\n if item.nodeid not in self.cached_nodeids:\n new_items[item.nodeid] = item\n else:\n other_items[item.nodeid] = item\n\n items[:] = self._get_increasing_order(\n new_items.values()\n ) + self._get_increasing_order(other_items.values())\n self.cached_nodeids.update(new_items)\n else:\n self.cached_nodeids.update(item.nodeid for item in items)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py_NFPlugin._get_increasing_order_NFPlugin.pytest_sessionfinish.config_cache_set_cache_n": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py_NFPlugin._get_increasing_order_NFPlugin.pytest_sessionfinish.config_cache_set_cache_n", "embedding": null, "metadata": {"file_path": "src/_pytest/cacheprovider.py", "file_name": "cacheprovider.py", "file_type": "text/x-python", "category": "implementation", "start_line": 384, "end_line": 396, "span_ids": ["NFPlugin._get_increasing_order", "NFPlugin.pytest_sessionfinish"], "tokens": 118}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class NFPlugin:\n\n def _get_increasing_order(self, items: Iterable[nodes.Item]) -> List[nodes.Item]:\n return sorted(items, key=lambda item: item.fspath.mtime(), reverse=True)\n\n def pytest_sessionfinish(self) -> None:\n config = self.config\n if config.getoption(\"cacheshow\") or hasattr(config, \"workerinput\"):\n return\n\n if config.getoption(\"collectonly\"):\n return\n\n assert config.cache is not None\n config.cache.set(\"cache/nodeids\", sorted(self.cached_nodeids))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py_pytest_cmdline_main_cache.return.request_config_cache": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py_pytest_cmdline_main_cache.return.request_config_cache", "embedding": null, "metadata": {"file_path": "src/_pytest/cacheprovider.py", "file_name": "cacheprovider.py", "file_type": "text/x-python", "category": "implementation", "start_line": 490, "end_line": 518, "span_ids": ["cache", "pytest_cmdline_main", "pytest_configure"], "tokens": 214}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_cmdline_main(config: Config) -> Optional[Union[int, ExitCode]]:\n if config.option.cacheshow:\n from _pytest.main import wrap_session\n\n return wrap_session(config, cacheshow)\n return None\n\n\n@hookimpl(tryfirst=True)\ndef pytest_configure(config: Config) -> None:\n config.cache = Cache.for_config(config, _ispytest=True)\n config.pluginmanager.register(LFPlugin(config), \"lfplugin\")\n config.pluginmanager.register(NFPlugin(config), \"nfplugin\")\n\n\n@fixture\ndef cache(request: FixtureRequest) -> Cache:\n \"\"\"Return a cache object that can persist state between testing sessions.\n\n cache.get(key, default)\n cache.set(key, value)\n\n Keys must be ``/`` separated strings, where the first part is usually the\n name of your plugin or application to avoid clashes with other cache users.\n\n Values can be any object handled by the json stdlib module.\n \"\"\"\n assert request.config.cache is not None\n return request.config.cache", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py_pytest_report_header_pytest_report_header.return.None": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py_pytest_report_header_pytest_report_header.return.None", "embedding": null, "metadata": {"file_path": "src/_pytest/cacheprovider.py", "file_name": "cacheprovider.py", "file_type": "text/x-python", "category": "implementation", "start_line": 521, "end_line": 534, "span_ids": ["pytest_report_header"], "tokens": 126}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_report_header(config: Config) -> Optional[str]:\n \"\"\"Display cachedir with --cache-show and if non-default.\"\"\"\n if config.option.verbose > 0 or config.getini(\"cache_dir\") != \".pytest_cache\":\n assert config.cache is not None\n cachedir = config.cache._cachedir\n # TODO: evaluate generating upward relative paths\n # starting with .., ../.. if sensible\n\n try:\n displaypath = cachedir.relative_to(config.rootpath)\n except ValueError:\n displaypath = cachedir\n return f\"cachedir: {displaypath}\"\n return None", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_pytest_addoption__colorama_workaround.if_sys_platform_startswit.try_.except_ImportError_.pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_pytest_addoption__colorama_workaround.if_sys_platform_startswit.try_.except_ImportError_.pass", "embedding": null, "metadata": {"file_path": "src/_pytest/capture.py", "file_name": "capture.py", "file_type": "text/x-python", "category": "implementation", "start_line": 37, "end_line": 68, "span_ids": ["_colorama_workaround", "pytest_addoption"], "tokens": 222}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_addoption(parser: Parser) -> None:\n group = parser.getgroup(\"general\")\n group._addoption(\n \"--capture\",\n action=\"store\",\n default=\"fd\",\n metavar=\"method\",\n choices=[\"fd\", \"sys\", \"no\", \"tee-sys\"],\n help=\"per-test capturing method: one of fd|sys|no|tee-sys.\",\n )\n group._addoption(\n \"-s\",\n action=\"store_const\",\n const=\"no\",\n dest=\"capture\",\n help=\"shortcut for --capture=no.\",\n )\n\n\ndef _colorama_workaround() -> None:\n \"\"\"Ensure colorama is imported so that it attaches to the correct stdio\n handles on Windows.\n\n colorama uses the terminal on import time. So if something does the\n first import of colorama while I/O capture is active, colorama will\n fail in various ways.\n \"\"\"\n if sys.platform.startswith(\"win32\"):\n try:\n import colorama # noqa: F401\n except ImportError:\n pass", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py__CaptureManager_and_Capt_CaptureManager.unset_fixture.self._capture_fixture.None": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py__CaptureManager_and_Capt_CaptureManager.unset_fixture.self._capture_fixture.None", "embedding": null, "metadata": {"file_path": "src/_pytest/capture.py", "file_name": "capture.py", "file_type": "text/x-python", "category": "implementation", "start_line": 646, "end_line": 738, "span_ids": ["_get_multicapture", "CaptureManager.resume", "CaptureManager.unset_fixture", "CaptureManager.start_global_capturing", "CaptureManager", "CaptureManager.suspend_global_capture", "CaptureManager.read_global_capture", "CaptureManager.set_fixture", "CaptureManager.suspend", "CaptureManager.is_globally_capturing", "CaptureManager.is_capturing", "CaptureManager.__repr__", "CaptureManager.stop_global_capturing", "CaptureManager.resume_global_capture"], "tokens": 788}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "# CaptureManager and CaptureFixture\n\n\nclass CaptureManager:\n \"\"\"The capture plugin.\n\n Manages that the appropriate capture method is enabled/disabled during\n collection and each test phase (setup, call, teardown). After each of\n those points, the captured output is obtained and attached to the\n collection/runtest report.\n\n There are two levels of capture:\n\n * global: enabled by default and can be suppressed by the ``-s``\n option. This is always enabled/disabled during collection and each test\n phase.\n\n * fixture: when a test function or one of its fixture depend on the\n ``capsys`` or ``capfd`` fixtures. In this case special handling is\n needed to ensure the fixtures take precedence over the global capture.\n \"\"\"\n\n def __init__(self, method: \"_CaptureMethod\") -> None:\n self._method = method\n self._global_capturing: Optional[MultiCapture[str]] = None\n self._capture_fixture: Optional[CaptureFixture[Any]] = None\n\n def __repr__(self) -> str:\n return \"\".format(\n self._method, self._global_capturing, self._capture_fixture\n )\n\n def is_capturing(self) -> Union[str, bool]:\n if self.is_globally_capturing():\n return \"global\"\n if self._capture_fixture:\n return \"fixture %s\" % self._capture_fixture.request.fixturename\n return False\n\n # Global capturing control\n\n def is_globally_capturing(self) -> bool:\n return self._method != \"no\"\n\n def start_global_capturing(self) -> None:\n assert self._global_capturing is None\n self._global_capturing = _get_multicapture(self._method)\n self._global_capturing.start_capturing()\n\n def stop_global_capturing(self) -> None:\n if self._global_capturing is not None:\n self._global_capturing.pop_outerr_to_orig()\n self._global_capturing.stop_capturing()\n self._global_capturing = None\n\n def resume_global_capture(self) -> None:\n # During teardown of the python process, and on rare occasions, capture\n # attributes can be `None` while trying to resume global capture.\n if self._global_capturing is not None:\n self._global_capturing.resume_capturing()\n\n def suspend_global_capture(self, in_: bool = False) -> None:\n if self._global_capturing is not None:\n self._global_capturing.suspend_capturing(in_=in_)\n\n def suspend(self, in_: bool = False) -> None:\n # Need to undo local capsys-et-al if it exists before disabling global capture.\n self.suspend_fixture()\n self.suspend_global_capture(in_)\n\n def resume(self) -> None:\n self.resume_global_capture()\n self.resume_fixture()\n\n def read_global_capture(self) -> CaptureResult[str]:\n assert self._global_capturing is not None\n return self._global_capturing.readouterr()\n\n # Fixture Control\n\n def set_fixture(self, capture_fixture: \"CaptureFixture[Any]\") -> None:\n if self._capture_fixture:\n current_fixture = self._capture_fixture.request.fixturename\n requested_fixture = capture_fixture.request.fixturename\n capture_fixture.request.raiseerror(\n \"cannot use {} and {} at the same time\".format(\n requested_fixture, current_fixture\n )\n )\n self._capture_fixture = capture_fixture\n\n def unset_fixture(self) -> None:\n self._capture_fixture = None", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_PytestPluginManager.consider_pluginarg_PytestPluginManager.consider_pluginarg.if_arg_startswith_no_.else_.self_import_plugin_arg_c": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_PytestPluginManager.consider_pluginarg_PytestPluginManager.consider_pluginarg.if_arg_startswith_no_.else_.self_import_plugin_arg_c", "embedding": null, "metadata": {"file_path": "src/_pytest/config/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 655, "end_line": 678, "span_ids": ["PytestPluginManager.consider_pluginarg"], "tokens": 247}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass PytestPluginManager(PluginManager):\n\n def consider_pluginarg(self, arg: str) -> None:\n if arg.startswith(\"no:\"):\n name = arg[3:]\n if name in essential_plugins:\n raise UsageError(\"plugin %s cannot be disabled\" % name)\n\n # PR #4304: remove stepwise if cacheprovider is blocked.\n if name == \"cacheprovider\":\n self.set_blocked(\"stepwise\")\n self.set_blocked(\"pytest_stepwise\")\n\n self.set_blocked(name)\n if not name.startswith(\"pytest_\"):\n self.set_blocked(\"pytest_\" + name)\n else:\n name = arg\n # Unblock the plugin. None indicates that it has been blocked.\n # There is no interface with pluggy for this.\n if self._name2plugin.get(name, -1) is None:\n del self._name2plugin[name]\n if not name.startswith(\"pytest_\"):\n if self._name2plugin.get(\"pytest_\" + name, -1) is None:\n del self._name2plugin[\"pytest_\" + name]\n self.import_plugin(arg, consider_entry_points=True)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_PytestPluginManager.consider_conftest_PytestPluginManager._import_plugin_specs.for_import_spec_in_plugin.self_import_plugin_import": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_PytestPluginManager.consider_conftest_PytestPluginManager._import_plugin_specs.for_import_spec_in_plugin.self_import_plugin_import", "embedding": null, "metadata": {"file_path": "src/_pytest/config/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 680, "end_line": 694, "span_ids": ["PytestPluginManager._import_plugin_specs", "PytestPluginManager.consider_conftest", "PytestPluginManager.consider_module", "PytestPluginManager.consider_env"], "tokens": 159}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass PytestPluginManager(PluginManager):\n\n def consider_conftest(self, conftestmodule: types.ModuleType) -> None:\n self.register(conftestmodule, name=conftestmodule.__file__)\n\n def consider_env(self) -> None:\n self._import_plugin_specs(os.environ.get(\"PYTEST_PLUGINS\"))\n\n def consider_module(self, mod: types.ModuleType) -> None:\n self._import_plugin_specs(getattr(mod, \"pytest_plugins\", []))\n\n def _import_plugin_specs(\n self, spec: Union[None, types.ModuleType, str, Sequence[str]]\n ) -> None:\n plugins = _get_plugin_specs_as_list(spec)\n for import_spec in plugins:\n self.import_plugin(import_spec)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/findpaths.py_os__parse_ini_config.try_.except_iniconfig_ParseErr.raise_UsageError_str_exc_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/findpaths.py_os__parse_ini_config.try_.except_iniconfig_ParseErr.raise_UsageError_str_exc_", "embedding": null, "metadata": {"file_path": "src/_pytest/config/findpaths.py", "file_name": "findpaths.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 32, "span_ids": ["impl", "_parse_ini_config", "imports", "imports:16"], "tokens": 186}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import os\nfrom pathlib import Path\nfrom typing import Dict\nfrom typing import Iterable\nfrom typing import List\nfrom typing import Optional\nfrom typing import Sequence\nfrom typing import Tuple\nfrom typing import TYPE_CHECKING\nfrom typing import Union\n\nimport iniconfig\n\nfrom .exceptions import UsageError\nfrom _pytest.outcomes import fail\nfrom _pytest.pathlib import absolutepath\nfrom _pytest.pathlib import commonpath\n\nif TYPE_CHECKING:\n from . import Config\n\n\ndef _parse_ini_config(path: Path) -> iniconfig.IniConfig:\n \"\"\"Parse the given generic '.ini' file using legacy IniConfig parser, returning\n the parsed object.\n\n Raise UsageError if the file cannot be parsed.\n \"\"\"\n try:\n return iniconfig.IniConfig(str(path))\n except iniconfig.ParseError as exc:\n raise UsageError(str(exc)) from exc", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/findpaths.py_load_config_dict_from_file_load_config_dict_from_file.return.None": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/findpaths.py_load_config_dict_from_file_load_config_dict_from_file.return.None", "embedding": null, "metadata": {"file_path": "src/_pytest/config/findpaths.py", "file_name": "findpaths.py", "file_type": "text/x-python", "category": "implementation", "start_line": 35, "end_line": 81, "span_ids": ["load_config_dict_from_file"], "tokens": 456}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def load_config_dict_from_file(\n filepath: Path,\n) -> Optional[Dict[str, Union[str, List[str]]]]:\n \"\"\"Load pytest configuration from the given file path, if supported.\n\n Return None if the file does not contain valid pytest configuration.\n \"\"\"\n\n # Configuration from ini files are obtained from the [pytest] section, if present.\n if filepath.suffix == \".ini\":\n iniconfig = _parse_ini_config(filepath)\n\n if \"pytest\" in iniconfig:\n return dict(iniconfig[\"pytest\"].items())\n else:\n # \"pytest.ini\" files are always the source of configuration, even if empty.\n if filepath.name == \"pytest.ini\":\n return {}\n\n # '.cfg' files are considered if they contain a \"[tool:pytest]\" section.\n elif filepath.suffix == \".cfg\":\n iniconfig = _parse_ini_config(filepath)\n\n if \"tool:pytest\" in iniconfig.sections:\n return dict(iniconfig[\"tool:pytest\"].items())\n elif \"pytest\" in iniconfig.sections:\n # If a setup.cfg contains a \"[pytest]\" section, we raise a failure to indicate users that\n # plain \"[pytest]\" sections in setup.cfg files is no longer supported (#3086).\n fail(CFG_PYTEST_SECTION.format(filename=\"setup.cfg\"), pytrace=False)\n\n # '.toml' files are considered if they contain a [tool.pytest.ini_options] table.\n elif filepath.suffix == \".toml\":\n import toml\n\n config = toml.load(str(filepath))\n\n result = config.get(\"tool\", {}).get(\"pytest\", {}).get(\"ini_options\", None)\n if result is not None:\n # TOML supports richer data types than ini files (strings, arrays, floats, ints, etc),\n # however we need to convert all scalar values to str for compatibility with the rest\n # of the configuration system, which expects strings only.\n def make_scalar(v: object) -> Union[str, List[str]]:\n return v if isinstance(v, list) else str(v)\n\n return {k: make_scalar(v) for k, v in result.items()}\n\n return None", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/findpaths.py_locate_config_locate_config.return.None_None_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/findpaths.py_locate_config_locate_config.return.None_None_", "embedding": null, "metadata": {"file_path": "src/_pytest/config/findpaths.py", "file_name": "findpaths.py", "file_type": "text/x-python", "category": "implementation", "start_line": 84, "end_line": 107, "span_ids": ["locate_config"], "tokens": 212}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def locate_config(\n args: Iterable[Path],\n) -> Tuple[Optional[Path], Optional[Path], Dict[str, Union[str, List[str]]]]:\n \"\"\"Search in the list of arguments for a valid ini-file for pytest,\n and return a tuple of (rootdir, inifile, cfg-dict).\"\"\"\n config_names = [\n \"pytest.ini\",\n \"pyproject.toml\",\n \"tox.ini\",\n \"setup.cfg\",\n ]\n args = [x for x in args if not str(x).startswith(\"-\")]\n if not args:\n args = [Path.cwd()]\n for arg in args:\n argpath = absolutepath(arg)\n for base in (argpath, *argpath.parents):\n for config_name in config_names:\n p = base / config_name\n if p.is_file():\n ini_config = load_config_dict_from_file(p)\n if ini_config is not None:\n return base, p, ini_config\n return None, None, {}", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py_pytest_unconfigure_pytest_collect_file.return.None": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py_pytest_unconfigure_pytest_collect_file.return.None", "embedding": null, "metadata": {"file_path": "src/_pytest/doctest.py", "file_name": "doctest.py", "file_type": "text/x-python", "category": "implementation", "start_line": 117, "end_line": 136, "span_ids": ["pytest_unconfigure", "pytest_collect_file"], "tokens": 155}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_unconfigure() -> None:\n global RUNNER_CLASS\n\n RUNNER_CLASS = None\n\n\ndef pytest_collect_file(\n fspath: Path,\n path: py.path.local,\n parent: Collector,\n) -> Optional[Union[\"DoctestModule\", \"DoctestTextfile\"]]:\n config = parent.config\n if fspath.suffix == \".py\":\n if config.option.doctestmodules and not _is_setup_py(fspath):\n mod: DoctestModule = DoctestModule.from_parent(parent, fspath=path)\n return mod\n elif _is_doctest(config, fspath, parent):\n txt: DoctestTextfile = DoctestTextfile.from_parent(parent, fspath=path)\n return txt\n return None", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py__is_setup_py_MultipleDoctestFailures.__init__.self.failures.failures": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py__is_setup_py_MultipleDoctestFailures.__init__.self.failures.failures", "embedding": null, "metadata": {"file_path": "src/_pytest/doctest.py", "file_name": "doctest.py", "file_type": "text/x-python", "category": "implementation", "start_line": 139, "end_line": 172, "span_ids": ["_is_setup_py", "ReprFailDoctest", "MultipleDoctestFailures", "ReprFailDoctest.toterminal", "_is_doctest"], "tokens": 278}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _is_setup_py(path: Path) -> bool:\n if path.name != \"setup.py\":\n return False\n contents = path.read_bytes()\n return b\"setuptools\" in contents or b\"distutils\" in contents\n\n\ndef _is_doctest(config: Config, path: Path, parent: Collector) -> bool:\n if path.suffix in (\".txt\", \".rst\") and parent.session.isinitpath(path):\n return True\n globs = config.getoption(\"doctestglob\") or [\"test*.txt\"]\n for glob in globs:\n if fnmatch_ex(glob, path):\n return True\n return False\n\n\nclass ReprFailDoctest(TerminalRepr):\n def __init__(\n self, reprlocation_lines: Sequence[Tuple[ReprFileLocation, Sequence[str]]]\n ) -> None:\n self.reprlocation_lines = reprlocation_lines\n\n def toterminal(self, tw: TerminalWriter) -> None:\n for reprlocation, lines in self.reprlocation_lines:\n for line in lines:\n tw.line(line)\n reprlocation.toterminal(tw)\n\n\nclass MultipleDoctestFailures(Exception):\n def __init__(self, failures: Sequence[\"doctest.DocTestFailure\"]) -> None:\n super().__init__()\n self.failures = failures", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py__init_runner_class__init_runner_class.PytestDoctestRunner.report_failure.if_self_continue_on_failu.else_.raise_failure": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py__init_runner_class__init_runner_class.PytestDoctestRunner.report_failure.if_self_continue_on_failu.else_.raise_failure", "embedding": null, "metadata": {"file_path": "src/_pytest/doctest.py", "file_name": "doctest.py", "file_type": "text/x-python", "category": "implementation", "start_line": 175, "end_line": 208, "span_ids": ["_init_runner_class.PytestDoctestRunner", "_init_runner_class", "_init_runner_class.PytestDoctestRunner:2"], "tokens": 234}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _init_runner_class() -> Type[\"doctest.DocTestRunner\"]:\n import doctest\n\n class PytestDoctestRunner(doctest.DebugRunner):\n \"\"\"Runner to collect failures.\n\n Note that the out variable in this case is a list instead of a\n stdout-like object.\n \"\"\"\n\n def __init__(\n self,\n checker: Optional[\"doctest.OutputChecker\"] = None,\n verbose: Optional[bool] = None,\n optionflags: int = 0,\n continue_on_failure: bool = True,\n ) -> None:\n doctest.DebugRunner.__init__(\n self, checker=checker, verbose=verbose, optionflags=optionflags\n )\n self.continue_on_failure = continue_on_failure\n\n def report_failure(\n self,\n out,\n test: \"doctest.DocTest\",\n example: \"doctest.Example\",\n got: str,\n ) -> None:\n failure = doctest.DocTestFailure(test, example, got)\n if self.continue_on_failure:\n out.append(failure)\n else:\n raise failure\n # ... other code", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py__init_runner_class.PytestDoctestRunner.report_unexpected_exception__init_runner_class.return.PytestDoctestRunner": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py__init_runner_class.PytestDoctestRunner.report_unexpected_exception__init_runner_class.return.PytestDoctestRunner", "embedding": null, "metadata": {"file_path": "src/_pytest/doctest.py", "file_name": "doctest.py", "file_type": "text/x-python", "category": "implementation", "start_line": 210, "end_line": 227, "span_ids": ["_init_runner_class.PytestDoctestRunner:2"], "tokens": 167}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _init_runner_class() -> Type[\"doctest.DocTestRunner\"]:\n\n class PytestDoctestRunner(doctest.DebugRunner):\n\n def report_unexpected_exception(\n self,\n out,\n test: \"doctest.DocTest\",\n example: \"doctest.Example\",\n exc_info: Tuple[Type[BaseException], BaseException, types.TracebackType],\n ) -> None:\n if isinstance(exc_info[1], OutcomeException):\n raise exc_info[1]\n if isinstance(exc_info[1], bdb.BdbQuit):\n outcomes.exit(\"Quitting debugger\")\n failure = doctest.UnexpectedException(test, example, exc_info)\n if self.continue_on_failure:\n out.append(failure)\n else:\n raise failure\n\n return PytestDoctestRunner", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py_DoctestItem._disable_output_capturing_for_darwin_DoctestItem._TODO_Type_ignored_b": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py_DoctestItem._disable_output_capturing_for_darwin_DoctestItem._TODO_Type_ignored_b", "embedding": null, "metadata": {"file_path": "src/_pytest/doctest.py", "file_name": "doctest.py", "file_type": "text/x-python", "category": "implementation", "start_line": 299, "end_line": 310, "span_ids": ["DoctestItem._disable_output_capturing_for_darwin"], "tokens": 123}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class DoctestItem(pytest.Item):\n\n def _disable_output_capturing_for_darwin(self) -> None:\n \"\"\"Disable output capturing. Otherwise, stdout is lost to doctest (#985).\"\"\"\n if platform.system() != \"Darwin\":\n return\n capman = self.config.pluginmanager.getplugin(\"capturemanager\")\n if capman:\n capman.suspend_global_capture(in_=True)\n out, err = capman.read_global_capture()\n sys.stdout.write(out)\n sys.stderr.write(err)\n\n # TODO: Type ignored -- breaks Liskov Substitution.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_getfixturemarker__Key.Tuple_object_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_getfixturemarker__Key.Tuple_object_", "embedding": null, "metadata": {"file_path": "src/_pytest/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 222, "end_line": 237, "span_ids": ["impl:14", "getfixturemarker"], "tokens": 123}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def getfixturemarker(obj: object) -> Optional[\"FixtureFunctionMarker\"]:\n \"\"\"Return fixturemarker or None if it doesn't exist or raised\n exceptions.\"\"\"\n try:\n fixturemarker: Optional[FixtureFunctionMarker] = getattr(\n obj, \"_pytestfixturefunction\", None\n )\n except TEST_OUTCOME:\n # some objects raise errors like request (from flask import request)\n # we don't expect them to be fixture functions\n return None\n return fixturemarker\n\n\n# Parametrized fixture key, helper alias for code below.\n_Key = Tuple[object, ...]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_get_parametrized_fixture_keys_get_parametrized_fixture_keys.try_.else_.for_argname_param_index_.yield_key": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_get_parametrized_fixture_keys_get_parametrized_fixture_keys.try_.else_.for_argname_param_index_.yield_key", "embedding": null, "metadata": {"file_path": "src/_pytest/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 240, "end_line": 265, "span_ids": ["get_parametrized_fixture_keys"], "tokens": 296}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def get_parametrized_fixture_keys(item: nodes.Item, scopenum: int) -> Iterator[_Key]:\n \"\"\"Return list of keys for all parametrized arguments which match\n the specified scope.\"\"\"\n assert scopenum < scopenum_function # function\n try:\n callspec = item.callspec # type: ignore[attr-defined]\n except AttributeError:\n pass\n else:\n cs: CallSpec2 = callspec\n # cs.indices.items() is random order of argnames. Need to\n # sort this so that different calls to\n # get_parametrized_fixture_keys will be deterministic.\n for argname, param_index in sorted(cs.indices.items()):\n if cs._arg2scopenum[argname] != scopenum:\n continue\n if scopenum == 0: # session\n key: _Key = (argname, param_index)\n elif scopenum == 1: # package\n key = (argname, param_index, item.fspath.dirpath())\n elif scopenum == 2: # module\n key = (argname, param_index, item.fspath)\n elif scopenum == 3: # class\n item_cls = item.cls # type: ignore[attr-defined]\n key = (argname, param_index, item.fspath, item_cls)\n yield key", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_fix_cache_order_fix_cache_order.for_scopenum_in_range_0_.for_key_in_argkeys_cache_.items_by_argkey_scopenum_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_fix_cache_order_fix_cache_order.for_scopenum_in_range_0_.for_key_in_argkeys_cache_.items_by_argkey_scopenum_", "embedding": null, "metadata": {"file_path": "src/_pytest/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 292, "end_line": 299, "span_ids": ["fix_cache_order"], "tokens": 103}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def fix_cache_order(\n item: nodes.Item,\n argkeys_cache: Dict[int, Dict[nodes.Item, Dict[_Key, None]]],\n items_by_argkey: Dict[int, Dict[_Key, \"Deque[nodes.Item]\"]],\n) -> None:\n for scopenum in range(0, scopenum_function):\n for key in argkeys_cache[scopenum].get(item, []):\n items_by_argkey[scopenum][key].appendleft(item)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureRequest.getfixturevalue_FixtureRequest.getfixturevalue.return.fixturedef_cached_result_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureRequest.getfixturevalue_FixtureRequest.getfixturevalue.return.fixturedef_cached_result_", "embedding": null, "metadata": {"file_path": "src/_pytest/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 573, "end_line": 586, "span_ids": ["FixtureRequest.getfixturevalue"], "tokens": 134}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class FixtureRequest:\n\n def getfixturevalue(self, argname: str) -> Any:\n \"\"\"Dynamically run a named fixture function.\n\n Declaring fixtures via function argument is recommended where possible.\n But if you can only decide whether to use another fixture at test\n setup time, you may use this function to retrieve it inside a fixture\n or test function body.\n\n :raises pytest.FixtureLookupError:\n If the given fixture could not be found.\n \"\"\"\n fixturedef = self._get_active_fixturedef(argname)\n assert fixturedef.cached_result is not None\n return fixturedef.cached_result[0]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureRequest._getscopeitem_FixtureRequest.__repr__.return._FixtureRequest_for_r_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureRequest._getscopeitem_FixtureRequest.__repr__.return._FixtureRequest_for_r_", "embedding": null, "metadata": {"file_path": "src/_pytest/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 731, "end_line": 750, "span_ids": ["FixtureRequest.__repr__", "FixtureRequest._getscopeitem"], "tokens": 237}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class FixtureRequest:\n\n def _getscopeitem(self, scope: \"_Scope\") -> Union[nodes.Item, nodes.Collector]:\n if scope == \"function\":\n # This might also be a non-function Item despite its attribute name.\n node: Optional[Union[nodes.Item, nodes.Collector]] = self._pyfuncitem\n elif scope == \"package\":\n # FIXME: _fixturedef is not defined on FixtureRequest (this class),\n # but on FixtureRequest (a subclass).\n node = get_scope_package(self._pyfuncitem, self._fixturedef) # type: ignore[attr-defined]\n else:\n node = get_scope_node(self._pyfuncitem, scope)\n if node is None and scope == \"class\":\n # Fallback to function item itself.\n node = self._pyfuncitem\n assert node, 'Could not obtain a node for scope \"{}\" for function {!r}'.format(\n scope, self._pyfuncitem\n )\n return node\n\n def __repr__(self) -> str:\n return \"\" % (self.node)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_SubRequest_SubRequest.addfinalizer.self__fixturedef_addfinal": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_SubRequest_SubRequest.addfinalizer.self__fixturedef_addfinal", "embedding": null, "metadata": {"file_path": "src/_pytest/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 753, "end_line": 787, "span_ids": ["SubRequest.__repr__", "SubRequest", "SubRequest.addfinalizer"], "tokens": 300}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass SubRequest(FixtureRequest):\n \"\"\"A sub request for handling getting a fixture from a test function/fixture.\"\"\"\n\n def __init__(\n self,\n request: \"FixtureRequest\",\n scope: \"_Scope\",\n param: Any,\n param_index: int,\n fixturedef: \"FixtureDef[object]\",\n *,\n _ispytest: bool = False,\n ) -> None:\n check_ispytest(_ispytest)\n self._parent_request = request\n self.fixturename = fixturedef.argname\n if param is not NOTSET:\n self.param = param\n self.param_index = param_index\n self.scope = scope\n self._fixturedef = fixturedef\n self._pyfuncitem = request._pyfuncitem\n self._fixture_defs = request._fixture_defs\n self._arg2fixturedefs = request._arg2fixturedefs\n self._arg2index = request._arg2index\n self._fixturemanager = request._fixturemanager\n\n def __repr__(self) -> str:\n return f\"\"\n\n def addfinalizer(self, finalizer: Callable[[], object]) -> None:\n \"\"\"Add finalizer/teardown function to be called after the last test\n within the requesting test context finished execution.\"\"\"\n self._fixturedef.addfinalizer(finalizer)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_fail_fixturefunc_call_fixture_func.return.fixture_result": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_fail_fixturefunc_call_fixture_func.return.fixture_result", "embedding": null, "metadata": {"file_path": "src/_pytest/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 916, "end_line": 940, "span_ids": ["call_fixture_func", "fail_fixturefunc"], "tokens": 235}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def fail_fixturefunc(fixturefunc, msg: str) -> \"NoReturn\":\n fs, lineno = getfslineno(fixturefunc)\n location = \"{}:{}\".format(fs, lineno + 1)\n source = _pytest._code.Source(fixturefunc)\n fail(msg + \":\\n\\n\" + str(source.indent()) + \"\\n\" + location, pytrace=False)\n\n\ndef call_fixture_func(\n fixturefunc: \"_FixtureFunc[_FixtureValue]\", request: FixtureRequest, kwargs\n) -> _FixtureValue:\n if is_generator(fixturefunc):\n fixturefunc = cast(\n Callable[..., Generator[_FixtureValue, None, None]], fixturefunc\n )\n generator = fixturefunc(**kwargs)\n try:\n fixture_result = next(generator)\n except StopIteration:\n raise ValueError(f\"{request.fixturename} did not yield a value\") from None\n finalizer = functools.partial(_teardown_yield_fixture, fixturefunc, generator)\n request.addfinalizer(finalizer)\n else:\n fixturefunc = cast(Callable[..., _FixtureValue], fixturefunc)\n fixture_result = fixturefunc(**kwargs)\n return fixture_result", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py__teardown_yield_fixture__eval_scope_callable.return.result": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py__teardown_yield_fixture__eval_scope_callable.return.result", "embedding": null, "metadata": {"file_path": "src/_pytest/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 943, "end_line": 977, "span_ids": ["_teardown_yield_fixture", "_eval_scope_callable"], "tokens": 284}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _teardown_yield_fixture(fixturefunc, it) -> None:\n \"\"\"Execute the teardown of a fixture function by advancing the iterator\n after the yield and ensure the iteration ends (if not it means there is\n more than one yield in the function).\"\"\"\n try:\n next(it)\n except StopIteration:\n pass\n else:\n fail_fixturefunc(fixturefunc, \"fixture function has more than one 'yield'\")\n\n\ndef _eval_scope_callable(\n scope_callable: \"Callable[[str, Config], _Scope]\",\n fixture_name: str,\n config: Config,\n) -> \"_Scope\":\n try:\n # Type ignored because there is no typing mechanism to specify\n # keyword arguments, currently.\n result = scope_callable(fixture_name=fixture_name, config=config) # type: ignore[call-arg]\n except Exception as e:\n raise TypeError(\n \"Error evaluating {} while defining fixture '{}'.\\n\"\n \"Expected a function with the signature (*, fixture_name, config)\".format(\n scope_callable, fixture_name\n )\n ) from e\n if not isinstance(result, str):\n fail(\n \"Expected {} to return a 'str' while defining fixture '{}', but it returned:\\n\"\n \"{!r}\".format(scope_callable, fixture_name, result),\n pytrace=False,\n )\n return result", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_pytest_fixture_setup_pytest_fixture_setup.return.result": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_pytest_fixture_setup_pytest_fixture_setup.return.result", "embedding": null, "metadata": {"file_path": "src/_pytest/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1118, "end_line": 1140, "span_ids": ["pytest_fixture_setup"], "tokens": 212}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_fixture_setup(\n fixturedef: FixtureDef[_FixtureValue], request: SubRequest\n) -> _FixtureValue:\n \"\"\"Execution of fixture setup.\"\"\"\n kwargs = {}\n for argname in fixturedef.argnames:\n fixdef = request._get_active_fixturedef(argname)\n assert fixdef.cached_result is not None\n result, arg_cache_key, exc = fixdef.cached_result\n request._check_scope(argname, request.scope, fixdef.scope)\n kwargs[argname] = result\n\n fixturefunc = resolve_fixture_function(fixturedef, request)\n my_cache_key = fixturedef.cache_key(request)\n try:\n result = call_fixture_func(fixturefunc, request, kwargs)\n except TEST_OUTCOME:\n exc_info = sys.exc_info()\n assert exc_info[0] is not None\n fixturedef.cached_result = (None, my_cache_key, exc_info)\n raise\n fixturedef.cached_result = (result, my_cache_key, None)\n return result", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py__ensure_immutable_ids__params_converter.return.tuple_params_if_params_i": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py__ensure_immutable_ids__params_converter.return.tuple_params_if_params_i", "embedding": null, "metadata": {"file_path": "src/_pytest/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1143, "end_line": 1166, "span_ids": ["_ensure_immutable_ids", "_params_converter"], "tokens": 139}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _ensure_immutable_ids(\n ids: Optional[\n Union[\n Iterable[Union[None, str, float, int, bool]],\n Callable[[Any], Optional[object]],\n ]\n ],\n) -> Optional[\n Union[\n Tuple[Union[None, str, float, int, bool], ...],\n Callable[[Any], Optional[object]],\n ]\n]:\n if ids is None:\n return None\n if callable(ids):\n return ids\n return tuple(ids)\n\n\ndef _params_converter(\n params: Optional[Iterable[object]],\n) -> Optional[Tuple[object, ...]]:\n return tuple(params) if params is not None else None", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureFunctionMarker_FixtureFunctionMarker.name.attr_ib_type_Optional_str": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureFunctionMarker_FixtureFunctionMarker.name.attr_ib_type_Optional_str", "embedding": null, "metadata": {"file_path": "src/_pytest/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1193, "end_line": 1207, "span_ids": ["FixtureFunctionMarker"], "tokens": 134}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\n@attr.s(frozen=True)\nclass FixtureFunctionMarker:\n scope = attr.ib(type=\"Union[_Scope, Callable[[str, Config], _Scope]]\")\n params = attr.ib(type=Optional[Tuple[object, ...]], converter=_params_converter)\n autouse = attr.ib(type=bool, default=False)\n ids = attr.ib(\n type=Union[\n Tuple[Union[None, str, float, int, bool], ...],\n Callable[[Any], Optional[object]],\n ],\n default=None,\n converter=_ensure_immutable_ids,\n )\n name = attr.ib(type=Optional[str], default=None)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureFunctionMarker.__call___FixtureFunctionMarker.__call__.return.function": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureFunctionMarker.__call___FixtureFunctionMarker.__call__.return.function", "embedding": null, "metadata": {"file_path": "src/_pytest/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1209, "end_line": 1232, "span_ids": ["FixtureFunctionMarker.__call__"], "tokens": 197}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\n@attr.s(frozen=True)\nclass FixtureFunctionMarker:\n\n def __call__(self, function: _FixtureFunction) -> _FixtureFunction:\n if inspect.isclass(function):\n raise ValueError(\"class fixtures not supported (maybe in the future)\")\n\n if getattr(function, \"_pytestfixturefunction\", False):\n raise ValueError(\n \"fixture is being applied more than once to the same function\"\n )\n\n function = wrap_function_to_error_out_if_called_directly(function, self)\n\n name = self.name or function.__name__\n if name == \"request\":\n location = getlocation(function)\n fail(\n \"'request' is a reserved word for fixtures, use another name:\\n {}\".format(\n location\n ),\n pytrace=False,\n )\n\n # Type ignored because https://github.com/python/mypy/issues/2087.\n function._pytestfixturefunction = self # type: ignore[attr-defined]\n return function", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/helpconfig.py_showversion_pytest_cmdline_main.return.None": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/helpconfig.py_showversion_pytest_cmdline_main.return.None", "embedding": null, "metadata": {"file_path": "src/_pytest/helpconfig.py", "file_name": "helpconfig.py", "file_type": "text/x-python", "category": "implementation", "start_line": 128, "end_line": 152, "span_ids": ["showversion", "pytest_cmdline_main"], "tokens": 169}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def showversion(config: Config) -> None:\n if config.option.version > 1:\n sys.stderr.write(\n \"This is pytest version {}, imported from {}\\n\".format(\n pytest.__version__, pytest.__file__\n )\n )\n plugininfo = getpluginversioninfo(config)\n if plugininfo:\n for line in plugininfo:\n sys.stderr.write(line + \"\\n\")\n else:\n sys.stderr.write(f\"pytest {pytest.__version__}\\n\")\n\n\ndef pytest_cmdline_main(config: Config) -> Optional[Union[int, ExitCode]]:\n if config.option.version > 0:\n showversion(config)\n return 0\n elif config.option.help:\n config._do_configure()\n showhelp(config)\n config._ensure_unconfigure()\n return 0\n return None", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_terminal_summary_pytest_terminal_summary._Add_a_section_to_termi": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_terminal_summary_pytest_terminal_summary._Add_a_section_to_termi", "embedding": null, "metadata": {"file_path": "src/_pytest/hookspec.py", "file_name": "hookspec.py", "file_type": "text/x-python", "category": "implementation", "start_line": 764, "end_line": 777, "span_ids": ["pytest_terminal_summary"], "tokens": 114}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_terminal_summary(\n terminalreporter: \"TerminalReporter\",\n exitstatus: \"ExitCode\",\n config: \"Config\",\n) -> None:\n \"\"\"Add a section to terminal summary reporting.\n\n :param _pytest.terminal.TerminalReporter terminalreporter: The internal terminal reporter object.\n :param int exitstatus: The exit status that will be reported back to the OS.\n :param _pytest.config.Config config: The pytest config object.\n\n .. versionadded:: 4.2\n The ``config`` parameter.\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_warning_captured_pytest_warning_captured._Deprecated_Proce": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_warning_captured_pytest_warning_captured._Deprecated_Proce", "embedding": null, "metadata": {"file_path": "src/_pytest/hookspec.py", "file_name": "hookspec.py", "file_type": "text/x-python", "category": "implementation", "start_line": 741, "end_line": 773, "span_ids": ["pytest_warning_captured"], "tokens": 313}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@hookspec(historic=True, warn_on_impl=WARNING_CAPTURED_HOOK)\ndef pytest_warning_captured(\n warning_message: \"warnings.WarningMessage\",\n when: \"Literal['config', 'collect', 'runtest']\",\n item: Optional[\"Item\"],\n location: Optional[Tuple[str, int, str]],\n) -> None:\n \"\"\"(**Deprecated**) Process a warning captured by the internal pytest warnings plugin.\n\n .. deprecated:: 6.0\n\n This hook is considered deprecated and will be removed in a future pytest version.\n Use :func:`pytest_warning_recorded` instead.\n\n :param warnings.WarningMessage warning_message:\n The captured warning. This is the same object produced by :py:func:`warnings.catch_warnings`, and contains\n the same attributes as the parameters of :py:func:`warnings.showwarning`.\n\n :param str when:\n Indicates when the warning was captured. Possible values:\n\n * ``\"config\"``: during pytest configuration/initialization stage.\n * ``\"collect\"``: during test collection.\n * ``\"runtest\"``: during test execution.\n\n :param pytest.Item|None item:\n The item being executed if ``when`` is ``\"runtest\"``, otherwise ``None``.\n\n :param tuple location:\n When available, holds information about the execution context of the captured\n warning (filename, linenumber, function). ``function`` evaluates to \n when the execution context is at the module level.\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py_LogXML.update_testcase_duration_LogXML.pytest_sessionstart.self.suite_start_time.timing_time_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py_LogXML.update_testcase_duration_LogXML.pytest_sessionstart.self.suite_start_time.timing_time_", "embedding": null, "metadata": {"file_path": "src/_pytest/junitxml.py", "file_name": "junitxml.py", "file_type": "text/x-python", "category": "implementation", "start_line": 624, "end_line": 645, "span_ids": ["LogXML.pytest_collectreport", "LogXML.pytest_internalerror", "LogXML.update_testcase_duration", "LogXML.pytest_sessionstart"], "tokens": 225}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class LogXML:\n\n def update_testcase_duration(self, report: TestReport) -> None:\n \"\"\"Accumulate total duration for nodeid from given report and update\n the Junit.testcase with the new total if already created.\"\"\"\n if self.report_duration == \"total\" or report.when == self.report_duration:\n reporter = self.node_reporter(report)\n reporter.duration += getattr(report, \"duration\", 0.0)\n\n def pytest_collectreport(self, report: TestReport) -> None:\n if not report.passed:\n reporter = self._opentestcase(report)\n if report.failed:\n reporter.append_collect_error(report)\n else:\n reporter.append_collect_skipped(report)\n\n def pytest_internalerror(self, excrepr: ExceptionRepr) -> None:\n reporter = self.node_reporter(\"internal\")\n reporter.attrs.update(classname=\"pytest\", name=\"internal\")\n reporter._add_simple(\"error\", \"internal error\", str(excrepr))\n\n def pytest_sessionstart(self) -> None:\n self.suite_start_time = timing.time()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_PercentStyleMultiline_PercentStyleMultiline._update_message.return.tmp": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_PercentStyleMultiline_PercentStyleMultiline._update_message.return.tmp", "embedding": null, "metadata": {"file_path": "src/_pytest/logging.py", "file_name": "logging.py", "file_type": "text/x-python", "category": "implementation", "start_line": 92, "end_line": 109, "span_ids": ["PercentStyleMultiline._update_message", "PercentStyleMultiline"], "tokens": 146}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class PercentStyleMultiline(logging.PercentStyle):\n \"\"\"A logging style with special support for multiline messages.\n\n If the message of a record consists of multiple lines, this style\n formats the message as if each line were logged separately.\n \"\"\"\n\n def __init__(self, fmt: str, auto_indent: Union[int, str, bool, None]) -> None:\n super().__init__(fmt)\n self._auto_indent = self._get_auto_indent(auto_indent)\n\n @staticmethod\n def _update_message(\n record_dict: Dict[str, object], message: str\n ) -> Dict[str, object]:\n tmp = record_dict.copy()\n tmp[\"message\"] = message\n return tmp", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_PercentStyleMultiline._get_auto_indent_PercentStyleMultiline._get_auto_indent.return.0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_PercentStyleMultiline._get_auto_indent_PercentStyleMultiline._get_auto_indent.return.0", "embedding": null, "metadata": {"file_path": "src/_pytest/logging.py", "file_name": "logging.py", "file_type": "text/x-python", "category": "implementation", "start_line": 114, "end_line": 167, "span_ids": ["PercentStyleMultiline._get_auto_indent"], "tokens": 428}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class PercentStyleMultiline(logging.PercentStyle):\n\n @staticmethod\n def _get_auto_indent(auto_indent_option: Union[int, str, bool, None]) -> int:\n \"\"\"Determine the current auto indentation setting.\n\n Specify auto indent behavior (on/off/fixed) by passing in\n extra={\"auto_indent\": [value]} to the call to logging.log() or\n using a --log-auto-indent [value] command line or the\n log_auto_indent [value] config option.\n\n Default behavior is auto-indent off.\n\n Using the string \"True\" or \"on\" or the boolean True as the value\n turns auto indent on, using the string \"False\" or \"off\" or the\n boolean False or the int 0 turns it off, and specifying a\n positive integer fixes the indentation position to the value\n specified.\n\n Any other values for the option are invalid, and will silently be\n converted to the default.\n\n :param None|bool|int|str auto_indent_option:\n User specified option for indentation from command line, config\n or extra kwarg. Accepts int, bool or str. str option accepts the\n same range of values as boolean config options, as well as\n positive integers represented in str form.\n\n :returns:\n Indentation value, which can be\n -1 (automatically determine indentation) or\n 0 (auto-indent turned off) or\n >0 (explicitly set indentation position).\n \"\"\"\n\n if auto_indent_option is None:\n return 0\n elif isinstance(auto_indent_option, bool):\n if auto_indent_option:\n return -1\n else:\n return 0\n elif isinstance(auto_indent_option, int):\n return int(auto_indent_option)\n elif isinstance(auto_indent_option, str):\n try:\n return int(auto_indent_option)\n except ValueError:\n pass\n try:\n if _strtobool(auto_indent_option):\n return -1\n except ValueError:\n return 0\n\n return 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py__HandlerType_catching_logs.__exit__.root_logger_removeHandler": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py__HandlerType_catching_logs.__exit__.root_logger_removeHandler", "embedding": null, "metadata": {"file_path": "src/_pytest/logging.py", "file_name": "logging.py", "file_type": "text/x-python", "category": "implementation", "start_line": 281, "end_line": 308, "span_ids": ["catching_logs", "catching_logs.__enter__", "impl:11", "catching_logs.__exit__"], "tokens": 204}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "_HandlerType = TypeVar(\"_HandlerType\", bound=logging.Handler)\n\n\n# Not using @contextmanager for performance reasons.\nclass catching_logs:\n \"\"\"Context manager that prepares the whole logging machinery properly.\"\"\"\n\n __slots__ = (\"handler\", \"level\", \"orig_level\")\n\n def __init__(self, handler: _HandlerType, level: Optional[int] = None) -> None:\n self.handler = handler\n self.level = level\n\n def __enter__(self):\n root_logger = logging.getLogger()\n if self.level is not None:\n self.handler.setLevel(self.level)\n root_logger.addHandler(self.handler)\n if self.level is not None:\n self.orig_level = root_logger.level\n root_logger.setLevel(min(self.orig_level, self.level))\n return self.handler\n\n def __exit__(self, type, value, traceback):\n root_logger = logging.getLogger()\n if self.level is not None:\n root_logger.setLevel(self.orig_level)\n root_logger.removeHandler(self.handler)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_LogCaptureFixture.messages_LogCaptureFixture.clear.self_handler_reset_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_LogCaptureFixture.messages_LogCaptureFixture.clear.self_handler_reset_", "embedding": null, "metadata": {"file_path": "src/_pytest/logging.py", "file_name": "logging.py", "file_type": "text/x-python", "category": "implementation", "start_line": 411, "end_line": 432, "span_ids": ["LogCaptureFixture.clear", "LogCaptureFixture.messages"], "tokens": 200}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass LogCaptureFixture:\n\n @property\n def messages(self) -> List[str]:\n \"\"\"A list of format-interpolated log messages.\n\n Unlike 'records', which contains the format string and parameters for\n interpolation, log messages in this list are all interpolated.\n\n Unlike 'text', which contains the output from the handler, log\n messages in this list are unadorned with levels, timestamps, etc,\n making exact comparisons more reliable.\n\n Note that traceback or stack info (from :func:`logging.exception` or\n the `exc_info` or `stack_info` arguments to the logging functions) is\n not included, as this is added by the formatter in the handler.\n\n .. versionadded:: 3.7\n \"\"\"\n return [r.getMessage() for r in self.records]\n\n def clear(self) -> None:\n \"\"\"Reset the list of log records and the captured log text.\"\"\"\n self.handler.reset()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_LogCaptureFixture.set_level_LogCaptureFixture.set_level.self_handler_setLevel_lev": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_LogCaptureFixture.set_level_LogCaptureFixture.set_level.self_handler_setLevel_lev", "embedding": null, "metadata": {"file_path": "src/_pytest/logging.py", "file_name": "logging.py", "file_type": "text/x-python", "category": "implementation", "start_line": 434, "end_line": 450, "span_ids": ["LogCaptureFixture.set_level"], "tokens": 181}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass LogCaptureFixture:\n\n def set_level(self, level: Union[int, str], logger: Optional[str] = None) -> None:\n \"\"\"Set the level of a logger for the duration of a test.\n\n .. versionchanged:: 3.4\n The levels of the loggers changed by this function will be\n restored to their initial values at the end of the test.\n\n :param int level: The level.\n :param str logger: The logger to update. If not given, the root logger.\n \"\"\"\n logger_obj = logging.getLogger(logger)\n # Save the original log-level to restore it during teardown.\n self._initial_logger_levels.setdefault(logger, logger_obj.level)\n logger_obj.setLevel(level)\n if self._initial_handler_level is None:\n self._initial_handler_level = self.handler.level\n self.handler.setLevel(level)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_LogCaptureFixture.at_level_LogCaptureFixture.at_level.try_.finally_.self_handler_setLevel_han": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_LogCaptureFixture.at_level_LogCaptureFixture.at_level.try_.finally_.self_handler_setLevel_han", "embedding": null, "metadata": {"file_path": "src/_pytest/logging.py", "file_name": "logging.py", "file_type": "text/x-python", "category": "implementation", "start_line": 452, "end_line": 472, "span_ids": ["LogCaptureFixture.at_level"], "tokens": 170}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass LogCaptureFixture:\n\n @contextmanager\n def at_level(\n self, level: int, logger: Optional[str] = None\n ) -> Generator[None, None, None]:\n \"\"\"Context manager that sets the level for capturing of logs. After\n the end of the 'with' statement the level is restored to its original\n value.\n\n :param int level: The level.\n :param str logger: The logger to update. If not given, the root logger.\n \"\"\"\n logger_obj = logging.getLogger(logger)\n orig_level = logger_obj.level\n logger_obj.setLevel(level)\n handler_orig_level = self.handler.level\n self.handler.setLevel(level)\n try:\n yield\n finally:\n logger_obj.setLevel(orig_level)\n self.handler.setLevel(handler_orig_level)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_validate_basetemp_validate_basetemp.return.path": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_validate_basetemp_validate_basetemp.return.path", "embedding": null, "metadata": {"file_path": "src/_pytest/main.py", "file_name": "main.py", "file_type": "text/x-python", "category": "implementation", "start_line": 231, "end_line": 256, "span_ids": ["validate_basetemp"], "tokens": 182}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def validate_basetemp(path: str) -> str:\n # GH 7119\n msg = \"basetemp must not be empty, the current working directory or any parent directory of it\"\n\n # empty path\n if not path:\n raise argparse.ArgumentTypeError(msg)\n\n def is_ancestor(base: Path, query: Path) -> bool:\n \"\"\"Return whether query is an ancestor of base.\"\"\"\n if base == query:\n return True\n for parent in base.parents:\n if parent == query:\n return True\n return False\n\n # check if path is an ancestor of cwd\n if is_ancestor(Path.cwd(), Path(path).absolute()):\n raise argparse.ArgumentTypeError(msg)\n\n # check symlinks for ancestors\n if is_ancestor(Path.cwd().resolve(), Path(path).resolve()):\n raise argparse.ArgumentTypeError(msg)\n\n return path", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_pytest_collection_modifyitems_pytest_collection_modifyitems.if_deselected_.items_remaining": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_pytest_collection_modifyitems_pytest_collection_modifyitems.if_deselected_.items_remaining", "embedding": null, "metadata": {"file_path": "src/_pytest/main.py", "file_name": "main.py", "file_type": "text/x-python", "category": "implementation", "start_line": 375, "end_line": 390, "span_ids": ["pytest_collection_modifyitems"], "tokens": 112}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_collection_modifyitems(items: List[nodes.Item], config: Config) -> None:\n deselect_prefixes = tuple(config.getoption(\"deselect\") or [])\n if not deselect_prefixes:\n return\n\n remaining = []\n deselected = []\n for colitem in items:\n if colitem.nodeid.startswith(deselect_prefixes):\n deselected.append(colitem)\n else:\n remaining.append(colitem)\n\n if deselected:\n config.hook.pytest_deselected(items=deselected)\n items[:] = remaining", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/__init__.py_pytest_cmdline_main_pytest_cmdline_main.return.None": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/__init__.py_pytest_cmdline_main_pytest_cmdline_main.return.None", "embedding": null, "metadata": {"file_path": "src/_pytest/mark/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 106, "end_line": 123, "span_ids": ["pytest_cmdline_main"], "tokens": 138}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@hookimpl(tryfirst=True)\ndef pytest_cmdline_main(config: Config) -> Optional[Union[int, ExitCode]]:\n import _pytest.config\n\n if config.option.markers:\n config._do_configure()\n tw = _pytest.config.create_terminal_writer(config)\n for line in config.getini(\"markers\"):\n parts = line.split(\":\", 1)\n name = parts[0]\n rest = parts[1] if len(parts) == 2 else \"\"\n tw.write(\"@pytest.mark.%s:\" % name, bold=True)\n tw.line(rest)\n tw.line()\n config._ensure_unconfigure()\n return 0\n\n return None", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/expression.py__True_False_and_None_ar_MatcherAdapter.__len__.raise_NotImplementedError": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/expression.py__True_False_and_None_ar_MatcherAdapter.__len__.raise_NotImplementedError", "embedding": null, "metadata": {"file_path": "src/_pytest/mark/expression.py", "file_name": "expression.py", "file_type": "text/x-python", "category": "implementation", "start_line": 130, "end_line": 187, "span_ids": ["MatcherAdapter.__len__", "not_expr", "expr", "MatcherAdapter", "and_expr", "MatcherAdapter.__iter__", "Scanner.reject", "expression", "MatcherAdapter.__getitem__", "impl:4"], "tokens": 409}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "# True, False and None are legal match expression identifiers,\n# but illegal as Python identifiers. To fix this, this prefix\n# is added to identifiers in the conversion to Python AST.\nIDENT_PREFIX = \"$\"\n\n\ndef expression(s: Scanner) -> ast.Expression:\n if s.accept(TokenType.EOF):\n ret: ast.expr = ast.NameConstant(False)\n else:\n ret = expr(s)\n s.accept(TokenType.EOF, reject=True)\n return ast.fix_missing_locations(ast.Expression(ret))\n\n\ndef expr(s: Scanner) -> ast.expr:\n ret = and_expr(s)\n while s.accept(TokenType.OR):\n rhs = and_expr(s)\n ret = ast.BoolOp(ast.Or(), [ret, rhs])\n return ret\n\n\ndef and_expr(s: Scanner) -> ast.expr:\n ret = not_expr(s)\n while s.accept(TokenType.AND):\n rhs = not_expr(s)\n ret = ast.BoolOp(ast.And(), [ret, rhs])\n return ret\n\n\ndef not_expr(s: Scanner) -> ast.expr:\n if s.accept(TokenType.NOT):\n return ast.UnaryOp(ast.Not(), not_expr(s))\n if s.accept(TokenType.LPAREN):\n ret = expr(s)\n s.accept(TokenType.RPAREN, reject=True)\n return ret\n ident = s.accept(TokenType.IDENT)\n if ident:\n return ast.Name(IDENT_PREFIX + ident.value, ast.Load())\n s.reject((TokenType.NOT, TokenType.LPAREN, TokenType.IDENT))\n\n\nclass MatcherAdapter(Mapping[str, bool]):\n \"\"\"Adapts a matcher function to a locals mapping as required by eval().\"\"\"\n\n def __init__(self, matcher: Callable[[str], bool]) -> None:\n self.matcher = matcher\n\n def __getitem__(self, key: str) -> bool:\n return self.matcher(key[len(IDENT_PREFIX) :])\n\n def __iter__(self) -> Iterator[str]:\n raise NotImplementedError()\n\n def __len__(self) -> int:\n raise NotImplementedError()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/structures.py_collections.abc_istestfunc.return._": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/structures.py_collections.abc_istestfunc.return._", "embedding": null, "metadata": {"file_path": "src/_pytest/mark/structures.py", "file_name": "structures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 46, "span_ids": ["impl", "impl:2", "istestfunc", "imports", "imports:32"], "tokens": 247}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import collections.abc\nimport inspect\nimport warnings\nfrom typing import Any\nfrom typing import Callable\nfrom typing import Collection\nfrom typing import Iterable\nfrom typing import Iterator\nfrom typing import List\nfrom typing import Mapping\nfrom typing import MutableMapping\nfrom typing import NamedTuple\nfrom typing import Optional\nfrom typing import overload\nfrom typing import Sequence\nfrom typing import Set\nfrom typing import Tuple\nfrom typing import Type\nfrom typing import TYPE_CHECKING\nfrom typing import TypeVar\nfrom typing import Union\n\nimport attr\n\nfrom .._code import getfslineno\nfrom ..compat import ascii_escaped\nfrom ..compat import final\nfrom ..compat import NOTSET\nfrom ..compat import NotSetType\nfrom _pytest.config import Config\nfrom _pytest.deprecated import check_ispytest\nfrom _pytest.outcomes import fail\nfrom _pytest.warning_types import PytestUnknownMarkWarning\n\nif TYPE_CHECKING:\n from ..nodes import Node\n\n\nEMPTY_PARAMETERSET_OPTION = \"empty_parameter_set_mark\"\n\n\ndef istestfunc(func) -> bool:\n return (\n hasattr(func, \"__call__\")\n and getattr(func, \"__name__\", \"\") != \"\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/structures.py_ParameterSet_ParameterSet.param.return.cls_values_marks_id_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/structures.py_ParameterSet_ParameterSet.param.return.cls_values_marks_id_", "embedding": null, "metadata": {"file_path": "src/_pytest/mark/structures.py", "file_name": "structures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 78, "end_line": 106, "span_ids": ["ParameterSet.param", "ParameterSet"], "tokens": 198}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class ParameterSet(\n NamedTuple(\n \"ParameterSet\",\n [\n (\"values\", Sequence[Union[object, NotSetType]]),\n (\"marks\", Collection[Union[\"MarkDecorator\", \"Mark\"]]),\n (\"id\", Optional[str]),\n ],\n )\n):\n @classmethod\n def param(\n cls,\n *values: object,\n marks: Union[\"MarkDecorator\", Collection[Union[\"MarkDecorator\", \"Mark\"]]] = (),\n id: Optional[str] = None,\n ) -> \"ParameterSet\":\n if isinstance(marks, MarkDecorator):\n marks = (marks,)\n else:\n assert isinstance(marks, collections.abc.Collection)\n\n if id is not None:\n if not isinstance(id, str):\n raise TypeError(\n \"Expected id to be a string, got {}: {!r}\".format(type(id), id)\n )\n id = ascii_escaped(id)\n return cls(values, marks, id)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/structures.py_ParameterSet.extract_from_ParameterSet.extract_from.if_force_tuple_.else_._type_ignore_arg_type_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/structures.py_ParameterSet.extract_from_ParameterSet.extract_from.if_force_tuple_.else_._type_ignore_arg_type_", "embedding": null, "metadata": {"file_path": "src/_pytest/mark/structures.py", "file_name": "structures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 108, "end_line": 135, "span_ids": ["ParameterSet.extract_from"], "tokens": 279}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class ParameterSet(\n NamedTuple(\n \"ParameterSet\",\n [\n (\"values\", Sequence[Union[object, NotSetType]]),\n (\"marks\", Collection[Union[\"MarkDecorator\", \"Mark\"]]),\n (\"id\", Optional[str]),\n ],\n )\n):\n\n @classmethod\n def extract_from(\n cls,\n parameterset: Union[\"ParameterSet\", Sequence[object], object],\n force_tuple: bool = False,\n ) -> \"ParameterSet\":\n \"\"\"Extract from an object or objects.\n\n :param parameterset:\n A legacy style parameterset that may or may not be a tuple,\n and may or may not be wrapped into a mess of mark objects.\n\n :param force_tuple:\n Enforce tuple wrapping so single argument tuple values\n don't get decomposed and break tests.\n \"\"\"\n\n if isinstance(parameterset, cls):\n return parameterset\n if force_tuple:\n return cls.param(parameterset)\n else:\n # TODO: Refactor to fix this type-ignore. Currently the following\n # passes type-checking but crashes:\n #\n # @pytest.mark.parametrize(('x', 'y'), [1, 2])\n # def test_foo(x, y): pass\n return cls(parameterset, marks=[], id=None) # type: ignore[arg-type]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/structures.py_MarkDecorator_MarkDecorator.__call___8.return.self_with_args_args_k": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/structures.py_MarkDecorator_MarkDecorator.__call___8.return.self_with_args_args_k", "embedding": null, "metadata": {"file_path": "src/_pytest/mark/structures.py", "file_name": "structures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 274, "end_line": 368, "span_ids": ["MarkDecorator.__call__", "MarkDecorator.args", "MarkDecorator.__repr__", "MarkDecorator.__call___7", "MarkDecorator.kwargs", "MarkDecorator", "MarkDecorator.with_args", "MarkDecorator.markname", "MarkDecorator.__call___8", "MarkDecorator.name"], "tokens": 786}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@attr.s(init=False, auto_attribs=True)\nclass MarkDecorator:\n \"\"\"A decorator for applying a mark on test functions and classes.\n\n ``MarkDecorators`` are created with ``pytest.mark``::\n\n mark1 = pytest.mark.NAME # Simple MarkDecorator\n mark2 = pytest.mark.NAME(name1=value) # Parametrized MarkDecorator\n\n and can then be applied as decorators to test functions::\n\n @mark2\n def test_function():\n pass\n\n When a ``MarkDecorator`` is called, it does the following:\n\n 1. If called with a single class as its only positional argument and no\n additional keyword arguments, it attaches the mark to the class so it\n gets applied automatically to all test cases found in that class.\n\n 2. If called with a single function as its only positional argument and\n no additional keyword arguments, it attaches the mark to the function,\n containing all the arguments already stored internally in the\n ``MarkDecorator``.\n\n 3. When called in any other case, it returns a new ``MarkDecorator``\n instance with the original ``MarkDecorator``'s content updated with\n the arguments passed to this call.\n\n Note: The rules above prevent a ``MarkDecorator`` from storing only a\n single function or class reference as its positional argument with no\n additional keyword or positional arguments. You can work around this by\n using `with_args()`.\n \"\"\"\n\n mark: Mark\n\n def __init__(self, mark: Mark, *, _ispytest: bool = False) -> None:\n \"\"\":meta private:\"\"\"\n check_ispytest(_ispytest)\n self.mark = mark\n\n @property\n def name(self) -> str:\n \"\"\"Alias for mark.name.\"\"\"\n return self.mark.name\n\n @property\n def args(self) -> Tuple[Any, ...]:\n \"\"\"Alias for mark.args.\"\"\"\n return self.mark.args\n\n @property\n def kwargs(self) -> Mapping[str, Any]:\n \"\"\"Alias for mark.kwargs.\"\"\"\n return self.mark.kwargs\n\n @property\n def markname(self) -> str:\n \"\"\":meta private:\"\"\"\n return self.name # for backward-compat (2.4.1 had this attr)\n\n def __repr__(self) -> str:\n return f\"\"\n\n def with_args(self, *args: object, **kwargs: object) -> \"MarkDecorator\":\n \"\"\"Return a MarkDecorator with extra arguments added.\n\n Unlike calling the MarkDecorator, with_args() can be used even\n if the sole argument is a callable/class.\n \"\"\"\n mark = Mark(self.name, args, kwargs, _ispytest=True)\n return MarkDecorator(self.mark.combined_with(mark), _ispytest=True)\n\n # Type ignored because the overloads overlap with an incompatible\n # return type. Not much we can do about that. Thankfully mypy picks\n # the first match so it works out even if we break the rules.\n @overload\n def __call__(self, arg: Markable) -> Markable: # type: ignore[misc]\n pass\n\n @overload\n def __call__(self, *args: object, **kwargs: object) -> \"MarkDecorator\":\n pass\n\n def __call__(self, *args: object, **kwargs: object):\n \"\"\"Call the MarkDecorator.\"\"\"\n if args and not kwargs:\n func = args[0]\n is_class = inspect.isclass(func)\n if len(args) == 1 and (istestfunc(func) or is_class):\n store_mark(func, self.mark)\n return func\n return self.with_args(*args, **kwargs)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/monkeypatch.py_monkeypatch_monkeypatch.mpatch_undo_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/monkeypatch.py_monkeypatch_monkeypatch.mpatch_undo_", "embedding": null, "metadata": {"file_path": "src/_pytest/monkeypatch.py", "file_name": "monkeypatch.py", "file_type": "text/x-python", "category": "implementation", "start_line": 28, "end_line": 50, "span_ids": ["monkeypatch"], "tokens": 193}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@fixture\ndef monkeypatch() -> Generator[\"MonkeyPatch\", None, None]:\n \"\"\"A convenient fixture for monkey-patching.\n\n The fixture provides these methods to modify objects, dictionaries or\n os.environ::\n\n monkeypatch.setattr(obj, name, value, raising=True)\n monkeypatch.delattr(obj, name, raising=True)\n monkeypatch.setitem(mapping, name, value)\n monkeypatch.delitem(obj, name, raising=True)\n monkeypatch.setenv(name, value, prepend=False)\n monkeypatch.delenv(name, raising=True)\n monkeypatch.syspath_prepend(path)\n monkeypatch.chdir(path)\n\n All modifications will be undone after the requesting test function or\n fixture has finished. The ``raising`` parameter determines if a KeyError\n or AttributeError will be raised if the set/deletion operation has no target.\n \"\"\"\n mpatch = MonkeyPatch()\n yield mpatch\n mpatch.undo()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/monkeypatch.py_MonkeyPatch.setattr_3_MonkeyPatch.setattr_3.setattr_target_name_val": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/monkeypatch.py_MonkeyPatch.setattr_3_MonkeyPatch.setattr_3.setattr_target_name_val", "embedding": null, "metadata": {"file_path": "src/_pytest/monkeypatch.py", "file_name": "monkeypatch.py", "file_type": "text/x-python", "category": "implementation", "start_line": 178, "end_line": 224, "span_ids": ["MonkeyPatch.setattr_3"], "tokens": 383}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass MonkeyPatch:\n\n def setattr(\n self,\n target: Union[str, object],\n name: Union[object, str],\n value: object = notset,\n raising: bool = True,\n ) -> None:\n \"\"\"Set attribute value on target, memorizing the old value.\n\n For convenience you can specify a string as ``target`` which\n will be interpreted as a dotted import path, with the last part\n being the attribute name. For example,\n ``monkeypatch.setattr(\"os.getcwd\", lambda: \"/\")``\n would set the ``getcwd`` function of the ``os`` module.\n\n Raises AttributeError if the attribute does not exist, unless\n ``raising`` is set to False.\n \"\"\"\n __tracebackhide__ = True\n import inspect\n\n if isinstance(value, Notset):\n if not isinstance(target, str):\n raise TypeError(\n \"use setattr(target, name, value) or \"\n \"setattr(target, value) with target being a dotted \"\n \"import string\"\n )\n value = name\n name, target = derive_importpath(target, raising)\n else:\n if not isinstance(name, str):\n raise TypeError(\n \"use setattr(target, name, value) with name being a string or \"\n \"setattr(target, value) with target being a dotted \"\n \"import string\"\n )\n\n oldval = getattr(target, name, notset)\n if raising and oldval is notset:\n raise AttributeError(f\"{target!r} has no attribute {name!r}\")\n\n # avoid class descriptors like staticmethod/classmethod\n if inspect.isclass(target):\n oldval = target.__dict__.get(name, notset)\n self._setattr.append((target, name, oldval))\n setattr(target, name, value)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nodes.py_Node.iter_markers_Node.iter_markers_with_node.for_node_in_reversed_self.for_mark_in_node_own_mark.if_name_is_None_or_getatt.yield_node_mark": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nodes.py_Node.iter_markers_Node.iter_markers_with_node.for_node_in_reversed_self.for_mark_in_node_own_mark.if_name_is_None_or_getatt.yield_node_mark", "embedding": null, "metadata": {"file_path": "src/_pytest/nodes.py", "file_name": "nodes.py", "file_type": "text/x-python", "category": "implementation", "start_line": 289, "end_line": 307, "span_ids": ["Node.iter_markers", "Node.iter_markers_with_node"], "tokens": 187}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Node(metaclass=NodeMeta):\n\n def iter_markers(self, name: Optional[str] = None) -> Iterator[Mark]:\n \"\"\"Iterate over all markers of the node.\n\n :param name: If given, filter the results by the name attribute.\n \"\"\"\n return (x[1] for x in self.iter_markers_with_node(name=name))\n\n def iter_markers_with_node(\n self, name: Optional[str] = None\n ) -> Iterator[Tuple[\"Node\", Mark]]:\n \"\"\"Iterate over all markers of the node.\n\n :param name: If given, filter the results by the name attribute.\n :returns: An iterator of (node, mark) tuples.\n \"\"\"\n for node in reversed(self.listchain()):\n for mark in node.own_markers:\n if name is None or getattr(mark, \"name\", None) == name:\n yield node, mark", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nodes.py_Node.get_closest_marker_Node._prunetraceback.pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nodes.py_Node.get_closest_marker_Node._prunetraceback.pass", "embedding": null, "metadata": {"file_path": "src/_pytest/nodes.py", "file_name": "nodes.py", "file_type": "text/x-python", "category": "implementation", "start_line": 309, "end_line": 356, "span_ids": ["Node.listnames", "Node.get_closest_marker", "Node.get_closest_marker_14", "Node.addfinalizer", "Node.getparent", "Node.listextrakeywords", "Node.get_closest_marker_13", "Node._prunetraceback"], "tokens": 412}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Node(metaclass=NodeMeta):\n\n @overload\n def get_closest_marker(self, name: str) -> Optional[Mark]:\n ...\n\n @overload\n def get_closest_marker(self, name: str, default: Mark) -> Mark:\n ...\n\n def get_closest_marker(\n self, name: str, default: Optional[Mark] = None\n ) -> Optional[Mark]:\n \"\"\"Return the first marker matching the name, from closest (for\n example function) to farther level (for example module level).\n\n :param default: Fallback return value if no marker was found.\n :param name: Name to filter by.\n \"\"\"\n return next(self.iter_markers(name=name), default)\n\n def listextrakeywords(self) -> Set[str]:\n \"\"\"Return a set of all extra keywords in self and any parents.\"\"\"\n extra_keywords: Set[str] = set()\n for item in self.listchain():\n extra_keywords.update(item.extra_keyword_matches)\n return extra_keywords\n\n def listnames(self) -> List[str]:\n return [x.name for x in self.listchain()]\n\n def addfinalizer(self, fin: Callable[[], object]) -> None:\n \"\"\"Register a function to be called when this node is finalized.\n\n This method can only be called when this node is active\n in a setup chain, for example during self.setup().\n \"\"\"\n self.session._setupstate.addfinalizer(fin, self)\n\n def getparent(self, cls: Type[_NodeType]) -> Optional[_NodeType]:\n \"\"\"Get the next parent node (including self) which is an instance of\n the given class.\"\"\"\n current: Optional[Node] = self\n while current and not isinstance(current, cls):\n current = current.parent\n assert current is None or isinstance(current, cls)\n return current\n\n def _prunetraceback(self, excinfo: ExceptionInfo[BaseException]) -> None:\n pass", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nodes.py_get_fslocation_from_item_get_fslocation_from_item.return.getattr_node_fspath_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nodes.py_get_fslocation_from_item_get_fslocation_from_item.return.getattr_node_fspath_", "embedding": null, "metadata": {"file_path": "src/_pytest/nodes.py", "file_name": "nodes.py", "file_type": "text/x-python", "category": "implementation", "start_line": 425, "end_line": 441, "span_ids": ["get_fslocation_from_item"], "tokens": 181}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def get_fslocation_from_item(node: \"Node\") -> Tuple[Union[str, Path], Optional[int]]:\n \"\"\"Try to extract the actual location from a node, depending on available attributes:\n\n * \"location\": a pair (path, lineno)\n * \"obj\": a Python object that the node wraps.\n * \"fspath\": just a path\n\n :rtype: A tuple of (str|py.path.local, int) with filename and line number.\n \"\"\"\n # See Item.location.\n location: Optional[Tuple[str, Optional[int], str]] = getattr(node, \"location\", None)\n if location is not None:\n return location[:2]\n obj = getattr(node, \"obj\", None)\n if obj is not None:\n return getfslineno(obj)\n return getattr(node, \"fspath\", \"unknown location\"), -1", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py_ensure_extended_length_path_ensure_extended_length_path.return.path": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py_ensure_extended_length_path_ensure_extended_length_path.return.path", "embedding": null, "metadata": {"file_path": "src/_pytest/pathlib.py", "file_name": "pathlib.py", "file_type": "text/x-python", "category": "implementation", "start_line": 112, "end_line": 127, "span_ids": ["ensure_extended_length_path"], "tokens": 164}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def ensure_extended_length_path(path: Path) -> Path:\n \"\"\"Get the extended-length version of a path (Windows).\n\n On Windows, by default, the maximum length of a path (MAX_PATH) is 260\n characters, and operations on paths longer than that fail. But it is possible\n to overcome this by converting the path to \"extended-length\" form before\n performing the operation:\n https://docs.microsoft.com/en-us/windows/win32/fileio/naming-a-file#maximum-path-length-limitation\n\n On Windows, this function returns the extended-length absolute version of path.\n On other platforms it returns path unchanged.\n \"\"\"\n if sys.platform.startswith(\"win32\"):\n path = path.resolve()\n path = Path(get_extended_length_path_str(str(path)))\n return path", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py_get_extended_length_path_str_parse_num.try_.except_ValueError_.return._1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py_get_extended_length_path_str_parse_num.try_.except_ValueError_.return._1", "embedding": null, "metadata": {"file_path": "src/_pytest/pathlib.py", "file_name": "pathlib.py", "file_type": "text/x-python", "category": "implementation", "start_line": 141, "end_line": 190, "span_ids": ["find_prefixed", "find_suffixes", "get_extended_length_path_str", "extract_suffixes", "parse_num", "rm_rf"], "tokens": 380}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def get_extended_length_path_str(path: str) -> str:\n \"\"\"Convert a path to a Windows extended length path.\"\"\"\n long_path_prefix = \"\\\\\\\\?\\\\\"\n unc_long_path_prefix = \"\\\\\\\\?\\\\UNC\\\\\"\n if path.startswith((long_path_prefix, unc_long_path_prefix)):\n return path\n # UNC\n if path.startswith(\"\\\\\\\\\"):\n return unc_long_path_prefix + path[2:]\n return long_path_prefix + path\n\n\ndef rm_rf(path: Path) -> None:\n \"\"\"Remove the path contents recursively, even if some elements\n are read-only.\"\"\"\n path = ensure_extended_length_path(path)\n onerror = partial(on_rm_rf_error, start_path=path)\n shutil.rmtree(str(path), onerror=onerror)\n\n\ndef find_prefixed(root: Path, prefix: str) -> Iterator[Path]:\n \"\"\"Find all elements in root that begin with the prefix, case insensitive.\"\"\"\n l_prefix = prefix.lower()\n for x in root.iterdir():\n if x.name.lower().startswith(l_prefix):\n yield x\n\n\ndef extract_suffixes(iter: Iterable[PurePath], prefix: str) -> Iterator[str]:\n \"\"\"Return the parts of the paths following the prefix.\n\n :param iter: Iterator over path names.\n :param prefix: Expected prefix of the path names.\n \"\"\"\n p_len = len(prefix)\n for p in iter:\n yield p.name[p_len:]\n\n\ndef find_suffixes(root: Path, prefix: str) -> Iterator[str]:\n \"\"\"Combine find_prefixes and extract_suffixes.\"\"\"\n return extract_suffixes(find_prefixed(root, prefix), prefix)\n\n\ndef parse_num(maybe_num) -> int:\n \"\"\"Parse number path suffixes, returns -1 on error.\"\"\"\n try:\n return int(maybe_num)\n except ValueError:\n return -1", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py_try_cleanup_cleanup_candidates.for_path_number_in_zip_p.if_number_max_delete_.yield_path": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py_try_cleanup_cleanup_candidates.for_path_number_in_zip_p.if_number_max_delete_.yield_path", "embedding": null, "metadata": {"file_path": "src/_pytest/pathlib.py", "file_name": "pathlib.py", "file_type": "text/x-python", "category": "implementation", "start_line": 326, "end_line": 341, "span_ids": ["cleanup_candidates", "try_cleanup"], "tokens": 180}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def try_cleanup(path: Path, consider_lock_dead_if_created_before: float) -> None:\n \"\"\"Try to cleanup a folder if we can ensure it's deletable.\"\"\"\n if ensure_deletable(path, consider_lock_dead_if_created_before):\n maybe_delete_a_numbered_dir(path)\n\n\ndef cleanup_candidates(root: Path, prefix: str, keep: int) -> Iterator[Path]:\n \"\"\"List candidates for numbered directories to be removed - follows py.path.\"\"\"\n max_existing = max(map(parse_num, find_suffixes(root, prefix)), default=-1)\n max_delete = max_existing - keep\n paths = find_prefixed(root, prefix)\n paths, paths2 = itertools.tee(paths)\n numbers = map(parse_num, extract_suffixes(paths2, prefix))\n for path, number in zip(paths, numbers):\n if number <= max_delete:\n yield path", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py_fnmatch_ex_fnmatch_ex.return.fnmatch_fnmatch_name_pat": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py_fnmatch_ex_fnmatch_ex.return.fnmatch_fnmatch_name_pat", "embedding": null, "metadata": {"file_path": "src/_pytest/pathlib.py", "file_name": "pathlib.py", "file_type": "text/x-python", "category": "implementation", "start_line": 390, "end_line": 423, "span_ids": ["fnmatch_ex"], "tokens": 319}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def fnmatch_ex(pattern: str, path: Union[str, \"os.PathLike[str]\"]) -> bool:\n \"\"\"A port of FNMatcher from py.path.common which works with PurePath() instances.\n\n The difference between this algorithm and PurePath.match() is that the\n latter matches \"**\" glob expressions for each part of the path, while\n this algorithm uses the whole path instead.\n\n For example:\n \"tests/foo/bar/doc/test_foo.py\" matches pattern \"tests/**/doc/test*.py\"\n with this algorithm, but not with PurePath.match().\n\n This algorithm was ported to keep backward-compatibility with existing\n settings which assume paths match according this logic.\n\n References:\n * https://bugs.python.org/issue29249\n * https://bugs.python.org/issue34731\n \"\"\"\n path = PurePath(path)\n iswin32 = sys.platform.startswith(\"win\")\n\n if iswin32 and sep not in pattern and posix_sep in pattern:\n # Running on Windows, the pattern has no Windows path separators,\n # and the pattern has one or more Posix path separators. Replace\n # the Posix path separators with the Windows path separator.\n pattern = pattern.replace(posix_sep, sep)\n\n if sep not in pattern:\n name = path.name\n else:\n name = str(path)\n if path.is_absolute() and not os.path.isabs(pattern):\n pattern = f\"*{os.sep}{pattern}\"\n return fnmatch.fnmatch(name, pattern)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py_parts_ImportPathMismatchError._Raised_on_import_path_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py_parts_ImportPathMismatchError._Raised_on_import_path_", "embedding": null, "metadata": {"file_path": "src/_pytest/pathlib.py", "file_name": "pathlib.py", "file_type": "text/x-python", "category": "implementation", "start_line": 426, "end_line": 453, "span_ids": ["symlink_or_skip", "ImportMode", "ImportPathMismatchError", "parts"], "tokens": 225}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def parts(s: str) -> Set[str]:\n parts = s.split(sep)\n return {sep.join(parts[: i + 1]) or sep for i in range(len(parts))}\n\n\ndef symlink_or_skip(src, dst, **kwargs):\n \"\"\"Make a symlink, or skip the test in case symlinks are not supported.\"\"\"\n try:\n os.symlink(str(src), str(dst), **kwargs)\n except OSError as e:\n skip(f\"symlinks not supported: {e}\")\n\n\nclass ImportMode(Enum):\n \"\"\"Possible values for `mode` parameter of `import_path`.\"\"\"\n\n prepend = \"prepend\"\n append = \"append\"\n importlib = \"importlib\"\n\n\nclass ImportPathMismatchError(ImportError):\n \"\"\"Raised on import_path() if there is a mismatch of __file__'s.\n\n This can happen when `import_path` is called multiple times with different filenames that has\n the same basename but reside in packages\n (for example \"/tests1/test_foo.py\" and \"/tests2/test_foo.py\").\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_pytest_addoption_pytest_addoption.None_5": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_pytest_addoption_pytest_addoption.None_5", "embedding": null, "metadata": {"file_path": "src/_pytest/python.py", "file_name": "python.py", "file_type": "text/x-python", "category": "implementation", "start_line": 75, "end_line": 118, "span_ids": ["pytest_addoption"], "tokens": 306}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_addoption(parser: Parser) -> None:\n group = parser.getgroup(\"general\")\n group.addoption(\n \"--fixtures\",\n \"--funcargs\",\n action=\"store_true\",\n dest=\"showfixtures\",\n default=False,\n help=\"show available fixtures, sorted by plugin appearance \"\n \"(fixtures with leading '_' are only shown with '-v')\",\n )\n group.addoption(\n \"--fixtures-per-test\",\n action=\"store_true\",\n dest=\"show_fixtures_per_test\",\n default=False,\n help=\"show fixtures per test\",\n )\n parser.addini(\n \"python_files\",\n type=\"args\",\n # NOTE: default is also used in AssertionRewritingHook.\n default=[\"test_*.py\", \"*_test.py\"],\n help=\"glob-style file patterns for Python test module discovery\",\n )\n parser.addini(\n \"python_classes\",\n type=\"args\",\n default=[\"Test\"],\n help=\"prefixes or glob names for Python test class discovery\",\n )\n parser.addini(\n \"python_functions\",\n type=\"args\",\n default=[\"test\"],\n help=\"prefixes or glob names for Python test function and method discovery\",\n )\n parser.addini(\n \"disable_test_id_escaping_and_forfeit_all_rights_to_community_support\",\n type=\"bool\",\n default=False,\n help=\"disable string escape non-ascii characters, might cause unwanted \"\n \"side effects(use at your own risk)\",\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_pytest_collect_file_pytest_collect_file.return.None": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_pytest_collect_file_pytest_collect_file.return.None", "embedding": null, "metadata": {"file_path": "src/_pytest/python.py", "file_name": "python.py", "file_type": "text/x-python", "category": "implementation", "start_line": 191, "end_line": 205, "span_ids": ["pytest_collect_file"], "tokens": 133}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_collect_file(\n fspath: Path, path: py.path.local, parent: nodes.Collector\n) -> Optional[\"Module\"]:\n if fspath.suffix == \".py\":\n if not parent.session.isinitpath(fspath):\n if not path_matches_patterns(\n fspath, parent.config.getini(\"python_files\") + [\"__init__.py\"]\n ):\n return None\n ihook = parent.session.gethookproxy(fspath)\n module: Module = ihook.pytest_pycollect_makemodule(\n fspath=fspath, path=path, parent=parent\n )\n return module\n return None", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_path_matches_patterns_pytest_pycollect_makemodule.return.mod": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_path_matches_patterns_pytest_pycollect_makemodule.return.mod", "embedding": null, "metadata": {"file_path": "src/_pytest/python.py", "file_name": "python.py", "file_type": "text/x-python", "category": "implementation", "start_line": 208, "end_line": 218, "span_ids": ["pytest_pycollect_makemodule", "path_matches_patterns"], "tokens": 120}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def path_matches_patterns(path: Path, patterns: Iterable[str]) -> bool:\n \"\"\"Return whether path matches any of the patterns in the list of globs given.\"\"\"\n return any(fnmatch_ex(pattern, path) for pattern in patterns)\n\n\ndef pytest_pycollect_makemodule(fspath: Path, path: py.path.local, parent) -> \"Module\":\n if fspath.name == \"__init__.py\":\n pkg: Package = Package.from_parent(parent, fspath=path)\n return pkg\n mod: Module = Module.from_parent(parent, fspath=path)\n return mod", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_PyCollector.collect_PyCollector.collect.return.values": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_PyCollector.collect_PyCollector.collect.return.values", "embedding": null, "metadata": {"file_path": "src/_pytest/python.py", "file_name": "python.py", "file_type": "text/x-python", "category": "implementation", "start_line": 412, "end_line": 448, "span_ids": ["PyCollector.collect"], "tokens": 308}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class PyCollector(PyobjMixin, nodes.Collector):\n\n def collect(self) -> Iterable[Union[nodes.Item, nodes.Collector]]:\n if not getattr(self.obj, \"__test__\", True):\n return []\n\n # NB. we avoid random getattrs and peek in the __dict__ instead\n # (XXX originally introduced from a PyPy need, still true?)\n dicts = [getattr(self.obj, \"__dict__\", {})]\n for basecls in self.obj.__class__.__mro__:\n dicts.append(basecls.__dict__)\n seen: Set[str] = set()\n values: List[Union[nodes.Item, nodes.Collector]] = []\n ihook = self.ihook\n for dic in dicts:\n # Note: seems like the dict can change during iteration -\n # be careful not to remove the list() without consideration.\n for name, obj in list(dic.items()):\n if name in IGNORED_ATTRIBUTES:\n continue\n if name in seen:\n continue\n seen.add(name)\n res = ihook.pytest_pycollect_makeitem(\n collector=self, name=name, obj=obj\n )\n if res is None:\n continue\n elif isinstance(res, list):\n values.extend(res)\n else:\n values.append(res)\n\n def sort_key(item):\n fspath, lineno, _ = item.reportinfo()\n return (str(fspath), lineno)\n\n values.sort(key=sort_key)\n return values", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Metafunc.parametrize_Metafunc.parametrize.if_request_in_argnames_.fail_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Metafunc.parametrize_Metafunc.parametrize.if_request_in_argnames_.fail_", "embedding": null, "metadata": {"file_path": "src/_pytest/python.py", "file_name": "python.py", "file_type": "text/x-python", "category": "implementation", "start_line": 994, "end_line": 1076, "span_ids": ["Metafunc.parametrize"], "tokens": 741}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Metafunc:\n\n def parametrize(\n self,\n argnames: Union[str, List[str], Tuple[str, ...]],\n argvalues: Iterable[Union[ParameterSet, Sequence[object], object]],\n indirect: Union[bool, Sequence[str]] = False,\n ids: Optional[\n Union[\n Iterable[Union[None, str, float, int, bool]],\n Callable[[Any], Optional[object]],\n ]\n ] = None,\n scope: \"Optional[_Scope]\" = None,\n *,\n _param_mark: Optional[Mark] = None,\n ) -> None:\n \"\"\"Add new invocations to the underlying test function using the list\n of argvalues for the given argnames. Parametrization is performed\n during the collection phase. If you need to setup expensive resources\n see about setting indirect to do it rather at test setup time.\n\n :param argnames:\n A comma-separated string denoting one or more argument names, or\n a list/tuple of argument strings.\n\n :param argvalues:\n The list of argvalues determines how often a test is invoked with\n different argument values.\n\n If only one argname was specified argvalues is a list of values.\n If N argnames were specified, argvalues must be a list of\n N-tuples, where each tuple-element specifies a value for its\n respective argname.\n\n :param indirect:\n A list of arguments' names (subset of argnames) or a boolean.\n If True the list contains all names from the argnames. Each\n argvalue corresponding to an argname in this list will\n be passed as request.param to its respective argname fixture\n function so that it can perform more expensive setups during the\n setup phase of a test rather than at collection time.\n\n :param ids:\n Sequence of (or generator for) ids for ``argvalues``,\n or a callable to return part of the id for each argvalue.\n\n With sequences (and generators like ``itertools.count()``) the\n returned ids should be of type ``string``, ``int``, ``float``,\n ``bool``, or ``None``.\n They are mapped to the corresponding index in ``argvalues``.\n ``None`` means to use the auto-generated id.\n\n If it is a callable it will be called for each entry in\n ``argvalues``, and the return value is used as part of the\n auto-generated id for the whole set (where parts are joined with\n dashes (\"-\")).\n This is useful to provide more specific ids for certain items, e.g.\n dates. Returning ``None`` will use an auto-generated id.\n\n If no ids are provided they will be generated automatically from\n the argvalues.\n\n :param scope:\n If specified it denotes the scope of the parameters.\n The scope is used for grouping tests by parameter instances.\n It will also override any fixture-function defined scope, allowing\n to set a dynamic scope using test context or configuration.\n \"\"\"\n from _pytest.fixtures import scope2index\n\n argnames, parameters = ParameterSet._for_parametrize(\n argnames,\n argvalues,\n self.function,\n self.config,\n nodeid=self.definition.nodeid,\n )\n del argvalues\n\n if \"request\" in argnames:\n fail(\n \"'request' is a reserved name and cannot be used in @pytest.mark.parametrize\",\n pytrace=False,\n )\n # ... other code", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Metafunc.parametrize.if_scope_is_None__Metafunc.parametrize.self._calls.newcalls": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Metafunc.parametrize.if_scope_is_None__Metafunc.parametrize.self._calls.newcalls", "embedding": null, "metadata": {"file_path": "src/_pytest/python.py", "file_name": "python.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1078, "end_line": 1120, "span_ids": ["Metafunc.parametrize"], "tokens": 508}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Metafunc:\n\n def parametrize(\n self,\n argnames: Union[str, List[str], Tuple[str, ...]],\n argvalues: Iterable[Union[ParameterSet, Sequence[object], object]],\n indirect: Union[bool, Sequence[str]] = False,\n ids: Optional[\n Union[\n Iterable[Union[None, str, float, int, bool]],\n Callable[[Any], Optional[object]],\n ]\n ] = None,\n scope: \"Optional[_Scope]\" = None,\n *,\n _param_mark: Optional[Mark] = None,\n ) -> None:\n # ... other code\n\n if scope is None:\n scope = _find_parametrized_scope(argnames, self._arg2fixturedefs, indirect)\n\n self._validate_if_using_arg_names(argnames, indirect)\n\n arg_values_types = self._resolve_arg_value_types(argnames, indirect)\n\n # Use any already (possibly) generated ids with parametrize Marks.\n if _param_mark and _param_mark._param_ids_from:\n generated_ids = _param_mark._param_ids_from._param_ids_generated\n if generated_ids is not None:\n ids = generated_ids\n\n ids = self._resolve_arg_ids(\n argnames, ids, parameters, nodeid=self.definition.nodeid\n )\n\n # Store used (possibly generated) ids with parametrize Marks.\n if _param_mark and _param_mark._param_ids_from and generated_ids is None:\n object.__setattr__(_param_mark._param_ids_from, \"_param_ids_generated\", ids)\n\n scopenum = scope2index(\n scope, descr=f\"parametrize() call in {self.function.__name__}\"\n )\n\n # Create the new calls: if we are parametrize() multiple times (by applying the decorator\n # more than once) then we accumulate those calls generating the cartesian product\n # of all calls.\n newcalls = []\n for callspec in self._calls or [CallSpec2(self)]:\n for param_index, (param_id, param_set) in enumerate(zip(ids, parameters)):\n newcallspec = callspec.copy()\n newcallspec.setmulti2(\n arg_values_types,\n argnames,\n param_set.values,\n param_id,\n param_set.marks,\n scopenum,\n param_index,\n )\n newcalls.append(newcallspec)\n self._calls = newcalls", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python_api.py__This_doesn_t_work_with__RaisesContext.__enter__.return.self_excinfo": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python_api.py__This_doesn_t_work_with__RaisesContext.__enter__.return.self_excinfo", "embedding": null, "metadata": {"file_path": "src/_pytest/python_api.py", "file_name": "python_api.py", "file_type": "text/x-python", "category": "implementation", "start_line": 748, "end_line": 767, "span_ids": ["raises_7", "RaisesContext", "RaisesContext.__enter__", "impl:4"], "tokens": 167}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "# This doesn't work with mypy for now. Use fail.Exception instead.\nraises.Exception = fail.Exception # type: ignore\n\n\n@final\nclass RaisesContext(Generic[_E]):\n def __init__(\n self,\n expected_exception: Union[Type[_E], Tuple[Type[_E], ...]],\n message: str,\n match_expr: Optional[Union[str, Pattern[str]]] = None,\n ) -> None:\n self.expected_exception = expected_exception\n self.message = message\n self.match_expr = match_expr\n self.excinfo: Optional[_pytest._code.ExceptionInfo[_E]] = None\n\n def __enter__(self) -> _pytest._code.ExceptionInfo[_E]:\n self.excinfo = _pytest._code.ExceptionInfo.for_later()\n return self.excinfo", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/reports.py_from_io_import_StringIO__R.TypeVar__R_bound_Base": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/reports.py_from_io_import_StringIO__R.TypeVar__R_bound_Base", "embedding": null, "metadata": {"file_path": "src/_pytest/reports.py", "file_name": "reports.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 57, "span_ids": ["impl", "imports:35", "impl:2", "getworkerinfoline", "imports"], "tokens": 385}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from io import StringIO\nfrom pathlib import Path\nfrom pprint import pprint\nfrom typing import Any\nfrom typing import cast\nfrom typing import Dict\nfrom typing import Iterable\nfrom typing import Iterator\nfrom typing import List\nfrom typing import Optional\nfrom typing import Tuple\nfrom typing import Type\nfrom typing import TYPE_CHECKING\nfrom typing import TypeVar\nfrom typing import Union\n\nimport attr\nimport py\n\nfrom _pytest._code.code import ExceptionChainRepr\nfrom _pytest._code.code import ExceptionInfo\nfrom _pytest._code.code import ExceptionRepr\nfrom _pytest._code.code import ReprEntry\nfrom _pytest._code.code import ReprEntryNative\nfrom _pytest._code.code import ReprExceptionInfo\nfrom _pytest._code.code import ReprFileLocation\nfrom _pytest._code.code import ReprFuncArgs\nfrom _pytest._code.code import ReprLocals\nfrom _pytest._code.code import ReprTraceback\nfrom _pytest._code.code import TerminalRepr\nfrom _pytest._io import TerminalWriter\nfrom _pytest.compat import final\nfrom _pytest.config import Config\nfrom _pytest.nodes import Collector\nfrom _pytest.nodes import Item\nfrom _pytest.outcomes import skip\n\nif TYPE_CHECKING:\n from typing import NoReturn\n from typing_extensions import Literal\n\n from _pytest.runner import CallInfo\n\n\ndef getworkerinfoline(node):\n try:\n return node._workerinfocache\n except AttributeError:\n d = node.workerinfo\n ver = \"%s.%s.%s\" % d[\"version_info\"][:3]\n node._workerinfocache = s = \"[{}] {} -- Python {} {}\".format(\n d[\"id\"], d[\"sysplatform\"], ver, d[\"executable\"]\n )\n return s\n\n\n_R = TypeVar(\"_R\", bound=\"BaseReport\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/reports.py_BaseReport.head_line_BaseReport.head_line.return.None": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/reports.py_BaseReport.head_line_BaseReport.head_line.return.None", "embedding": null, "metadata": {"file_path": "src/_pytest/reports.py", "file_name": "reports.py", "file_type": "text/x-python", "category": "implementation", "start_line": 173, "end_line": 192, "span_ids": ["BaseReport.head_line"], "tokens": 124}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class BaseReport:\n\n @property\n def head_line(self) -> Optional[str]:\n \"\"\"**Experimental** The head line shown with longrepr output for this\n report, more commonly during traceback representation during\n failures::\n\n ________ Test.foo ________\n\n\n In the example above, the head_line is \"Test.foo\".\n\n .. note::\n\n This function is considered **experimental**, so beware that it is subject to changes\n even in patch releases.\n \"\"\"\n if self.location is not None:\n fspath, lineno, domain = self.location\n return domain\n return None", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/reports.py_CollectErrorRepr_pytest_report_from_serializable.return.None": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/reports.py_CollectErrorRepr_pytest_report_from_serializable.return.None", "embedding": null, "metadata": {"file_path": "src/_pytest/reports.py", "file_name": "reports.py", "file_type": "text/x-python", "category": "implementation", "start_line": 404, "end_line": 434, "span_ids": ["pytest_report_to_serializable", "pytest_report_from_serializable", "CollectErrorRepr.toterminal", "CollectErrorRepr"], "tokens": 246}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class CollectErrorRepr(TerminalRepr):\n def __init__(self, msg: str) -> None:\n self.longrepr = msg\n\n def toterminal(self, out: TerminalWriter) -> None:\n out.line(self.longrepr, red=True)\n\n\ndef pytest_report_to_serializable(\n report: Union[CollectReport, TestReport]\n) -> Optional[Dict[str, Any]]:\n if isinstance(report, (TestReport, CollectReport)):\n data = report._to_json()\n data[\"$report_type\"] = report.__class__.__name__\n return data\n # TODO: Check if this is actually reachable.\n return None # type: ignore[unreachable]\n\n\ndef pytest_report_from_serializable(\n data: Dict[str, Any],\n) -> Optional[Union[CollectReport, TestReport]]:\n if \"$report_type\" in data:\n if data[\"$report_type\"] == \"TestReport\":\n return TestReport._from_json(data)\n elif data[\"$report_type\"] == \"CollectReport\":\n return CollectReport._from_json(data)\n assert False, \"Unknown report_type unserialize data: {}\".format(\n data[\"$report_type\"]\n )\n return None", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/runner.py_pytest_sessionstart_pytest_runtest_protocol.return.True": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/runner.py_pytest_sessionstart_pytest_runtest_protocol.return.True", "embedding": null, "metadata": {"file_path": "src/_pytest/runner.py", "file_name": "runner.py", "file_type": "text/x-python", "category": "implementation", "start_line": 102, "end_line": 115, "span_ids": ["pytest_runtest_protocol", "pytest_sessionfinish", "pytest_sessionstart"], "tokens": 124}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_sessionstart(session: \"Session\") -> None:\n session._setupstate = SetupState()\n\n\ndef pytest_sessionfinish(session: \"Session\") -> None:\n session._setupstate.teardown_exact(None)\n\n\ndef pytest_runtest_protocol(item: Item, nextitem: Optional[Item]) -> bool:\n ihook = item.ihook\n ihook.pytest_runtest_logstart(nodeid=item.nodeid, location=item.location)\n runtestprotocol(item, nextitem=nextitem)\n ihook.pytest_runtest_logfinish(nodeid=item.nodeid, location=item.location)\n return True", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/runner.py_runtestprotocol_runtestprotocol.return.reports": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/runner.py_runtestprotocol_runtestprotocol.return.reports", "embedding": null, "metadata": {"file_path": "src/_pytest/runner.py", "file_name": "runner.py", "file_type": "text/x-python", "category": "implementation", "start_line": 118, "end_line": 139, "span_ids": ["runtestprotocol"], "tokens": 247}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def runtestprotocol(\n item: Item, log: bool = True, nextitem: Optional[Item] = None\n) -> List[TestReport]:\n hasrequest = hasattr(item, \"_request\")\n if hasrequest and not item._request: # type: ignore[attr-defined]\n # This only happens if the item is re-run, as is done by\n # pytest-rerunfailures.\n item._initrequest() # type: ignore[attr-defined]\n rep = call_and_report(item, \"setup\", log)\n reports = [rep]\n if rep.passed:\n if item.config.getoption(\"setupshow\", False):\n show_test_item(item)\n if not item.config.getoption(\"setuponly\", False):\n reports.append(call_and_report(item, \"call\", log))\n reports.append(call_and_report(item, \"teardown\", log, nextitem=nextitem))\n # After all teardown hooks have been called\n # want funcargs and request info to go away.\n if hasrequest:\n item._request = False # type: ignore[attr-defined]\n item.funcargs = None # type: ignore[attr-defined]\n return reports", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/runner.py_show_test_item_pytest_runtest_setup.item_session__setupstate_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/runner.py_show_test_item_pytest_runtest_setup.item_session__setupstate_", "embedding": null, "metadata": {"file_path": "src/_pytest/runner.py", "file_name": "runner.py", "file_type": "text/x-python", "category": "implementation", "start_line": 142, "end_line": 156, "span_ids": ["show_test_item", "pytest_runtest_setup"], "tokens": 132}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def show_test_item(item: Item) -> None:\n \"\"\"Show test function, parameters and the fixtures of the test item.\"\"\"\n tw = item.config.get_terminal_writer()\n tw.line()\n tw.write(\" \" * 8)\n tw.write(item.nodeid)\n used_fixtures = sorted(getattr(item, \"fixturenames\", []))\n if used_fixtures:\n tw.write(\" (fixtures used: {})\".format(\", \".join(used_fixtures)))\n tw.flush()\n\n\ndef pytest_runtest_setup(item: Item) -> None:\n _update_current_test_var(item, \"setup\")\n item.session._setupstate.setup(item)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/runner.py_pytest_runtest_call_pytest_runtest_teardown.None_2": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/runner.py_pytest_runtest_call_pytest_runtest_teardown.None_2", "embedding": null, "metadata": {"file_path": "src/_pytest/runner.py", "file_name": "runner.py", "file_type": "text/x-python", "category": "implementation", "start_line": 159, "end_line": 182, "span_ids": ["pytest_runtest_call", "pytest_runtest_teardown"], "tokens": 184}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_runtest_call(item: Item) -> None:\n _update_current_test_var(item, \"call\")\n try:\n del sys.last_type\n del sys.last_value\n del sys.last_traceback\n except AttributeError:\n pass\n try:\n item.runtest()\n except Exception as e:\n # Store trace info to allow postmortem debugging\n sys.last_type = type(e)\n sys.last_value = e\n assert e.__traceback__ is not None\n # Skip *this* frame\n sys.last_traceback = e.__traceback__.tb_next\n raise e\n\n\ndef pytest_runtest_teardown(item: Item, nextitem: Optional[Item]) -> None:\n _update_current_test_var(item, \"teardown\")\n item.session._setupstate.teardown_exact(nextitem)\n _update_current_test_var(item, None)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/setuponly.py_from_typing_import_Genera_pytest_addoption.None_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/setuponly.py_from_typing_import_Genera_pytest_addoption.None_1", "embedding": null, "metadata": {"file_path": "src/_pytest/setuponly.py", "file_name": "setuponly.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 27, "span_ids": ["imports", "pytest_addoption"], "tokens": 165}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from typing import Generator\nfrom typing import Optional\nfrom typing import Union\n\nimport pytest\nfrom _pytest._io.saferepr import saferepr\nfrom _pytest.config import Config\nfrom _pytest.config import ExitCode\nfrom _pytest.config.argparsing import Parser\nfrom _pytest.fixtures import FixtureDef\nfrom _pytest.fixtures import SubRequest\n\n\ndef pytest_addoption(parser: Parser) -> None:\n group = parser.getgroup(\"debugconfig\")\n group.addoption(\n \"--setuponly\",\n \"--setup-only\",\n action=\"store_true\",\n help=\"only setup fixtures, do not execute tests.\",\n )\n group.addoption(\n \"--setupshow\",\n \"--setup-show\",\n action=\"store_true\",\n help=\"show setup of fixtures while executing tests.\",\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/setuponly.py_pytest_fixture_setup_pytest_fixture_post_finalizer.if_fixturedef_cached_resu.if_config_option_setupsho.if_hasattr_fixturedef_c._type_ignore_attr_defin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/setuponly.py_pytest_fixture_setup_pytest_fixture_post_finalizer.if_fixturedef_cached_resu.if_config_option_setupsho.if_hasattr_fixturedef_c._type_ignore_attr_defin", "embedding": null, "metadata": {"file_path": "src/_pytest/setuponly.py", "file_name": "setuponly.py", "file_type": "text/x-python", "category": "implementation", "start_line": 30, "end_line": 56, "span_ids": ["pytest_fixture_setup", "pytest_fixture_post_finalizer"], "tokens": 242}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.hookimpl(hookwrapper=True)\ndef pytest_fixture_setup(\n fixturedef: FixtureDef[object], request: SubRequest\n) -> Generator[None, None, None]:\n yield\n if request.config.option.setupshow:\n if hasattr(request, \"param\"):\n # Save the fixture parameter so ._show_fixture_action() can\n # display it now and during the teardown (in .finish()).\n if fixturedef.ids:\n if callable(fixturedef.ids):\n param = fixturedef.ids(request.param)\n else:\n param = fixturedef.ids[request.param_index]\n else:\n param = request.param\n fixturedef.cached_param = param # type: ignore[attr-defined]\n _show_fixture_action(fixturedef, \"SETUP\")\n\n\ndef pytest_fixture_post_finalizer(fixturedef: FixtureDef[object]) -> None:\n if fixturedef.cached_result is not None:\n config = fixturedef._fixturemanager.config\n if config.option.setupshow:\n _show_fixture_action(fixturedef, \"TEARDOWN\")\n if hasattr(fixturedef, \"cached_param\"):\n del fixturedef.cached_param # type: ignore[attr-defined]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/setupplan.py_from_typing_import_Option_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/setupplan.py_from_typing_import_Option_", "embedding": null, "metadata": {"file_path": "src/_pytest/setupplan.py", "file_name": "setupplan.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 41, "span_ids": ["pytest_cmdline_main", "imports", "pytest_fixture_setup", "pytest_addoption"], "tokens": 269}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from typing import Optional\nfrom typing import Union\n\nimport pytest\nfrom _pytest.config import Config\nfrom _pytest.config import ExitCode\nfrom _pytest.config.argparsing import Parser\nfrom _pytest.fixtures import FixtureDef\nfrom _pytest.fixtures import SubRequest\n\n\ndef pytest_addoption(parser: Parser) -> None:\n group = parser.getgroup(\"debugconfig\")\n group.addoption(\n \"--setupplan\",\n \"--setup-plan\",\n action=\"store_true\",\n help=\"show what fixtures and tests would be executed but \"\n \"don't execute anything.\",\n )\n\n\n@pytest.hookimpl(tryfirst=True)\ndef pytest_fixture_setup(\n fixturedef: FixtureDef[object], request: SubRequest\n) -> Optional[object]:\n # Will return a dummy fixture if the setuponly option is provided.\n if request.config.option.setupplan:\n my_cache_key = fixturedef.cache_key(request)\n fixturedef.cached_result = (None, my_cache_key, None)\n return fixturedef.cached_result\n return None\n\n\n@pytest.hookimpl(tryfirst=True)\ndef pytest_cmdline_main(config: Config) -> Optional[Union[int, ExitCode]]:\n if config.option.setupplan:\n config.option.setuponly = True\n config.option.setupshow = True\n return None", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/skipping.py_pytest_addoption_pytest_addoption.parser_addini_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/skipping.py_pytest_addoption_pytest_addoption.parser_addini_", "embedding": null, "metadata": {"file_path": "src/_pytest/skipping.py", "file_name": "skipping.py", "file_type": "text/x-python", "category": "implementation", "start_line": 24, "end_line": 40, "span_ids": ["pytest_addoption"], "tokens": 113}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_addoption(parser: Parser) -> None:\n group = parser.getgroup(\"general\")\n group.addoption(\n \"--runxfail\",\n action=\"store_true\",\n dest=\"runxfail\",\n default=False,\n help=\"report the results of xfail tests as if they were not marked\",\n )\n\n parser.addini(\n \"xfail_strict\",\n \"default for the strict parameter of xfail \"\n \"markers when not given explicitly (default: False)\",\n default=False,\n type=\"bool\",\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter._determine_show_progress_info_TerminalReporter._determine_show_progress_info.if_cfg_progress_.else_.return.False": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter._determine_show_progress_info_TerminalReporter._determine_show_progress_info.if_cfg_progress_.else_.return.False", "embedding": null, "metadata": {"file_path": "src/_pytest/terminal.py", "file_name": "terminal.py", "file_type": "text/x-python", "category": "implementation", "start_line": 337, "end_line": 351, "span_ids": ["TerminalReporter._determine_show_progress_info"], "tokens": 156}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass TerminalReporter:\n\n def _determine_show_progress_info(self) -> \"Literal['progress', 'count', False]\":\n \"\"\"Return whether we should display progress information based on the current config.\"\"\"\n # do not show progress if we are not capturing output (#3038)\n if self.config.getoption(\"capture\", \"no\") == \"no\":\n return False\n # do not show progress if we are showing fixture setup/teardown\n if self.config.getoption(\"setupshow\", False):\n return False\n cfg: str = self.config.getini(\"console_output_style\")\n if cfg == \"progress\":\n return \"progress\"\n elif cfg == \"count\":\n return \"count\"\n else:\n return False", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.write_sep_TerminalReporter.pytest_runtest_logstart.if_self_showlongtestinfo_.elif_self_showfspath_.self_flush_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.write_sep_TerminalReporter.pytest_runtest_logstart.if_self_showlongtestinfo_.elif_self_showfspath_.self_flush_", "embedding": null, "metadata": {"file_path": "src/_pytest/terminal.py", "file_name": "terminal.py", "file_type": "text/x-python", "category": "implementation", "start_line": 443, "end_line": 507, "span_ids": ["TerminalReporter.section", "TerminalReporter.pytest_deselected", "TerminalReporter._add_stats", "TerminalReporter.pytest_internalerror", "TerminalReporter.pytest_plugin_registered", "TerminalReporter.pytest_warning_recorded", "TerminalReporter.write_sep", "TerminalReporter.pytest_runtest_logstart", "TerminalReporter.line"], "tokens": 554}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass TerminalReporter:\n\n def write_sep(\n self,\n sep: str,\n title: Optional[str] = None,\n fullwidth: Optional[int] = None,\n **markup: bool,\n ) -> None:\n self.ensure_newline()\n self._tw.sep(sep, title, fullwidth, **markup)\n\n def section(self, title: str, sep: str = \"=\", **kw: bool) -> None:\n self._tw.sep(sep, title, **kw)\n\n def line(self, msg: str, **kw: bool) -> None:\n self._tw.line(msg, **kw)\n\n def _add_stats(self, category: str, items: Sequence[Any]) -> None:\n set_main_color = category not in self.stats\n self.stats.setdefault(category, []).extend(items)\n if set_main_color:\n self._set_main_color()\n\n def pytest_internalerror(self, excrepr: ExceptionRepr) -> bool:\n for line in str(excrepr).split(\"\\n\"):\n self.write_line(\"INTERNALERROR> \" + line)\n return True\n\n def pytest_warning_recorded(\n self,\n warning_message: warnings.WarningMessage,\n nodeid: str,\n ) -> None:\n from _pytest.warnings import warning_record_to_str\n\n fslocation = warning_message.filename, warning_message.lineno\n message = warning_record_to_str(warning_message)\n\n warning_report = WarningReport(\n fslocation=fslocation, message=message, nodeid=nodeid\n )\n self._add_stats(\"warnings\", [warning_report])\n\n def pytest_plugin_registered(self, plugin: _PluggyPlugin) -> None:\n if self.config.option.traceconfig:\n msg = f\"PLUGIN registered: {plugin}\"\n # XXX This event may happen during setup/teardown time\n # which unfortunately captures our output here\n # which garbles our output if we use self.write_line.\n self.write_line(msg)\n\n def pytest_deselected(self, items: Sequence[Item]) -> None:\n self._add_stats(\"deselected\", items)\n\n def pytest_runtest_logstart(\n self, nodeid: str, location: Tuple[str, Optional[int], str]\n ) -> None:\n # Ensure that the path is printed before the\n # 1st test of a module starts running.\n if self.showlongtestinfo:\n line = self._locationline(nodeid, *location)\n self.write_ensure_prefix(line, \"\")\n self.flush()\n elif self.showfspath:\n self.write_fspath_result(nodeid, \"\")\n self.flush()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.pytest_sessionstart_TerminalReporter.pytest_sessionstart.if_not_self_no_header_.self__write_report_lines_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.pytest_sessionstart_TerminalReporter.pytest_sessionstart.if_not_self_no_header_.self__write_report_lines_", "embedding": null, "metadata": {"file_path": "src/_pytest/terminal.py", "file_name": "terminal.py", "file_type": "text/x-python", "category": "implementation", "start_line": 683, "end_line": 710, "span_ids": ["TerminalReporter.pytest_sessionstart"], "tokens": 291}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass TerminalReporter:\n\n @hookimpl(trylast=True)\n def pytest_sessionstart(self, session: \"Session\") -> None:\n self._session = session\n self._sessionstarttime = timing.time()\n if not self.showheader:\n return\n self.write_sep(\"=\", \"test session starts\", bold=True)\n verinfo = platform.python_version()\n if not self.no_header:\n msg = f\"platform {sys.platform} -- Python {verinfo}\"\n pypy_version_info = getattr(sys, \"pypy_version_info\", None)\n if pypy_version_info:\n verinfo = \".\".join(map(str, pypy_version_info[:3]))\n msg += \"[pypy-{}-{}]\".format(verinfo, pypy_version_info[3])\n msg += \", pytest-{}, py-{}, pluggy-{}\".format(\n _pytest._version.version, py.__version__, pluggy.__version__\n )\n if (\n self.verbosity > 0\n or self.config.option.debug\n or getattr(self.config.option, \"pastebin\", None)\n ):\n msg += \" -- \" + str(sys.executable)\n self.write_line(msg)\n lines = self.config.hook.pytest_report_header(\n config=self.config, startpath=self.startpath, startdir=self.startdir\n )\n self._write_report_lines_from_hooks(lines)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.pytest_terminal_summary_TerminalReporter.pytest_unconfigure.if_self__keyboardinterrup.self__report_keyboardinte": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.pytest_terminal_summary_TerminalReporter.pytest_unconfigure.if_self__keyboardinterrup.self__report_keyboardinte", "embedding": null, "metadata": {"file_path": "src/_pytest/terminal.py", "file_name": "terminal.py", "file_type": "text/x-python", "category": "implementation", "start_line": 823, "end_line": 839, "span_ids": ["TerminalReporter.pytest_terminal_summary", "TerminalReporter.pytest_unconfigure", "TerminalReporter.pytest_keyboard_interrupt"], "tokens": 145}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass TerminalReporter:\n\n @hookimpl(hookwrapper=True)\n def pytest_terminal_summary(self) -> Generator[None, None, None]:\n self.summary_errors()\n self.summary_failures()\n self.summary_warnings()\n self.summary_passes()\n yield\n self.short_test_summary()\n # Display any extra warnings from teardown here (if any).\n self.summary_warnings()\n\n def pytest_keyboard_interrupt(self, excinfo: ExceptionInfo[BaseException]) -> None:\n self._keyboardinterrupt_memo = excinfo.getrepr(funcargs=True)\n\n def pytest_unconfigure(self) -> None:\n if self._keyboardinterrupt_memo is not None:\n self._report_keyboardinterrupt()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.summary_passes_TerminalReporter.summary_passes.if_self_config_option_tbs.if_self_hasopt_P_.for_rep_in_reports_.self__handle_teardown_sec": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.summary_passes_TerminalReporter.summary_passes.if_self_config_option_tbs.if_self_hasopt_P_.for_rep_in_reports_.self__handle_teardown_sec", "embedding": null, "metadata": {"file_path": "src/_pytest/terminal.py", "file_name": "terminal.py", "file_type": "text/x-python", "category": "implementation", "start_line": 956, "end_line": 968, "span_ids": ["TerminalReporter.summary_passes"], "tokens": 122}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass TerminalReporter:\n\n def summary_passes(self) -> None:\n if self.config.option.tbstyle != \"no\":\n if self.hasopt(\"P\"):\n reports: List[TestReport] = self.getreports(\"passed\")\n if not reports:\n return\n self.write_sep(\"=\", \"PASSES\")\n for rep in reports:\n if rep.sections:\n msg = self._getfailureheadline(rep)\n self.write_sep(\"_\", msg, green=True, bold=True)\n self._outrep_summary(rep)\n self._handle_teardown_sections(rep.nodeid)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter._get_teardown_reports_TerminalReporter.print_teardown_sections.for_secname_content_in_r.if_teardown_in_secname_.self__tw_line_content_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter._get_teardown_reports_TerminalReporter.print_teardown_sections.for_secname_content_in_r.if_teardown_in_secname_.self__tw_line_content_", "embedding": null, "metadata": {"file_path": "src/_pytest/terminal.py", "file_name": "terminal.py", "file_type": "text/x-python", "category": "implementation", "start_line": 970, "end_line": 993, "span_ids": ["TerminalReporter.print_teardown_sections", "TerminalReporter._handle_teardown_sections", "TerminalReporter._get_teardown_reports"], "tokens": 211}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass TerminalReporter:\n\n def _get_teardown_reports(self, nodeid: str) -> List[TestReport]:\n reports = self.getreports(\"\")\n return [\n report\n for report in reports\n if report.when == \"teardown\" and report.nodeid == nodeid\n ]\n\n def _handle_teardown_sections(self, nodeid: str) -> None:\n for report in self._get_teardown_reports(nodeid):\n self.print_teardown_sections(report)\n\n def print_teardown_sections(self, rep: TestReport) -> None:\n showcapture = self.config.option.showcapture\n if showcapture == \"no\":\n return\n for secname, content in rep.sections:\n if showcapture != \"all\" and showcapture not in secname:\n continue\n if \"teardown\" in secname:\n self._tw.sep(\"-\", secname)\n if content[-1:] == \"\\n\":\n content = content[:-1]\n self._tw.line(content)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/timing.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/timing.py__", "embedding": null, "metadata": {"file_path": "src/_pytest/timing.py", "file_name": "timing.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 13, "span_ids": ["impl", "docstring", "imports"], "tokens": 81}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"Indirection for time functions.\n\nWe intentionally grab some \"time\" functions internally to avoid tests mocking \"time\" to affect\npytest runtime information (issue #185).\n\nFixture \"mock_timing\" also interacts with this module for pytest's own tests.\n\"\"\"\nfrom time import perf_counter\nfrom time import sleep\nfrom time import time\n\n__all__ = [\"perf_counter\", \"sleep\", \"time\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/unittest.py_pytest_pycollect_makeitem_pytest_pycollect_makeitem.return.item": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/unittest.py_pytest_pycollect_makeitem_pytest_pycollect_makeitem.return.item", "embedding": null, "metadata": {"file_path": "src/_pytest/unittest.py", "file_name": "unittest.py", "file_type": "text/x-python", "category": "implementation", "start_line": 45, "end_line": 58, "span_ids": ["pytest_pycollect_makeitem"], "tokens": 127}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_pycollect_makeitem(\n collector: PyCollector, name: str, obj: object\n) -> Optional[\"UnitTestCase\"]:\n # Has unittest been imported and is obj a subclass of its TestCase?\n try:\n ut = sys.modules[\"unittest\"]\n # Type ignored because `ut` is an opaque module.\n if not issubclass(obj, ut.TestCase): # type: ignore\n return None\n except Exception:\n return None\n # Yes, so let's collect it.\n item: UnitTestCase = UnitTestCase.from_parent(collector, name=name, obj=obj)\n return item", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/unittest.py_UnitTestCase._inject_setup_teardown_fixtures_UnitTestCase._inject_setup_teardown_fixtures.if_method_fixture_._type_ignore_attr_defin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/unittest.py_UnitTestCase._inject_setup_teardown_fixtures_UnitTestCase._inject_setup_teardown_fixtures.if_method_fixture_._type_ignore_attr_defin", "embedding": null, "metadata": {"file_path": "src/_pytest/unittest.py", "file_name": "unittest.py", "file_type": "text/x-python", "category": "implementation", "start_line": 97, "end_line": 120, "span_ids": ["UnitTestCase._inject_setup_teardown_fixtures"], "tokens": 184}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class UnitTestCase(Class):\n # Marker for fixturemanger.getfixtureinfo()\n\n def _inject_setup_teardown_fixtures(self, cls: type) -> None:\n \"\"\"Injects a hidden auto-use fixture to invoke setUpClass/setup_method and corresponding\n teardown functions (#517).\"\"\"\n class_fixture = _make_xunit_fixture(\n cls,\n \"setUpClass\",\n \"tearDownClass\",\n \"doClassCleanups\",\n scope=\"class\",\n pass_self=False,\n )\n if class_fixture:\n cls.__pytest_class_setup = class_fixture # type: ignore[attr-defined]\n\n method_fixture = _make_xunit_fixture(\n cls,\n \"setup_method\",\n \"teardown_method\",\n None,\n scope=\"function\",\n pass_self=True,\n )\n if method_fixture:\n cls.__pytest_method_setup = method_fixture # type: ignore[attr-defined]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/unittest.py_pytest_runtest_makereport_pytest_runtest_makereport.if_.call.excinfo.call2_excinfo": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/unittest.py_pytest_runtest_makereport_pytest_runtest_makereport.if_.call.excinfo.call2_excinfo", "embedding": null, "metadata": {"file_path": "src/_pytest/unittest.py", "file_name": "unittest.py", "file_type": "text/x-python", "category": "implementation", "start_line": 334, "end_line": 358, "span_ids": ["pytest_runtest_makereport"], "tokens": 217}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@hookimpl(tryfirst=True)\ndef pytest_runtest_makereport(item: Item, call: CallInfo[None]) -> None:\n if isinstance(item, TestCaseFunction):\n if item._excinfo:\n call.excinfo = item._excinfo.pop(0)\n try:\n del call.result\n except AttributeError:\n pass\n\n # Convert unittest.SkipTest to pytest.skip.\n # This is actually only needed for nose, which reuses unittest.SkipTest for\n # its own nose.SkipTest. For unittest TestCases, SkipTest is already\n # handled internally, and doesn't reach here.\n unittest = sys.modules.get(\"unittest\")\n if (\n unittest\n and call.excinfo\n and isinstance(call.excinfo.value, unittest.SkipTest) # type: ignore[attr-defined]\n ):\n excinfo = call.excinfo\n call2 = CallInfo[None].from_call(\n lambda: pytest.skip(str(excinfo.value)), call.when\n )\n call.excinfo = call2.excinfo", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/warnings.py_catch_warnings_for_item_catch_warnings_for_item.with_warnings_catch_warni.for_warning_message_in_lo.ihook_pytest_warning_reco": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/warnings.py_catch_warnings_for_item_catch_warnings_for_item.with_warnings_catch_warni.for_warning_message_in_lo.ihook_pytest_warning_reco", "embedding": null, "metadata": {"file_path": "src/_pytest/warnings.py", "file_name": "warnings.py", "file_type": "text/x-python", "category": "implementation", "start_line": 28, "end_line": 79, "span_ids": ["catch_warnings_for_item"], "tokens": 387}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@contextmanager\ndef catch_warnings_for_item(\n config: Config,\n ihook,\n when: \"Literal['config', 'collect', 'runtest']\",\n item: Optional[Item],\n) -> Generator[None, None, None]:\n \"\"\"Context manager that catches warnings generated in the contained execution block.\n\n ``item`` can be None if we are not in the context of an item execution.\n\n Each warning captured triggers the ``pytest_warning_recorded`` hook.\n \"\"\"\n config_filters = config.getini(\"filterwarnings\")\n cmdline_filters = config.known_args_namespace.pythonwarnings or []\n with warnings.catch_warnings(record=True) as log:\n # mypy can't infer that record=True means log is not None; help it.\n assert log is not None\n\n if not sys.warnoptions:\n # If user is not explicitly configuring warning filters, show deprecation warnings by default (#2908).\n warnings.filterwarnings(\"always\", category=DeprecationWarning)\n warnings.filterwarnings(\"always\", category=PendingDeprecationWarning)\n\n apply_warning_filters(config_filters, cmdline_filters)\n\n # apply filters from \"filterwarnings\" marks\n nodeid = \"\" if item is None else item.nodeid\n if item is not None:\n for mark in item.iter_markers(name=\"filterwarnings\"):\n for arg in mark.args:\n warnings.filterwarnings(*parse_warning_filter(arg, escape=False))\n\n yield\n\n for warning_message in log:\n ihook.pytest_warning_captured.call_historic(\n kwargs=dict(\n warning_message=warning_message,\n when=when,\n item=item,\n location=None,\n )\n )\n ihook.pytest_warning_recorded.call_historic(\n kwargs=dict(\n warning_message=warning_message,\n nodeid=nodeid,\n when=when,\n location=None,\n )\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_assertion_rewrite_TestGeneralUsage.test_assertion_rewrite.assert_result_ret_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_assertion_rewrite_TestGeneralUsage.test_assertion_rewrite.assert_result_ret_1", "embedding": null, "metadata": {"file_path": "testing/acceptance_test.py", "file_name": "acceptance_test.py", "file_type": "text/x-python", "category": "test", "start_line": 152, "end_line": 163, "span_ids": ["TestGeneralUsage.test_assertion_rewrite"], "tokens": 117}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestGeneralUsage:\n\n @pytest.mark.parametrize(\"import_mode\", [\"prepend\", \"append\", \"importlib\"])\n def test_assertion_rewrite(self, pytester: Pytester, import_mode) -> None:\n p = pytester.makepyfile(\n \"\"\"\n def test_this():\n x = 0\n assert x\n \"\"\"\n )\n result = pytester.runpytest(p, f\"--import-mode={import_mode}\")\n result.stdout.fnmatch_lines([\"> assert x\", \"E assert 0\"])\n assert result.ret == 1", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_nested_import_error_TestGeneralUsage.test_nested_import_error.assert_result_ret_2": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_nested_import_error_TestGeneralUsage.test_nested_import_error.assert_result_ret_2", "embedding": null, "metadata": {"file_path": "testing/acceptance_test.py", "file_name": "acceptance_test.py", "file_type": "text/x-python", "category": "test", "start_line": 165, "end_line": 181, "span_ids": ["TestGeneralUsage.test_nested_import_error"], "tokens": 123}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestGeneralUsage:\n\n def test_nested_import_error(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n import import_fails\n def test_this():\n assert import_fails.a == 1\n \"\"\"\n )\n pytester.makepyfile(import_fails=\"import does_not_work\")\n result = pytester.runpytest(p)\n result.stdout.fnmatch_lines(\n [\n \"ImportError while importing test module*\",\n \"*No module named *does_not_work*\",\n ]\n )\n assert result.ret == 2", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestInvocationVariants.test_cmdline_python_namespace_package_TestInvocationVariants.test_cmdline_python_namespace_package.None_5": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestInvocationVariants.test_cmdline_python_namespace_package_TestInvocationVariants.test_cmdline_python_namespace_package.None_5", "embedding": null, "metadata": {"file_path": "testing/acceptance_test.py", "file_name": "acceptance_test.py", "file_type": "text/x-python", "category": "test", "start_line": 674, "end_line": 741, "span_ids": ["TestInvocationVariants.test_cmdline_python_namespace_package"], "tokens": 583}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestInvocationVariants:\n\n def test_cmdline_python_namespace_package(\n self, pytester: Pytester, monkeypatch\n ) -> None:\n \"\"\"Test --pyargs option with namespace packages (#1567).\n\n Ref: https://packaging.python.org/guides/packaging-namespace-packages/\n \"\"\"\n monkeypatch.delenv(\"PYTHONDONTWRITEBYTECODE\", raising=False)\n\n search_path = []\n for dirname in \"hello\", \"world\":\n d = pytester.mkdir(dirname)\n search_path.append(d)\n ns = d.joinpath(\"ns_pkg\")\n ns.mkdir()\n ns.joinpath(\"__init__.py\").write_text(\n \"__import__('pkg_resources').declare_namespace(__name__)\"\n )\n lib = ns.joinpath(dirname)\n lib.mkdir()\n lib.joinpath(\"__init__.py\").touch()\n lib.joinpath(f\"test_{dirname}.py\").write_text(\n f\"def test_{dirname}(): pass\\ndef test_other():pass\"\n )\n\n # The structure of the test directory is now:\n # .\n # \u251c\u2500\u2500 hello\n # \u2502 \u2514\u2500\u2500 ns_pkg\n # \u2502 \u251c\u2500\u2500 __init__.py\n # \u2502 \u2514\u2500\u2500 hello\n # \u2502 \u251c\u2500\u2500 __init__.py\n # \u2502 \u2514\u2500\u2500 test_hello.py\n # \u2514\u2500\u2500 world\n # \u2514\u2500\u2500 ns_pkg\n # \u251c\u2500\u2500 __init__.py\n # \u2514\u2500\u2500 world\n # \u251c\u2500\u2500 __init__.py\n # \u2514\u2500\u2500 test_world.py\n\n # NOTE: the different/reversed ordering is intentional here.\n monkeypatch.setenv(\"PYTHONPATH\", prepend_pythonpath(*search_path))\n for p in search_path:\n monkeypatch.syspath_prepend(p)\n\n # mixed module and filenames:\n monkeypatch.chdir(\"world\")\n result = pytester.runpytest(\"--pyargs\", \"-v\", \"ns_pkg.hello\", \"ns_pkg/world\")\n assert result.ret == 0\n result.stdout.fnmatch_lines(\n [\n \"test_hello.py::test_hello*PASSED*\",\n \"test_hello.py::test_other*PASSED*\",\n \"ns_pkg/world/test_world.py::test_world*PASSED*\",\n \"ns_pkg/world/test_world.py::test_other*PASSED*\",\n \"*4 passed in*\",\n ]\n )\n\n # specify tests within a module\n pytester.chdir()\n result = pytester.runpytest(\n \"--pyargs\", \"-v\", \"ns_pkg.world.test_world::test_other\"\n )\n assert result.ret == 0\n result.stdout.fnmatch_lines(\n [\"*test_world.py::test_other*PASSED*\", \"*1 passed*\"]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestInvocationVariants.test_invoke_test_and_doctestmodules_TestInvocationVariants.test_cmdline_python_package_symlink.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestInvocationVariants.test_invoke_test_and_doctestmodules_TestInvocationVariants.test_cmdline_python_package_symlink.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/acceptance_test.py", "file_name": "acceptance_test.py", "file_type": "text/x-python", "category": "test", "start_line": 743, "end_line": 810, "span_ids": ["TestInvocationVariants.test_invoke_test_and_doctestmodules", "TestInvocationVariants.test_cmdline_python_package_symlink"], "tokens": 578}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestInvocationVariants:\n\n def test_invoke_test_and_doctestmodules(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n def test():\n pass\n \"\"\"\n )\n result = pytester.runpytest(str(p) + \"::test\", \"--doctest-modules\")\n result.stdout.fnmatch_lines([\"*1 passed*\"])\n\n def test_cmdline_python_package_symlink(\n self, pytester: Pytester, monkeypatch\n ) -> None:\n \"\"\"\n --pyargs with packages with path containing symlink can have conftest.py in\n their package (#2985)\n \"\"\"\n monkeypatch.delenv(\"PYTHONDONTWRITEBYTECODE\", raising=False)\n\n dirname = \"lib\"\n d = pytester.mkdir(dirname)\n foo = d.joinpath(\"foo\")\n foo.mkdir()\n foo.joinpath(\"__init__.py\").touch()\n lib = foo.joinpath(\"bar\")\n lib.mkdir()\n lib.joinpath(\"__init__.py\").touch()\n lib.joinpath(\"test_bar.py\").write_text(\n \"def test_bar(): pass\\ndef test_other(a_fixture):pass\"\n )\n lib.joinpath(\"conftest.py\").write_text(\n \"import pytest\\n@pytest.fixture\\ndef a_fixture():pass\"\n )\n\n d_local = pytester.mkdir(\"symlink_root\")\n symlink_location = d_local / \"lib\"\n symlink_or_skip(d, symlink_location, target_is_directory=True)\n\n # The structure of the test directory is now:\n # .\n # \u251c\u2500\u2500 symlink_root\n # \u2502 \u2514\u2500\u2500 lib -> ../lib\n # \u2514\u2500\u2500 lib\n # \u2514\u2500\u2500 foo\n # \u251c\u2500\u2500 __init__.py\n # \u2514\u2500\u2500 bar\n # \u251c\u2500\u2500 __init__.py\n # \u251c\u2500\u2500 conftest.py\n # \u2514\u2500\u2500 test_bar.py\n\n # NOTE: the different/reversed ordering is intentional here.\n search_path = [\"lib\", os.path.join(\"symlink_root\", \"lib\")]\n monkeypatch.setenv(\"PYTHONPATH\", prepend_pythonpath(*search_path))\n for p in search_path:\n monkeypatch.syspath_prepend(p)\n\n # module picked up in symlink-ed directory:\n # It picks up symlink_root/lib/foo/bar (symlink) via sys.path.\n result = pytester.runpytest(\"--pyargs\", \"-v\", \"foo.bar\")\n pytester.chdir()\n assert result.ret == 0\n result.stdout.fnmatch_lines(\n [\n \"symlink_root/lib/foo/bar/test_bar.py::test_bar PASSED*\",\n \"symlink_root/lib/foo/bar/test_bar.py::test_other PASSED*\",\n \"*2 passed*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_test_excinfo_exconly_test_excinfo_repr_str.assert_str_excinfo2_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_test_excinfo_exconly_test_excinfo_repr_str.assert_str_excinfo2_", "embedding": null, "metadata": {"file_path": "testing/code/test_excinfo.py", "file_name": "test_excinfo.py", "file_type": "text/x-python", "category": "test", "start_line": 316, "end_line": 340, "span_ids": ["test_excinfo_exconly", "test_excinfo_repr_str", "test_excinfo_repr_str.CustomException", "test_excinfo_repr_str.CustomException.__repr__"], "tokens": 215}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_excinfo_exconly():\n excinfo = pytest.raises(ValueError, h)\n assert excinfo.exconly().startswith(\"ValueError\")\n with pytest.raises(ValueError) as excinfo:\n raise ValueError(\"hello\\nworld\")\n msg = excinfo.exconly(tryshort=True)\n assert msg.startswith(\"ValueError\")\n assert msg.endswith(\"world\")\n\n\ndef test_excinfo_repr_str() -> None:\n excinfo1 = pytest.raises(ValueError, h)\n assert repr(excinfo1) == \"\"\n assert str(excinfo1) == \"\"\n\n class CustomException(Exception):\n def __repr__(self):\n return \"custom_repr\"\n\n def raises() -> None:\n raise CustomException()\n\n excinfo2 = pytest.raises(CustomException, raises)\n assert repr(excinfo2) == \"\"\n assert str(excinfo2) == \"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_repr_local_truncated_TestFormattedExcinfo.test_repr_local_truncated.None_5": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_repr_local_truncated_TestFormattedExcinfo.test_repr_local_truncated.None_5", "embedding": null, "metadata": {"file_path": "testing/code/test_excinfo.py", "file_name": "test_excinfo.py", "file_type": "text/x-python", "category": "test", "start_line": 575, "end_line": 587, "span_ids": ["TestFormattedExcinfo.test_repr_local_truncated"], "tokens": 193}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFormattedExcinfo:\n\n def test_repr_local_truncated(self) -> None:\n loc = {\"l\": [i for i in range(10)]}\n p = FormattedExcinfo(showlocals=True)\n truncated_reprlocals = p.repr_locals(loc)\n assert truncated_reprlocals is not None\n assert truncated_reprlocals.lines\n assert truncated_reprlocals.lines[0] == \"l = [0, 1, 2, 3, 4, 5, ...]\"\n\n q = FormattedExcinfo(showlocals=True, truncate_locals=False)\n full_reprlocals = q.repr_locals(loc)\n assert full_reprlocals is not None\n assert full_reprlocals.lines\n assert full_reprlocals.lines[0] == \"l = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_repr_tracebackentry_short_TestFormattedExcinfo.test_repr_tracebackentry_short.None_8": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_repr_tracebackentry_short_TestFormattedExcinfo.test_repr_tracebackentry_short.None_8", "embedding": null, "metadata": {"file_path": "testing/code/test_excinfo.py", "file_name": "test_excinfo.py", "file_type": "text/x-python", "category": "test", "start_line": 675, "end_line": 702, "span_ids": ["TestFormattedExcinfo.test_repr_tracebackentry_short"], "tokens": 275}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFormattedExcinfo:\n\n def test_repr_tracebackentry_short(self, importasmod) -> None:\n mod = importasmod(\n \"\"\"\n def func1():\n raise ValueError(\"hello\")\n def entry():\n func1()\n \"\"\"\n )\n excinfo = pytest.raises(ValueError, mod.entry)\n p = FormattedExcinfo(style=\"short\")\n reprtb = p.repr_traceback_entry(excinfo.traceback[-2])\n lines = reprtb.lines\n basename = Path(mod.__file__).name\n assert lines[0] == \" func1()\"\n assert reprtb.reprfileloc is not None\n assert basename in str(reprtb.reprfileloc.path)\n assert reprtb.reprfileloc.lineno == 5\n\n # test last entry\n p = FormattedExcinfo(style=\"short\")\n reprtb = p.repr_traceback_entry(excinfo.traceback[-1], excinfo)\n lines = reprtb.lines\n assert lines[0] == ' raise ValueError(\"hello\")'\n assert lines[1] == \"E ValueError: hello\"\n assert reprtb.reprfileloc is not None\n assert basename in str(reprtb.reprfileloc.path)\n assert reprtb.reprfileloc.lineno == 3", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_repr_traceback_and_excinfo_TestFormattedExcinfo.test_repr_traceback_and_excinfo.for_style_in_styles_.assert_repr_reprcrash_mes": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_repr_traceback_and_excinfo_TestFormattedExcinfo.test_repr_traceback_and_excinfo.for_style_in_styles_.assert_repr_reprcrash_mes", "embedding": null, "metadata": {"file_path": "testing/code/test_excinfo.py", "file_name": "test_excinfo.py", "file_type": "text/x-python", "category": "test", "start_line": 765, "end_line": 791, "span_ids": ["TestFormattedExcinfo.test_repr_traceback_and_excinfo"], "tokens": 260}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFormattedExcinfo:\n\n def test_repr_traceback_and_excinfo(self, importasmod) -> None:\n mod = importasmod(\n \"\"\"\n def f(x):\n raise ValueError(x)\n def entry():\n f(0)\n \"\"\"\n )\n excinfo = pytest.raises(ValueError, mod.entry)\n\n styles: Tuple[_TracebackStyle, ...] = (\"long\", \"short\")\n for style in styles:\n p = FormattedExcinfo(style=style)\n reprtb = p.repr_traceback(excinfo)\n assert len(reprtb.reprentries) == 2\n assert reprtb.style == style\n assert not reprtb.extraline\n repr = p.repr_excinfo(excinfo)\n assert repr.reprtraceback\n assert len(repr.reprtraceback.reprentries) == len(reprtb.reprentries)\n\n assert repr.chain[0][0]\n assert len(repr.chain[0][0].reprentries) == len(reprtb.reprentries)\n assert repr.reprcrash is not None\n assert repr.reprcrash.path.endswith(\"mod.py\")\n assert repr.reprcrash.message == \"ValueError: 0\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/conftest.py_mock_timing_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/conftest.py_mock_timing_", "embedding": null, "metadata": {"file_path": "testing/conftest.py", "file_name": "conftest.py", "file_type": "text/x-python", "category": "implementation", "start_line": 181, "end_line": 217, "span_ids": ["mock_timing.MockTiming:2", "mock_timing", "mock_timing.MockTiming"], "tokens": 247}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.fixture\ndef mock_timing(monkeypatch: MonkeyPatch):\n \"\"\"Mocks _pytest.timing with a known object that can be used to control timing in tests\n deterministically.\n\n pytest itself should always use functions from `_pytest.timing` instead of `time` directly.\n\n This then allows us more control over time during testing, if testing code also\n uses `_pytest.timing` functions.\n\n Time is static, and only advances through `sleep` calls, thus tests might sleep over large\n numbers and obtain accurate time() calls at the end, making tests reliable and instant.\n \"\"\"\n import attr\n\n @attr.s\n class MockTiming:\n\n _current_time = attr.ib(default=1590150050.0)\n\n def sleep(self, seconds):\n self._current_time += seconds\n\n def time(self):\n return self._current_time\n\n def patch(self):\n from _pytest import timing\n\n monkeypatch.setattr(timing, \"sleep\", self.sleep)\n monkeypatch.setattr(timing, \"time\", self.time)\n monkeypatch.setattr(timing, \"perf_counter\", self.time)\n\n result = MockTiming()\n result.patch()\n return result", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/dataclasses/test_compare_recursive_dataclasses.py_from_dataclasses_import_d_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/dataclasses/test_compare_recursive_dataclasses.py_from_dataclasses_import_d_", "embedding": null, "metadata": {"file_path": "testing/example_scripts/dataclasses/test_compare_recursive_dataclasses.py", "file_name": "test_compare_recursive_dataclasses.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 45, "span_ids": ["S", "C", "test_recursive_dataclasses", "imports", "C2", "C3"], "tokens": 197}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from dataclasses import dataclass\n\n\n@dataclass\nclass S:\n a: int\n b: str\n\n\n@dataclass\nclass C:\n c: S\n d: S\n\n\n@dataclass\nclass C2:\n e: C\n f: S\n\n\n@dataclass\nclass C3:\n g: S\n h: C2\n i: str\n j: str\n\n\ndef test_recursive_dataclasses():\n left = C3(\n S(10, \"ten\"),\n C2(C(S(1, \"one\"), S(2, \"two\")), S(2, \"three\")),\n \"equal\",\n \"left\",\n )\n right = C3(\n S(20, \"xxx\"),\n C2(C(S(1, \"one\"), S(2, \"yyy\")), S(3, \"three\")),\n \"equal\",\n \"right\",\n )\n\n assert left == right", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/dataclasses/test_compare_two_different_dataclasses.py_from_dataclasses_import_d_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/dataclasses/test_compare_two_different_dataclasses.py_from_dataclasses_import_d_", "embedding": null, "metadata": {"file_path": "testing/example_scripts/dataclasses/test_compare_two_different_dataclasses.py", "file_name": "test_compare_two_different_dataclasses.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 20, "span_ids": ["test_comparing_two_different_data_classes.SimpleDataObjectTwo", "test_comparing_two_different_data_classes.SimpleDataObjectOne", "test_comparing_two_different_data_classes.SimpleDataObjectOne:2", "imports", "test_comparing_two_different_data_classes.SimpleDataObjectTwo:2", "test_comparing_two_different_data_classes"], "tokens": 122}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from dataclasses import dataclass\nfrom dataclasses import field\n\n\ndef test_comparing_two_different_data_classes() -> None:\n @dataclass\n class SimpleDataObjectOne:\n field_a: int = field()\n field_b: str = field()\n\n @dataclass\n class SimpleDataObjectTwo:\n field_a: int = field()\n field_b: str = field()\n\n left = SimpleDataObjectOne(1, \"b\")\n right = SimpleDataObjectTwo(1, \"c\")\n\n assert left != right # type: ignore[comparison-overlap]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/unittest/test_unittest_asyncio.py_from_typing_import_List_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/unittest/test_unittest_asyncio.py_from_typing_import_List_", "embedding": null, "metadata": {"file_path": "testing/example_scripts/unittest/test_unittest_asyncio.py", "file_name": "test_unittest_asyncio.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 26, "span_ids": ["impl", "AsyncArguments.asyncTearDown", "AsyncArguments", "AsyncArguments.test_something_async_fails", "AsyncArguments.test_teardowns", "AsyncArguments.test_something_async", "imports"], "tokens": 142}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from typing import List\nfrom unittest import IsolatedAsyncioTestCase\n\n\nteardowns: List[None] = []\n\n\nclass AsyncArguments(IsolatedAsyncioTestCase):\n async def asyncTearDown(self):\n teardowns.append(None)\n\n async def test_something_async(self):\n async def addition(x, y):\n return x + y\n\n self.assertEqual(await addition(2, 2), 4)\n\n async def test_something_async_fails(self):\n async def addition(x, y):\n return x + y\n\n self.assertEqual(await addition(2, 2), 3)\n\n def test_teardowns(self):\n assert len(teardowns) == 2", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/io/test_wcwidth.py_pytest_test_wcwidth.assert_wcwidth_c_expe": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/io/test_wcwidth.py_pytest_test_wcwidth.assert_wcwidth_c_expe", "embedding": null, "metadata": {"file_path": "testing/io/test_wcwidth.py", "file_name": "test_wcwidth.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 22, "span_ids": ["test_wcwidth", "imports"], "tokens": 147}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import pytest\nfrom _pytest._io.wcwidth import wcswidth\nfrom _pytest._io.wcwidth import wcwidth\n\n\n@pytest.mark.parametrize(\n (\"c\", \"expected\"),\n [\n (\"\\0\", 0),\n (\"\\n\", -1),\n (\"a\", 1),\n (\"1\", 1),\n (\"\u05d0\", 1),\n (\"\\u200B\", 0),\n (\"\\u1ABE\", 0),\n (\"\\u0591\", 0),\n (\"\ud83c\ude50\", 2),\n (\"\uff04\", 2),\n ],\n)\ndef test_wcwidth(c: str, expected: int) -> None:\n assert wcwidth(c) == expected", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/io/test_wcwidth.py_test_wcswidth_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/io/test_wcwidth.py_test_wcswidth_", "embedding": null, "metadata": {"file_path": "testing/io/test_wcwidth.py", "file_name": "test_wcwidth.py", "file_type": "text/x-python", "category": "test", "start_line": 25, "end_line": 39, "span_ids": ["test_wcswidth"], "tokens": 127}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\n (\"s\", \"expected\"),\n [\n (\"\", 0),\n (\"hello, world!\", 13),\n (\"hello, world!\\n\", -1),\n (\"0123456789\", 10),\n (\"\u05e9\u05dc\u05d5\u05dd, \u05e2\u05d5\u05dc\u05dd!\", 11),\n (\"\u05e9\u05b0\u05d1\u05bb\u05e2\u05b8\u05d9\u05d9\u05dd\", 6),\n (\"\ud83c\ude50\ud83c\ude50\ud83c\ude50\", 6),\n ],\n)\ndef test_wcswidth(s: str, expected: int) -> None:\n assert wcswidth(s) == expected", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_fixture.py_test_caplog_captures_for_all_stages_test_caplog_captures_for_all_stages.assert_set_caplog__item__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_fixture.py_test_caplog_captures_for_all_stages_test_caplog_captures_for_all_stages.assert_set_caplog__item__", "embedding": null, "metadata": {"file_path": "testing/logging/test_fixture.py", "file_name": "test_fixture.py", "file_type": "text/x-python", "category": "test", "start_line": 131, "end_line": 140, "span_ids": ["test_caplog_captures_for_all_stages"], "tokens": 130}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_caplog_captures_for_all_stages(caplog, logging_during_setup_and_teardown):\n assert not caplog.records\n assert not caplog.get_records(\"call\")\n logger.info(\"a_call_log\")\n assert [x.message for x in caplog.get_records(\"call\")] == [\"a_call_log\"]\n\n assert [x.message for x in caplog.get_records(\"setup\")] == [\"a_setup_log\"]\n\n # This reaches into private API, don't use this type of thing in real tests!\n assert set(caplog._item._store[caplog_records_key]) == {\"setup\", \"call\"}", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_fixture.py_test_ini_controls_global_log_level_test_ini_controls_global_log_level.assert_result_ret_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_fixture.py_test_ini_controls_global_log_level_test_ini_controls_global_log_level.assert_result_ret_0", "embedding": null, "metadata": {"file_path": "testing/logging/test_fixture.py", "file_name": "test_fixture.py", "file_type": "text/x-python", "category": "test", "start_line": 175, "end_line": 199, "span_ids": ["test_ini_controls_global_log_level"], "tokens": 177}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_ini_controls_global_log_level(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n import logging\n def test_log_level_override(request, caplog):\n plugin = request.config.pluginmanager.getplugin('logging-plugin')\n assert plugin.log_level == logging.ERROR\n logger = logging.getLogger('catchlog')\n logger.warning(\"WARNING message won't be shown\")\n logger.error(\"ERROR message will be shown\")\n assert 'WARNING' not in caplog.text\n assert 'ERROR' in caplog.text\n \"\"\"\n )\n pytester.makeini(\n \"\"\"\n [pytest]\n log_level=ERROR\n \"\"\"\n )\n\n result = pytester.runpytest()\n # make sure that that we get a '0' exit code for the testsuite\n assert result.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_fixture.py_test_caplog_can_override_global_log_level_test_caplog_can_override_global_log_level.assert_result_ret_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_fixture.py_test_caplog_can_override_global_log_level_test_caplog_can_override_global_log_level.assert_result_ret_0", "embedding": null, "metadata": {"file_path": "testing/logging/test_fixture.py", "file_name": "test_fixture.py", "file_type": "text/x-python", "category": "test", "start_line": 202, "end_line": 238, "span_ids": ["test_caplog_can_override_global_log_level"], "tokens": 233}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_caplog_can_override_global_log_level(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n import logging\n def test_log_level_override(request, caplog):\n logger = logging.getLogger('catchlog')\n plugin = request.config.pluginmanager.getplugin('logging-plugin')\n assert plugin.log_level == logging.WARNING\n\n logger.info(\"INFO message won't be shown\")\n\n caplog.set_level(logging.INFO, logger.name)\n\n with caplog.at_level(logging.DEBUG, logger.name):\n logger.debug(\"DEBUG message will be shown\")\n\n logger.debug(\"DEBUG message won't be shown\")\n\n with caplog.at_level(logging.CRITICAL, logger.name):\n logger.warning(\"WARNING message won't be shown\")\n\n logger.debug(\"DEBUG message won't be shown\")\n logger.info(\"INFO message will be shown\")\n\n assert \"message won't be shown\" not in caplog.text\n \"\"\"\n )\n pytester.makeini(\n \"\"\"\n [pytest]\n log_level=WARNING\n \"\"\"\n )\n\n result = pytester.runpytest()\n assert result.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_live_logging_suspends_capture_test_live_logging_suspends_capture.assert_cast_io_StringIO_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_reporting.py_test_live_logging_suspends_capture_test_live_logging_suspends_capture.assert_cast_io_StringIO_", "embedding": null, "metadata": {"file_path": "testing/logging/test_reporting.py", "file_name": "test_reporting.py", "file_type": "text/x-python", "category": "test", "start_line": 821, "end_line": 867, "span_ids": ["test_live_logging_suspends_capture.MockCaptureManager", "test_live_logging_suspends_capture.DummyTerminal.section", "test_live_logging_suspends_capture.DummyTerminal", "test_live_logging_suspends_capture.MockCaptureManager:2", "test_live_logging_suspends_capture"], "tokens": 363}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\"has_capture_manager\", [True, False])\ndef test_live_logging_suspends_capture(\n has_capture_manager: bool, request: FixtureRequest\n) -> None:\n \"\"\"Test that capture manager is suspended when we emitting messages for live logging.\n\n This tests the implementation calls instead of behavior because it is difficult/impossible to do it using\n ``pytester`` facilities because they do their own capturing.\n\n We parametrize the test to also make sure _LiveLoggingStreamHandler works correctly if no capture manager plugin\n is installed.\n \"\"\"\n import logging\n import contextlib\n from functools import partial\n from _pytest.logging import _LiveLoggingStreamHandler\n\n class MockCaptureManager:\n calls = []\n\n @contextlib.contextmanager\n def global_and_fixture_disabled(self):\n self.calls.append(\"enter disabled\")\n yield\n self.calls.append(\"exit disabled\")\n\n class DummyTerminal(io.StringIO):\n def section(self, *args, **kwargs):\n pass\n\n out_file = cast(TerminalReporter, DummyTerminal())\n capture_manager = (\n cast(CaptureManager, MockCaptureManager()) if has_capture_manager else None\n )\n handler = _LiveLoggingStreamHandler(out_file, capture_manager)\n handler.set_when(\"call\")\n\n logger = logging.getLogger(__name__ + \".test_live_logging_suspends_capture\")\n logger.addHandler(handler)\n request.addfinalizer(partial(logger.removeHandler, handler))\n\n logger.critical(\"some message\")\n if has_capture_manager:\n assert MockCaptureManager.calls == [\"enter disabled\", \"exit disabled\"]\n else:\n assert MockCaptureManager.calls == []\n assert cast(io.StringIO, out_file).getvalue() == \"\\nsome message\\n\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestModule.test_show_traceback_import_error_TestModule.test_show_traceback_import_error.if_verbose_2_.else_.assert__pytest_not_in_s": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestModule.test_show_traceback_import_error_TestModule.test_show_traceback_import_error.if_verbose_2_.else_.assert__pytest_not_in_s", "embedding": null, "metadata": {"file_path": "testing/python/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 91, "end_line": 126, "span_ids": ["TestModule.test_show_traceback_import_error"], "tokens": 253}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestModule:\n\n @pytest.mark.parametrize(\"verbose\", [0, 1, 2])\n def test_show_traceback_import_error(\n self, pytester: Pytester, verbose: int\n ) -> None:\n \"\"\"Import errors when collecting modules should display the traceback (#1976).\n\n With low verbosity we omit pytest and internal modules, otherwise show all traceback entries.\n \"\"\"\n pytester.makepyfile(\n foo_traceback_import_error=\"\"\"\n from bar_traceback_import_error import NOT_AVAILABLE\n \"\"\",\n bar_traceback_import_error=\"\",\n )\n pytester.makepyfile(\n \"\"\"\n import foo_traceback_import_error\n \"\"\"\n )\n args = (\"-v\",) * verbose\n result = pytester.runpytest(*args)\n result.stdout.fnmatch_lines(\n [\n \"ImportError while importing test module*\",\n \"Traceback:\",\n \"*from bar_traceback_import_error import NOT_AVAILABLE\",\n \"*cannot import name *NOT_AVAILABLE*\",\n ]\n )\n assert result.ret == 2\n\n stdout = result.stdout.str()\n if verbose == 2:\n assert \"_pytest\" in stdout\n else:\n assert \"_pytest\" not in stdout", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestScopeOrdering.test_func_closure_with_native_fixtures_TestScopeOrdering.test_func_closure_with_native_fixtures.assert_FIXTURE_ORDER_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestScopeOrdering.test_func_closure_with_native_fixtures_TestScopeOrdering.test_func_closure_with_native_fixtures.assert_FIXTURE_ORDER_", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 3975, "end_line": 4028, "span_ids": ["TestScopeOrdering.test_func_closure_with_native_fixtures"], "tokens": 437}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestScopeOrdering:\n\n def test_func_closure_with_native_fixtures(\n self, pytester: Pytester, monkeypatch: MonkeyPatch\n ) -> None:\n \"\"\"Sanity check that verifies the order returned by the closures and the actual fixture execution order:\n The execution order may differ because of fixture inter-dependencies.\n \"\"\"\n monkeypatch.setattr(pytest, \"FIXTURE_ORDER\", [], raising=False)\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n FIXTURE_ORDER = pytest.FIXTURE_ORDER\n\n @pytest.fixture(scope=\"session\")\n def s1():\n FIXTURE_ORDER.append('s1')\n\n @pytest.fixture(scope=\"package\")\n def p1():\n FIXTURE_ORDER.append('p1')\n\n @pytest.fixture(scope=\"module\")\n def m1():\n FIXTURE_ORDER.append('m1')\n\n @pytest.fixture(scope='session')\n def my_tmpdir_factory():\n FIXTURE_ORDER.append('my_tmpdir_factory')\n\n @pytest.fixture\n def my_tmpdir(my_tmpdir_factory):\n FIXTURE_ORDER.append('my_tmpdir')\n\n @pytest.fixture\n def f1(my_tmpdir):\n FIXTURE_ORDER.append('f1')\n\n @pytest.fixture\n def f2():\n FIXTURE_ORDER.append('f2')\n\n def test_foo(f1, p1, m1, f2, s1): pass\n \"\"\"\n )\n items, _ = pytester.inline_genitems()\n request = FixtureRequest(items[0], _ispytest=True)\n # order of fixtures based on their scope and position in the parameter list\n assert (\n request.fixturenames == \"s1 my_tmpdir_factory p1 m1 f1 f2 my_tmpdir\".split()\n )\n pytester.runpytest()\n # actual fixture execution differs: dependent fixtures must be created first (\"my_tmpdir\")\n FIXTURE_ORDER = pytest.FIXTURE_ORDER # type: ignore[attr-defined]\n assert FIXTURE_ORDER == \"s1 my_tmpdir_factory p1 m1 my_tmpdir f1 f2\".split()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/integration.py_from_typing_import_Any_TestOEJSKITSpecials.test_funcarg_non_pycollectobj.assert_clscol_funcargs_a": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/integration.py_from_typing_import_Any_TestOEJSKITSpecials.test_funcarg_non_pycollectobj.assert_clscol_funcargs_a", "embedding": null, "metadata": {"file_path": "testing/python/integration.py", "file_name": "integration.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 42, "span_ids": ["TestOEJSKITSpecials.test_funcarg_non_pycollectobj", "imports", "TestOEJSKITSpecials"], "tokens": 296}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from typing import Any\n\nimport pytest\nfrom _pytest import runner\nfrom _pytest._code import getfslineno\nfrom _pytest.fixtures import getfixturemarker\nfrom _pytest.pytester import Pytester\n\n\nclass TestOEJSKITSpecials:\n def test_funcarg_non_pycollectobj(\n self, pytester: Pytester, recwarn\n ) -> None: # rough jstests usage\n pytester.makeconftest(\n \"\"\"\n import pytest\n def pytest_pycollect_makeitem(collector, name, obj):\n if name == \"MyClass\":\n return MyCollector.from_parent(collector, name=name)\n class MyCollector(pytest.Collector):\n def reportinfo(self):\n return self.fspath, 3, \"xyz\"\n \"\"\"\n )\n modcol = pytester.getmodulecol(\n \"\"\"\n import pytest\n @pytest.fixture\n def arg1(request):\n return 42\n class MyClass(object):\n pass\n \"\"\"\n )\n # this hook finds funcarg factories\n rep = runner.collect_one_node(collector=modcol)\n # TODO: Don't treat as Any.\n clscol: Any = rep.result[0]\n clscol.obj = lambda arg1: None\n clscol.funcargs = {}\n pytest._fillfuncargs(clscol)\n assert clscol.funcargs[\"arg1\"] == 42", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/integration.py_TestOEJSKITSpecials.test_autouse_fixture_TestOEJSKITSpecials.test_autouse_fixture.assert_not_clscol_funcarg": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/integration.py_TestOEJSKITSpecials.test_autouse_fixture_TestOEJSKITSpecials.test_autouse_fixture.assert_not_clscol_funcarg", "embedding": null, "metadata": {"file_path": "testing/python/integration.py", "file_name": "integration.py", "file_type": "text/x-python", "category": "implementation", "start_line": 44, "end_line": 78, "span_ids": ["TestOEJSKITSpecials.test_autouse_fixture"], "tokens": 253}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestOEJSKITSpecials:\n\n def test_autouse_fixture(\n self, pytester: Pytester, recwarn\n ) -> None: # rough jstests usage\n pytester.makeconftest(\n \"\"\"\n import pytest\n def pytest_pycollect_makeitem(collector, name, obj):\n if name == \"MyClass\":\n return MyCollector.from_parent(collector, name=name)\n class MyCollector(pytest.Collector):\n def reportinfo(self):\n return self.fspath, 3, \"xyz\"\n \"\"\"\n )\n modcol = pytester.getmodulecol(\n \"\"\"\n import pytest\n @pytest.fixture(autouse=True)\n def hello():\n pass\n @pytest.fixture\n def arg1(request):\n return 42\n class MyClass(object):\n pass\n \"\"\"\n )\n # this hook finds funcarg factories\n rep = runner.collect_one_node(modcol)\n # TODO: Don't treat as Any.\n clscol: Any = rep.result[0]\n clscol.obj = lambda: None\n clscol.funcargs = {}\n pytest._fillfuncargs(clscol)\n assert not clscol.funcargs", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/integration.py_test_wrapped_getfslineno_test_wrapped_getfslineno.assert_lineno_lineno2_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/integration.py_test_wrapped_getfslineno_test_wrapped_getfslineno.assert_lineno_lineno2_", "embedding": null, "metadata": {"file_path": "testing/python/integration.py", "file_name": "integration.py", "file_type": "text/x-python", "category": "implementation", "start_line": 81, "end_line": 96, "span_ids": ["test_wrapped_getfslineno"], "tokens": 121}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_wrapped_getfslineno() -> None:\n def func():\n pass\n\n def wrap(f):\n func.__wrapped__ = f # type: ignore\n func.patchings = [\"qwe\"] # type: ignore\n return func\n\n @wrap\n def wrapped_func(x, y, z):\n pass\n\n fs, lineno = getfslineno(wrapped_func)\n fs2, lineno2 = getfslineno(wrap)\n assert lineno > lineno2, \"getfslineno does not unwrap correctly\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare.test_iterable_full_diff_TestAssert_reprcompare.test_iterable_full_diff.assert_n_join_verbose_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare.test_iterable_full_diff_TestAssert_reprcompare.test_iterable_full_diff.assert_n_join_verbose_", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 389, "end_line": 441, "span_ids": ["TestAssert_reprcompare.test_iterable_full_diff"], "tokens": 349}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssert_reprcompare:\n\n @pytest.mark.parametrize(\n [\"left\", \"right\", \"expected\"],\n [\n pytest.param(\n [0, 1],\n [0, 2],\n \"\"\"\n Full diff:\n - [0, 2]\n ? ^\n + [0, 1]\n ? ^\n \"\"\",\n id=\"lists\",\n ),\n pytest.param(\n {0: 1},\n {0: 2},\n \"\"\"\n Full diff:\n - {0: 2}\n ? ^\n + {0: 1}\n ? ^\n \"\"\",\n id=\"dicts\",\n ),\n pytest.param(\n {0, 1},\n {0, 2},\n \"\"\"\n Full diff:\n - {0, 2}\n ? ^\n + {0, 1}\n ? ^\n \"\"\",\n id=\"sets\",\n ),\n ],\n )\n def test_iterable_full_diff(self, left, right, expected) -> None:\n \"\"\"Test the full diff assertion failure explanation.\n\n When verbose is False, then just a -v notice to get the diff is rendered,\n when verbose is True, then ndiff of the pprint is returned.\n \"\"\"\n expl = callequal(left, right, verbose=0)\n assert expl is not None\n assert expl[-1] == \"Use -v to get the full diff\"\n verbose_expl = callequal(left, right, verbose=1)\n assert verbose_expl is not None\n assert \"\\n\".join(verbose_expl).endswith(textwrap.dedent(expected).strip())", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare.test_list_tuples_TestAssert_reprcompare.test_repr_verbose.None_7": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare.test_list_tuples_TestAssert_reprcompare.test_repr_verbose.None_7", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 666, "end_line": 698, "span_ids": ["TestAssert_reprcompare.test_repr_verbose.Nums", "TestAssert_reprcompare.test_list_tuples", "TestAssert_reprcompare.test_repr_verbose", "TestAssert_reprcompare.test_repr_verbose.Nums.__init__"], "tokens": 272}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssert_reprcompare:\n\n def test_list_tuples(self) -> None:\n expl = callequal([], [(1, 2)])\n assert expl is not None\n assert len(expl) > 1\n expl = callequal([(1, 2)], [])\n assert expl is not None\n assert len(expl) > 1\n\n def test_repr_verbose(self) -> None:\n class Nums:\n def __init__(self, nums):\n self.nums = nums\n\n def __repr__(self):\n return str(self.nums)\n\n list_x = list(range(5000))\n list_y = list(range(5000))\n list_y[len(list_y) // 2] = 3\n nums_x = Nums(list_x)\n nums_y = Nums(list_y)\n\n assert callequal(nums_x, nums_y) is None\n\n expl = callequal(nums_x, nums_y, verbose=1)\n assert expl is not None\n assert \"+\" + repr(nums_x) in expl\n assert \"-\" + repr(nums_y) in expl\n\n expl = callequal(nums_x, nums_y, verbose=2)\n assert expl is not None\n assert \"+\" + repr(nums_x) in expl\n assert \"-\" + repr(nums_y) in expl", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare_dataclass.test_recursive_dataclasses_TestAssert_reprcompare_dataclass.test_recursive_dataclasses.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare_dataclass.test_recursive_dataclasses_TestAssert_reprcompare_dataclass.test_recursive_dataclasses.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 798, "end_line": 815, "span_ids": ["TestAssert_reprcompare_dataclass.test_recursive_dataclasses"], "tokens": 217}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssert_reprcompare_dataclass:\n\n @pytest.mark.skipif(sys.version_info < (3, 7), reason=\"Dataclasses in Python3.7+\")\n def test_recursive_dataclasses(self, pytester: Pytester) -> None:\n p = pytester.copy_example(\"dataclasses/test_compare_recursive_dataclasses.py\")\n result = pytester.runpytest(p)\n result.assert_outcomes(failed=1, passed=0)\n result.stdout.fnmatch_lines(\n [\n \"E Omitting 1 identical items, use -vv to show\",\n \"E Differing attributes:\",\n \"E ['g', 'h', 'j']\",\n \"E \",\n \"E Drill down into differing attribute g:\",\n \"E g: S(a=10, b='ten') != S(a=20, b='xxx')...\",\n \"E \",\n \"E ...Full output truncated (52 lines hidden), use '-vv' to show\",\n ],\n consecutive=True,\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare_dataclass.test_recursive_dataclasses_verbose_TestAssert_reprcompare_dataclass.test_recursive_dataclasses_verbose.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare_dataclass.test_recursive_dataclasses_verbose_TestAssert_reprcompare_dataclass.test_recursive_dataclasses_verbose.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 817, "end_line": 848, "span_ids": ["TestAssert_reprcompare_dataclass.test_recursive_dataclasses_verbose"], "tokens": 319}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssert_reprcompare_dataclass:\n\n @pytest.mark.skipif(sys.version_info < (3, 7), reason=\"Dataclasses in Python3.7+\")\n def test_recursive_dataclasses_verbose(self, pytester: Pytester) -> None:\n p = pytester.copy_example(\"dataclasses/test_compare_recursive_dataclasses.py\")\n result = pytester.runpytest(p, \"-vv\")\n result.assert_outcomes(failed=1, passed=0)\n result.stdout.fnmatch_lines(\n [\n \"E Matching attributes:\",\n \"E ['i']\",\n \"E Differing attributes:\",\n \"E ['g', 'h', 'j']\",\n \"E \",\n \"E Drill down into differing attribute g:\",\n \"E g: S(a=10, b='ten') != S(a=20, b='xxx')\",\n \"E \",\n \"E Differing attributes:\",\n \"E ['a', 'b']\",\n \"E \",\n \"E Drill down into differing attribute a:\",\n \"E a: 10 != 20\",\n \"E +10\",\n \"E -20\",\n \"E \",\n \"E Drill down into differing attribute b:\",\n \"E b: 'ten' != 'xxx'\",\n \"E - xxx\",\n \"E + ten\",\n \"E \",\n \"E Drill down into differing attribute h:\",\n ],\n consecutive=True,\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare_attrsclass.test_attrs_recursive_TestAssert_reprcompare_attrsclass.test_attrs_recursive.for_line_in_lines_1_.assert_field_c_not_in_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare_attrsclass.test_attrs_recursive_TestAssert_reprcompare_attrsclass.test_attrs_recursive.for_line_in_lines_1_.assert_field_c_not_in_", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 876, "end_line": 895, "span_ids": ["TestAssert_reprcompare_attrsclass.test_attrs_recursive.OtherDataObject:2", "TestAssert_reprcompare_attrsclass.test_attrs_recursive.OtherDataObject", "TestAssert_reprcompare_attrsclass.test_attrs_recursive", "TestAssert_reprcompare_attrsclass.test_attrs_recursive.SimpleDataObject:2", "TestAssert_reprcompare_attrsclass.test_attrs_recursive.SimpleDataObject"], "tokens": 159}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssert_reprcompare_attrsclass:\n\n def test_attrs_recursive(self) -> None:\n @attr.s\n class OtherDataObject:\n field_c = attr.ib()\n field_d = attr.ib()\n\n @attr.s\n class SimpleDataObject:\n field_a = attr.ib()\n field_b = attr.ib()\n\n left = SimpleDataObject(OtherDataObject(1, \"a\"), \"b\")\n right = SimpleDataObject(OtherDataObject(1, \"b\"), \"b\")\n\n lines = callequal(left, right)\n assert lines is not None\n assert \"Matching attributes\" not in lines\n for line in lines[1:]:\n assert \"field_b:\" not in line\n assert \"field_c:\" not in line", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare_attrsclass.test_attrs_recursive_verbose_TestAssert_reprcompare_attrsclass.test_attrs_recursive_verbose.assert_field_d_a_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare_attrsclass.test_attrs_recursive_verbose_TestAssert_reprcompare_attrsclass.test_attrs_recursive_verbose.assert_field_d_a_", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 914, "end_line": 931, "span_ids": ["TestAssert_reprcompare_attrsclass.test_attrs_recursive_verbose.SimpleDataObject", "TestAssert_reprcompare_attrsclass.test_attrs_recursive_verbose.SimpleDataObject:2", "TestAssert_reprcompare_attrsclass.test_attrs_recursive_verbose.OtherDataObject", "TestAssert_reprcompare_attrsclass.test_attrs_recursive_verbose", "TestAssert_reprcompare_attrsclass.test_attrs_recursive_verbose.OtherDataObject:2"], "tokens": 150}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssert_reprcompare_attrsclass:\n\n def test_attrs_recursive_verbose(self) -> None:\n @attr.s\n class OtherDataObject:\n field_c = attr.ib()\n field_d = attr.ib()\n\n @attr.s\n class SimpleDataObject:\n field_a = attr.ib()\n field_b = attr.ib()\n\n left = SimpleDataObject(OtherDataObject(1, \"a\"), \"b\")\n right = SimpleDataObject(OtherDataObject(1, \"b\"), \"b\")\n\n lines = callequal(left, right)\n assert lines is not None\n # indentation in output because of nested object structure\n assert \" field_d: 'a' != 'b'\" in lines", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_getmsg_getmsg.try_.else_.return.None": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_getmsg_getmsg.try_.else_.return.None", "embedding": null, "metadata": {"file_path": "testing/test_assertrewrite.py", "file_name": "test_assertrewrite.py", "file_type": "text/x-python", "category": "test", "start_line": 40, "end_line": 64, "span_ids": ["getmsg"], "tokens": 216}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def getmsg(\n f, extra_ns: Optional[Mapping[str, object]] = None, *, must_pass: bool = False\n) -> Optional[str]:\n \"\"\"Rewrite the assertions in f, run it, and get the failure message.\"\"\"\n src = \"\\n\".join(_pytest._code.Code.from_function(f).source().lines)\n mod = rewrite(src)\n code = compile(mod, \"\", \"exec\")\n ns: Dict[str, object] = {}\n if extra_ns is not None:\n ns.update(extra_ns)\n exec(code, ns)\n func = ns[f.__name__]\n try:\n func() # type: ignore[operator]\n except AssertionError:\n if must_pass:\n pytest.fail(\"shouldn't have raised\")\n s = str(sys.exc_info()[1])\n if not s.startswith(\"assert\"):\n return \"AssertionError: \" + s\n return s\n else:\n if not must_pass:\n pytest.fail(\"function didn't raise at all\")\n return None", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_name_TestAssertionRewrite.test_name.if_verbose_1_.else_.assert_lines_assert_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_name_TestAssertionRewrite.test_name.if_verbose_1_.else_.assert_lines_assert_", "embedding": null, "metadata": {"file_path": "testing/test_assertrewrite.py", "file_name": "test_assertrewrite.py", "file_type": "text/x-python", "category": "test", "start_line": 159, "end_line": 212, "span_ids": ["TestAssertionRewrite.test_name", "TestAssertionRewrite.test_name.X:2", "TestAssertionRewrite.test_name.X"], "tokens": 394}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssertionRewrite:\n\n def test_name(self, request) -> None:\n def f1() -> None:\n assert False\n\n assert getmsg(f1) == \"assert False\"\n\n def f2() -> None:\n f = False\n assert f\n\n assert getmsg(f2) == \"assert False\"\n\n def f3() -> None:\n assert a_global # type: ignore[name-defined] # noqa\n\n assert getmsg(f3, {\"a_global\": False}) == \"assert False\"\n\n def f4() -> None:\n assert sys == 42 # type: ignore[comparison-overlap]\n\n verbose = request.config.getoption(\"verbose\")\n msg = getmsg(f4, {\"sys\": sys})\n if verbose > 0:\n assert msg == (\n \"assert == 42\\n\"\n \" +\\n\"\n \" -42\"\n )\n else:\n assert msg == \"assert sys == 42\"\n\n def f5() -> None:\n assert cls == 42 # type: ignore[name-defined] # noqa: F821\n\n class X:\n pass\n\n msg = getmsg(f5, {\"cls\": X})\n assert msg is not None\n lines = msg.splitlines()\n if verbose > 1:\n assert lines == [\n f\"assert {X!r} == 42\",\n f\" +{X!r}\",\n \" -42\",\n ]\n elif verbose > 0:\n assert lines == [\n \"assert .X'> == 42\",\n f\" +{X!r}\",\n \" -42\",\n ]\n else:\n assert lines == [\"assert cls == 42\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_assertrepr_compare_same_width_TestAssertionRewrite.test_assertrepr_compare_same_width.if_request_config_getopti.else_.assert_line_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_assertrepr_compare_same_width_TestAssertionRewrite.test_assertrepr_compare_same_width.if_request_config_getopti.else_.assert_line_", "embedding": null, "metadata": {"file_path": "testing/test_assertrewrite.py", "file_name": "test_assertrewrite.py", "file_type": "text/x-python", "category": "test", "start_line": 214, "end_line": 232, "span_ids": ["TestAssertionRewrite.test_assertrepr_compare_same_width"], "tokens": 200}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssertionRewrite:\n\n def test_assertrepr_compare_same_width(self, request) -> None:\n \"\"\"Should use same width/truncation with same initial width.\"\"\"\n\n def f() -> None:\n assert \"1234567890\" * 5 + \"A\" == \"1234567890\" * 5 + \"B\"\n\n msg = getmsg(f)\n assert msg is not None\n line = msg.splitlines()[0]\n if request.config.getoption(\"verbose\") > 1:\n assert line == (\n \"assert '12345678901234567890123456789012345678901234567890A' \"\n \"== '12345678901234567890123456789012345678901234567890B'\"\n )\n else:\n assert line == (\n \"assert '123456789012...901234567890A' \"\n \"== '123456789012...901234567890B'\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_dont_rewrite_if_hasattr_fails_TestAssertionRewrite.test_dont_rewrite_if_hasattr_fails.if_request_config_getopti.else_.assert_lines_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_dont_rewrite_if_hasattr_fails_TestAssertionRewrite.test_dont_rewrite_if_hasattr_fails.if_request_config_getopti.else_.assert_lines_", "embedding": null, "metadata": {"file_path": "testing/test_assertrewrite.py", "file_name": "test_assertrewrite.py", "file_type": "text/x-python", "category": "test", "start_line": 234, "end_line": 261, "span_ids": ["TestAssertionRewrite.test_dont_rewrite_if_hasattr_fails.Y:2", "TestAssertionRewrite.test_dont_rewrite_if_hasattr_fails", "TestAssertionRewrite.test_dont_rewrite_if_hasattr_fails.Y"], "tokens": 239}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssertionRewrite:\n\n def test_dont_rewrite_if_hasattr_fails(self, request) -> None:\n class Y:\n \"\"\"A class whose getattr fails, but not with `AttributeError`.\"\"\"\n\n def __getattr__(self, attribute_name):\n raise KeyError()\n\n def __repr__(self) -> str:\n return \"Y\"\n\n def __init__(self) -> None:\n self.foo = 3\n\n def f() -> None:\n assert cls().foo == 2 # type: ignore[name-defined] # noqa: F821\n\n # XXX: looks like the \"where\" should also be there in verbose mode?!\n msg = getmsg(f, {\"cls\": Y})\n assert msg is not None\n lines = msg.splitlines()\n if request.config.getoption(\"verbose\") > 0:\n assert lines == [\"assert 3 == 2\", \" +3\", \" -2\"]\n else:\n assert lines == [\n \"assert 3 == 2\",\n \" + where 3 = Y.foo\",\n \" + where Y = cls()\",\n ]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_boolop_TestAssertionRewrite.test_boolop.getmsg_f11_must_pass_Tru": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_boolop_TestAssertionRewrite.test_boolop.getmsg_f11_must_pass_Tru", "embedding": null, "metadata": {"file_path": "testing/test_assertrewrite.py", "file_name": "test_assertrewrite.py", "file_type": "text/x-python", "category": "test", "start_line": 338, "end_line": 415, "span_ids": ["TestAssertionRewrite.test_boolop"], "tokens": 463}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssertionRewrite:\n\n def test_boolop(self) -> None:\n def f1() -> None:\n f = g = False\n assert f and g\n\n assert getmsg(f1) == \"assert (False)\"\n\n def f2() -> None:\n f = True\n g = False\n assert f and g\n\n assert getmsg(f2) == \"assert (True and False)\"\n\n def f3() -> None:\n f = False\n g = True\n assert f and g\n\n assert getmsg(f3) == \"assert (False)\"\n\n def f4() -> None:\n f = g = False\n assert f or g\n\n assert getmsg(f4) == \"assert (False or False)\"\n\n def f5() -> None:\n f = g = False\n assert not f and not g\n\n getmsg(f5, must_pass=True)\n\n def x() -> bool:\n return False\n\n def f6() -> None:\n assert x() and x()\n\n assert (\n getmsg(f6, {\"x\": x})\n == \"\"\"assert (False)\n + where False = x()\"\"\"\n )\n\n def f7() -> None:\n assert False or x()\n\n assert (\n getmsg(f7, {\"x\": x})\n == \"\"\"assert (False or False)\n + where False = x()\"\"\"\n )\n\n def f8() -> None:\n assert 1 in {} and 2 in {}\n\n assert getmsg(f8) == \"assert (1 in {})\"\n\n def f9() -> None:\n x = 1\n y = 2\n assert x in {1: None} and y in {}\n\n assert getmsg(f9) == \"assert (1 in {1: None} and 2 in {})\"\n\n def f10() -> None:\n f = True\n g = False\n assert f or g\n\n getmsg(f10, must_pass=True)\n\n def f11() -> None:\n f = g = h = lambda: True\n assert f() and g() and h()\n\n getmsg(f11, must_pass=True)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_short_circuit_evaluation_TestAssertionRewrite.test_unary_op.assert_getmsg_f4_ass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_short_circuit_evaluation_TestAssertionRewrite.test_unary_op.assert_getmsg_f4_ass", "embedding": null, "metadata": {"file_path": "testing/test_assertrewrite.py", "file_name": "test_assertrewrite.py", "file_type": "text/x-python", "category": "test", "start_line": 417, "end_line": 452, "span_ids": ["TestAssertionRewrite.test_short_circuit_evaluation", "TestAssertionRewrite.test_unary_op"], "tokens": 248}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssertionRewrite:\n\n def test_short_circuit_evaluation(self) -> None:\n def f1() -> None:\n assert True or explode # type: ignore[name-defined,unreachable] # noqa: F821\n\n getmsg(f1, must_pass=True)\n\n def f2() -> None:\n x = 1\n assert x == 1 or x == 2\n\n getmsg(f2, must_pass=True)\n\n def test_unary_op(self) -> None:\n def f1() -> None:\n x = True\n assert not x\n\n assert getmsg(f1) == \"assert not True\"\n\n def f2() -> None:\n x = 0\n assert ~x + 1\n\n assert getmsg(f2) == \"assert (~0 + 1)\"\n\n def f3() -> None:\n x = 3\n assert -x + x\n\n assert getmsg(f3) == \"assert (-3 + 3)\"\n\n def f4() -> None:\n x = 0\n assert +x + x\n\n assert getmsg(f4) == \"assert (+0 + 0)\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_attribute_TestAssertionRewrite.test_attribute.None_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_attribute_TestAssertionRewrite.test_attribute.None_1", "embedding": null, "metadata": {"file_path": "testing/test_assertrewrite.py", "file_name": "test_assertrewrite.py", "file_type": "text/x-python", "category": "test", "start_line": 575, "end_line": 598, "span_ids": ["TestAssertionRewrite.test_attribute.X:2", "TestAssertionRewrite.test_attribute.X", "TestAssertionRewrite.test_attribute"], "tokens": 162}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssertionRewrite:\n\n def test_attribute(self) -> None:\n class X:\n g = 3\n\n ns = {\"x\": X}\n\n def f1() -> None:\n assert not x.g # type: ignore[name-defined] # noqa: F821\n\n assert (\n getmsg(f1, ns)\n == \"\"\"assert not 3\n + where 3 = x.g\"\"\"\n )\n\n def f2() -> None:\n x.a = False # type: ignore[name-defined] # noqa: F821\n assert x.a # type: ignore[name-defined] # noqa: F821\n\n assert (\n getmsg(f2, ns)\n == \"\"\"assert False\n + where False = x.a\"\"\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_comparisons_TestAssertionRewrite.test_comparisons.getmsg_f5_must_pass_True": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_comparisons_TestAssertionRewrite.test_comparisons.getmsg_f5_must_pass_True", "embedding": null, "metadata": {"file_path": "testing/test_assertrewrite.py", "file_name": "test_assertrewrite.py", "file_type": "text/x-python", "category": "test", "start_line": 600, "end_line": 630, "span_ids": ["TestAssertionRewrite.test_comparisons"], "tokens": 220}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssertionRewrite:\n\n def test_comparisons(self) -> None:\n def f1() -> None:\n a, b = range(2)\n assert b < a\n\n assert getmsg(f1) == \"\"\"assert 1 < 0\"\"\"\n\n def f2() -> None:\n a, b, c = range(3)\n assert a > b > c\n\n assert getmsg(f2) == \"\"\"assert 0 > 1\"\"\"\n\n def f3() -> None:\n a, b, c = range(3)\n assert a < b > c\n\n assert getmsg(f3) == \"\"\"assert 1 > 2\"\"\"\n\n def f4() -> None:\n a, b, c = range(3)\n assert a < b <= c\n\n getmsg(f4, must_pass=True)\n\n def f5() -> None:\n a, b, c = range(3)\n assert a < b\n assert b < c\n\n getmsg(f5, must_pass=True)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_custom_reprcompare_TestAssertionRewrite.test_custom_reprcompare.assert_getmsg_f2_ass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_custom_reprcompare_TestAssertionRewrite.test_custom_reprcompare.assert_getmsg_f2_ass", "embedding": null, "metadata": {"file_path": "testing/test_assertrewrite.py", "file_name": "test_assertrewrite.py", "file_type": "text/x-python", "category": "test", "start_line": 643, "end_line": 662, "span_ids": ["TestAssertionRewrite.test_custom_reprcompare"], "tokens": 169}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssertionRewrite:\n\n def test_custom_reprcompare(self, monkeypatch) -> None:\n def my_reprcompare1(op, left, right) -> str:\n return \"42\"\n\n monkeypatch.setattr(util, \"_reprcompare\", my_reprcompare1)\n\n def f1() -> None:\n assert 42 < 3\n\n assert getmsg(f1) == \"assert 42\"\n\n def my_reprcompare2(op, left, right) -> str:\n return f\"{left} {op} {right}\"\n\n monkeypatch.setattr(util, \"_reprcompare\", my_reprcompare2)\n\n def f2() -> None:\n assert 1 < 3 < 5 <= 4 < 7\n\n assert getmsg(f2) == \"assert 5 <= 4\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_assert_raising__bool__in_comparison_TestAssertionRewrite.test_formatchar.assert_msg_startswith_as": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_assert_raising__bool__in_comparison_TestAssertionRewrite.test_formatchar.assert_msg_startswith_as", "embedding": null, "metadata": {"file_path": "testing/test_assertrewrite.py", "file_name": "test_assertrewrite.py", "file_type": "text/x-python", "category": "test", "start_line": 664, "end_line": 691, "span_ids": ["TestAssertionRewrite.test_formatchar", "TestAssertionRewrite.test_assert_raising__bool__in_comparison", "TestAssertionRewrite.test_assert_raising__bool__in_comparison.f.A.__bool__", "TestAssertionRewrite.test_assert_raising__bool__in_comparison.f.A"], "tokens": 190}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssertionRewrite:\n\n def test_assert_raising__bool__in_comparison(self) -> None:\n def f() -> None:\n class A:\n def __bool__(self):\n raise ValueError(42)\n\n def __lt__(self, other):\n return A()\n\n def __repr__(self):\n return \"\"\n\n def myany(x) -> bool:\n return False\n\n assert myany(A() < 0)\n\n msg = getmsg(f)\n assert msg is not None\n assert \" < 0\" in msg\n\n def test_formatchar(self) -> None:\n def f() -> None:\n assert \"%test\" == \"test\" # type: ignore[comparison-overlap]\n\n msg = getmsg(f)\n assert msg is not None\n assert msg.startswith(\"assert '%test' == 'test'\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_fscollector_from_parent_test_fscollector_from_parent.assert_collector_x_10": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_fscollector_from_parent_test_fscollector_from_parent.assert_collector_x_10", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 1342, "end_line": 1360, "span_ids": ["test_fscollector_from_parent.MyCollector", "test_fscollector_from_parent", "test_fscollector_from_parent.MyCollector.__init__"], "tokens": 169}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_fscollector_from_parent(pytester: Pytester, request: FixtureRequest) -> None:\n \"\"\"Ensure File.from_parent can forward custom arguments to the constructor.\n\n Context: https://github.com/pytest-dev/pytest-cpp/pull/47\n \"\"\"\n\n class MyCollector(pytest.File):\n def __init__(self, fspath, parent, x):\n super().__init__(fspath, parent)\n self.x = x\n\n @classmethod\n def from_parent(cls, parent, *, fspath, x):\n return super().from_parent(parent=parent, fspath=fspath, x=x)\n\n collector = MyCollector.from_parent(\n parent=request.session, fspath=py.path.local(pytester.path) / \"foo\", x=10\n )\n assert collector.x == 10", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestImportModeImportlib_TestImportModeImportlib.test_collect_duplicate_names.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestImportModeImportlib_TestImportModeImportlib.test_collect_duplicate_names.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 1363, "end_line": 1379, "span_ids": ["TestImportModeImportlib", "TestImportModeImportlib.test_collect_duplicate_names"], "tokens": 162}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestImportModeImportlib:\n def test_collect_duplicate_names(self, pytester: Pytester) -> None:\n \"\"\"--import-mode=importlib can import modules with same names that are not in packages.\"\"\"\n pytester.makepyfile(\n **{\n \"tests_a/test_foo.py\": \"def test_foo1(): pass\",\n \"tests_b/test_foo.py\": \"def test_foo2(): pass\",\n }\n )\n result = pytester.runpytest(\"-v\", \"--import-mode=importlib\")\n result.stdout.fnmatch_lines(\n [\n \"tests_a/test_foo.py::test_foo1 *\",\n \"tests_b/test_foo.py::test_foo2 *\",\n \"* 2 passed in *\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestImportModeImportlib.test_conftest_TestImportModeImportlib.test_conftest.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestImportModeImportlib.test_conftest_TestImportModeImportlib.test_conftest.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 1381, "end_line": 1398, "span_ids": ["TestImportModeImportlib.test_conftest"], "tokens": 157}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestImportModeImportlib:\n\n def test_conftest(self, pytester: Pytester) -> None:\n \"\"\"Directory containing conftest modules are not put in sys.path as a side-effect of\n importing them.\"\"\"\n tests_dir = pytester.path.joinpath(\"tests\")\n pytester.makepyfile(\n **{\n \"tests/conftest.py\": \"\",\n \"tests/test_foo.py\": \"\"\"\n import sys\n def test_check():\n assert r\"{tests_dir}\" not in sys.path\n \"\"\".format(\n tests_dir=tests_dir\n ),\n }\n )\n result = pytester.runpytest(\"-v\", \"--import-mode=importlib\")\n result.stdout.fnmatch_lines([\"* 1 passed in *\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestImportModeImportlib.setup_conftest_and_foo_TestImportModeImportlib.test_modules_importable_as_side_effect.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestImportModeImportlib.setup_conftest_and_foo_TestImportModeImportlib.test_modules_importable_as_side_effect.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 1400, "end_line": 1422, "span_ids": ["TestImportModeImportlib.setup_conftest_and_foo", "TestImportModeImportlib.test_modules_importable_as_side_effect"], "tokens": 232}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestImportModeImportlib:\n\n def setup_conftest_and_foo(self, pytester: Pytester) -> None:\n \"\"\"Setup a tests folder to be used to test if modules in that folder can be imported\n due to side-effects of --import-mode or not.\"\"\"\n pytester.makepyfile(\n **{\n \"tests/conftest.py\": \"\",\n \"tests/foo.py\": \"\"\"\n def foo(): return 42\n \"\"\",\n \"tests/test_foo.py\": \"\"\"\n def test_check():\n from foo import foo\n assert foo() == 42\n \"\"\",\n }\n )\n\n def test_modules_importable_as_side_effect(self, pytester: Pytester) -> None:\n \"\"\"In import-modes `prepend` and `append`, we are able to import modules from folders\n containing conftest.py files due to the side effect of changing sys.path.\"\"\"\n self.setup_conftest_and_foo(pytester)\n result = pytester.runpytest(\"-v\", \"--import-mode=prepend\")\n result.stdout.fnmatch_lines([\"* 1 passed in *\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_compat.py_enum_test_is_generator.assert_not_is_generator_f": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_compat.py_enum_test_is_generator.assert_not_is_generator_f", "embedding": null, "metadata": {"file_path": "testing/test_compat.py", "file_name": "test_compat.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 30, "span_ids": ["test_is_generator", "impl", "imports", "imports:16"], "tokens": 168}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import enum\nfrom functools import partial\nfrom functools import wraps\nfrom typing import TYPE_CHECKING\nfrom typing import Union\n\nimport pytest\nfrom _pytest.compat import _PytestWrapper\nfrom _pytest.compat import assert_never\nfrom _pytest.compat import cached_property\nfrom _pytest.compat import get_real_func\nfrom _pytest.compat import is_generator\nfrom _pytest.compat import safe_getattr\nfrom _pytest.compat import safe_isclass\nfrom _pytest.outcomes import OutcomeException\nfrom _pytest.pytester import Pytester\n\nif TYPE_CHECKING:\n from typing_extensions import Literal\n\n\ndef test_is_generator() -> None:\n def zap():\n yield # pragma: no cover\n\n def foo():\n pass # pragma: no cover\n\n assert is_generator(zap)\n assert not is_generator(foo)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestConfigAPI.test_getconftest_pathlist_TestConfigAPI.test_getconftest_pathlist.assert_pl_1_somepath": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestConfigAPI.test_getconftest_pathlist_TestConfigAPI.test_getconftest_pathlist.assert_pl_1_somepath", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 577, "end_line": 587, "span_ids": ["TestConfigAPI.test_getconftest_pathlist"], "tokens": 162}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestConfigAPI:\n\n def test_getconftest_pathlist(self, pytester: Pytester, tmp_path: Path) -> None:\n somepath = tmp_path.joinpath(\"x\", \"y\", \"z\")\n p = tmp_path.joinpath(\"conftest.py\")\n p.write_text(f\"pathlist = ['.', {str(somepath)!r}]\")\n config = pytester.parseconfigure(p)\n assert config._getconftest_pathlist(\"notexist\", path=tmp_path) is None\n pl = config._getconftest_pathlist(\"pathlist\", path=tmp_path) or []\n print(pl)\n assert len(pl) == 2\n assert pl[0] == tmp_path\n assert pl[1] == somepath", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestConfigAPI.test_addini_linelist_pprojecttoml_TestConfigAPI.check_config_linelist.assert_values_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestConfigAPI.test_addini_linelist_pprojecttoml_TestConfigAPI.check_config_linelist.assert_values_", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 706, "end_line": 722, "span_ids": ["TestConfigAPI.check_config_linelist", "TestConfigAPI.test_addini_linelist_pprojecttoml"], "tokens": 155}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestConfigAPI:\n\n def test_addini_linelist_pprojecttoml(self, pytester: Pytester) -> None:\n self.make_conftest_for_linelist(pytester)\n pytester.makepyprojecttoml(\n \"\"\"\n [tool.pytest.ini_options]\n xy = [\"123 345\", \"second line\"]\n \"\"\"\n )\n self.check_config_linelist(pytester)\n\n def check_config_linelist(self, pytester: Pytester) -> None:\n config = pytester.parseconfig()\n values = config.getini(\"xy\")\n assert len(values) == 2\n assert values == [\"123 345\", \"second line\"]\n values = config.getini(\"a2\")\n assert values == []", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_options_on_small_file_do_not_blow_up_test_options_on_small_file_do_not_blow_up.runfiletest_v_v_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_options_on_small_file_do_not_blow_up_test_options_on_small_file_do_not_blow_up.runfiletest_v_v_", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 888, "end_line": 913, "span_ids": ["test_options_on_small_file_do_not_blow_up"], "tokens": 220}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_options_on_small_file_do_not_blow_up(pytester: Pytester) -> None:\n def runfiletest(opts: Sequence[str]) -> None:\n reprec = pytester.inline_run(*opts)\n passed, skipped, failed = reprec.countoutcomes()\n assert failed == 2\n assert skipped == passed == 0\n\n path = str(\n pytester.makepyfile(\n \"\"\"\n def test_f1(): assert 0\n def test_f2(): assert 0\n \"\"\"\n )\n )\n\n runfiletest([path])\n runfiletest([\"-l\", path])\n runfiletest([\"-s\", path])\n runfiletest([\"--tb=no\", path])\n runfiletest([\"--tb=short\", path])\n runfiletest([\"--tb=long\", path])\n runfiletest([\"--fulltrace\", path])\n runfiletest([\"--traceconfig\", path])\n runfiletest([\"-v\", path])\n runfiletest([\"-v\", \"-v\", path])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestRootdir.test_with_ini_TestRootdir.test_with_ini.assert_ini_config_x_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestRootdir.test_with_ini_TestRootdir.test_with_ini.assert_ini_config_x_", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 1306, "end_line": 1332, "span_ids": ["TestRootdir.test_with_ini"], "tokens": 293}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRootdir:\n\n @pytest.mark.parametrize(\n \"name, contents\",\n [\n pytest.param(\"pytest.ini\", \"[pytest]\\nx=10\", id=\"pytest.ini\"),\n pytest.param(\n \"pyproject.toml\", \"[tool.pytest.ini_options]\\nx=10\", id=\"pyproject.toml\"\n ),\n pytest.param(\"tox.ini\", \"[pytest]\\nx=10\", id=\"tox.ini\"),\n pytest.param(\"setup.cfg\", \"[tool:pytest]\\nx=10\", id=\"setup.cfg\"),\n ],\n )\n def test_with_ini(self, tmp_path: Path, name: str, contents: str) -> None:\n inipath = tmp_path / name\n inipath.write_text(contents, \"utf-8\")\n\n a = tmp_path / \"a\"\n a.mkdir()\n b = a / \"b\"\n b.mkdir()\n for args in ([str(tmp_path)], [str(a)], [str(b)]):\n rootpath, parsed_inipath, _ = determine_setup(None, args)\n assert rootpath == tmp_path\n assert parsed_inipath == inipath\n rootpath, parsed_inipath, ini_config = determine_setup(None, [str(b), str(a)])\n assert rootpath == tmp_path\n assert parsed_inipath == inipath\n assert ini_config == {\"x\": \"10\"}", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestRootdir.test_with_specific_inifile_TestRootdir.test_with_specific_inifile.assert_ini_config_x_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestRootdir.test_with_specific_inifile_TestRootdir.test_with_specific_inifile.assert_ini_config_x_", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 1362, "end_line": 1382, "span_ids": ["TestRootdir.test_with_specific_inifile"], "tokens": 219}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRootdir:\n\n @pytest.mark.parametrize(\n \"name, contents\",\n [\n # pytest.param(\"pytest.ini\", \"[pytest]\\nx=10\", id=\"pytest.ini\"),\n pytest.param(\n \"pyproject.toml\", \"[tool.pytest.ini_options]\\nx=10\", id=\"pyproject.toml\"\n ),\n # pytest.param(\"tox.ini\", \"[pytest]\\nx=10\", id=\"tox.ini\"),\n # pytest.param(\"setup.cfg\", \"[tool:pytest]\\nx=10\", id=\"setup.cfg\"),\n ],\n )\n def test_with_specific_inifile(\n self, tmp_path: Path, name: str, contents: str\n ) -> None:\n p = tmp_path / name\n p.touch()\n p.write_text(contents, \"utf-8\")\n rootpath, inipath, ini_config = determine_setup(str(p), [str(tmp_path)])\n assert rootpath == tmp_path\n assert inipath == p\n assert ini_config == {\"x\": \"10\"}", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_invocation_args_test_invocation_args.with_pytest_raises_TypeEr._type_ignore_arg_type_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_invocation_args_test_invocation_args.with_pytest_raises_TypeEr._type_ignore_arg_type_", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 1746, "end_line": 1770, "span_ids": ["test_invocation_args.DummyPlugin", "test_invocation_args.DummyPlugin:2", "test_invocation_args"], "tokens": 222}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_invocation_args(pytester: Pytester) -> None:\n \"\"\"Ensure that Config.invocation_* arguments are correctly defined\"\"\"\n\n class DummyPlugin:\n pass\n\n p = pytester.makepyfile(\"def test(): pass\")\n plugin = DummyPlugin()\n rec = pytester.inline_run(p, \"-v\", plugins=[plugin])\n calls = rec.getcalls(\"pytest_runtest_protocol\")\n assert len(calls) == 1\n call = calls[0]\n config = call.item.config\n\n assert config.invocation_params.args == (str(p), \"-v\")\n assert config.invocation_params.dir == pytester.path\n\n plugins = config.invocation_params.plugins\n assert len(plugins) == 2\n assert plugins[0] is plugin\n assert type(plugins[1]).__name__ == \"Collect\" # installed by pytester.inline_run()\n\n # args cannot be None\n with pytest.raises(TypeError):\n Config.InvocationParams(args=None, plugins=None, dir=Path()) # type: ignore[arg-type]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_TestConftestValueAccessGlobal.test_value_access_not_existing_TestConftestValueAccessGlobal.test_value_access_by_path.assert_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_TestConftestValueAccessGlobal.test_value_access_not_existing_TestConftestValueAccessGlobal.test_value_access_by_path.assert_", "embedding": null, "metadata": {"file_path": "testing/test_conftest.py", "file_name": "test_conftest.py", "file_type": "text/x-python", "category": "test", "start_line": 75, "end_line": 86, "span_ids": ["TestConftestValueAccessGlobal.test_value_access_by_path", "TestConftestValueAccessGlobal.test_value_access_not_existing"], "tokens": 184}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.usefixtures(\"_sys_snapshot\")\nclass TestConftestValueAccessGlobal:\n\n def test_value_access_not_existing(self, basedir: Path) -> None:\n conftest = ConftestWithSetinitial(basedir)\n with pytest.raises(KeyError):\n conftest._rget_with_confmod(\"a\", basedir, importmode=\"prepend\")\n\n def test_value_access_by_path(self, basedir: Path) -> None:\n conftest = ConftestWithSetinitial(basedir)\n adir = basedir / \"adir\"\n assert conftest._rget_with_confmod(\"a\", adir, importmode=\"prepend\")[1] == 1\n assert (\n conftest._rget_with_confmod(\"a\", adir / \"b\", importmode=\"prepend\")[1] == 1.5\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_conftest_symlink_test_conftest_symlink.None_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_conftest_symlink_test_conftest_symlink.None_1", "embedding": null, "metadata": {"file_path": "testing/test_conftest.py", "file_name": "test_conftest.py", "file_type": "text/x-python", "category": "test", "start_line": 217, "end_line": 259, "span_ids": ["test_conftest_symlink"], "tokens": 355}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_conftest_symlink(pytester: Pytester) -> None:\n \"\"\"`conftest.py` discovery follows normal path resolution and does not resolve symlinks.\"\"\"\n # Structure:\n # /real\n # /real/conftest.py\n # /real/app\n # /real/app/tests\n # /real/app/tests/test_foo.py\n\n # Links:\n # /symlinktests -> /real/app/tests (running at symlinktests should fail)\n # /symlink -> /real (running at /symlink should work)\n\n real = pytester.mkdir(\"real\")\n realtests = real.joinpath(\"app/tests\")\n realtests.mkdir(parents=True)\n symlink_or_skip(realtests, pytester.path.joinpath(\"symlinktests\"))\n symlink_or_skip(real, pytester.path.joinpath(\"symlink\"))\n pytester.makepyfile(\n **{\n \"real/app/tests/test_foo.py\": \"def test1(fixture): pass\",\n \"real/conftest.py\": textwrap.dedent(\n \"\"\"\n import pytest\n\n print(\"conftest_loaded\")\n\n @pytest.fixture\n def fixture():\n print(\"fixture_used\")\n \"\"\"\n ),\n }\n )\n\n # Should fail because conftest cannot be found from the link structure.\n result = pytester.runpytest(\"-vs\", \"symlinktests\")\n result.stdout.fnmatch_lines([\"*fixture 'fixture' not found*\"])\n assert result.ret == ExitCode.TESTS_FAILED\n\n # Should not cause \"ValueError: Plugin already registered\" (#4174).\n result = pytester.runpytest(\"-vs\", \"symlink\")\n assert result.ret == ExitCode.OK", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_custom_debugger_hook_custom_debugger_hook.None_2": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_custom_debugger_hook_custom_debugger_hook.None_2", "embedding": null, "metadata": {"file_path": "testing/test_debugging.py", "file_name": "test_debugging.py", "file_type": "text/x-python", "category": "test", "start_line": 57, "end_line": 78, "span_ids": ["custom_debugger_hook._CustomDebugger.__init__", "custom_debugger_hook._CustomDebugger", "custom_debugger_hook"], "tokens": 132}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.fixture\ndef custom_debugger_hook():\n called = []\n\n # install dummy debugger class and track which methods were called on it\n class _CustomDebugger:\n def __init__(self, *args, **kwargs):\n called.append(\"init\")\n\n def reset(self):\n called.append(\"reset\")\n\n def interaction(self, *args):\n called.append(\"interaction\")\n\n def set_trace(self, frame):\n print(\"**CustomDebugger**\")\n called.append(\"set_trace\")\n\n _pytest._CustomDebugger = _CustomDebugger # type: ignore\n yield called\n del _pytest._CustomDebugger # type: ignore", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_findpaths.py_TestLoadConfigDictFromFile.test_valid_toml_file_TestLoadConfigDictFromFile.test_valid_toml_file.assert_load_config_dict_f": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_findpaths.py_TestLoadConfigDictFromFile.test_valid_toml_file_TestLoadConfigDictFromFile.test_valid_toml_file.assert_load_config_dict_f", "embedding": null, "metadata": {"file_path": "testing/test_findpaths.py", "file_name": "test_findpaths.py", "file_type": "text/x-python", "category": "test", "start_line": 68, "end_line": 89, "span_ids": ["TestLoadConfigDictFromFile.test_valid_toml_file"], "tokens": 177}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestLoadConfigDictFromFile:\n\n def test_valid_toml_file(self, tmp_path: Path) -> None:\n \"\"\".toml files with [tool.pytest.ini_options] are read correctly, including changing\n data types to str/list for compatibility with other configuration options.\"\"\"\n fn = tmp_path / \"myconfig.toml\"\n fn.write_text(\n dedent(\n \"\"\"\n [tool.pytest.ini_options]\n x = 1\n y = 20.0\n values = [\"tests\", \"integration\"]\n name = \"foo\"\n \"\"\"\n ),\n encoding=\"utf-8\",\n )\n assert load_config_dict_from_file(fn) == {\n \"x\": \"1\",\n \"y\": \"20.0\",\n \"values\": [\"tests\", \"integration\"],\n \"name\": \"foo\",\n }", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_timing_function_TestPython.test_timing_function.assert_float_val_7_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_timing_function_TestPython.test_timing_function.assert_float_val_7_0", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 228, "end_line": 246, "span_ids": ["TestPython.test_timing_function"], "tokens": 140}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPython:\n\n def test_timing_function(\n self, pytester: Pytester, run_and_parse: RunAndParse, mock_timing\n ) -> None:\n pytester.makepyfile(\n \"\"\"\n from _pytest import timing\n def setup_module():\n timing.sleep(1)\n def teardown_module():\n timing.sleep(2)\n def test_sleep():\n timing.sleep(4)\n \"\"\"\n )\n result, dom = run_and_parse()\n node = dom.find_first_by_tag(\"testsuite\")\n tnode = node.find_first_by_tag(\"testcase\")\n val = tnode[\"time\"]\n assert float(val) == 7.0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_mangle_test_address_test_dont_configure_on_workers.None_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_mangle_test_address_test_dont_configure_on_workers.None_1", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 936, "end_line": 971, "span_ids": ["test_dont_configure_on_workers.FakeConfig:2", "test_mangle_test_address", "test_dont_configure_on_workers", "test_dont_configure_on_workers.FakeConfig"], "tokens": 269}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_mangle_test_address() -> None:\n from _pytest.junitxml import mangle_test_address\n\n address = \"::\".join([\"a/my.py.thing.py\", \"Class\", \"()\", \"method\", \"[a-1-::]\"])\n newnames = mangle_test_address(address)\n assert newnames == [\"a.my.py.thing\", \"Class\", \"method\", \"[a-1-::]\"]\n\n\ndef test_dont_configure_on_workers(tmp_path: Path) -> None:\n gotten: List[object] = []\n\n class FakeConfig:\n if TYPE_CHECKING:\n workerinput = None\n\n def __init__(self):\n self.pluginmanager = self\n self.option = self\n self._store = Store()\n\n def getini(self, name):\n return \"pytest\"\n\n junitprefix = None\n # XXX: shouldn't need tmp_path ?\n xmlpath = str(tmp_path.joinpath(\"junix.xml\"))\n register = gotten.append\n\n fake_config = cast(Config, FakeConfig())\n from _pytest import junitxml\n\n junitxml.pytest_configure(fake_config)\n assert len(gotten) == 1\n FakeConfig.workerinput = None\n junitxml.pytest_configure(fake_config)\n assert len(gotten) == 1", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_link_resolve.py_os.path_subst_path_linux.try_.finally_.pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_link_resolve.py_os.path_subst_path_linux.try_.finally_.pass", "embedding": null, "metadata": {"file_path": "testing/test_link_resolve.py", "file_name": "test_link_resolve.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 48, "span_ids": ["subst_path_windows", "subst_path_linux", "imports"], "tokens": 273}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import os.path\nimport subprocess\nimport sys\nimport textwrap\nfrom contextlib import contextmanager\nfrom pathlib import Path\nfrom string import ascii_lowercase\n\nfrom _pytest.pytester import Pytester\n\n\n@contextmanager\ndef subst_path_windows(filepath: Path):\n for c in ascii_lowercase[7:]: # Create a subst drive from H-Z.\n c += \":\"\n if not os.path.exists(c):\n drive = c\n break\n else:\n raise AssertionError(\"Unable to find suitable drive letter for subst.\")\n\n directory = filepath.parent\n basename = filepath.name\n\n args = [\"subst\", drive, str(directory)]\n subprocess.check_call(args)\n assert os.path.exists(drive)\n try:\n filename = Path(drive, os.sep, basename)\n yield filename\n finally:\n args = [\"subst\", \"/D\", drive]\n subprocess.check_call(args)\n\n\n@contextmanager\ndef subst_path_linux(filepath: Path):\n directory = filepath.parent\n basename = filepath.name\n\n target = directory / \"..\" / \"sub2\"\n os.symlink(str(directory), str(target), target_is_directory=True)\n try:\n filename = target / basename\n yield filename\n finally:\n # We don't need to unlink (it's all in the tempdir).\n pass", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_link_resolve.py_test_link_resolve_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_link_resolve.py_test_link_resolve_", "embedding": null, "metadata": {"file_path": "testing/test_link_resolve.py", "file_name": "test_link_resolve.py", "file_type": "text/x-python", "category": "test", "start_line": 51, "end_line": 81, "span_ids": ["test_link_resolve"], "tokens": 269}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_link_resolve(pytester: Pytester) -> None:\n \"\"\"See: https://github.com/pytest-dev/pytest/issues/5965.\"\"\"\n sub1 = pytester.mkpydir(\"sub1\")\n p = sub1.joinpath(\"test_foo.py\")\n p.write_text(\n textwrap.dedent(\n \"\"\"\n import pytest\n def test_foo():\n raise AssertionError()\n \"\"\"\n )\n )\n\n subst = subst_path_linux\n if sys.platform == \"win32\":\n subst = subst_path_windows\n\n with subst(p) as subst_p:\n result = pytester.runpytest(str(subst_p), \"-v\")\n # i.e.: Make sure that the error is reported as a relative path, not as a\n # resolved path.\n # See: https://github.com/pytest-dev/pytest/issues/5965\n stdout = result.stdout.str()\n assert \"sub1/test_foo.py\" not in stdout\n\n # i.e.: Expect drive on windows because we just have drive:filename, whereas\n # we expect a relative path on Linux.\n expect = f\"*{subst_p}*\" if sys.platform == \"win32\" else \"*sub2/test_foo.py*\"\n result.stdout.fnmatch_lines([expect])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_main.py_argparse_test_wrap_session_notify_exception.if_returncode_is_False_.else_.assert_result_stderr_line": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_main.py_argparse_test_wrap_session_notify_exception.if_returncode_is_False_.else_.assert_result_stderr_line", "embedding": null, "metadata": {"file_path": "testing/test_main.py", "file_name": "test_main.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 62, "span_ids": ["imports", "test_wrap_session_notify_exception"], "tokens": 462}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import argparse\nimport os\nimport re\nfrom pathlib import Path\nfrom typing import Optional\n\nimport pytest\nfrom _pytest.config import ExitCode\nfrom _pytest.config import UsageError\nfrom _pytest.main import resolve_collection_argument\nfrom _pytest.main import validate_basetemp\nfrom _pytest.pytester import Pytester\n\n\n@pytest.mark.parametrize(\n \"ret_exc\",\n (\n pytest.param((None, ValueError)),\n pytest.param((42, SystemExit)),\n pytest.param((False, SystemExit)),\n ),\n)\ndef test_wrap_session_notify_exception(ret_exc, pytester: Pytester) -> None:\n returncode, exc = ret_exc\n c1 = pytester.makeconftest(\n \"\"\"\n import pytest\n\n def pytest_sessionstart():\n raise {exc}(\"boom\")\n\n def pytest_internalerror(excrepr, excinfo):\n returncode = {returncode!r}\n if returncode is not False:\n pytest.exit(\"exiting after %s...\" % excinfo.typename, returncode={returncode!r})\n \"\"\".format(\n returncode=returncode, exc=exc.__name__\n )\n )\n result = pytester.runpytest()\n if returncode:\n assert result.ret == returncode\n else:\n assert result.ret == ExitCode.INTERNAL_ERROR\n assert result.stdout.lines[0] == \"INTERNALERROR> Traceback (most recent call last):\"\n\n if exc == SystemExit:\n assert result.stdout.lines[-3:] == [\n f'INTERNALERROR> File \"{c1}\", line 4, in pytest_sessionstart',\n 'INTERNALERROR> raise SystemExit(\"boom\")',\n \"INTERNALERROR> SystemExit: boom\",\n ]\n else:\n assert result.stdout.lines[-3:] == [\n f'INTERNALERROR> File \"{c1}\", line 4, in pytest_sessionstart',\n 'INTERNALERROR> raise ValueError(\"boom\")',\n \"INTERNALERROR> ValueError: boom\",\n ]\n if returncode is False:\n assert result.stderr.lines == [\"mainloop: caught unexpected SystemExit!\"]\n else:\n assert result.stderr.lines == [f\"Exit: exiting after {exc.__name__}...\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_main.py_test_wrap_session_exit_sessionfinish_test_wrap_session_exit_sessionfinish.assert_result_stderr_line": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_main.py_test_wrap_session_exit_sessionfinish_test_wrap_session_exit_sessionfinish.assert_result_stderr_line", "embedding": null, "metadata": {"file_path": "testing/test_main.py", "file_name": "test_main.py", "file_type": "text/x-python", "category": "test", "start_line": 65, "end_line": 84, "span_ids": ["test_wrap_session_exit_sessionfinish"], "tokens": 158}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\"returncode\", (None, 42))\ndef test_wrap_session_exit_sessionfinish(\n returncode: Optional[int], pytester: Pytester\n) -> None:\n pytester.makeconftest(\n \"\"\"\n import pytest\n def pytest_sessionfinish():\n pytest.exit(msg=\"exit_pytest_sessionfinish\", returncode={returncode})\n \"\"\".format(\n returncode=returncode\n )\n )\n result = pytester.runpytest()\n if returncode:\n assert result.ret == returncode\n else:\n assert result.ret == ExitCode.NO_TESTS_COLLECTED\n assert result.stdout.lines[-1] == \"collected 0 items\"\n assert result.stderr.lines == [\"Exit: exit_pytest_sessionfinish\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_monkeypatch.py_os_test_setattr.with_pytest_raises_TypeEr._type_ignore_call_overl": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_monkeypatch.py_os_test_setattr.with_pytest_raises_TypeEr._type_ignore_call_overl", "embedding": null, "metadata": {"file_path": "testing/test_monkeypatch.py", "file_name": "test_monkeypatch.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 48, "span_ids": ["test_setattr.A", "test_setattr.A:2", "test_setattr", "imports", "mp"], "tokens": 309}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import os\nimport re\nimport sys\nimport textwrap\nfrom pathlib import Path\nfrom typing import Dict\nfrom typing import Generator\nfrom typing import Type\n\nimport pytest\nfrom _pytest.monkeypatch import MonkeyPatch\nfrom _pytest.pytester import Pytester\n\n\n@pytest.fixture\ndef mp() -> Generator[MonkeyPatch, None, None]:\n cwd = os.getcwd()\n sys_path = list(sys.path)\n yield MonkeyPatch()\n sys.path[:] = sys_path\n os.chdir(cwd)\n\n\ndef test_setattr() -> None:\n class A:\n x = 1\n\n monkeypatch = MonkeyPatch()\n pytest.raises(AttributeError, monkeypatch.setattr, A, \"notexists\", 2)\n monkeypatch.setattr(A, \"y\", 2, raising=False)\n assert A.y == 2 # type: ignore\n monkeypatch.undo()\n assert not hasattr(A, \"y\")\n\n monkeypatch = MonkeyPatch()\n monkeypatch.setattr(A, \"x\", 2)\n assert A.x == 2\n monkeypatch.setattr(A, \"x\", 3)\n assert A.x == 3\n monkeypatch.undo()\n assert A.x == 1\n\n A.x = 5\n monkeypatch.undo() # double-undo makes no modification\n assert A.x == 5\n\n with pytest.raises(TypeError):\n monkeypatch.setattr(A, \"y\") # type: ignore[call-overload]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_monkeypatch.py_TestEnvironWarnings_TestEnvironWarnings.test_setenv_non_str_warning.with_pytest_warns_pytest_._type_ignore_arg_type_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_monkeypatch.py_TestEnvironWarnings_TestEnvironWarnings.test_setenv_non_str_warning.with_pytest_warns_pytest_._type_ignore_arg_type_", "embedding": null, "metadata": {"file_path": "testing/test_monkeypatch.py", "file_name": "test_monkeypatch.py", "file_type": "text/x-python", "category": "test", "start_line": 209, "end_line": 225, "span_ids": ["TestEnvironWarnings.test_setenv_non_str_warning", "TestEnvironWarnings"], "tokens": 175}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestEnvironWarnings:\n \"\"\"\n os.environ keys and values should be native strings, otherwise it will cause problems with other modules (notably\n subprocess). On Python 2 os.environ accepts anything without complaining, while Python 3 does the right thing\n and raises an error.\n \"\"\"\n\n VAR_NAME = \"PYTEST_INTERNAL_MY_VAR\"\n\n def test_setenv_non_str_warning(self, monkeypatch: MonkeyPatch) -> None:\n value = 2\n msg = (\n \"Value of environment variable PYTEST_INTERNAL_MY_VAR type should be str, \"\n \"but got 2 (type: int); converted to str implicitly\"\n )\n with pytest.warns(pytest.PytestWarning, match=re.escape(msg)):\n monkeypatch.setenv(str(self.VAR_NAME), value) # type: ignore[arg-type]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_monkeypatch.py_test_setenv_prepend_test_issue185_time_breaks.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_monkeypatch.py_test_setenv_prepend_test_issue185_time_breaks.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_monkeypatch.py", "file_name": "test_monkeypatch.py", "file_type": "text/x-python", "category": "test", "start_line": 224, "end_line": 310, "span_ids": ["test_chdir_with_str", "test_monkeypatch_plugin", "test_syspath_prepend_double_undo", "test_chdir_with_path_local", "test_issue185_time_breaks", "test_chdir_double_undo", "test_chdir_undo", "test_setenv_prepend", "test_syspath_prepend"], "tokens": 573}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_setenv_prepend() -> None:\n import os\n\n monkeypatch = MonkeyPatch()\n monkeypatch.setenv(\"XYZ123\", \"2\", prepend=\"-\")\n monkeypatch.setenv(\"XYZ123\", \"3\", prepend=\"-\")\n assert os.environ[\"XYZ123\"] == \"3-2\"\n monkeypatch.undo()\n assert \"XYZ123\" not in os.environ\n\n\ndef test_monkeypatch_plugin(pytester: Pytester) -> None:\n reprec = pytester.inline_runsource(\n \"\"\"\n def test_method(monkeypatch):\n assert monkeypatch.__class__.__name__ == \"MonkeyPatch\"\n \"\"\"\n )\n res = reprec.countoutcomes()\n assert tuple(res) == (1, 0, 0), res\n\n\ndef test_syspath_prepend(mp: MonkeyPatch) -> None:\n old = list(sys.path)\n mp.syspath_prepend(\"world\")\n mp.syspath_prepend(\"hello\")\n assert sys.path[0] == \"hello\"\n assert sys.path[1] == \"world\"\n mp.undo()\n assert sys.path == old\n mp.undo()\n assert sys.path == old\n\n\ndef test_syspath_prepend_double_undo(mp: MonkeyPatch) -> None:\n old_syspath = sys.path[:]\n try:\n mp.syspath_prepend(\"hello world\")\n mp.undo()\n sys.path.append(\"more hello world\")\n mp.undo()\n assert sys.path[-1] == \"more hello world\"\n finally:\n sys.path[:] = old_syspath\n\n\ndef test_chdir_with_path_local(mp: MonkeyPatch, tmp_path: Path) -> None:\n mp.chdir(tmp_path)\n assert os.getcwd() == str(tmp_path)\n\n\ndef test_chdir_with_str(mp: MonkeyPatch, tmp_path: Path) -> None:\n mp.chdir(str(tmp_path))\n assert os.getcwd() == str(tmp_path)\n\n\ndef test_chdir_undo(mp: MonkeyPatch, tmp_path: Path) -> None:\n cwd = os.getcwd()\n mp.chdir(tmp_path)\n mp.undo()\n assert os.getcwd() == cwd\n\n\ndef test_chdir_double_undo(mp: MonkeyPatch, tmp_path: Path) -> None:\n mp.chdir(str(tmp_path))\n mp.undo()\n os.chdir(tmp_path)\n mp.undo()\n assert os.getcwd() == str(tmp_path)\n\n\ndef test_issue185_time_breaks(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import time\n def test_m(monkeypatch):\n def f():\n raise Exception\n monkeypatch.setattr(time, \"time\", f)\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n \"\"\"\n *1 passed*\n \"\"\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_monkeypatch.py_test_syspath_prepend_with_namespace_packages_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_monkeypatch.py_test_syspath_prepend_with_namespace_packages_", "embedding": null, "metadata": {"file_path": "testing/test_monkeypatch.py", "file_name": "test_monkeypatch.py", "file_type": "text/x-python", "category": "test", "start_line": 423, "end_line": 456, "span_ids": ["test_syspath_prepend_with_namespace_packages"], "tokens": 269}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_syspath_prepend_with_namespace_packages(\n pytester: Pytester, monkeypatch: MonkeyPatch\n) -> None:\n for dirname in \"hello\", \"world\":\n d = pytester.mkdir(dirname)\n ns = d.joinpath(\"ns_pkg\")\n ns.mkdir()\n ns.joinpath(\"__init__.py\").write_text(\n \"__import__('pkg_resources').declare_namespace(__name__)\"\n )\n lib = ns.joinpath(dirname)\n lib.mkdir()\n lib.joinpath(\"__init__.py\").write_text(\"def check(): return %r\" % dirname)\n\n monkeypatch.syspath_prepend(\"hello\")\n import ns_pkg.hello\n\n assert ns_pkg.hello.check() == \"hello\"\n\n with pytest.raises(ImportError):\n import ns_pkg.world\n\n # Prepending should call fixup_namespace_packages.\n monkeypatch.syspath_prepend(\"world\")\n import ns_pkg.world\n\n assert ns_pkg.world.check() == \"world\"\n\n # Should invalidate caches via importlib.invalidate_caches.\n modules_tmpdir = pytester.mkdir(\"modules_tmpdir\")\n monkeypatch.syspath_prepend(str(modules_tmpdir))\n modules_tmpdir.joinpath(\"main_app.py\").write_text(\"app = True\")\n from main_app import app # noqa: F401", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_nodes.py_test_failure_with_changed_cwd_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_nodes.py_test_failure_with_changed_cwd_", "embedding": null, "metadata": {"file_path": "testing/test_nodes.py", "file_name": "test_nodes.py", "file_type": "text/x-python", "category": "test", "start_line": 95, "end_line": 120, "span_ids": ["test_failure_with_changed_cwd"], "tokens": 158}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_failure_with_changed_cwd(pytester: Pytester) -> None:\n \"\"\"\n Test failure lines should use absolute paths if cwd has changed since\n invocation, so the path is correct (#6428).\n \"\"\"\n p = pytester.makepyfile(\n \"\"\"\n import os\n import pytest\n\n @pytest.fixture\n def private_dir():\n out_dir = 'ddd'\n os.mkdir(out_dir)\n old_dir = os.getcwd()\n os.chdir(out_dir)\n yield out_dir\n os.chdir(old_dir)\n\n def test_show_wrong_path(private_dir):\n assert False\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([str(p) + \":*: AssertionError\", \"*1 failed in *\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_TestFNMatcherPort_TestFNMatcherPort.if_sys_platform_win32.else_.drv2._d_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_TestFNMatcherPort_TestFNMatcherPort.if_sys_platform_win32.else_.drv2._d_", "embedding": null, "metadata": {"file_path": "testing/test_pathlib.py", "file_name": "test_pathlib.py", "file_type": "text/x-python", "category": "test", "start_line": 29, "end_line": 53, "span_ids": ["TestFNMatcherPort", "TestFNMatcherPort:3", "TestFNMatcherPort.match"], "tokens": 160}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFNMatcherPort:\n \"\"\"Test that our port of py.common.FNMatcher (fnmatch_ex) produces the\n same results as the original py.path.local.fnmatch method.\"\"\"\n\n @pytest.fixture(params=[\"pathlib\", \"py.path\"])\n def match(self, request):\n if request.param == \"py.path\":\n\n def match_(pattern, path):\n return py.path.local(path).fnmatch(pattern)\n\n else:\n assert request.param == \"pathlib\"\n\n def match_(pattern, path):\n return fnmatch_ex(pattern, path)\n\n return match_\n\n if sys.platform == \"win32\":\n drv1 = \"c:\"\n drv2 = \"d:\"\n else:\n drv1 = \"/c\"\n drv2 = \"/d\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_TestFNMatcherPort.test_matching_TestFNMatcherPort.test_matching.assert_match_pattern_pat": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_TestFNMatcherPort.test_matching_TestFNMatcherPort.test_matching.assert_match_pattern_pat", "embedding": null, "metadata": {"file_path": "testing/test_pathlib.py", "file_name": "test_pathlib.py", "file_type": "text/x-python", "category": "test", "start_line": 47, "end_line": 62, "span_ids": ["TestFNMatcherPort.test_matching"], "tokens": 180}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFNMatcherPort:\n\n @pytest.mark.parametrize(\n \"pattern, path\",\n [\n (\"*.py\", \"foo.py\"),\n (\"*.py\", \"bar/foo.py\"),\n (\"test_*.py\", \"foo/test_foo.py\"),\n (\"tests/*.py\", \"tests/foo.py\"),\n (drv1 + \"/*.py\", drv1 + \"/foo.py\"),\n (drv1 + \"/foo/*.py\", drv1 + \"/foo/foo.py\"),\n (\"tests/**/test*.py\", \"tests/foo/test_foo.py\"),\n (\"tests/**/doc/test*.py\", \"tests/foo/bar/doc/test_foo.py\"),\n (\"tests/**/doc/**/test*.py\", \"tests/foo/doc/bar/test_foo.py\"),\n ],\n )\n def test_matching(self, match, pattern, path):\n assert match(pattern, path)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_TestFNMatcherPort.test_matching_abspath_TestFNMatcherPort.test_not_matching.assert_not_match_pattern_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_TestFNMatcherPort.test_matching_abspath_TestFNMatcherPort.test_not_matching.assert_not_match_pattern_", "embedding": null, "metadata": {"file_path": "testing/test_pathlib.py", "file_name": "test_pathlib.py", "file_type": "text/x-python", "category": "test", "start_line": 64, "end_line": 83, "span_ids": ["TestFNMatcherPort.test_matching_abspath", "TestFNMatcherPort.test_not_matching"], "tokens": 214}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFNMatcherPort:\n\n def test_matching_abspath(self, match):\n abspath = os.path.abspath(os.path.join(\"tests/foo.py\"))\n assert match(\"tests/foo.py\", abspath)\n\n @pytest.mark.parametrize(\n \"pattern, path\",\n [\n (\"*.py\", \"foo.pyc\"),\n (\"*.py\", \"foo/foo.pyc\"),\n (\"tests/*.py\", \"foo/foo.py\"),\n (drv1 + \"/*.py\", drv2 + \"/foo.py\"),\n (drv1 + \"/foo/*.py\", drv2 + \"/foo/foo.py\"),\n (\"tests/**/test*.py\", \"tests/foo.py\"),\n (\"tests/**/test*.py\", \"foo/test_foo.py\"),\n (\"tests/**/doc/test*.py\", \"tests/foo/bar/doc/foo.py\"),\n (\"tests/**/doc/test*.py\", \"tests/foo/bar/test_foo.py\"),\n ],\n )\n def test_not_matching(self, match, pattern, path):\n assert not match(pattern, path)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_TestImportPath.test_messy_name_TestImportPath.test_import_after.assert_module1_is_mod1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_TestImportPath.test_messy_name_TestImportPath.test_import_after.assert_module1_is_mod1", "embedding": null, "metadata": {"file_path": "testing/test_pathlib.py", "file_name": "test_pathlib.py", "file_type": "text/x-python", "category": "test", "start_line": 181, "end_line": 229, "span_ids": ["TestImportPath.test_d", "TestImportPath.test_import_after", "TestImportPath.test_a", "TestImportPath.test_dir", "TestImportPath.test_c", "TestImportPath.test_messy_name", "TestImportPath.test_b"], "tokens": 508}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestImportPath:\n\n def test_messy_name(self, tmp_path: Path) -> None:\n # http://bitbucket.org/hpk42/py-trunk/issue/129\n path = tmp_path / \"foo__init__.py\"\n path.touch()\n module = import_path(path)\n assert module.__name__ == \"foo__init__\"\n\n def test_dir(self, tmp_path: Path) -> None:\n p = tmp_path / \"hello_123\"\n p.mkdir()\n p_init = p / \"__init__.py\"\n p_init.touch()\n m = import_path(p)\n assert m.__name__ == \"hello_123\"\n m = import_path(p_init)\n assert m.__name__ == \"hello_123\"\n\n def test_a(self, path1: Path) -> None:\n otherdir = path1 / \"otherdir\"\n mod = import_path(otherdir / \"a.py\")\n assert mod.result == \"got it\" # type: ignore[attr-defined]\n assert mod.__name__ == \"otherdir.a\"\n\n def test_b(self, path1: Path) -> None:\n otherdir = path1 / \"otherdir\"\n mod = import_path(otherdir / \"b.py\")\n assert mod.stuff == \"got it\" # type: ignore[attr-defined]\n assert mod.__name__ == \"otherdir.b\"\n\n def test_c(self, path1: Path) -> None:\n otherdir = path1 / \"otherdir\"\n mod = import_path(otherdir / \"c.py\")\n assert mod.value == \"got it\" # type: ignore[attr-defined]\n\n def test_d(self, path1: Path) -> None:\n otherdir = path1 / \"otherdir\"\n mod = import_path(otherdir / \"d.py\")\n assert mod.value2 == \"got it\" # type: ignore[attr-defined]\n\n def test_import_after(self, tmp_path: Path) -> None:\n tmp_path.joinpath(\"xxxpackage\").mkdir()\n tmp_path.joinpath(\"xxxpackage\", \"__init__.py\").touch()\n mod1path = tmp_path.joinpath(\"xxxpackage\", \"module1.py\")\n mod1path.touch()\n mod1 = import_path(mod1path)\n assert mod1.__name__ == \"xxxpackage.module1\"\n from xxxpackage import module1\n\n assert module1 is mod1", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_TestImportPath.test_check_filepath_consistency_TestImportPath.test_check_filepath_consistency.assert_issubclass_ImportP": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_TestImportPath.test_check_filepath_consistency_TestImportPath.test_check_filepath_consistency.assert_issubclass_ImportP", "embedding": null, "metadata": {"file_path": "testing/test_pathlib.py", "file_name": "test_pathlib.py", "file_type": "text/x-python", "category": "test", "start_line": 231, "end_line": 257, "span_ids": ["TestImportPath.test_check_filepath_consistency"], "tokens": 253}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestImportPath:\n\n def test_check_filepath_consistency(\n self, monkeypatch: MonkeyPatch, tmp_path: Path\n ) -> None:\n name = \"pointsback123\"\n p = tmp_path.joinpath(name + \".py\")\n p.touch()\n for ending in (\".pyc\", \".pyo\"):\n mod = ModuleType(name)\n pseudopath = tmp_path.joinpath(name + ending)\n pseudopath.touch()\n mod.__file__ = str(pseudopath)\n monkeypatch.setitem(sys.modules, name, mod)\n newmod = import_path(p)\n assert mod == newmod\n monkeypatch.undo()\n mod = ModuleType(name)\n pseudopath = tmp_path.joinpath(name + \"123.py\")\n pseudopath.touch()\n mod.__file__ = str(pseudopath)\n monkeypatch.setitem(sys.modules, name, mod)\n with pytest.raises(ImportPathMismatchError) as excinfo:\n import_path(p)\n modname, modfile, orig = excinfo.value.args\n assert modname == name\n assert modfile == str(pseudopath)\n assert orig == p\n assert issubclass(ImportPathMismatchError, ImportError)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_TestImportPath.test_no_meta_path_found_TestImportPath.test_no_meta_path_found.with_pytest_raises_Import.import_path_simple_module": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_TestImportPath.test_no_meta_path_found_TestImportPath.test_no_meta_path_found.with_pytest_raises_Import.import_path_simple_module", "embedding": null, "metadata": {"file_path": "testing/test_pathlib.py", "file_name": "test_pathlib.py", "file_type": "text/x-python", "category": "test", "start_line": 311, "end_line": 326, "span_ids": ["TestImportPath.test_no_meta_path_found"], "tokens": 157}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestImportPath:\n\n def test_no_meta_path_found(\n self, simple_module: Path, monkeypatch: MonkeyPatch\n ) -> None:\n \"\"\"Even without any meta_path should still import module.\"\"\"\n monkeypatch.setattr(sys, \"meta_path\", [])\n module = import_path(simple_module, mode=\"importlib\")\n assert module.foo(2) == 42 # type: ignore[attr-defined]\n\n # mode='importlib' fails if no spec is found to load the module\n import importlib.util\n\n monkeypatch.setattr(\n importlib.util, \"spec_from_file_location\", lambda *args: None\n )\n with pytest.raises(ImportError):\n import_path(simple_module, mode=\"importlib\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_test_long_path_during_cleanup_test_long_path_during_cleanup.assert_not_os_path_isdir_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_test_long_path_during_cleanup_test_long_path_during_cleanup.assert_not_os_path_isdir_", "embedding": null, "metadata": {"file_path": "testing/test_pathlib.py", "file_name": "test_pathlib.py", "file_type": "text/x-python", "category": "test", "start_line": 368, "end_line": 381, "span_ids": ["test_long_path_during_cleanup"], "tokens": 149}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_long_path_during_cleanup(tmp_path: Path) -> None:\n \"\"\"Ensure that deleting long path works (particularly on Windows (#6775)).\"\"\"\n path = (tmp_path / (\"a\" * 250)).resolve()\n if sys.platform == \"win32\":\n # make sure that the full path is > 260 characters without any\n # component being over 260 characters\n assert len(str(path)) > 260\n extended_path = \"\\\\\\\\?\\\\\" + str(path)\n else:\n extended_path = str(path)\n os.mkdir(extended_path)\n assert os.path.isdir(extended_path)\n maybe_delete_a_numbered_dir(path)\n assert not os.path.isdir(extended_path)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_test_get_extended_length_path_str_test_get_extended_length_path_str.None_3": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_test_get_extended_length_path_str_test_get_extended_length_path_str.None_3", "embedding": null, "metadata": {"file_path": "testing/test_pathlib.py", "file_name": "test_pathlib.py", "file_type": "text/x-python", "category": "test", "start_line": 384, "end_line": 388, "span_ids": ["test_get_extended_length_path_str"], "tokens": 109}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_get_extended_length_path_str() -> None:\n assert get_extended_length_path_str(r\"c:\\foo\") == r\"\\\\?\\c:\\foo\"\n assert get_extended_length_path_str(r\"\\\\share\\foo\") == r\"\\\\?\\UNC\\share\\foo\"\n assert get_extended_length_path_str(r\"\\\\?\\UNC\\share\\foo\") == r\"\\\\?\\UNC\\share\\foo\"\n assert get_extended_length_path_str(r\"\\\\?\\c:\\foo\") == r\"\\\\?\\c:\\foo\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_TestPytestPluginManager.test_consider_module_TestPytestPluginManager.test_consider_module.None_2": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pluginmanager.py_TestPytestPluginManager.test_consider_module_TestPytestPluginManager.test_consider_module.None_2", "embedding": null, "metadata": {"file_path": "testing/test_pluginmanager.py", "file_name": "test_pluginmanager.py", "file_type": "text/x-python", "category": "test", "start_line": 228, "end_line": 238, "span_ids": ["TestPytestPluginManager.test_consider_module"], "tokens": 143}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPytestPluginManager:\n\n def test_consider_module(\n self, pytester: Pytester, pytestpm: PytestPluginManager\n ) -> None:\n pytester.syspathinsert()\n pytester.makepyfile(pytest_p1=\"#\")\n pytester.makepyfile(pytest_p2=\"#\")\n mod = types.ModuleType(\"temp\")\n mod.__dict__[\"pytest_plugins\"] = [\"pytest_p1\", \"pytest_p2\"]\n pytestpm.consider_module(mod)\n assert pytestpm.get_plugin(\"pytest_p1\").__name__ == \"pytest_p1\"\n assert pytestpm.get_plugin(\"pytest_p2\").__name__ == \"pytest_p2\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestReportSerialization.test_chained_exceptions_no_reprcrash.check_longrepr_TestReportSerialization.test_chained_exceptions_no_reprcrash.check_longrepr.assert_fileloc2_message_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestReportSerialization.test_chained_exceptions_no_reprcrash.check_longrepr_TestReportSerialization.test_chained_exceptions_no_reprcrash.check_longrepr.assert_fileloc2_message_", "embedding": null, "metadata": {"file_path": "testing/test_reports.py", "file_name": "test_reports.py", "file_type": "text/x-python", "category": "test", "start_line": 369, "end_line": 381, "span_ids": ["TestReportSerialization.test_chained_exceptions_no_reprcrash"], "tokens": 171}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestReportSerialization:\n\n def test_chained_exceptions_no_reprcrash(self, pytester: Pytester, tw_mock) -> None:\n # ... other code\n\n def check_longrepr(longrepr: object) -> None:\n assert isinstance(longrepr, ExceptionChainRepr)\n assert len(longrepr.chain) == 2\n entry1, entry2 = longrepr.chain\n tb1, fileloc1, desc1 = entry1\n tb2, fileloc2, desc2 = entry2\n\n assert \"RemoteTraceback\" in str(tb1)\n assert \"ValueError: value error\" in str(tb2)\n\n assert fileloc1 is None\n assert fileloc2 is not None\n assert fileloc2.message == \"ValueError: value error\"\n # ... other code", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminalFunctional.test_no_header_trailer_info_TestTerminalFunctional.test_no_header_trailer_info.if_request_config_pluginm.result_stdout_no_fnmatch_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminalFunctional.test_no_header_trailer_info_TestTerminalFunctional.test_no_header_trailer_info.if_request_config_pluginm.result_stdout_no_fnmatch_", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 818, "end_line": 841, "span_ids": ["TestTerminalFunctional.test_no_header_trailer_info"], "tokens": 183}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTerminalFunctional:\n\n def test_no_header_trailer_info(\n self, monkeypatch: MonkeyPatch, pytester: Pytester, request\n ) -> None:\n monkeypatch.delenv(\"PYTEST_DISABLE_PLUGIN_AUTOLOAD\")\n pytester.makepyfile(\n \"\"\"\n def test_passes():\n pass\n \"\"\"\n )\n result = pytester.runpytest(\"--no-header\")\n verinfo = \".\".join(map(str, sys.version_info[:3]))\n result.stdout.no_fnmatch_line(\n \"platform %s -- Python %s*pytest-%s*py-%s*pluggy-%s\"\n % (\n sys.platform,\n verinfo,\n pytest.__version__,\n py.__version__,\n pluggy.__version__,\n )\n )\n if request.config.pluginmanager.list_plugin_distinfo():\n result.stdout.no_fnmatch_line(\"plugins: *\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminalFunctional.test_no_header_TestTerminalFunctional.test_no_header.None_4": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminalFunctional.test_no_header_TestTerminalFunctional.test_no_header.None_4", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 895, "end_line": 913, "span_ids": ["TestTerminalFunctional.test_no_header"], "tokens": 184}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTerminalFunctional:\n\n def test_no_header(self, pytester: Pytester) -> None:\n pytester.path.joinpath(\"tests\").mkdir()\n pytester.path.joinpath(\"gui\").mkdir()\n\n # with testpaths option, and not passing anything in the command-line\n pytester.makeini(\n \"\"\"\n [pytest]\n testpaths = tests gui\n \"\"\"\n )\n result = pytester.runpytest(\"--no-header\")\n result.stdout.no_fnmatch_line(\n \"rootdir: *test_header0, inifile: tox.ini, testpaths: tests, gui\"\n )\n\n # with testpaths option, passing directory in command-line: do not show testpaths then\n result = pytester.runpytest(\"tests\", \"--no-header\")\n result.stdout.no_fnmatch_line(\"rootdir: *test_header0, inifile: tox.ini\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminalFunctional.test_no_summary_TestTerminalFunctional.test_showlocals.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminalFunctional.test_no_summary_TestTerminalFunctional.test_showlocals.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 915, "end_line": 941, "span_ids": ["TestTerminalFunctional.test_no_summary", "TestTerminalFunctional.test_showlocals"], "tokens": 188}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTerminalFunctional:\n\n def test_no_summary(self, pytester: Pytester) -> None:\n p1 = pytester.makepyfile(\n \"\"\"\n def test_no_summary():\n assert false\n \"\"\"\n )\n result = pytester.runpytest(p1, \"--no-summary\")\n result.stdout.no_fnmatch_line(\"*= FAILURES =*\")\n\n def test_showlocals(self, pytester: Pytester) -> None:\n p1 = pytester.makepyfile(\n \"\"\"\n def test_showlocals():\n x = 3\n y = \"x\" * 5000\n assert 0\n \"\"\"\n )\n result = pytester.runpytest(p1, \"-l\")\n result.stdout.fnmatch_lines(\n [\n # \"_ _ * Locals *\",\n \"x* = 3\",\n \"y* = 'xxxxxx*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_terminal_summary_warnings_header_once_test_terminal_summary_warnings_header_once.assert_stdout_count_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_terminal_summary_warnings_header_once_test_terminal_summary_warnings_header_once.assert_stdout_count_", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 1658, "end_line": 1680, "span_ids": ["test_terminal_summary_warnings_header_once"], "tokens": 165}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.filterwarnings(\"default\")\ndef test_terminal_summary_warnings_header_once(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n def test_failure():\n import warnings\n warnings.warn(\"warning_from_\" + \"test\")\n assert 0\n \"\"\"\n )\n result = pytester.runpytest(\"-ra\")\n result.stdout.fnmatch_lines(\n [\n \"*= warnings summary =*\",\n \"*warning_from_test*\",\n \"*= short test summary info =*\",\n \"*== 1 failed, 1 warning in *\",\n ]\n )\n result.stdout.no_fnmatch_line(\"*None*\")\n stdout = result.stdout.str()\n assert stdout.count(\"warning_from_test\") == 1\n assert stdout.count(\"=== warnings summary \") == 1", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_terminal_no_summary_warnings_header_once_tr.return.TerminalReporter_config_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_terminal_no_summary_warnings_header_once_tr.return.TerminalReporter_config_", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 1683, "end_line": 1701, "span_ids": ["tr", "test_terminal_no_summary_warnings_header_once"], "tokens": 132}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.filterwarnings(\"default\")\ndef test_terminal_no_summary_warnings_header_once(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n def test_failure():\n import warnings\n warnings.warn(\"warning_from_\" + \"test\")\n assert 0\n \"\"\"\n )\n result = pytester.runpytest(\"--no-summary\")\n result.stdout.no_fnmatch_line(\"*= warnings summary =*\")\n result.stdout.no_fnmatch_line(\"*= short test summary info =*\")\n\n\n@pytest.fixture(scope=\"session\")\ndef tr() -> TerminalReporter:\n config = _pytest.config._prepareconfig()\n return TerminalReporter(config)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_line_with_reprcrash_test_line_with_reprcrash.None_19": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_line_with_reprcrash_test_line_with_reprcrash.None_19", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 2256, "end_line": 2316, "span_ids": ["test_line_with_reprcrash.config", "test_line_with_reprcrash.config:2", "test_line_with_reprcrash.rep.longrepr", "test_line_with_reprcrash.rep.longrepr.reprcrash:2", "test_line_with_reprcrash", "test_line_with_reprcrash.rep", "test_line_with_reprcrash.rep._get_verbose_word"], "tokens": 768}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_line_with_reprcrash(monkeypatch: MonkeyPatch) -> None:\n mocked_verbose_word = \"FAILED\"\n\n mocked_pos = \"some::nodeid\"\n\n def mock_get_pos(*args):\n return mocked_pos\n\n monkeypatch.setattr(_pytest.terminal, \"_get_pos\", mock_get_pos)\n\n class config:\n pass\n\n class rep:\n def _get_verbose_word(self, *args):\n return mocked_verbose_word\n\n class longrepr:\n class reprcrash:\n pass\n\n def check(msg, width, expected):\n __tracebackhide__ = True\n if msg:\n rep.longrepr.reprcrash.message = msg # type: ignore\n actual = _get_line_with_reprcrash_message(config, rep(), width) # type: ignore\n\n assert actual == expected\n if actual != f\"{mocked_verbose_word} {mocked_pos}\":\n assert len(actual) <= width\n assert wcswidth(actual) <= width\n\n # AttributeError with message\n check(None, 80, \"FAILED some::nodeid\")\n\n check(\"msg\", 80, \"FAILED some::nodeid - msg\")\n check(\"msg\", 3, \"FAILED some::nodeid\")\n\n check(\"msg\", 24, \"FAILED some::nodeid\")\n check(\"msg\", 25, \"FAILED some::nodeid - msg\")\n\n check(\"some longer msg\", 24, \"FAILED some::nodeid\")\n check(\"some longer msg\", 25, \"FAILED some::nodeid - ...\")\n check(\"some longer msg\", 26, \"FAILED some::nodeid - s...\")\n\n check(\"some\\nmessage\", 25, \"FAILED some::nodeid - ...\")\n check(\"some\\nmessage\", 26, \"FAILED some::nodeid - some\")\n check(\"some\\nmessage\", 80, \"FAILED some::nodeid - some\")\n\n # Test unicode safety.\n check(\"\ud83c\ude50\ud83c\ude50\ud83c\ude50\ud83c\ude50\ud83c\ude50\\n2nd line\", 25, \"FAILED some::nodeid - ...\")\n check(\"\ud83c\ude50\ud83c\ude50\ud83c\ude50\ud83c\ude50\ud83c\ude50\\n2nd line\", 26, \"FAILED some::nodeid - ...\")\n check(\"\ud83c\ude50\ud83c\ude50\ud83c\ude50\ud83c\ude50\ud83c\ude50\\n2nd line\", 27, \"FAILED some::nodeid - \ud83c\ude50...\")\n check(\"\ud83c\ude50\ud83c\ude50\ud83c\ude50\ud83c\ude50\ud83c\ude50\\n2nd line\", 28, \"FAILED some::nodeid - \ud83c\ude50...\")\n check(\"\ud83c\ude50\ud83c\ude50\ud83c\ude50\ud83c\ude50\ud83c\ude50\\n2nd line\", 29, \"FAILED some::nodeid - \ud83c\ude50\ud83c\ude50...\")\n\n # NOTE: constructed, not sure if this is supported.\n mocked_pos = \"nodeid::\ud83c\ude50::withunicode\"\n check(\"\ud83c\ude50\ud83c\ude50\ud83c\ude50\ud83c\ude50\ud83c\ude50\\n2nd line\", 29, \"FAILED nodeid::\ud83c\ude50::withunicode\")\n check(\"\ud83c\ude50\ud83c\ude50\ud83c\ude50\ud83c\ude50\ud83c\ude50\\n2nd line\", 40, \"FAILED nodeid::\ud83c\ude50::withunicode - \ud83c\ude50\ud83c\ude50...\")\n check(\"\ud83c\ude50\ud83c\ude50\ud83c\ude50\ud83c\ude50\ud83c\ude50\\n2nd line\", 41, \"FAILED nodeid::\ud83c\ude50::withunicode - \ud83c\ude50\ud83c\ude50...\")\n # ... other code", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_line_with_reprcrash.None_20_test_line_with_reprcrash.None_21": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_line_with_reprcrash.None_20_test_line_with_reprcrash.None_21", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 2317, "end_line": 2318, "span_ids": ["test_line_with_reprcrash.rep.longrepr.reprcrash:2"], "tokens": 127}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_line_with_reprcrash(monkeypatch: MonkeyPatch) -> None:\n # ... other code\n check(\"\ud83c\ude50\ud83c\ude50\ud83c\ude50\ud83c\ude50\ud83c\ude50\\n2nd line\", 42, \"FAILED nodeid::\ud83c\ude50::withunicode - \ud83c\ude50\ud83c\ude50\ud83c\ude50...\")\n check(\"\ud83c\ude50\ud83c\ude50\ud83c\ude50\ud83c\ude50\ud83c\ude50\\n2nd line\", 80, \"FAILED nodeid::\ud83c\ude50::withunicode - \ud83c\ude50\ud83c\ude50\ud83c\ude50\ud83c\ude50\ud83c\ude50\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_collecterror_test_collecterror.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_collecterror_test_collecterror.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 2338, "end_line": 2352, "span_ids": ["test_collecterror"], "tokens": 136}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_collecterror(pytester: Pytester) -> None:\n p1 = pytester.makepyfile(\"raise SyntaxError()\")\n result = pytester.runpytest(\"-ra\", str(p1))\n result.stdout.fnmatch_lines(\n [\n \"collected 0 items / 1 error\",\n \"*= ERRORS =*\",\n \"*_ ERROR collecting test_collecterror.py _*\",\n \"E SyntaxError: *\",\n \"*= short test summary info =*\",\n \"ERROR test_collecterror.py\",\n \"*! Interrupted: 1 error during collection !*\",\n \"*= 1 error in *\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_no_summary_collecterror_test_via_exec.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_no_summary_collecterror_test_via_exec.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 2355, "end_line": 2366, "span_ids": ["test_via_exec", "test_no_summary_collecterror"], "tokens": 141}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_no_summary_collecterror(pytester: Pytester) -> None:\n p1 = pytester.makepyfile(\"raise SyntaxError()\")\n result = pytester.runpytest(\"-ra\", \"--no-summary\", str(p1))\n result.stdout.no_fnmatch_line(\"*= ERRORS =*\")\n\n\ndef test_via_exec(pytester: Pytester) -> None:\n p1 = pytester.makepyfile(\"exec('def test_via_exec(): pass')\")\n result = pytester.runpytest(str(p1), \"-vv\")\n result.stdout.fnmatch_lines(\n [\"test_via_exec.py::test_via_exec <- PASSED*\", \"*= 1 passed in *\"]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_test_warning_captured_hook_test_warning_captured_hook.for_index_in_range_len_ex.if_collected_result_3_is.else_.assert_collected_result_3": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_test_warning_captured_hook_test_warning_captured_hook.for_index_in_range_len_ex.if_collected_result_3_is.else_.assert_collected_result_3", "embedding": null, "metadata": {"file_path": "testing/test_warnings.py", "file_name": "test_warnings.py", "file_type": "text/x-python", "category": "test", "start_line": 239, "end_line": 297, "span_ids": ["test_warning_captured_hook", "test_warning_captured_hook.WarningCollector", "test_warning_captured_hook.WarningCollector.pytest_warning_recorded"], "tokens": 513}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.filterwarnings(\"always\")\ndef test_warning_captured_hook(pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n def pytest_configure(config):\n config.issue_config_time_warning(UserWarning(\"config warning\"), stacklevel=2)\n \"\"\"\n )\n pytester.makepyfile(\n \"\"\"\n import pytest, warnings\n\n warnings.warn(UserWarning(\"collect warning\"))\n\n @pytest.fixture\n def fix():\n warnings.warn(UserWarning(\"setup warning\"))\n yield 1\n warnings.warn(UserWarning(\"teardown warning\"))\n\n def test_func(fix):\n warnings.warn(UserWarning(\"call warning\"))\n assert fix == 1\n \"\"\"\n )\n\n collected = []\n\n class WarningCollector:\n def pytest_warning_recorded(self, warning_message, when, nodeid, location):\n collected.append((str(warning_message.message), when, nodeid, location))\n\n result = pytester.runpytest(plugins=[WarningCollector()])\n result.stdout.fnmatch_lines([\"*1 passed*\"])\n\n expected = [\n (\"config warning\", \"config\", \"\"),\n (\"collect warning\", \"collect\", \"\"),\n (\"setup warning\", \"runtest\", \"test_warning_captured_hook.py::test_func\"),\n (\"call warning\", \"runtest\", \"test_warning_captured_hook.py::test_func\"),\n (\"teardown warning\", \"runtest\", \"test_warning_captured_hook.py::test_func\"),\n ]\n for index in range(len(expected)):\n collected_result = collected[index]\n expected_result = expected[index]\n\n assert collected_result[0] == expected_result[0], str(collected)\n assert collected_result[1] == expected_result[1], str(collected)\n assert collected_result[2] == expected_result[2], str(collected)\n\n # NOTE: collected_result[3] is location, which differs based on the platform you are on\n # thus, the best we can do here is assert the types of the paremeters match what we expect\n # and not try and preload it in the expected array\n if collected_result[3] is not None:\n assert type(collected_result[3][0]) is str, str(collected)\n assert type(collected_result[3][1]) is int, str(collected)\n assert type(collected_result[3][2]) is str, str(collected)\n else:\n assert collected_result[3] is None, str(collected)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_ConftestImportFailure_filter_traceback_for_conftest_import_failure.return.filter_traceback_entry_a": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_ConftestImportFailure_filter_traceback_for_conftest_import_failure.return.filter_traceback_entry_a", "embedding": null, "metadata": {"file_path": "src/_pytest/config/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 105, "end_line": 129, "span_ids": ["ConftestImportFailure", "filter_traceback_for_conftest_import_failure", "ConftestImportFailure.__str__"], "tokens": 204}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class ConftestImportFailure(Exception):\n def __init__(\n self,\n path: Path,\n excinfo: Tuple[Type[Exception], Exception, TracebackType],\n ) -> None:\n super().__init__(path, excinfo)\n self.path = path\n self.excinfo = excinfo\n\n def __str__(self) -> str:\n return \"{}: {} (from {})\".format(\n self.excinfo[0].__name__, self.excinfo[1], self.path\n )\n\n\ndef filter_traceback_for_conftest_import_failure(\n entry: _pytest._code.TracebackEntry,\n) -> bool:\n \"\"\"Filter tracebacks entries which point to pytest internals or importlib.\n\n Make a special case for importlib because we use it to import test modules and conftest files\n in _pytest.pathlib.import_path.\n \"\"\"\n return filter_traceback(entry) and \"importlib\" not in str(entry.path).split(os.sep)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config._consider_importhook_Config._consider_importhook.self__warn_about_missing_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config._consider_importhook_Config._consider_importhook.self__warn_about_missing_", "embedding": null, "metadata": {"file_path": "src/_pytest/config/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1115, "end_line": 1133, "span_ids": ["Config._consider_importhook"], "tokens": 169}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Config:\n\n def _consider_importhook(self, args: Sequence[str]) -> None:\n \"\"\"Install the PEP 302 import hook if using assertion rewriting.\n\n Needs to parse the --assert= option from the commandline\n and find all the installed plugins to mark them for rewriting\n by the importhook.\n \"\"\"\n ns, unknown_args = self._parser.parse_known_and_unknown_args(args)\n mode = getattr(ns, \"assertmode\", \"plain\")\n if mode == \"rewrite\":\n import _pytest.assertion\n\n try:\n hook = _pytest.assertion.install_importhook(self)\n except SystemError:\n mode = \"plain\"\n else:\n self._mark_plugins_for_rewrite(hook)\n self._warn_about_missing_assertion(mode)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/helpconfig.py_conftest_options_getpluginversioninfo.return.lines": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/helpconfig.py_conftest_options_getpluginversioninfo.return.lines", "embedding": null, "metadata": {"file_path": "src/_pytest/helpconfig.py", "file_name": "helpconfig.py", "file_type": "text/x-python", "category": "implementation", "start_line": 228, "end_line": 240, "span_ids": ["impl", "getpluginversioninfo"], "tokens": 111}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "conftest_options = [(\"pytest_plugins\", \"list of plugin names to load\")]\n\n\ndef getpluginversioninfo(config: Config) -> List[str]:\n lines = []\n plugininfo = config.pluginmanager.list_plugin_distinfo()\n if plugininfo:\n lines.append(\"setuptools registered plugins:\")\n for plugin, dist in plugininfo:\n loc = getattr(plugin, \"__file__\", repr(plugin))\n content = f\"{dist.project_name}-{dist.version} at {loc}\"\n lines.append(\" \" + content)\n return lines", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/helpconfig.py_pytest_report_header_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/helpconfig.py_pytest_report_header_", "embedding": null, "metadata": {"file_path": "src/_pytest/helpconfig.py", "file_name": "helpconfig.py", "file_type": "text/x-python", "category": "implementation", "start_line": 243, "end_line": 262, "span_ids": ["pytest_report_header"], "tokens": 144}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_report_header(config: Config) -> List[str]:\n lines = []\n if config.option.debug or config.option.traceconfig:\n lines.append(f\"using: pytest-{pytest.__version__} pylib-{py.__version__}\")\n\n verinfo = getpluginversioninfo(config)\n if verinfo:\n lines.extend(verinfo)\n\n if config.option.traceconfig:\n lines.append(\"active plugins:\")\n items = config.pluginmanager.list_name_plugin()\n for name, plugin in items:\n if hasattr(plugin, \"__file__\"):\n r = plugin.__file__\n else:\n r = repr(plugin)\n lines.append(f\" {name:<20}: {r}\")\n return lines", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_collection_modifyitems_pytest_collection_finish._Called_after_collectio": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_collection_modifyitems_pytest_collection_finish._Called_after_collectio", "embedding": null, "metadata": {"file_path": "src/_pytest/hookspec.py", "file_name": "hookspec.py", "file_type": "text/x-python", "category": "implementation", "start_line": 245, "end_line": 261, "span_ids": ["pytest_collection_modifyitems", "pytest_collection_finish"], "tokens": 129}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_collection_modifyitems(\n session: \"Session\", config: \"Config\", items: List[\"Item\"]\n) -> None:\n \"\"\"Called after collection has been performed. May filter or re-order\n the items in-place.\n\n :param pytest.Session session: The pytest session object.\n :param _pytest.config.Config config: The pytest config object.\n :param List[pytest.Item] items: List of item objects.\n \"\"\"\n\n\ndef pytest_collection_finish(session: \"Session\") -> None:\n \"\"\"Called after collection has been performed and modified.\n\n :param pytest.Session session: The pytest session object.\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_ignore_collect_pytest_ignore_collect._Return_True_to_prevent": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_ignore_collect_pytest_ignore_collect._Return_True_to_prevent", "embedding": null, "metadata": {"file_path": "src/_pytest/hookspec.py", "file_name": "hookspec.py", "file_type": "text/x-python", "category": "implementation", "start_line": 264, "end_line": 282, "span_ids": ["pytest_ignore_collect"], "tokens": 163}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@hookspec(firstresult=True)\ndef pytest_ignore_collect(\n fspath: Path, path: py.path.local, config: \"Config\"\n) -> Optional[bool]:\n \"\"\"Return True to prevent considering this path for collection.\n\n This hook is consulted for all files and directories prior to calling\n more specific hooks.\n\n Stops at first non-None result, see :ref:`firstresult`.\n\n :param pathlib.Path fspath: The path to analyze.\n :param py.path.local path: The path to analyze.\n :param _pytest.config.Config config: The pytest config object.\n\n .. versionchanged:: 6.3.0\n The ``fspath`` parameter was added as a :class:`pathlib.Path`\n equivalent of the ``path`` parameter.\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_None_12_pytest_pycollect_makemodule._Return_a_Module_collec": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_None_12_pytest_pycollect_makemodule._Return_a_Module_collec", "embedding": null, "metadata": {"file_path": "src/_pytest/hookspec.py", "file_name": "hookspec.py", "file_type": "text/x-python", "category": "implementation", "start_line": 331, "end_line": 354, "span_ids": ["pytest_pycollect_makemodule", "pytest_make_collect_report"], "tokens": 193}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "# -------------------------------------------------------------------------\n# Python test function related hooks\n# -------------------------------------------------------------------------\n\n\n@hookspec(firstresult=True)\ndef pytest_pycollect_makemodule(\n fspath: Path, path: py.path.local, parent\n) -> Optional[\"Module\"]:\n \"\"\"Return a Module collector or None for the given path.\n\n This hook will be called for each matching test module path.\n The pytest_collect_file hook needs to be used if you want to\n create test modules for files that do not match as a test module.\n\n Stops at first non-None result, see :ref:`firstresult`.\n\n :param pathlib.Path fspath: The path of the module to collect.\n :param py.path.local path: The path of the module to collect.\n\n .. versionchanged:: 6.3.0\n The ``fspath`` parameter was added as a :class:`pathlib.Path`\n equivalent of the ``path`` parameter.\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_make_parametrize_id_pytest_make_parametrize_id._Return_a_user_friendly": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_make_parametrize_id_pytest_make_parametrize_id._Return_a_user_friendly", "embedding": null, "metadata": {"file_path": "src/_pytest/hookspec.py", "file_name": "hookspec.py", "file_type": "text/x-python", "category": "implementation", "start_line": 379, "end_line": 394, "span_ids": ["pytest_make_parametrize_id"], "tokens": 147}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@hookspec(firstresult=True)\ndef pytest_make_parametrize_id(\n config: \"Config\", val: object, argname: str\n) -> Optional[str]:\n \"\"\"Return a user-friendly string representation of the given ``val``\n that will be used by @pytest.mark.parametrize calls, or None if the hook\n doesn't know about ``val``.\n\n The parameter name is available as ``argname``, if required.\n\n Stops at first non-None result, see :ref:`firstresult`.\n\n :param _pytest.config.Config config: The pytest config object.\n :param val: The parametrized value.\n :param str argname: The automatic parameter name produced by pytest.\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_runtest_logstart_pytest_runtest_call._Called_to_run_the_test": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_runtest_logstart_pytest_runtest_call._Called_to_run_the_test", "embedding": null, "metadata": {"file_path": "src/_pytest/hookspec.py", "file_name": "hookspec.py", "file_type": "text/x-python", "category": "implementation", "start_line": 461, "end_line": 499, "span_ids": ["pytest_runtest_logstart", "pytest_runtest_call", "pytest_runtest_setup", "pytest_runtest_logfinish"], "tokens": 318}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_runtest_logstart(\n nodeid: str, location: Tuple[str, Optional[int], str]\n) -> None:\n \"\"\"Called at the start of running the runtest protocol for a single item.\n\n See :func:`pytest_runtest_protocol` for a description of the runtest protocol.\n\n :param str nodeid: Full node ID of the item.\n :param location: A tuple of ``(filename, lineno, testname)``.\n \"\"\"\n\n\ndef pytest_runtest_logfinish(\n nodeid: str, location: Tuple[str, Optional[int], str]\n) -> None:\n \"\"\"Called at the end of running the runtest protocol for a single item.\n\n See :func:`pytest_runtest_protocol` for a description of the runtest protocol.\n\n :param str nodeid: Full node ID of the item.\n :param location: A tuple of ``(filename, lineno, testname)``.\n \"\"\"\n\n\ndef pytest_runtest_setup(item: \"Item\") -> None:\n \"\"\"Called to perform the setup phase for a test item.\n\n The default implementation runs ``setup()`` on ``item`` and all of its\n parents (which haven't been setup yet). This includes obtaining the\n values of fixtures required by the item (which haven't been obtained\n yet).\n \"\"\"\n\n\ndef pytest_runtest_call(item: \"Item\") -> None:\n \"\"\"Called to run the test for test item (the call phase).\n\n The default implementation calls ``item.runtest()``.\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_runtest_teardown_pytest_runtest_teardown._Called_to_perform_the_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_runtest_teardown_pytest_runtest_teardown._Called_to_perform_the_", "embedding": null, "metadata": {"file_path": "src/_pytest/hookspec.py", "file_name": "hookspec.py", "file_type": "text/x-python", "category": "implementation", "start_line": 502, "end_line": 515, "span_ids": ["pytest_runtest_teardown"], "tokens": 151}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_runtest_teardown(item: \"Item\", nextitem: Optional[\"Item\"]) -> None:\n \"\"\"Called to perform the teardown phase for a test item.\n\n The default implementation runs the finalizers and calls ``teardown()``\n on ``item`` and all of its parents (which need to be torn down). This\n includes running the teardown phase of fixtures required by the item (if\n they go out of scope).\n\n :param nextitem:\n The scheduled-to-be-next test item (None if no further test item is\n scheduled). This argument is used to perform exact teardowns, i.e.\n calling just enough finalizers so that nextitem only needs to call\n setup functions.\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_runtest_makereport_pytest_runtest_makereport._Called_to_create_a_py": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_runtest_makereport_pytest_runtest_makereport._Called_to_create_a_py", "embedding": null, "metadata": {"file_path": "src/_pytest/hookspec.py", "file_name": "hookspec.py", "file_type": "text/x-python", "category": "implementation", "start_line": 488, "end_line": 500, "span_ids": ["pytest_runtest_makereport"], "tokens": 129}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@hookspec(firstresult=True)\ndef pytest_runtest_makereport(\n item: \"Item\", call: \"CallInfo[None]\"\n) -> Optional[\"TestReport\"]:\n \"\"\"Called to create a :py:class:`_pytest.reports.TestReport` for each of\n the setup, call and teardown runtest phases of a test item.\n\n See :func:`pytest_runtest_protocol` for a description of the runtest protocol.\n\n :param CallInfo[None] call: The ``CallInfo`` for the phase.\n\n Stops at first non-None result, see :ref:`firstresult`.\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_fixture_post_finalizer_pytest_unconfigure._Called_before_test_pro": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_fixture_post_finalizer_pytest_unconfigure._Called_before_test_pro", "embedding": null, "metadata": {"file_path": "src/_pytest/hookspec.py", "file_name": "hookspec.py", "file_type": "text/x-python", "category": "implementation", "start_line": 580, "end_line": 616, "span_ids": ["pytest_sessionfinish", "pytest_unconfigure", "pytest_sessionstart", "pytest_fixture_post_finalizer"], "tokens": 242}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_fixture_post_finalizer(\n fixturedef: \"FixtureDef[Any]\", request: \"SubRequest\"\n) -> None:\n \"\"\"Called after fixture teardown, but before the cache is cleared, so\n the fixture result ``fixturedef.cached_result`` is still available (not\n ``None``).\"\"\"\n\n\n# -------------------------------------------------------------------------\n# test session related hooks\n# -------------------------------------------------------------------------\n\n\ndef pytest_sessionstart(session: \"Session\") -> None:\n \"\"\"Called after the ``Session`` object has been created and before performing collection\n and entering the run test loop.\n\n :param pytest.Session session: The pytest session object.\n \"\"\"\n\n\ndef pytest_sessionfinish(\n session: \"Session\",\n exitstatus: Union[int, \"ExitCode\"],\n) -> None:\n \"\"\"Called after whole test run finished, right before returning the exit status to the system.\n\n :param pytest.Session session: The pytest session object.\n :param int exitstatus: The status which pytest will return to the system.\n \"\"\"\n\n\ndef pytest_unconfigure(config: \"Config\") -> None:\n \"\"\"Called before test process is exited.\n\n :param _pytest.config.Config config: The pytest config object.\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_None_24_pytest_assertrepr_compare._Return_explanation_for": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_None_24_pytest_assertrepr_compare._Return_explanation_for", "embedding": null, "metadata": {"file_path": "src/_pytest/hookspec.py", "file_name": "hookspec.py", "file_type": "text/x-python", "category": "implementation", "start_line": 619, "end_line": 635, "span_ids": ["pytest_assertrepr_compare", "pytest_unconfigure"], "tokens": 138}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "# -------------------------------------------------------------------------\n# hooks for customizing the assert methods\n# -------------------------------------------------------------------------\n\n\ndef pytest_assertrepr_compare(\n config: \"Config\", op: str, left: object, right: object\n) -> Optional[List[str]]:\n \"\"\"Return explanation for comparisons in failing assert expressions.\n\n Return None for no custom explanation, otherwise return a list\n of strings. The strings will be joined by newlines but any newlines\n *in* a string will be escaped. Note that all but the first line will\n be indented slightly, the intention is for the first line to be a summary.\n\n :param _pytest.config.Config config: The pytest config object.\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_exception_interact_pytest_exception_interact._Called_when_an_excepti": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_exception_interact_pytest_exception_interact._Called_when_an_excepti", "embedding": null, "metadata": {"file_path": "src/_pytest/hookspec.py", "file_name": "hookspec.py", "file_type": "text/x-python", "category": "implementation", "start_line": 890, "end_line": 906, "span_ids": ["pytest_exception_interact"], "tokens": 165}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_exception_interact(\n node: Union[\"Item\", \"Collector\"],\n call: \"CallInfo[Any]\",\n report: Union[\"CollectReport\", \"TestReport\"],\n) -> None:\n \"\"\"Called when an exception was raised which can potentially be\n interactively handled.\n\n May be called during collection (see :py:func:`pytest_make_collect_report`),\n in which case ``report`` is a :py:class:`_pytest.reports.CollectReport`.\n\n May be called during runtest of an item (see :py:func:`pytest_runtest_protocol`),\n in which case ``report`` is a :py:class:`_pytest.reports.TestReport`.\n\n This hook is not called if the exception that was raised is an internal\n exception like ``skip.Exception``.\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_enter_pdb_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_enter_pdb_", "embedding": null, "metadata": {"file_path": "src/_pytest/hookspec.py", "file_name": "hookspec.py", "file_type": "text/x-python", "category": "implementation", "start_line": 909, "end_line": 929, "span_ids": ["pytest_leave_pdb", "pytest_enter_pdb"], "tokens": 168}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_enter_pdb(config: \"Config\", pdb: \"pdb.Pdb\") -> None:\n \"\"\"Called upon pdb.set_trace().\n\n Can be used by plugins to take special action just before the python\n debugger enters interactive mode.\n\n :param _pytest.config.Config config: The pytest config object.\n :param pdb.Pdb pdb: The Pdb instance.\n \"\"\"\n\n\ndef pytest_leave_pdb(config: \"Config\", pdb: \"pdb.Pdb\") -> None:\n \"\"\"Called when leaving pdb (e.g. with continue after pdb.set_trace()).\n\n Can be used by plugins to take special action just after the python\n debugger leaves interactive mode.\n\n :param _pytest.config.Config config: The pytest config object.\n :param pdb.Pdb pdb: The Pdb instance.\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_pytest_cmdline_main_pytest_collection.session_perform_collect_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_pytest_cmdline_main_pytest_collection.session_perform_collect_", "embedding": null, "metadata": {"file_path": "src/_pytest/main.py", "file_name": "main.py", "file_type": "text/x-python", "category": "implementation", "start_line": 318, "end_line": 336, "span_ids": ["_main", "pytest_cmdline_main", "pytest_collection"], "tokens": 139}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_cmdline_main(config: Config) -> Union[int, ExitCode]:\n return wrap_session(config, _main)\n\n\ndef _main(config: Config, session: \"Session\") -> Optional[Union[int, ExitCode]]:\n \"\"\"Default command line protocol for initialization, session,\n running tests and reporting.\"\"\"\n config.hook.pytest_collection(session=session)\n config.hook.pytest_runtestloop(session=session)\n\n if session.testsfailed:\n return ExitCode.TESTS_FAILED\n elif session.testscollected == 0:\n return ExitCode.NO_TESTS_COLLECTED\n return None\n\n\ndef pytest_collection(session: \"Session\") -> None:\n session.perform_collect()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/structures.py_get_empty_parameterset_mark_get_empty_parameterset_mark.return.mark": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/structures.py_get_empty_parameterset_mark_get_empty_parameterset_mark.return.mark", "embedding": null, "metadata": {"file_path": "src/_pytest/mark/structures.py", "file_name": "structures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 44, "end_line": 70, "span_ids": ["get_empty_parameterset_mark"], "tokens": 218}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def get_empty_parameterset_mark(\n config: Config, argnames: Sequence[str], func\n) -> \"MarkDecorator\":\n from ..nodes import Collector\n\n fs, lineno = getfslineno(func)\n reason = \"got empty parameter set %r, function %s at %s:%d\" % (\n argnames,\n func.__name__,\n fs,\n lineno,\n )\n\n requested_mark = config.getini(EMPTY_PARAMETERSET_OPTION)\n if requested_mark in (\"\", None, \"skip\"):\n mark = MARK_GEN.skip(reason=reason)\n elif requested_mark == \"xfail\":\n mark = MARK_GEN.xfail(reason=reason, run=False)\n elif requested_mark == \"fail_at_collect\":\n f_name = func.__name__\n _, lineno = getfslineno(func)\n raise Collector.CollectError(\n \"Empty parameter set in '%s' at line %d\" % (f_name, lineno + 1)\n )\n else:\n raise LookupError(requested_mark)\n return mark", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/structures.py_get_unpacked_marks_normalize_mark_list.return._x_for_x_in_extracted_if_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/structures.py_get_unpacked_marks_normalize_mark_list.return._x_for_x_in_extracted_if_", "embedding": null, "metadata": {"file_path": "src/_pytest/mark/structures.py", "file_name": "structures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 371, "end_line": 391, "span_ids": ["normalize_mark_list", "get_unpacked_marks"], "tokens": 191}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def get_unpacked_marks(obj) -> List[Mark]:\n \"\"\"Obtain the unpacked marks that are stored on an object.\"\"\"\n mark_list = getattr(obj, \"pytestmark\", [])\n if not isinstance(mark_list, list):\n mark_list = [mark_list]\n return normalize_mark_list(mark_list)\n\n\ndef normalize_mark_list(mark_list: Iterable[Union[Mark, MarkDecorator]]) -> List[Mark]:\n \"\"\"Normalize marker decorating helpers to mark objects.\n\n :type List[Union[Mark, Markdecorator]] mark_list:\n :rtype: List[Mark]\n \"\"\"\n extracted = [\n getattr(mark, \"mark\", mark) for mark in mark_list\n ] # unpack MarkDecorator\n for mark in extracted:\n if not isinstance(mark, Mark):\n raise TypeError(f\"got {mark!r} instead of Mark\")\n return [x for x in extracted if isinstance(x, Mark)]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/structures.py_MarkGenerator.__getattr___MarkGenerator.__getattr__.return.MarkDecorator_Mark_name_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/structures.py_MarkGenerator.__getattr___MarkGenerator.__getattr__.return.MarkDecorator_Mark_name_", "embedding": null, "metadata": {"file_path": "src/_pytest/mark/structures.py", "file_name": "structures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 501, "end_line": 540, "span_ids": ["MarkGenerator.__getattr__"], "tokens": 443}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass MarkGenerator:\n\n def __getattr__(self, name: str) -> MarkDecorator:\n \"\"\"Generate a new :class:`MarkDecorator` with the given name.\"\"\"\n if name[0] == \"_\":\n raise AttributeError(\"Marker name must NOT start with underscore\")\n\n if self._config is not None:\n # We store a set of markers as a performance optimisation - if a mark\n # name is in the set we definitely know it, but a mark may be known and\n # not in the set. We therefore start by updating the set!\n if name not in self._markers:\n for line in self._config.getini(\"markers\"):\n # example lines: \"skipif(condition): skip the given test if...\"\n # or \"hypothesis: tests which use Hypothesis\", so to get the\n # marker name we split on both `:` and `(`.\n marker = line.split(\":\")[0].split(\"(\")[0].strip()\n self._markers.add(marker)\n\n # If the name is not in the set of known marks after updating,\n # then it really is time to issue a warning or an error.\n if name not in self._markers:\n if self._config.option.strict_markers or self._config.option.strict:\n fail(\n f\"{name!r} not found in `markers` configuration option\",\n pytrace=False,\n )\n\n # Raise a specific error for common misspellings of \"parametrize\".\n if name in [\"parameterize\", \"parametrise\", \"parameterise\"]:\n __tracebackhide__ = True\n fail(f\"Unknown '{name}' mark, did you mean 'parametrize'?\")\n\n warnings.warn(\n \"Unknown pytest.mark.%s - is this a typo? You can register \"\n \"custom marks to avoid this warning - for details, see \"\n \"https://docs.pytest.org/en/stable/mark.html\" % name,\n PytestUnknownMarkWarning,\n 2,\n )\n\n return MarkDecorator(Mark(name, (), {}, _ispytest=True), _ispytest=True)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_RunResult.parseoutcomes_RunResult.parseoutcomes.return.self_parse_summary_nouns_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_RunResult.parseoutcomes_RunResult.parseoutcomes.return.self_parse_summary_nouns_", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 544, "end_line": 554, "span_ids": ["RunResult.parseoutcomes"], "tokens": 118}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class RunResult:\n\n def parseoutcomes(self) -> Dict[str, int]:\n \"\"\"Return a dictionary of outcome noun -> count from parsing the terminal\n output that the test process produced.\n\n The returned nouns will always be in plural form::\n\n ======= 1 failed, 1 passed, 1 warning, 1 error in 0.13s ====\n\n Will return ``{\"failed\": 1, \"passed\": 1, \"warnings\": 1, \"errors\": 1}``.\n \"\"\"\n return self.parse_summary_nouns(self.outlines)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_RunResult.parse_summary_nouns_RunResult.parse_summary_nouns.return._to_plural_get_k_k_v_f": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_RunResult.parse_summary_nouns_RunResult.parse_summary_nouns.return._to_plural_get_k_k_v_f", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 556, "end_line": 578, "span_ids": ["RunResult.parse_summary_nouns"], "tokens": 202}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class RunResult:\n\n @classmethod\n def parse_summary_nouns(cls, lines) -> Dict[str, int]:\n \"\"\"Extract the nouns from a pytest terminal summary line.\n\n It always returns the plural noun for consistency::\n\n ======= 1 failed, 1 passed, 1 warning, 1 error in 0.13s ====\n\n Will return ``{\"failed\": 1, \"passed\": 1, \"warnings\": 1, \"errors\": 1}``.\n \"\"\"\n for line in reversed(lines):\n if rex_session_duration.search(line):\n outcomes = rex_outcome.findall(line)\n ret = {noun: int(count) for (count, noun) in outcomes}\n break\n else:\n raise ValueError(\"Pytest terminal summary report not found\")\n\n to_plural = {\n \"warning\": \"warnings\",\n \"error\": \"errors\",\n }\n return {to_plural.get(k, k): v for k, v in ret.items()}", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/recwarn.py_deprecated_call_3_deprecated_call_3.return.warns_DeprecationWarning": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/recwarn.py_deprecated_call_3_deprecated_call_3.return.warns_DeprecationWarning", "embedding": null, "metadata": {"file_path": "src/_pytest/recwarn.py", "file_name": "recwarn.py", "file_type": "text/x-python", "category": "implementation", "start_line": 52, "end_line": 81, "span_ids": ["deprecated_call_3"], "tokens": 291}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def deprecated_call(\n func: Optional[Callable[..., Any]] = None, *args: Any, **kwargs: Any\n) -> Union[\"WarningsRecorder\", Any]:\n \"\"\"Assert that code produces a ``DeprecationWarning`` or ``PendingDeprecationWarning``.\n\n This function can be used as a context manager::\n\n >>> import warnings\n >>> def api_call_v2():\n ... warnings.warn('use v3 of this api', DeprecationWarning)\n ... return 200\n\n >>> import pytest\n >>> with pytest.deprecated_call():\n ... assert api_call_v2() == 200\n\n It can also be used by passing a function and ``*args`` and ``**kwargs``,\n in which case it will ensure calling ``func(*args, **kwargs)`` produces one of\n the warnings types above. The return value is the return value of the function.\n\n In the context manager form you may use the keyword argument ``match`` to assert\n that the warning matches a text or regex.\n\n The context manager produces a list of :class:`warnings.WarningMessage` objects,\n one for each warning raised.\n \"\"\"\n __tracebackhide__ = True\n if func is not None:\n args = (func,) + args\n return warns((DeprecationWarning, PendingDeprecationWarning), *args, **kwargs)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/recwarn.py_warns_6_warns_6.if_not_args_.else_.with_WarningsChecker_expe.return.func_args_1_kwargs_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/recwarn.py_warns_6_warns_6.if_not_args_.else_.with_WarningsChecker_expe.return.func_args_1_kwargs_", "embedding": null, "metadata": {"file_path": "src/_pytest/recwarn.py", "file_name": "recwarn.py", "file_type": "text/x-python", "category": "implementation", "start_line": 103, "end_line": 156, "span_ids": ["warns_6"], "tokens": 493}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def warns(\n expected_warning: Optional[Union[Type[Warning], Tuple[Type[Warning], ...]]],\n *args: Any,\n match: Optional[Union[str, Pattern[str]]] = None,\n **kwargs: Any,\n) -> Union[\"WarningsChecker\", Any]:\n r\"\"\"Assert that code raises a particular class of warning.\n\n Specifically, the parameter ``expected_warning`` can be a warning class or\n sequence of warning classes, and the inside the ``with`` block must issue a warning of that class or\n classes.\n\n This helper produces a list of :class:`warnings.WarningMessage` objects,\n one for each warning raised.\n\n This function can be used as a context manager, or any of the other ways\n :func:`pytest.raises` can be used::\n\n >>> import pytest\n >>> with pytest.warns(RuntimeWarning):\n ... warnings.warn(\"my warning\", RuntimeWarning)\n\n In the context manager form you may use the keyword argument ``match`` to assert\n that the warning matches a text or regex::\n\n >>> with pytest.warns(UserWarning, match='must be 0 or None'):\n ... warnings.warn(\"value must be 0 or None\", UserWarning)\n\n >>> with pytest.warns(UserWarning, match=r'must be \\d+$'):\n ... warnings.warn(\"value must be 42\", UserWarning)\n\n >>> with pytest.warns(UserWarning, match=r'must be \\d+$'):\n ... warnings.warn(\"this is not here\", UserWarning)\n Traceback (most recent call last):\n ...\n Failed: DID NOT WARN. No warnings of type ...UserWarning... was emitted...\n\n \"\"\"\n __tracebackhide__ = True\n if not args:\n if kwargs:\n msg = \"Unexpected keyword arguments passed to pytest.warns: \"\n msg += \", \".join(sorted(kwargs))\n msg += \"\\nUse context-manager form instead?\"\n raise TypeError(msg)\n return WarningsChecker(expected_warning, match_expr=match, _ispytest=True)\n else:\n func = args[0]\n if not callable(func):\n raise TypeError(\n \"{!r} object (type: {}) must be callable\".format(func, type(func))\n )\n with WarningsChecker(expected_warning, _ispytest=True):\n return func(*args[1:], **kwargs)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/skipping.py_evaluate_condition_evaluate_condition.return.result_reason": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/skipping.py_evaluate_condition_evaluate_condition.return.result_reason", "embedding": null, "metadata": {"file_path": "src/_pytest/skipping.py", "file_name": "skipping.py", "file_type": "text/x-python", "category": "implementation", "start_line": 85, "end_line": 157, "span_ids": ["evaluate_condition"], "tokens": 560}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def evaluate_condition(item: Item, mark: Mark, condition: object) -> Tuple[bool, str]:\n \"\"\"Evaluate a single skipif/xfail condition.\n\n If an old-style string condition is given, it is eval()'d, otherwise the\n condition is bool()'d. If this fails, an appropriately formatted pytest.fail\n is raised.\n\n Returns (result, reason). The reason is only relevant if the result is True.\n \"\"\"\n # String condition.\n if isinstance(condition, str):\n globals_ = {\n \"os\": os,\n \"sys\": sys,\n \"platform\": platform,\n \"config\": item.config,\n }\n for dictionary in reversed(\n item.ihook.pytest_markeval_namespace(config=item.config)\n ):\n if not isinstance(dictionary, Mapping):\n raise ValueError(\n \"pytest_markeval_namespace() needs to return a dict, got {!r}\".format(\n dictionary\n )\n )\n globals_.update(dictionary)\n if hasattr(item, \"obj\"):\n globals_.update(item.obj.__globals__) # type: ignore[attr-defined]\n try:\n filename = f\"<{mark.name} condition>\"\n condition_code = compile(condition, filename, \"eval\")\n result = eval(condition_code, globals_)\n except SyntaxError as exc:\n msglines = [\n \"Error evaluating %r condition\" % mark.name,\n \" \" + condition,\n \" \" + \" \" * (exc.offset or 0) + \"^\",\n \"SyntaxError: invalid syntax\",\n ]\n fail(\"\\n\".join(msglines), pytrace=False)\n except Exception as exc:\n msglines = [\n \"Error evaluating %r condition\" % mark.name,\n \" \" + condition,\n *traceback.format_exception_only(type(exc), exc),\n ]\n fail(\"\\n\".join(msglines), pytrace=False)\n\n # Boolean condition.\n else:\n try:\n result = bool(condition)\n except Exception as exc:\n msglines = [\n \"Error evaluating %r condition as a boolean\" % mark.name,\n *traceback.format_exception_only(type(exc), exc),\n ]\n fail(\"\\n\".join(msglines), pytrace=False)\n\n reason = mark.kwargs.get(\"reason\", None)\n if reason is None:\n if isinstance(condition, str):\n reason = \"condition: \" + condition\n else:\n # XXX better be checked at collection time\n msg = (\n \"Error evaluating %r: \" % mark.name\n + \"you need to specify reason=STRING when using booleans as conditions.\"\n )\n fail(msg, pytrace=False)\n\n return result, reason", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/skipping.py_Skip_evaluate_skip_marks.return.None": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/skipping.py_Skip_evaluate_skip_marks.return.None", "embedding": null, "metadata": {"file_path": "src/_pytest/skipping.py", "file_name": "skipping.py", "file_type": "text/x-python", "category": "implementation", "start_line": 152, "end_line": 187, "span_ids": ["evaluate_skip_marks", "Skip"], "tokens": 224}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@attr.s(slots=True, frozen=True)\nclass Skip:\n \"\"\"The result of evaluate_skip_marks().\"\"\"\n\n reason = attr.ib(type=str)\n\n\ndef evaluate_skip_marks(item: Item) -> Optional[Skip]:\n \"\"\"Evaluate skip and skipif marks on item, returning Skip if triggered.\"\"\"\n for mark in item.iter_markers(name=\"skipif\"):\n if \"condition\" not in mark.kwargs:\n conditions = mark.args\n else:\n conditions = (mark.kwargs[\"condition\"],)\n\n # Unconditional.\n if not conditions:\n reason = mark.kwargs.get(\"reason\", \"\")\n return Skip(reason)\n\n # If any of the conditions are true.\n for condition in conditions:\n result, reason = evaluate_condition(item, mark, condition)\n if result:\n return Skip(reason)\n\n for mark in item.iter_markers(name=\"skip\"):\n if \"reason\" in mark.kwargs:\n reason = mark.kwargs[\"reason\"]\n elif mark.args:\n reason = mark.args[0]\n else:\n reason = \"unconditional skip\"\n return Skip(reason)\n\n return None", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/skipping.py_Xfail_evaluate_xfail_marks.return.None": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/skipping.py_Xfail_evaluate_xfail_marks.return.None", "embedding": null, "metadata": {"file_path": "src/_pytest/skipping.py", "file_name": "skipping.py", "file_type": "text/x-python", "category": "implementation", "start_line": 198, "end_line": 230, "span_ids": ["Xfail", "evaluate_xfail_marks"], "tokens": 259}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@attr.s(slots=True, frozen=True)\nclass Xfail:\n \"\"\"The result of evaluate_xfail_marks().\"\"\"\n\n reason = attr.ib(type=str)\n run = attr.ib(type=bool)\n strict = attr.ib(type=bool)\n raises = attr.ib(type=Optional[Tuple[Type[BaseException], ...]])\n\n\ndef evaluate_xfail_marks(item: Item) -> Optional[Xfail]:\n \"\"\"Evaluate xfail marks on item, returning Xfail if triggered.\"\"\"\n for mark in item.iter_markers(name=\"xfail\"):\n run = mark.kwargs.get(\"run\", True)\n strict = mark.kwargs.get(\"strict\", item.config.getini(\"xfail_strict\"))\n raises = mark.kwargs.get(\"raises\", None)\n if \"condition\" not in mark.kwargs:\n conditions = mark.args\n else:\n conditions = (mark.kwargs[\"condition\"],)\n\n # Unconditional.\n if not conditions:\n reason = mark.kwargs.get(\"reason\", \"\")\n return Xfail(reason, run, strict, raises)\n\n # If any of the conditions are true.\n for condition in conditions:\n result, reason = evaluate_condition(item, mark, condition)\n if result:\n return Xfail(reason, run, strict, raises)\n\n return None", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_test_excinfo_no_python_sourcecode_test_excinfo_no_python_sourcecode.for_item_in_excinfo_trace.if_isinstance_item_path_.assert_str_item_source_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_test_excinfo_no_python_sourcecode_test_excinfo_no_python_sourcecode.for_item_in_excinfo_trace.if_isinstance_item_path_.assert_str_item_source_", "embedding": null, "metadata": {"file_path": "testing/code/test_excinfo.py", "file_name": "test_excinfo.py", "file_type": "text/x-python", "category": "test", "start_line": 367, "end_line": 380, "span_ids": ["test_excinfo_no_python_sourcecode"], "tokens": 172}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_excinfo_no_python_sourcecode(tmp_path: Path) -> None:\n # XXX: simplified locally testable version\n tmp_path.joinpath(\"test.txt\").write_text(\"{{ h()}}:\")\n\n jinja2 = pytest.importorskip(\"jinja2\")\n loader = jinja2.FileSystemLoader(str(tmp_path))\n env = jinja2.Environment(loader=loader)\n template = env.get_template(\"test.txt\")\n excinfo = pytest.raises(ValueError, template.render, h=h)\n for item in excinfo.traceback:\n print(item) # XXX: for some reason jinja.Template.render is printed in full\n item.source # shouldn't fail\n if isinstance(item.path, Path) and item.path.name == \"test.txt\":\n assert str(item.source) == \"{{ h()}}:\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_negative_tolerance_TestApprox.test_negative_tolerance.with_pytest_raises_ValueE.1_1_approx_1_rel_abs": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_negative_tolerance_TestApprox.test_negative_tolerance.with_pytest_raises_ValueE.1_1_approx_1_rel_abs", "embedding": null, "metadata": {"file_path": "testing/python/approx.py", "file_name": "approx.py", "file_type": "text/x-python", "category": "implementation", "start_line": 125, "end_line": 140, "span_ids": ["TestApprox.test_negative_tolerance"], "tokens": 126}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestApprox:\n\n @pytest.mark.parametrize(\n (\"rel\", \"abs\"),\n [\n (-1e100, None),\n (None, -1e100),\n (1e100, -1e100),\n (-1e100, 1e100),\n (-1e100, -1e100),\n ],\n )\n def test_negative_tolerance(\n self, rel: Optional[float], abs: Optional[float]\n ) -> None:\n # Negative tolerances are not allowed.\n with pytest.raises(ValueError):\n 1.1 == approx(1, rel, abs)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_test_warn_missing_test_warn_missing.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_test_warn_missing_test_warn_missing.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 1449, "end_line": 1478, "span_ids": ["test_warn_missing"], "tokens": 230}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.skipif(\"'__pypy__' in sys.builtin_module_names\")\n@pytest.mark.parametrize(\n \"cmdline_args, warning_output\",\n [\n (\n [\"-OO\", \"-m\", \"pytest\", \"-h\"],\n [\"warning :*PytestConfigWarning:*assert statements are not executed*\"],\n ),\n (\n [\"-OO\", \"-m\", \"pytest\"],\n [\n \"=*= warnings summary =*=\",\n \"*PytestConfigWarning:*assert statements are not executed*\",\n ],\n ),\n (\n [\"-OO\", \"-m\", \"pytest\", \"--assert=plain\"],\n [\n \"=*= warnings summary =*=\",\n \"*PytestConfigWarning: ASSERTIONS ARE NOT EXECUTED and FAILING TESTS WILL PASS. \"\n \"Are you using python -O?\",\n ],\n ),\n ],\n)\ndef test_warn_missing(pytester: Pytester, cmdline_args, warning_output) -> None:\n pytester.makepyfile(\"\")\n\n result = pytester.run(sys.executable, *cmdline_args)\n result.stdout.fnmatch_lines(warning_output)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_call_failure_teardown_error_TestPython.test_call_failure_teardown_error.snode_assert_attr_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_call_failure_teardown_error_TestPython.test_call_failure_teardown_error.snode_assert_attr_", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 333, "end_line": 362, "span_ids": ["TestPython.test_call_failure_teardown_error"], "tokens": 230}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPython:\n\n @parametrize_families\n def test_call_failure_teardown_error(\n self, pytester: Pytester, run_and_parse: RunAndParse, xunit_family: str\n ) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.fixture\n def arg():\n yield\n raise Exception(\"Teardown Exception\")\n def test_function(arg):\n raise Exception(\"Call Exception\")\n \"\"\"\n )\n result, dom = run_and_parse(family=xunit_family)\n assert result.ret\n node = dom.find_first_by_tag(\"testsuite\")\n node.assert_attr(errors=1, failures=1, tests=1)\n first, second = dom.find_by_tag(\"testcase\")\n assert first\n assert second\n assert first != second\n fnode = first.find_first_by_tag(\"failure\")\n fnode.assert_attr(message=\"Exception: Call Exception\")\n snode = second.find_first_by_tag(\"error\")\n snode.assert_attr(\n message='failed on teardown with \"Exception: Teardown Exception\"'\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestFunctional.test_reevaluate_dynamic_expr_TestFunctional.test_reevaluate_dynamic_expr.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestFunctional.test_reevaluate_dynamic_expr_TestFunctional.test_reevaluate_dynamic_expr.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/test_mark.py", "file_name": "test_mark.py", "file_type": "text/x-python", "category": "test", "start_line": 732, "end_line": 760, "span_ids": ["TestFunctional.test_reevaluate_dynamic_expr"], "tokens": 190}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFunctional:\n\n def test_reevaluate_dynamic_expr(self, pytester: Pytester) -> None:\n \"\"\"#7360\"\"\"\n py_file1 = pytester.makepyfile(\n test_reevaluate_dynamic_expr1=\"\"\"\n import pytest\n\n skip = True\n\n @pytest.mark.skipif(\"skip\")\n def test_should_skip():\n assert True\n \"\"\"\n )\n py_file2 = pytester.makepyfile(\n test_reevaluate_dynamic_expr2=\"\"\"\n import pytest\n\n skip = False\n\n @pytest.mark.skipif(\"skip\")\n def test_should_not_skip():\n assert True\n \"\"\"\n )\n\n file_name1 = os.path.basename(py_file1)\n file_name2 = os.path.basename(py_file2)\n reprec = pytester.inline_run(file_name1, file_name2)\n reprec.assertoutcome(passed=1, skipped=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_sys_TestEvaluation.test_marked_one_arg_with_reason.assert_skipped_reason_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_sys_TestEvaluation.test_marked_one_arg_with_reason.assert_skipped_reason_", "embedding": null, "metadata": {"file_path": "testing/test_skipping.py", "file_name": "test_skipping.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 69, "span_ids": ["TestEvaluation.test_no_marker", "TestEvaluation", "TestEvaluation.test_marked_one_arg_with_reason", "TestEvaluation.test_marked_one_arg", "imports", "TestEvaluation.test_marked_xfail_no_args", "TestEvaluation.test_marked_skipif_no_args"], "tokens": 435}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import sys\nimport textwrap\n\nimport pytest\nfrom _pytest.pytester import Pytester\nfrom _pytest.runner import runtestprotocol\nfrom _pytest.skipping import evaluate_skip_marks\nfrom _pytest.skipping import evaluate_xfail_marks\nfrom _pytest.skipping import pytest_runtest_setup\n\n\nclass TestEvaluation:\n def test_no_marker(self, pytester: Pytester) -> None:\n item = pytester.getitem(\"def test_func(): pass\")\n skipped = evaluate_skip_marks(item)\n assert not skipped\n\n def test_marked_xfail_no_args(self, pytester: Pytester) -> None:\n item = pytester.getitem(\n \"\"\"\n import pytest\n @pytest.mark.xfail\n def test_func():\n pass\n \"\"\"\n )\n xfailed = evaluate_xfail_marks(item)\n assert xfailed\n assert xfailed.reason == \"\"\n assert xfailed.run\n\n def test_marked_skipif_no_args(self, pytester: Pytester) -> None:\n item = pytester.getitem(\n \"\"\"\n import pytest\n @pytest.mark.skipif\n def test_func():\n pass\n \"\"\"\n )\n skipped = evaluate_skip_marks(item)\n assert skipped\n assert skipped.reason == \"\"\n\n def test_marked_one_arg(self, pytester: Pytester) -> None:\n item = pytester.getitem(\n \"\"\"\n import pytest\n @pytest.mark.skipif(\"hasattr(os, 'sep')\")\n def test_func():\n pass\n \"\"\"\n )\n skipped = evaluate_skip_marks(item)\n assert skipped\n assert skipped.reason == \"condition: hasattr(os, 'sep')\"\n\n def test_marked_one_arg_with_reason(self, pytester: Pytester) -> None:\n item = pytester.getitem(\n \"\"\"\n import pytest\n @pytest.mark.skipif(\"hasattr(os, 'sep')\", attr=2, reason=\"hello world\")\n def test_func():\n pass\n \"\"\"\n )\n skipped = evaluate_skip_marks(item)\n assert skipped\n assert skipped.reason == \"hello world\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestEvaluation.test_marked_one_arg_twice_TestEvaluation.test_marked_one_arg_twice.for_i_in_range_0_2_.assert_skipped_reason_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestEvaluation.test_marked_one_arg_twice_TestEvaluation.test_marked_one_arg_twice.for_i_in_range_0_2_.assert_skipped_reason_", "embedding": null, "metadata": {"file_path": "testing/test_skipping.py", "file_name": "test_skipping.py", "file_type": "text/x-python", "category": "test", "start_line": 71, "end_line": 89, "span_ids": ["TestEvaluation.test_marked_one_arg_twice"], "tokens": 157}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestEvaluation:\n\n def test_marked_one_arg_twice(self, pytester: Pytester) -> None:\n lines = [\n \"\"\"@pytest.mark.skipif(\"not hasattr(os, 'murks')\")\"\"\",\n \"\"\"@pytest.mark.skipif(condition=\"hasattr(os, 'murks')\")\"\"\",\n ]\n for i in range(0, 2):\n item = pytester.getitem(\n \"\"\"\n import pytest\n %s\n %s\n def test_func():\n pass\n \"\"\"\n % (lines[i], lines[(i + 1) % 2])\n )\n skipped = evaluate_skip_marks(item)\n assert skipped\n assert skipped.reason == \"condition: not hasattr(os, 'murks')\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestEvaluation.test_marked_skipif_with_invalid_boolean_TestEvaluation.test_skipif_class.assert_skipped_reason_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestEvaluation.test_marked_skipif_with_invalid_boolean_TestEvaluation.test_skipif_class.assert_skipped_reason_", "embedding": null, "metadata": {"file_path": "testing/test_skipping.py", "file_name": "test_skipping.py", "file_type": "text/x-python", "category": "test", "start_line": 124, "end_line": 157, "span_ids": ["TestEvaluation.test_skipif_class", "TestEvaluation.test_marked_skipif_with_invalid_boolean"], "tokens": 251}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestEvaluation:\n\n def test_marked_skipif_with_invalid_boolean(self, pytester: Pytester) -> None:\n item = pytester.getitem(\n \"\"\"\n import pytest\n\n class InvalidBool:\n def __bool__(self):\n raise TypeError(\"INVALID\")\n\n @pytest.mark.skipif(InvalidBool(), reason=\"xxx\")\n def test_func():\n pass\n \"\"\"\n )\n with pytest.raises(pytest.fail.Exception) as excinfo:\n evaluate_skip_marks(item)\n assert excinfo.value.msg is not None\n assert \"Error evaluating 'skipif' condition as a boolean\" in excinfo.value.msg\n assert \"INVALID\" in excinfo.value.msg\n\n def test_skipif_class(self, pytester: Pytester) -> None:\n (item,) = pytester.getitems(\n \"\"\"\n import pytest\n class TestClass(object):\n pytestmark = pytest.mark.skipif(\"config._hackxyz\")\n def test_func(self):\n pass\n \"\"\"\n )\n item.config._hackxyz = 3 # type: ignore[attr-defined]\n skipped = evaluate_skip_marks(item)\n assert skipped\n assert skipped.reason == \"condition: config._hackxyz\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_test_default_markers_test_xfail_test_setup_exception.result_stdout_no_fnmatch_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_test_default_markers_test_xfail_test_setup_exception.result_stdout_no_fnmatch_", "embedding": null, "metadata": {"file_path": "testing/test_skipping.py", "file_name": "test_skipping.py", "file_type": "text/x-python", "category": "test", "start_line": 1163, "end_line": 1191, "span_ids": ["test_xfail_test_setup_exception", "test_default_markers"], "tokens": 201}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_default_markers(pytester: Pytester) -> None:\n result = pytester.runpytest(\"--markers\")\n result.stdout.fnmatch_lines(\n [\n \"*skipif(condition, ..., [*], reason=...)*skip*\",\n \"*xfail(condition, ..., [*], reason=..., run=True, raises=None, strict=xfail_strict)*expected failure*\",\n ]\n )\n\n\ndef test_xfail_test_setup_exception(pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n def pytest_runtest_setup():\n 0 / 0\n \"\"\"\n )\n p = pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.mark.xfail\n def test_func():\n assert 0\n \"\"\"\n )\n result = pytester.runpytest(p)\n assert result.ret == 0\n assert \"xfailed\" in result.stdout.str()\n result.stdout.no_fnmatch_line(\"*xpassed*\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/assertion/failure_demo.py_pytest_TestFailing.test_not.assert_not_f_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/assertion/failure_demo.py_pytest_TestFailing.test_not.assert_not_f_", "embedding": null, "metadata": {"file_path": "doc/en/example/assertion/failure_demo.py", "file_name": "failure_demo.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 39, "span_ids": ["TestFailing", "TestFailing.test_simple_multiline", "TestFailing.test_simple", "TestFailing.test_not", "somefunc", "test_generative", "imports", "otherfunc_multi", "otherfunc"], "tokens": 163}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import pytest\nfrom pytest import raises\n\n\ndef otherfunc(a, b):\n assert a == b\n\n\ndef somefunc(x, y):\n otherfunc(x, y)\n\n\ndef otherfunc_multi(a, b):\n assert a == b\n\n\n@pytest.mark.parametrize(\"param1, param2\", [(3, 6)])\ndef test_generative(param1, param2):\n assert param1 * 2 < param2\n\n\nclass TestFailing:\n def test_simple(self):\n def f():\n return 42\n\n def g():\n return 43\n\n assert f() == g()\n\n def test_simple_multiline(self):\n otherfunc_multi(42, 6 * 9)\n\n def test_not(self):\n def f():\n return 42\n\n assert not f()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/assertion/failure_demo.py_TestRaises_test_dynamic_compile_shows_nicely.module_foo_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/assertion/failure_demo.py_TestRaises_test_dynamic_compile_shows_nicely.module_foo_", "embedding": null, "metadata": {"file_path": "doc/en/example/assertion/failure_demo.py", "file_name": "failure_demo.py", "file_type": "text/x-python", "category": "implementation", "start_line": 163, "end_line": 202, "span_ids": ["TestRaises", "TestRaises.test_raises", "TestRaises.test_raise", "TestRaises.test_some_error", "TestRaises.test_reinterpret_fails_with_print_for_the_fun_of_it", "test_dynamic_compile_shows_nicely", "TestRaises.func1", "TestRaises.test_tupleerror", "TestRaises.test_raises_doesnt"], "tokens": 270}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRaises:\n def test_raises(self):\n s = \"qwe\"\n raises(TypeError, int, s)\n\n def test_raises_doesnt(self):\n raises(OSError, int, \"3\")\n\n def test_raise(self):\n raise ValueError(\"demo error\")\n\n def test_tupleerror(self):\n a, b = [1] # NOQA\n\n def test_reinterpret_fails_with_print_for_the_fun_of_it(self):\n items = [1, 2, 3]\n print(f\"items is {items!r}\")\n a, b = items.pop()\n\n def test_some_error(self):\n if namenotexi: # NOQA\n pass\n\n def func1(self):\n assert 41 == 42\n\n\n# thanks to Matthew Scott for this test\ndef test_dynamic_compile_shows_nicely():\n import importlib.util\n import sys\n\n src = \"def foo():\\n assert 1 == 0\\n\"\n name = \"abc-123\"\n spec = importlib.util.spec_from_loader(name, loader=None)\n module = importlib.util.module_from_spec(spec)\n code = compile(src, name, \"exec\")\n exec(code, module.__dict__)\n sys.modules[name] = module\n module.foo()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_getfslineno_getfslineno.return.code_path_code_firstline": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_getfslineno_getfslineno.return.code_path_code_firstline", "embedding": null, "metadata": {"file_path": "src/_pytest/_code/code.py", "file_name": "code.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1189, "end_line": 1220, "span_ids": ["getfslineno"], "tokens": 257}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def getfslineno(obj: object) -> Tuple[Union[str, Path], int]:\n \"\"\"Return source location (path, lineno) for the given object.\n\n If the source cannot be determined return (\"\", -1).\n\n The line number is 0-based.\n \"\"\"\n # xxx let decorators etc specify a sane ordering\n # NOTE: this used to be done in _pytest.compat.getfslineno, initially added\n # in 6ec13a2b9. It (\"place_as\") appears to be something very custom.\n obj = get_real_func(obj)\n if hasattr(obj, \"place_as\"):\n obj = obj.place_as # type: ignore[attr-defined]\n\n try:\n code = Code.from_function(obj)\n except TypeError:\n try:\n fn = inspect.getsourcefile(obj) or inspect.getfile(obj) # type: ignore[arg-type]\n except TypeError:\n return \"\", -1\n\n fspath = fn and absolutepath(fn) or \"\"\n lineno = -1\n if fspath:\n try:\n _, lineno = findsource(obj)\n except OSError:\n pass\n return fspath, lineno\n\n return code.path, code.firstlineno", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/source.py_ast_Source.__str__.return._n_join_self_lines_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/source.py_ast_Source.__str__.return._n_join_self_lines_", "embedding": null, "metadata": {"file_path": "src/_pytest/_code/source.py", "file_name": "source.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 111, "span_ids": ["Source.__getitem___2", "Source.__getitem__", "Source.__getitem___3", "Source.__iter__", "Source.__str__", "Source.indent", "Source.deindent", "Source.strip", "Source.getstatement", "Source", "Source.__eq__", "imports", "Source.getstatementrange", "Source:3", "Source.__len__"], "tokens": 821}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import ast\nimport inspect\nimport textwrap\nimport tokenize\nimport types\nimport warnings\nfrom bisect import bisect_right\nfrom typing import Iterable\nfrom typing import Iterator\nfrom typing import List\nfrom typing import Optional\nfrom typing import overload\nfrom typing import Tuple\nfrom typing import Union\n\n\nclass Source:\n \"\"\"An immutable object holding a source code fragment.\n\n When using Source(...), the source lines are deindented.\n \"\"\"\n\n def __init__(self, obj: object = None) -> None:\n if not obj:\n self.lines: List[str] = []\n elif isinstance(obj, Source):\n self.lines = obj.lines\n elif isinstance(obj, (tuple, list)):\n self.lines = deindent(x.rstrip(\"\\n\") for x in obj)\n elif isinstance(obj, str):\n self.lines = deindent(obj.split(\"\\n\"))\n else:\n try:\n rawcode = getrawcode(obj)\n src = inspect.getsource(rawcode)\n except TypeError:\n src = inspect.getsource(obj) # type: ignore[arg-type]\n self.lines = deindent(src.split(\"\\n\"))\n\n def __eq__(self, other: object) -> bool:\n if not isinstance(other, Source):\n return NotImplemented\n return self.lines == other.lines\n\n # Ignore type because of https://github.com/python/mypy/issues/4266.\n __hash__ = None # type: ignore\n\n @overload\n def __getitem__(self, key: int) -> str:\n ...\n\n @overload\n def __getitem__(self, key: slice) -> \"Source\":\n ...\n\n def __getitem__(self, key: Union[int, slice]) -> Union[str, \"Source\"]:\n if isinstance(key, int):\n return self.lines[key]\n else:\n if key.step not in (None, 1):\n raise IndexError(\"cannot slice a Source with a step\")\n newsource = Source()\n newsource.lines = self.lines[key.start : key.stop]\n return newsource\n\n def __iter__(self) -> Iterator[str]:\n return iter(self.lines)\n\n def __len__(self) -> int:\n return len(self.lines)\n\n def strip(self) -> \"Source\":\n \"\"\"Return new Source object with trailing and leading blank lines removed.\"\"\"\n start, end = 0, len(self)\n while start < end and not self.lines[start].strip():\n start += 1\n while end > start and not self.lines[end - 1].strip():\n end -= 1\n source = Source()\n source.lines[:] = self.lines[start:end]\n return source\n\n def indent(self, indent: str = \" \" * 4) -> \"Source\":\n \"\"\"Return a copy of the source object with all lines indented by the\n given indent-string.\"\"\"\n newsource = Source()\n newsource.lines = [(indent + line) for line in self.lines]\n return newsource\n\n def getstatement(self, lineno: int) -> \"Source\":\n \"\"\"Return Source statement which contains the given linenumber\n (counted from 0).\"\"\"\n start, end = self.getstatementrange(lineno)\n return self[start:end]\n\n def getstatementrange(self, lineno: int) -> Tuple[int, int]:\n \"\"\"Return (start, end) tuple which spans the minimal statement region\n which containing the given lineno.\"\"\"\n if not (0 <= lineno < len(self)):\n raise IndexError(\"lineno out of range\")\n ast, start, end = getstatementrange_ast(lineno, self)\n return start, end\n\n def deindent(self) -> \"Source\":\n \"\"\"Return a new Source object deindented.\"\"\"\n newsource = Source()\n newsource.lines[:] = deindent(self.lines)\n return newsource\n\n def __str__(self) -> str:\n return \"\\n\".join(self.lines)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/source.py___deindent.return.textwrap_dedent_n_join": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/source.py___deindent.return.textwrap_dedent_n_join", "embedding": null, "metadata": {"file_path": "src/_pytest/_code/source.py", "file_name": "source.py", "file_type": "text/x-python", "category": "implementation", "start_line": 114, "end_line": 143, "span_ids": ["deindent", "findsource", "getrawcode", "Source.__str__"], "tokens": 212}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "#\n# helper functions\n#\n\n\ndef findsource(obj) -> Tuple[Optional[Source], int]:\n try:\n sourcelines, lineno = inspect.findsource(obj)\n except Exception:\n return None, -1\n source = Source()\n source.lines = [line.rstrip() for line in sourcelines]\n return source, lineno\n\n\ndef getrawcode(obj: object, trycall: bool = True) -> types.CodeType:\n \"\"\"Return code object for given function.\"\"\"\n try:\n return obj.__code__ # type: ignore[attr-defined,no-any-return]\n except AttributeError:\n pass\n if trycall:\n call = getattr(obj, \"__call__\", None)\n if call and not isinstance(obj, type):\n return getrawcode(call, trycall=False)\n raise TypeError(f\"could not get code object for {obj!r}\")\n\n\ndef deindent(lines: Iterable[str]) -> List[str]:\n return textwrap.dedent(\"\\n\".join(lines)).splitlines()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_LogCaptureFixture_LogCaptureFixture.handler.return.self__item__store_caplog_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_LogCaptureFixture_LogCaptureFixture.handler.return.self__item__store_caplog_", "embedding": null, "metadata": {"file_path": "src/_pytest/logging.py", "file_name": "logging.py", "file_type": "text/x-python", "category": "implementation", "start_line": 346, "end_line": 375, "span_ids": ["LogCaptureFixture._finalize", "LogCaptureFixture.handler", "LogCaptureFixture"], "tokens": 230}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass LogCaptureFixture:\n \"\"\"Provides access and control of log capturing.\"\"\"\n\n def __init__(self, item: nodes.Node, *, _ispytest: bool = False) -> None:\n check_ispytest(_ispytest)\n self._item = item\n self._initial_handler_level: Optional[int] = None\n # Dict of log name -> log level.\n self._initial_logger_levels: Dict[Optional[str], int] = {}\n\n def _finalize(self) -> None:\n \"\"\"Finalize the fixture.\n\n This restores the log levels changed by :meth:`set_level`.\n \"\"\"\n # Restore log levels.\n if self._initial_handler_level is not None:\n self.handler.setLevel(self._initial_handler_level)\n for logger_name, level in self._initial_logger_levels.items():\n logger = logging.getLogger(logger_name)\n logger.setLevel(level)\n\n @property\n def handler(self) -> LogCaptureHandler:\n \"\"\"Get the logging handler used by the fixture.\n\n :rtype: LogCaptureHandler\n \"\"\"\n return self._item._store[caplog_handler_key]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_LogCaptureFixture.get_records_LogCaptureFixture.get_records.return.self__item__store_caplog_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_LogCaptureFixture.get_records_LogCaptureFixture.get_records.return.self__item__store_caplog_", "embedding": null, "metadata": {"file_path": "src/_pytest/logging.py", "file_name": "logging.py", "file_type": "text/x-python", "category": "implementation", "start_line": 377, "end_line": 388, "span_ids": ["LogCaptureFixture.get_records"], "tokens": 124}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass LogCaptureFixture:\n\n def get_records(self, when: str) -> List[logging.LogRecord]:\n \"\"\"Get the logging records for one of the possible test phases.\n\n :param str when:\n Which test phase to obtain the records from. Valid values are: \"setup\", \"call\" and \"teardown\".\n\n :returns: The list of captured records at the given stage.\n :rtype: List[logging.LogRecord]\n\n .. versionadded:: 3.4\n \"\"\"\n return self._item._store[caplog_records_key].get(when, [])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_LoggingPlugin.pytest_runtestloop_LoggingPlugin.pytest_runtest_logreport.self_log_cli_handler_set_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_LoggingPlugin.pytest_runtestloop_LoggingPlugin.pytest_runtest_logreport.self_log_cli_handler_set_", "embedding": null, "metadata": {"file_path": "src/_pytest/logging.py", "file_name": "logging.py", "file_type": "text/x-python", "category": "implementation", "start_line": 662, "end_line": 683, "span_ids": ["LoggingPlugin.pytest_runtestloop", "LoggingPlugin.pytest_runtest_logreport", "LoggingPlugin.pytest_runtest_logstart"], "tokens": 194}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class LoggingPlugin:\n\n @hookimpl(hookwrapper=True)\n def pytest_runtestloop(self, session: Session) -> Generator[None, None, None]:\n if session.config.option.collectonly:\n yield\n return\n\n if self._log_cli_enabled() and self._config.getoption(\"verbose\") < 1:\n # The verbose flag is needed to avoid messy test progress output.\n self._config.option.verbose = 1\n\n with catching_logs(self.log_cli_handler, level=self.log_cli_level):\n with catching_logs(self.log_file_handler, level=self.log_file_level):\n yield # Run all the tests.\n\n @hookimpl\n def pytest_runtest_logstart(self) -> None:\n self.log_cli_handler.reset()\n self.log_cli_handler.set_when(\"start\")\n\n @hookimpl\n def pytest_runtest_logreport(self) -> None:\n self.log_cli_handler.set_when(\"logreport\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_LoggingPlugin._runtest_for_LoggingPlugin._runtest_for.with_catching_logs_.item_add_report_section_w": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py_LoggingPlugin._runtest_for_LoggingPlugin._runtest_for.with_catching_logs_.item_add_report_section_w", "embedding": null, "metadata": {"file_path": "src/_pytest/logging.py", "file_name": "logging.py", "file_type": "text/x-python", "category": "implementation", "start_line": 685, "end_line": 702, "span_ids": ["LoggingPlugin._runtest_for"], "tokens": 159}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class LoggingPlugin:\n\n def _runtest_for(self, item: nodes.Item, when: str) -> Generator[None, None, None]:\n \"\"\"Implement the internals of the pytest_runtest_xxx() hooks.\"\"\"\n with catching_logs(\n self.caplog_handler,\n level=self.log_level,\n ) as caplog_handler, catching_logs(\n self.report_handler,\n level=self.log_level,\n ) as report_handler:\n caplog_handler.reset()\n report_handler.reset()\n item._store[caplog_records_key][when] = caplog_handler.records\n item._store[caplog_handler_key] = caplog_handler\n\n yield\n\n log = report_handler.stream.getvalue().strip()\n item.add_report_section(when, \"log\", log)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py_cleanup_numbered_dir_resolve_from_str.if_isabs_input_.else_.return.rootpath_joinpath_input_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py_cleanup_numbered_dir_resolve_from_str.if_isabs_input_.else_.return.rootpath_joinpath_input_", "embedding": null, "metadata": {"file_path": "src/_pytest/pathlib.py", "file_name": "pathlib.py", "file_type": "text/x-python", "category": "implementation", "start_line": 344, "end_line": 387, "span_ids": ["resolve_from_str", "make_numbered_dir_with_cleanup", "cleanup_numbered_dir"], "tokens": 316}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def cleanup_numbered_dir(\n root: Path, prefix: str, keep: int, consider_lock_dead_if_created_before: float\n) -> None:\n \"\"\"Cleanup for lock driven numbered directories.\"\"\"\n for path in cleanup_candidates(root, prefix, keep):\n try_cleanup(path, consider_lock_dead_if_created_before)\n for path in root.glob(\"garbage-*\"):\n try_cleanup(path, consider_lock_dead_if_created_before)\n\n\ndef make_numbered_dir_with_cleanup(\n root: Path, prefix: str, keep: int, lock_timeout: float\n) -> Path:\n \"\"\"Create a numbered dir with a cleanup lock and remove old ones.\"\"\"\n e = None\n for i in range(10):\n try:\n p = make_numbered_dir(root, prefix)\n lock_path = create_cleanup_lock(p)\n register_cleanup_lock_removal(lock_path)\n except Exception as exc:\n e = exc\n else:\n consider_lock_dead_if_created_before = p.stat().st_mtime - lock_timeout\n # Register a cleanup for program exit\n atexit.register(\n cleanup_numbered_dir,\n root,\n prefix,\n keep,\n consider_lock_dead_if_created_before,\n )\n return p\n assert e is not None\n raise e\n\n\ndef resolve_from_str(input: str, rootpath: Path) -> Path:\n input = expanduser(input)\n input = expandvars(input)\n if isabs(input):\n return Path(input)\n else:\n return rootpath.joinpath(input)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_pytest_addoption_pytest_configure.config_addinivalue_line_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_pytest_addoption_pytest_configure.config_addinivalue_line_", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 59, "end_line": 94, "span_ids": ["pytest_configure", "pytest_addoption"], "tokens": 237}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_addoption(parser: Parser) -> None:\n parser.addoption(\n \"--lsof\",\n action=\"store_true\",\n dest=\"lsof\",\n default=False,\n help=\"run FD checks if lsof is available\",\n )\n\n parser.addoption(\n \"--runpytest\",\n default=\"inprocess\",\n dest=\"runpytest\",\n choices=(\"inprocess\", \"subprocess\"),\n help=(\n \"run pytest sub runs in tests using an 'inprocess' \"\n \"or 'subprocess' (python -m main) method\"\n ),\n )\n\n parser.addini(\n \"pytester_example_dir\", help=\"directory to take the pytester example files from\"\n )\n\n\ndef pytest_configure(config: Config) -> None:\n if config.getvalue(\"lsof\"):\n checker = LsofFdLeakChecker()\n if checker.matching_platform():\n config.pluginmanager.register(checker)\n\n config.addinivalue_line(\n \"markers\",\n \"pytester_example_path(*path_segments): join the given path \"\n \"segments to `pytester_example_dir` for this test.\",\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_LsofFdLeakChecker_LsofFdLeakChecker.matching_platform.try_.else_.return.True": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_LsofFdLeakChecker_LsofFdLeakChecker.matching_platform.try_.else_.return.True", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 97, "end_line": 135, "span_ids": ["LsofFdLeakChecker.matching_platform", "LsofFdLeakChecker.get_open_files", "LsofFdLeakChecker"], "tokens": 257}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class LsofFdLeakChecker:\n def get_open_files(self) -> List[Tuple[str, str]]:\n out = subprocess.run(\n (\"lsof\", \"-Ffn0\", \"-p\", str(os.getpid())),\n stdout=subprocess.PIPE,\n stderr=subprocess.DEVNULL,\n check=True,\n universal_newlines=True,\n ).stdout\n\n def isopen(line: str) -> bool:\n return line.startswith(\"f\") and (\n \"deleted\" not in line\n and \"mem\" not in line\n and \"txt\" not in line\n and \"cwd\" not in line\n )\n\n open_files = []\n\n for line in out.split(\"\\n\"):\n if isopen(line):\n fields = line.split(\"\\0\")\n fd = fields[0][1:]\n filename = fields[1][1:]\n if filename in IGNORE_PAM:\n continue\n if filename.startswith(\"/\"):\n open_files.append((fd, filename))\n\n return open_files\n\n def matching_platform(self) -> bool:\n try:\n subprocess.run((\"lsof\", \"-v\"), check=True)\n except (OSError, subprocess.CalledProcessError):\n return False\n else:\n return True", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_pytest_pycollect_makeitem_pytest_pycollect_makeitem.if_safe_isclass_obj_.elif_collector_istestfunc.if_not_inspect_isfunctio.elif_getattr_obj___test.return.res": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_pytest_pycollect_makeitem_pytest_pycollect_makeitem.if_safe_isclass_obj_.elif_collector_istestfunc.if_not_inspect_isfunctio.elif_getattr_obj___test.return.res", "embedding": null, "metadata": {"file_path": "src/_pytest/python.py", "file_name": "python.py", "file_type": "text/x-python", "category": "implementation", "start_line": 221, "end_line": 253, "span_ids": ["pytest_pycollect_makeitem"], "tokens": 342}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@hookimpl(trylast=True)\ndef pytest_pycollect_makeitem(collector: \"PyCollector\", name: str, obj: object):\n # Nothing was collected elsewhere, let's do it here.\n if safe_isclass(obj):\n if collector.istestclass(obj, name):\n return Class.from_parent(collector, name=name, obj=obj)\n elif collector.istestfunction(obj, name):\n # mock seems to store unbound methods (issue473), normalize it.\n obj = getattr(obj, \"__func__\", obj)\n # We need to try and unwrap the function if it's a functools.partial\n # or a functools.wrapped.\n # We mustn't if it's been wrapped with mock.patch (python 2 only).\n if not (inspect.isfunction(obj) or inspect.isfunction(get_real_func(obj))):\n filename, lineno = getfslineno(obj)\n warnings.warn_explicit(\n message=PytestCollectionWarning(\n \"cannot collect %r because it is not a function.\" % name\n ),\n category=None,\n filename=str(filename),\n lineno=lineno + 1,\n )\n elif getattr(obj, \"__test__\", True):\n if is_generator(obj):\n res = Function.from_parent(collector, name=name)\n reason = \"yield tests were removed in pytest 4.0 - {name} will be ignored\".format(\n name=name\n )\n res.add_marker(MARK_GEN.xfail(run=False, reason=reason))\n res.warn(PytestCollectionWarning(reason))\n else:\n res = list(collector._genfunctions(name, obj))\n return res", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_PyobjMixin_PyobjMixin._getobj.return.getattr_obj_self_name_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_PyobjMixin_PyobjMixin._getobj.return.getattr_obj_self_name_", "embedding": null, "metadata": {"file_path": "src/_pytest/python.py", "file_name": "python.py", "file_type": "text/x-python", "category": "implementation", "start_line": 256, "end_line": 303, "span_ids": ["PyobjMixin.cls", "PyobjMixin.instance", "PyobjMixin._getobj", "PyobjMixin.module", "PyobjMixin", "PyobjMixin.obj", "PyobjMixin.obj_4"], "tokens": 364}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class PyobjMixin(nodes.Node):\n \"\"\"this mix-in inherits from Node to carry over the typing information\n\n as its intended to always mix in before a node\n its position in the mro is unaffected\"\"\"\n\n _ALLOW_MARKERS = True\n\n @property\n def module(self):\n \"\"\"Python module object this node was collected from (can be None).\"\"\"\n node = self.getparent(Module)\n return node.obj if node is not None else None\n\n @property\n def cls(self):\n \"\"\"Python class object this node was collected from (can be None).\"\"\"\n node = self.getparent(Class)\n return node.obj if node is not None else None\n\n @property\n def instance(self):\n \"\"\"Python instance object this node was collected from (can be None).\"\"\"\n node = self.getparent(Instance)\n return node.obj if node is not None else None\n\n @property\n def obj(self):\n \"\"\"Underlying Python object.\"\"\"\n obj = getattr(self, \"_obj\", None)\n if obj is None:\n self._obj = obj = self._getobj()\n # XXX evil hack\n # used to avoid Instance collector marker duplication\n if self._ALLOW_MARKERS:\n self.own_markers.extend(get_unpacked_marks(self.obj))\n return obj\n\n @obj.setter\n def obj(self, value):\n self._obj = value\n\n def _getobj(self):\n \"\"\"Get the underlying Python object. May be overwritten by subclasses.\"\"\"\n # TODO: Improve the type of `parent` such that assert/ignore aren't needed.\n assert self.parent is not None\n obj = self.parent.obj # type: ignore[attr-defined]\n return getattr(obj, self.name)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_PyobjMixin.getmodpath_PyobjMixin.getmodpath.return._join_parts_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_PyobjMixin.getmodpath_PyobjMixin.getmodpath.return._join_parts_", "embedding": null, "metadata": {"file_path": "src/_pytest/python.py", "file_name": "python.py", "file_type": "text/x-python", "category": "implementation", "start_line": 305, "end_line": 322, "span_ids": ["PyobjMixin.getmodpath"], "tokens": 134}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class PyobjMixin(nodes.Node):\n\n def getmodpath(self, stopatmodule: bool = True, includemodule: bool = False) -> str:\n \"\"\"Return Python path relative to the containing module.\"\"\"\n chain = self.listchain()\n chain.reverse()\n parts = []\n for node in chain:\n if isinstance(node, Instance):\n continue\n name = node.name\n if isinstance(node, Module):\n name = os.path.splitext(name)[0]\n if stopatmodule:\n if includemodule:\n parts.append(name)\n break\n parts.append(name)\n parts.reverse()\n return \".\".join(parts)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Instance_hasnew.return.False": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Instance_hasnew.return.False", "embedding": null, "metadata": {"file_path": "src/_pytest/python.py", "file_name": "python.py", "file_type": "text/x-python", "category": "implementation", "start_line": 859, "end_line": 891, "span_ids": ["hasinit", "Instance.newinstance", "Instance._getobj", "Instance", "hasnew", "Instance.collect"], "tokens": 242}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Instance(PyCollector):\n _ALLOW_MARKERS = False # hack, destroy later\n # Instances share the object with their parents in a way\n # that duplicates markers instances if not taken out\n # can be removed at node structure reorganization time.\n\n def _getobj(self):\n # TODO: Improve the type of `parent` such that assert/ignore aren't needed.\n assert self.parent is not None\n obj = self.parent.obj # type: ignore[attr-defined]\n return obj()\n\n def collect(self) -> Iterable[Union[nodes.Item, nodes.Collector]]:\n self.session._fixturemanager.parsefactories(self)\n return super().collect()\n\n def newinstance(self):\n self.obj = self._getobj()\n return self.obj\n\n\ndef hasinit(obj: object) -> bool:\n init: object = getattr(obj, \"__init__\", None)\n if init:\n return init != object.__init__\n return False\n\n\ndef hasnew(obj: object) -> bool:\n new: object = getattr(obj, \"__new__\", None)\n if new:\n return new != object.__new__\n return False", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Function._prunetraceback_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Function._prunetraceback_", "embedding": null, "metadata": {"file_path": "src/_pytest/python.py", "file_name": "python.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1663, "end_line": 1705, "span_ids": ["FunctionDefinition.runtest", "FunctionDefinition", "FunctionDefinition:3", "Function._prunetraceback", "Function.repr_failure"], "tokens": 405}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Function(PyobjMixin, nodes.Item):\n\n def _prunetraceback(self, excinfo: ExceptionInfo[BaseException]) -> None:\n if hasattr(self, \"_obj\") and not self.config.getoption(\"fulltrace\", False):\n code = _pytest._code.Code.from_function(get_real_func(self.obj))\n path, firstlineno = code.path, code.firstlineno\n traceback = excinfo.traceback\n ntraceback = traceback.cut(path=path, firstlineno=firstlineno)\n if ntraceback == traceback:\n ntraceback = ntraceback.cut(path=path)\n if ntraceback == traceback:\n ntraceback = ntraceback.filter(filter_traceback)\n if not ntraceback:\n ntraceback = traceback\n\n excinfo.traceback = ntraceback.filter()\n # issue364: mark all but first and last frames to\n # only show a single-line message for each frame.\n if self.config.getoption(\"tbstyle\", \"auto\") == \"auto\":\n if len(excinfo.traceback) > 2:\n for entry in excinfo.traceback[1:-1]:\n entry.set_repr_style(\"short\")\n\n # TODO: Type ignored -- breaks Liskov Substitution.\n def repr_failure( # type: ignore[override]\n self,\n excinfo: ExceptionInfo[BaseException],\n ) -> Union[str, TerminalRepr]:\n style = self.config.getoption(\"tbstyle\", \"auto\")\n if style == \"auto\":\n style = \"long\"\n return self._repr_failure_py(excinfo, style=style)\n\n\nclass FunctionDefinition(Function):\n \"\"\"\n This class is a step gap solution until we evolve to have actual function definition nodes\n and manage to get rid of ``metafunc``.\n \"\"\"\n\n def runtest(self) -> None:\n raise RuntimeError(\"function definitions are not supposed to be run as tests\")\n\n setup = runtest", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_test_frame_leak_on_failing_test_test_usage_error_code.assert_result_ret_Exit": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_test_frame_leak_on_failing_test_test_usage_error_code.assert_result_ret_Exit", "embedding": null, "metadata": {"file_path": "testing/acceptance_test.py", "file_name": "acceptance_test.py", "file_type": "text/x-python", "category": "test", "start_line": 1131, "end_line": 1173, "span_ids": ["test_frame_leak_on_failing_test", "test_usage_error_code", "test_fixture_mock_integration"], "tokens": 298}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_frame_leak_on_failing_test(pytester: Pytester) -> None:\n \"\"\"Pytest would leak garbage referencing the frames of tests that failed\n that could never be reclaimed (#2798).\n\n Unfortunately it was not possible to remove the actual circles because most of them\n are made of traceback objects which cannot be weakly referenced. Those objects at least\n can be eventually claimed by the garbage collector.\n \"\"\"\n pytester.makepyfile(\n \"\"\"\n import gc\n import weakref\n\n class Obj:\n pass\n\n ref = None\n\n def test1():\n obj = Obj()\n global ref\n ref = weakref.ref(obj)\n assert 0\n\n def test2():\n gc.collect()\n assert ref() is None\n \"\"\"\n )\n result = pytester.runpytest_subprocess()\n result.stdout.fnmatch_lines([\"*1 failed, 1 passed in*\"])\n\n\ndef test_fixture_mock_integration(pytester: Pytester) -> None:\n \"\"\"Test that decorators applied to fixture are left working (#3774)\"\"\"\n p = pytester.copy_example(\"acceptance/fixture_mock_integration.py\")\n result = pytester.runpytest(p)\n result.stdout.fnmatch_lines([\"*1 passed*\"])\n\n\ndef test_usage_error_code(pytester: Pytester) -> None:\n result = pytester.runpytest(\"-unknown-option-\")\n assert result.ret == ExitCode.USAGE_ERROR", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_test_warn_on_async_function_test_warn_on_async_function.assert_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_test_warn_on_async_function_test_warn_on_async_function.assert_", "embedding": null, "metadata": {"file_path": "testing/acceptance_test.py", "file_name": "acceptance_test.py", "file_type": "text/x-python", "category": "test", "start_line": 1176, "end_line": 1206, "span_ids": ["test_warn_on_async_function"], "tokens": 231}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.filterwarnings(\"default\")\ndef test_warn_on_async_function(pytester: Pytester) -> None:\n # In the below we .close() the coroutine only to avoid\n # \"RuntimeWarning: coroutine 'test_2' was never awaited\"\n # which messes with other tests.\n pytester.makepyfile(\n test_async=\"\"\"\n async def test_1():\n pass\n async def test_2():\n pass\n def test_3():\n coro = test_2()\n coro.close()\n return coro\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n [\n \"test_async.py::test_1\",\n \"test_async.py::test_2\",\n \"test_async.py::test_3\",\n \"*async def functions are not natively supported*\",\n \"*3 skipped, 3 warnings in*\",\n ]\n )\n # ensure our warning message appears only once\n assert (\n result.stdout.str().count(\"async def functions are not natively supported\") == 1\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestTraceback_f_g_h.test_traceback_entry_getsource_in_construct_TestTraceback_f_g_h.test_traceback_entry_getsource_in_construct.assert_source_deindent_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestTraceback_f_g_h.test_traceback_entry_getsource_in_construct_TestTraceback_f_g_h.test_traceback_entry_getsource_in_construct.assert_source_deindent_", "embedding": null, "metadata": {"file_path": "testing/code/test_excinfo.py", "file_name": "test_excinfo.py", "file_type": "text/x-python", "category": "test", "start_line": 129, "end_line": 151, "span_ids": ["TestTraceback_f_g_h.test_traceback_entry_getsource_in_construct"], "tokens": 175}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTraceback_f_g_h:\n\n def test_traceback_entry_getsource_in_construct(self):\n def xyz():\n try:\n raise ValueError\n except somenoname: # type: ignore[name-defined] # noqa: F821\n pass # pragma: no cover\n\n try:\n xyz()\n except NameError:\n excinfo = _pytest._code.ExceptionInfo.from_current()\n else:\n assert False, \"did not raise NameError\"\n\n tb = excinfo.traceback\n source = tb[-1].getsource()\n assert source is not None\n assert source.deindent().lines == [\n \"def xyz():\",\n \" try:\",\n \" raise ValueError\",\n \" except somenoname: # type: ignore[name-defined] # noqa: F821\",\n ]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_repr_source_excinfo_TestFormattedExcinfo.test_repr_source_excinfo.assert_lines_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_repr_source_excinfo_TestFormattedExcinfo.test_repr_source_excinfo.assert_lines_", "embedding": null, "metadata": {"file_path": "testing/code/test_excinfo.py", "file_name": "test_excinfo.py", "file_type": "text/x-python", "category": "test", "start_line": 464, "end_line": 488, "span_ids": ["TestFormattedExcinfo.test_repr_source_excinfo"], "tokens": 162}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFormattedExcinfo:\n\n def test_repr_source_excinfo(self) -> None:\n \"\"\"Check if indentation is right.\"\"\"\n try:\n\n def f():\n 1 / 0\n\n f()\n\n except BaseException:\n excinfo = _pytest._code.ExceptionInfo.from_current()\n else:\n assert False, \"did not raise\"\n\n pr = FormattedExcinfo()\n source = pr._getentrysource(excinfo.traceback[-1])\n assert source is not None\n lines = pr.get_source(source, 1, excinfo)\n for line in lines:\n print(line)\n assert lines == [\n \" def f():\",\n \"> 1 / 0\",\n \"E ZeroDivisionError: division by zero\",\n ]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_repr_source_not_existing_TestFormattedExcinfo.test_repr_many_line_source_not_existing.assert_repr_chain_0_0_r": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_repr_source_not_existing_TestFormattedExcinfo.test_repr_many_line_source_not_existing.assert_repr_chain_0_0_r", "embedding": null, "metadata": {"file_path": "testing/code/test_excinfo.py", "file_name": "test_excinfo.py", "file_type": "text/x-python", "category": "test", "start_line": 492, "end_line": 519, "span_ids": ["TestFormattedExcinfo.test_repr_many_line_source_not_existing", "TestFormattedExcinfo.test_repr_source_not_existing"], "tokens": 240}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFormattedExcinfo:\n\n def test_repr_source_not_existing(self):\n pr = FormattedExcinfo()\n co = compile(\"raise ValueError()\", \"\", \"exec\")\n try:\n exec(co)\n except ValueError:\n excinfo = _pytest._code.ExceptionInfo.from_current()\n repr = pr.repr_excinfo(excinfo)\n assert repr.reprtraceback.reprentries[1].lines[0] == \"> ???\"\n assert repr.chain[0][0].reprentries[1].lines[0] == \"> ???\"\n\n def test_repr_many_line_source_not_existing(self):\n pr = FormattedExcinfo()\n co = compile(\n \"\"\"\na = 1\nraise ValueError()\n\"\"\",\n \"\",\n \"exec\",\n )\n try:\n exec(co)\n except ValueError:\n excinfo = _pytest._code.ExceptionInfo.from_current()\n repr = pr.repr_excinfo(excinfo)\n assert repr.reprtraceback.reprentries[1].lines[0] == \"> ???\"\n assert repr.chain[0][0].reprentries[1].lines[0] == \"> ???\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_source.py__flake8_noqa_test_source_strip_multiline.assert_source2_lines_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_source.py__flake8_noqa_test_source_strip_multiline.assert_source2_lines_", "embedding": null, "metadata": {"file_path": "testing/code/test_source.py", "file_name": "test_source.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 77, "span_ids": ["test_source_from_function", "test_source_strips", "test_source_from_method.TestClass.test_method", "test_source_from_lines", "docstring", "test_source_from_method", "test_source_from_inner_function", "imports", "test_source_strip_multiline", "test_source_from_method.TestClass", "test_source_str_function"], "tokens": 442}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "# flake8: noqa\n# disable flake check on this file because some constructs are strange\n# or redundant on purpose and can't be disable on a line-by-line basis\nimport ast\nimport inspect\nimport linecache\nimport sys\nimport textwrap\nfrom pathlib import Path\nfrom types import CodeType\nfrom typing import Any\nfrom typing import Dict\nfrom typing import Optional\n\nimport pytest\nfrom _pytest._code import Code\nfrom _pytest._code import Frame\nfrom _pytest._code import getfslineno\nfrom _pytest._code import Source\nfrom _pytest.pathlib import import_path\n\n\ndef test_source_str_function() -> None:\n x = Source(\"3\")\n assert str(x) == \"3\"\n\n x = Source(\" 3\")\n assert str(x) == \"3\"\n\n x = Source(\n \"\"\"\n 3\n \"\"\"\n )\n assert str(x) == \"\\n3\"\n\n\ndef test_source_from_function() -> None:\n source = Source(test_source_str_function)\n assert str(source).startswith(\"def test_source_str_function() -> None:\")\n\n\ndef test_source_from_method() -> None:\n class TestClass:\n def test_method(self):\n pass\n\n source = Source(TestClass().test_method)\n assert source.lines == [\"def test_method(self):\", \" pass\"]\n\n\ndef test_source_from_lines() -> None:\n lines = [\"a \\n\", \"b\\n\", \"c\"]\n source = Source(lines)\n assert source.lines == [\"a \", \"b\", \"c\"]\n\n\ndef test_source_from_inner_function() -> None:\n def f():\n raise NotImplementedError()\n\n source = Source(f)\n assert str(source).startswith(\"def f():\")\n\n\ndef test_source_strips() -> None:\n source = Source(\"\")\n assert source == Source()\n assert str(source) == \"\"\n assert source.strip() == source\n\n\ndef test_source_strip_multiline() -> None:\n source = Source()\n source.lines = [\"\", \" hello\", \" \"]\n source2 = source.strip()\n assert source2.lines == [\" hello\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_source.py_TestAccesses_TestAccesses.test_iter.assert_len_values_4": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_source.py_TestAccesses_TestAccesses.test_iter.assert_len_values_4", "embedding": null, "metadata": {"file_path": "testing/code/test_source.py", "file_name": "test_source.py", "file_type": "text/x-python", "category": "test", "start_line": 79, "end_line": 108, "span_ids": ["TestAccesses.test_getline", "TestAccesses.test_len", "TestAccesses", "TestAccesses.test_getrange_step_not_supported", "TestAccesses.test_iter", "TestAccesses.setup_class", "TestAccesses.test_getrange"], "tokens": 195}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAccesses:\n def setup_class(self) -> None:\n self.source = Source(\n \"\"\"\\\n def f(x):\n pass\n def g(x):\n pass\n \"\"\"\n )\n\n def test_getrange(self) -> None:\n x = self.source[0:2]\n assert len(x.lines) == 2\n assert str(x) == \"def f(x):\\n pass\"\n\n def test_getrange_step_not_supported(self) -> None:\n with pytest.raises(IndexError, match=r\"step\"):\n self.source[::2]\n\n def test_getline(self) -> None:\n x = self.source[0]\n assert x == \"def f(x):\"\n\n def test_len(self) -> None:\n assert len(self.source) == 4\n\n def test_iter(self) -> None:\n values = [x for x in self.source]\n assert len(values) == 4", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_source.py_TestSourceParsing_TestSourceParsing.test_getstatementrange_triple_quoted.None_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_source.py_TestSourceParsing_TestSourceParsing.test_getstatementrange_triple_quoted.None_1", "embedding": null, "metadata": {"file_path": "testing/code/test_source.py", "file_name": "test_source.py", "file_type": "text/x-python", "category": "test", "start_line": 111, "end_line": 140, "span_ids": ["TestSourceParsing.test_getstatementrange_triple_quoted", "TestSourceParsing.setup_class", "TestSourceParsing.test_getstatement", "TestSourceParsing"], "tokens": 192}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestSourceParsing:\n def setup_class(self) -> None:\n self.source = Source(\n \"\"\"\\\n def f(x):\n assert (x ==\n 3 +\n 4)\n \"\"\"\n ).strip()\n\n def test_getstatement(self) -> None:\n # print str(self.source)\n ass = str(self.source[1:])\n for i in range(1, 4):\n # print \"trying start in line %r\" % self.source[i]\n s = self.source.getstatement(i)\n # x = s.deindent()\n assert str(s) == ass\n\n def test_getstatementrange_triple_quoted(self) -> None:\n # print str(self.source)\n source = Source(\n \"\"\"hello('''\n ''')\"\"\"\n )\n s = source.getstatement(0)\n assert s == source\n s = source.getstatement(1)\n assert s == source", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_source.py_TestSourceParsing.test_getstatementrange_within_constructs_TestSourceParsing.test_getstatementrange_within_constructs.None_4": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_source.py_TestSourceParsing.test_getstatementrange_within_constructs_TestSourceParsing.test_getstatementrange_within_constructs.None_4", "embedding": null, "metadata": {"file_path": "testing/code/test_source.py", "file_name": "test_source.py", "file_type": "text/x-python", "category": "test", "start_line": 142, "end_line": 162, "span_ids": ["TestSourceParsing.test_getstatementrange_within_constructs"], "tokens": 193}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestSourceParsing:\n\n def test_getstatementrange_within_constructs(self) -> None:\n source = Source(\n \"\"\"\\\n try:\n try:\n raise ValueError\n except SomeThing:\n pass\n finally:\n 42\n \"\"\"\n )\n assert len(source) == 7\n # check all lineno's that could occur in a traceback\n # assert source.getstatementrange(0) == (0, 7)\n # assert source.getstatementrange(1) == (1, 5)\n assert source.getstatementrange(2) == (2, 3)\n assert source.getstatementrange(3) == (3, 4)\n assert source.getstatementrange(4) == (4, 5)\n # assert source.getstatementrange(5) == (0, 7)\n assert source.getstatementrange(6) == (6, 7)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_source.py_TestSourceParsing.test_getstatementrange_bug_TestSourceParsing.test_getstatementrange_with_syntaxerror_issue7.pytest_raises_SyntaxError": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_source.py_TestSourceParsing.test_getstatementrange_bug_TestSourceParsing.test_getstatementrange_with_syntaxerror_issue7.pytest_raises_SyntaxError", "embedding": null, "metadata": {"file_path": "testing/code/test_source.py", "file_name": "test_source.py", "file_type": "text/x-python", "category": "test", "start_line": 164, "end_line": 216, "span_ids": ["TestSourceParsing.test_getstatementrange_bug2", "TestSourceParsing.test_getstatementrange_out_of_bounds_py3", "TestSourceParsing.test_getstatementrange_bug", "TestSourceParsing.test_getstatementrange_ast_issue58", "TestSourceParsing.test_getstatementrange_with_syntaxerror_issue7"], "tokens": 334}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestSourceParsing:\n\n def test_getstatementrange_bug(self) -> None:\n source = Source(\n \"\"\"\\\n try:\n x = (\n y +\n z)\n except:\n pass\n \"\"\"\n )\n assert len(source) == 6\n assert source.getstatementrange(2) == (1, 4)\n\n def test_getstatementrange_bug2(self) -> None:\n source = Source(\n \"\"\"\\\n assert (\n 33\n ==\n [\n X(3,\n b=1, c=2\n ),\n ]\n )\n \"\"\"\n )\n assert len(source) == 9\n assert source.getstatementrange(5) == (0, 9)\n\n def test_getstatementrange_ast_issue58(self) -> None:\n source = Source(\n \"\"\"\\\n\n def test_some():\n for a in [a for a in\n CAUSE_ERROR]: pass\n\n x = 3\n \"\"\"\n )\n assert getstatement(2, source).lines == source.lines[2:3]\n assert getstatement(3, source).lines == source.lines[3:4]\n\n def test_getstatementrange_out_of_bounds_py3(self) -> None:\n source = Source(\"if xxx:\\n from .collections import something\")\n r = source.getstatementrange(1)\n assert r == (1, 2)\n\n def test_getstatementrange_with_syntaxerror_issue7(self) -> None:\n source = Source(\":\")\n pytest.raises(SyntaxError, lambda: source.getstatementrange(0))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_source.py_test_getstartingblock_singleline_test_deindent.assert_lines_def_f_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_source.py_test_getstartingblock_singleline_test_deindent.assert_lines_def_f_", "embedding": null, "metadata": {"file_path": "testing/code/test_source.py", "file_name": "test_source.py", "file_type": "text/x-python", "category": "test", "start_line": 220, "end_line": 286, "span_ids": ["test_deindent", "test_getstartingblock_singleline.A", "test_getstartingblock_singleline", "test_getline_finally", "test_getfuncsource_dynamic", "test_getstartingblock_singleline.A.__init__", "test_getfuncsource_with_multine_string"], "tokens": 415}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_getstartingblock_singleline() -> None:\n class A:\n def __init__(self, *args) -> None:\n frame = sys._getframe(1)\n self.source = Frame(frame).statement\n\n x = A(\"x\", \"y\")\n\n values = [i for i in x.source.lines if i.strip()]\n assert len(values) == 1\n\n\ndef test_getline_finally() -> None:\n def c() -> None:\n pass\n\n with pytest.raises(TypeError) as excinfo:\n teardown = None\n try:\n c(1) # type: ignore\n finally:\n if teardown:\n teardown() # type: ignore[unreachable]\n source = excinfo.traceback[-1].statement\n assert str(source).strip() == \"c(1) # type: ignore\"\n\n\ndef test_getfuncsource_dynamic() -> None:\n def f():\n raise NotImplementedError()\n\n def g():\n pass # pragma: no cover\n\n f_source = Source(f)\n g_source = Source(g)\n assert str(f_source).strip() == \"def f():\\n raise NotImplementedError()\"\n assert str(g_source).strip() == \"def g():\\n pass # pragma: no cover\"\n\n\ndef test_getfuncsource_with_multine_string() -> None:\n def f():\n c = \"\"\"while True:\n pass\n\"\"\"\n\n expected = '''\\\n def f():\n c = \"\"\"while True:\n pass\n\"\"\"\n'''\n assert str(Source(f)) == expected.rstrip()\n\n\ndef test_deindent() -> None:\n from _pytest._code.source import deindent as deindent\n\n assert deindent([\"\\tfoo\", \"\\tbar\"]) == [\"foo\", \"bar\"]\n\n source = \"\"\"\\\n def f():\n def g():\n pass\n \"\"\"\n lines = deindent(source.splitlines())\n assert lines == [\"def f():\", \" def g():\", \" pass\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_source.py_test_source_of_class_at_eof_without_newline_test_findsource_fallback.assert_src_lineno_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_source.py_test_source_of_class_at_eof_without_newline_test_findsource_fallback.assert_src_lineno_", "embedding": null, "metadata": {"file_path": "testing/code/test_source.py", "file_name": "test_source.py", "file_type": "text/x-python", "category": "test", "start_line": 289, "end_line": 327, "span_ids": ["test_source_fallback", "impl", "test_findsource_fallback", "test_source_of_class_at_eof_without_newline"], "tokens": 238}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_source_of_class_at_eof_without_newline(\n tmpdir, _sys_snapshot, tmp_path: Path\n) -> None:\n # this test fails because the implicit inspect.getsource(A) below\n # does not return the \"x = 1\" last line.\n source = Source(\n \"\"\"\n class A:\n def method(self):\n x = 1\n \"\"\"\n )\n path = tmp_path.joinpath(\"a.py\")\n path.write_text(str(source))\n mod: Any = import_path(path)\n s2 = Source(mod.A)\n assert str(source).strip() == str(s2).strip()\n\n\nif True:\n\n def x():\n pass\n\n\ndef test_source_fallback() -> None:\n src = Source(x)\n expected = \"\"\"def x():\n pass\"\"\"\n assert str(src) == expected\n\n\ndef test_findsource_fallback() -> None:\n from _pytest._code.source import findsource\n\n src, lineno = findsource(x)\n assert src is not None\n assert \"test_findsource_simple\" in str(src)\n assert src[lineno] == \" def x():\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_source.py_test_source_with_decorator_test_source_with_decorator.assert_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_source.py_test_source_with_decorator_test_source_with_decorator.assert_", "embedding": null, "metadata": {"file_path": "testing/code/test_source.py", "file_name": "test_source.py", "file_type": "text/x-python", "category": "test", "start_line": 475, "end_line": 499, "span_ids": ["test_source_with_decorator"], "tokens": 220}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_source_with_decorator() -> None:\n \"\"\"Test behavior with Source / Code().source with regard to decorators.\"\"\"\n from _pytest.compat import get_real_func\n\n @pytest.mark.foo\n def deco_mark():\n assert False\n\n src = inspect.getsource(deco_mark)\n assert textwrap.indent(str(Source(deco_mark)), \" \") + \"\\n\" == src\n assert src.startswith(\" @pytest.mark.foo\")\n\n @pytest.fixture\n def deco_fixture():\n assert False\n\n src = inspect.getsource(deco_fixture)\n assert src == \" @pytest.fixture\\n def deco_fixture():\\n assert False\\n\"\n # currenly Source does not unwrap decorators, testing the\n # existing behavior here for explicitness, but perhaps we should revisit/change this\n # in the future\n assert str(Source(deco_fixture)).startswith(\"@functools.wraps(function)\")\n assert (\n textwrap.indent(str(Source(get_real_func(deco_fixture))), \" \") + \"\\n\" == src\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/io/test_terminalwriter.py_io_test_terminalwriter_dumb_term_no_markup.with_monkeypatch_context_.assert_not_tw_hasmarkup": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/io/test_terminalwriter.py_io_test_terminalwriter_dumb_term_no_markup.with_monkeypatch_context_.assert_not_tw_hasmarkup", "embedding": null, "metadata": {"file_path": "testing/io/test_terminalwriter.py", "file_name": "test_terminalwriter.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 50, "span_ids": ["test_terminalwriter_dumb_term_no_markup.MyFile:2", "test_terminalwriter_width_bogus", "test_terminalwriter_computes_width", "test_terminalwriter_dumb_term_no_markup", "imports", "test_terminal_width_COLUMNS", "test_terminalwriter_dumb_term_no_markup.MyFile"], "tokens": 349}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import io\nimport os\nimport re\nimport shutil\nimport sys\nfrom pathlib import Path\nfrom typing import Generator\nfrom unittest import mock\n\nimport pytest\nfrom _pytest._io import terminalwriter\nfrom _pytest.monkeypatch import MonkeyPatch\n\n\n# These tests were initially copied from py 1.8.1.\n\n\ndef test_terminal_width_COLUMNS(monkeypatch: MonkeyPatch) -> None:\n monkeypatch.setenv(\"COLUMNS\", \"42\")\n assert terminalwriter.get_terminal_width() == 42\n monkeypatch.delenv(\"COLUMNS\", raising=False)\n\n\ndef test_terminalwriter_width_bogus(monkeypatch: MonkeyPatch) -> None:\n monkeypatch.setattr(shutil, \"get_terminal_size\", mock.Mock(return_value=(10, 10)))\n monkeypatch.delenv(\"COLUMNS\", raising=False)\n tw = terminalwriter.TerminalWriter()\n assert tw.fullwidth == 80\n\n\ndef test_terminalwriter_computes_width(monkeypatch: MonkeyPatch) -> None:\n monkeypatch.setattr(terminalwriter, \"get_terminal_width\", lambda: 42)\n tw = terminalwriter.TerminalWriter()\n assert tw.fullwidth == 42\n\n\ndef test_terminalwriter_dumb_term_no_markup(monkeypatch: MonkeyPatch) -> None:\n monkeypatch.setattr(os, \"environ\", {\"TERM\": \"dumb\", \"PATH\": \"\"})\n\n class MyFile:\n closed = False\n\n def isatty(self):\n return True\n\n with monkeypatch.context() as m:\n m.setattr(sys, \"stdout\", MyFile())\n assert sys.stdout.isatty()\n tw = terminalwriter.TerminalWriter()\n assert not tw.hasmarkup", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/io/test_terminalwriter.py_test_terminalwriter_not_unicode_win32.int_sys_platform_win3": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/io/test_terminalwriter.py_test_terminalwriter_not_unicode_win32.int_sys_platform_win3", "embedding": null, "metadata": {"file_path": "testing/io/test_terminalwriter.py", "file_name": "test_terminalwriter.py", "file_type": "text/x-python", "category": "test", "start_line": 52, "end_line": 61, "span_ids": ["impl", "test_terminalwriter_not_unicode"], "tokens": 128}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_terminalwriter_not_unicode() -> None:\n \"\"\"If the file doesn't support Unicode, the string is unicode-escaped (#7475).\"\"\"\n buffer = io.BytesIO()\n file = io.TextIOWrapper(buffer, encoding=\"cp1252\")\n tw = terminalwriter.TerminalWriter(file)\n tw.write(\"hello \ud83c\udf00 w\u00f4rld \u05d0\u05d1\u05d2\", flush=True)\n assert buffer.getvalue() == br\"hello \\U0001f300 w\\xf4rld \\u05d0\\u05d1\\u05d2\"\n\n\nwin32 = int(sys.platform == \"win32\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/io/test_terminalwriter.py_test_attr_hasmarkup_test_should_not_do_markup_NO_COLOR_and_FORCE_COLOR.assert_color_not_set_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/io/test_terminalwriter.py_test_attr_hasmarkup_test_should_not_do_markup_NO_COLOR_and_FORCE_COLOR.assert_color_not_set_", "embedding": null, "metadata": {"file_path": "testing/io/test_terminalwriter.py", "file_name": "test_terminalwriter.py", "file_type": "text/x-python", "category": "test", "start_line": 153, "end_line": 212, "span_ids": ["test_attr_hasmarkup", "test_should_do_markup_FORCE_COLOR", "assert_color_set", "test_should_not_do_markup_NO_COLOR", "assert_color_not_set", "test_should_not_do_markup_PY_COLORS_eq_0", "test_should_not_do_markup_NO_COLOR_and_FORCE_COLOR", "test_should_do_markup_PY_COLORS_eq_1"], "tokens": 478}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.skipif(sys.platform == \"win32\", reason=\"win32 has no native ansi\")\ndef test_attr_hasmarkup() -> None:\n file = io.StringIO()\n tw = terminalwriter.TerminalWriter(file)\n assert not tw.hasmarkup\n tw.hasmarkup = True\n tw.line(\"hello\", bold=True)\n s = file.getvalue()\n assert len(s) > len(\"hello\\n\")\n assert \"\\x1b[1m\" in s\n assert \"\\x1b[0m\" in s\n\n\ndef assert_color_set():\n file = io.StringIO()\n tw = terminalwriter.TerminalWriter(file)\n assert tw.hasmarkup\n tw.line(\"hello\", bold=True)\n s = file.getvalue()\n assert len(s) > len(\"hello\\n\")\n assert \"\\x1b[1m\" in s\n assert \"\\x1b[0m\" in s\n\n\ndef assert_color_not_set():\n f = io.StringIO()\n f.isatty = lambda: True # type: ignore\n tw = terminalwriter.TerminalWriter(file=f)\n assert not tw.hasmarkup\n tw.line(\"hello\", bold=True)\n s = f.getvalue()\n assert s == \"hello\\n\"\n\n\ndef test_should_do_markup_PY_COLORS_eq_1(monkeypatch: MonkeyPatch) -> None:\n monkeypatch.setitem(os.environ, \"PY_COLORS\", \"1\")\n assert_color_set()\n\n\ndef test_should_not_do_markup_PY_COLORS_eq_0(monkeypatch: MonkeyPatch) -> None:\n monkeypatch.setitem(os.environ, \"PY_COLORS\", \"0\")\n assert_color_not_set()\n\n\ndef test_should_not_do_markup_NO_COLOR(monkeypatch: MonkeyPatch) -> None:\n monkeypatch.setitem(os.environ, \"NO_COLOR\", \"1\")\n assert_color_not_set()\n\n\ndef test_should_do_markup_FORCE_COLOR(monkeypatch: MonkeyPatch) -> None:\n monkeypatch.setitem(os.environ, \"FORCE_COLOR\", \"1\")\n assert_color_set()\n\n\ndef test_should_not_do_markup_NO_COLOR_and_FORCE_COLOR(\n monkeypatch: MonkeyPatch,\n) -> None:\n monkeypatch.setitem(os.environ, \"NO_COLOR\", \"1\")\n monkeypatch.setitem(os.environ, \"FORCE_COLOR\", \"1\")\n assert_color_not_set()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_fixture.py_test_caplog_captures_despite_exception_test_caplog_captures_despite_exception.assert_result_ret_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_fixture.py_test_caplog_captures_despite_exception_test_caplog_captures_despite_exception.assert_result_ret_1", "embedding": null, "metadata": {"file_path": "testing/logging/test_fixture.py", "file_name": "test_fixture.py", "file_type": "text/x-python", "category": "test", "start_line": 241, "end_line": 268, "span_ids": ["test_caplog_captures_despite_exception"], "tokens": 193}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_caplog_captures_despite_exception(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n import logging\n def test_log_level_override(request, caplog):\n logger = logging.getLogger('catchlog')\n plugin = request.config.pluginmanager.getplugin('logging-plugin')\n assert plugin.log_level == logging.WARNING\n\n logger.error(\"ERROR message \" + \"will be shown\")\n\n with caplog.at_level(logging.DEBUG, logger.name):\n logger.debug(\"DEBUG message \" + \"won't be shown\")\n raise Exception()\n \"\"\"\n )\n pytester.makeini(\n \"\"\"\n [pytest]\n log_level=WARNING\n \"\"\"\n )\n\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*ERROR message will be shown*\"])\n result.stdout.no_fnmatch_line(\"*DEBUG message won't be shown*\")\n assert result.ret == 1", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_fixture.py_test_log_report_captures_according_to_config_option_upon_failure_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_fixture.py_test_log_report_captures_according_to_config_option_upon_failure_", "embedding": null, "metadata": {"file_path": "testing/logging/test_fixture.py", "file_name": "test_fixture.py", "file_type": "text/x-python", "category": "test", "start_line": 271, "end_line": 311, "span_ids": ["test_log_report_captures_according_to_config_option_upon_failure"], "tokens": 341}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_log_report_captures_according_to_config_option_upon_failure(\n pytester: Pytester,\n) -> None:\n \"\"\"Test that upon failure:\n (1) `caplog` succeeded to capture the DEBUG message and assert on it => No `Exception` is raised.\n (2) The `DEBUG` message does NOT appear in the `Captured log call` report.\n (3) The stdout, `INFO`, and `WARNING` messages DO appear in the test reports due to `--log-level=INFO`.\n \"\"\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n import logging\n\n def function_that_logs():\n logging.debug('DEBUG log ' + 'message')\n logging.info('INFO log ' + 'message')\n logging.warning('WARNING log ' + 'message')\n print('Print ' + 'message')\n\n def test_that_fails(request, caplog):\n plugin = request.config.pluginmanager.getplugin('logging-plugin')\n assert plugin.log_level == logging.INFO\n\n with caplog.at_level(logging.DEBUG):\n function_that_logs()\n\n if 'DEBUG log ' + 'message' not in caplog.text:\n raise Exception('caplog failed to ' + 'capture DEBUG')\n\n assert False\n \"\"\"\n )\n\n result = pytester.runpytest(\"--log-level=INFO\")\n result.stdout.no_fnmatch_line(\"*Exception: caplog failed to capture DEBUG*\")\n result.stdout.no_fnmatch_line(\"*DEBUG log message*\")\n result.stdout.fnmatch_lines(\n [\"*Print message*\", \"*INFO log message*\", \"*WARNING log message*\"]\n )\n assert result.ret == 1", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.test_function_originalname_TestFunction.test_function_originalname.assert_originalnames_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.test_function_originalname_TestFunction.test_function_originalname.assert_originalnames_", "embedding": null, "metadata": {"file_path": "testing/python/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 683, "end_line": 704, "span_ids": ["TestFunction.test_function_originalname"], "tokens": 121}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFunction:\n\n def test_function_originalname(self, pytester: Pytester) -> None:\n items = pytester.getitems(\n \"\"\"\n import pytest\n\n @pytest.mark.parametrize('arg', [1,2])\n def test_func(arg):\n pass\n\n def test_no_param():\n pass\n \"\"\"\n )\n originalnames = []\n for x in items:\n assert isinstance(x, pytest.Function)\n originalnames.append(x.originalname)\n assert originalnames == [\n \"test_func\",\n \"test_func\",\n \"test_no_param\",\n ]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare_attrsclass_TestAssert_reprcompare_attrsclass.test_attrs.for_line_in_lines_2_.assert_field_a_not_in_l": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare_attrsclass_TestAssert_reprcompare_attrsclass.test_attrs.for_line_in_lines_2_.assert_field_a_not_in_l", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 876, "end_line": 891, "span_ids": ["TestAssert_reprcompare_attrsclass", "TestAssert_reprcompare_attrsclass.test_attrs.SimpleDataObject:2", "TestAssert_reprcompare_attrsclass.test_attrs", "TestAssert_reprcompare_attrsclass.test_attrs.SimpleDataObject"], "tokens": 125}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssert_reprcompare_attrsclass:\n def test_attrs(self) -> None:\n @attr.s\n class SimpleDataObject:\n field_a = attr.ib()\n field_b = attr.ib()\n\n left = SimpleDataObject(1, \"b\")\n right = SimpleDataObject(1, \"c\")\n\n lines = callequal(left, right)\n assert lines is not None\n assert lines[2].startswith(\"Omitting 1 identical item\")\n assert \"Matching attributes\" not in lines\n for line in lines[2:]:\n assert \"field_a\" not in line", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare_attrsclass.test_attrs_verbose_TestAssert_reprcompare_attrsclass.test_attrs_verbose.assert_lines_3_fie": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare_attrsclass.test_attrs_verbose_TestAssert_reprcompare_attrsclass.test_attrs_verbose.assert_lines_3_fie", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 933, "end_line": 946, "span_ids": ["TestAssert_reprcompare_attrsclass.test_attrs_verbose.SimpleDataObject", "TestAssert_reprcompare_attrsclass.test_attrs_verbose", "TestAssert_reprcompare_attrsclass.test_attrs_verbose.SimpleDataObject:2"], "tokens": 124}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssert_reprcompare_attrsclass:\n\n def test_attrs_verbose(self) -> None:\n @attr.s\n class SimpleDataObject:\n field_a = attr.ib()\n field_b = attr.ib()\n\n left = SimpleDataObject(1, \"b\")\n right = SimpleDataObject(1, \"c\")\n\n lines = callequal(left, right, verbose=2)\n assert lines is not None\n assert lines[2].startswith(\"Matching attributes:\")\n assert \"Omitting\" not in lines[2]\n assert lines[3] == \"['field_a']\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_os_TestMark.test_pytest_mark_name_starts_with_underscore.with_pytest_raises_Attrib.mark__some_name": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_os_TestMark.test_pytest_mark_name_starts_with_underscore.with_pytest_raises_Attrib.mark__some_name", "embedding": null, "metadata": {"file_path": "testing/test_mark.py", "file_name": "test_mark.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 45, "span_ids": ["TestMark", "TestMark.test_mark_with_param", "TestMark.test_pytest_mark_notcallable", "TestMark.test_pytest_exists_in_namespace_all", "TestMark.test_pytest_mark_name_starts_with_underscore", "TestMark.test_mark_with_param.SomeClass:2", "imports", "TestMark.test_mark_with_param.SomeClass"], "tokens": 344}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import os\nimport sys\nfrom typing import List\nfrom typing import Optional\nfrom unittest import mock\n\nimport pytest\nfrom _pytest.config import ExitCode\nfrom _pytest.mark import MarkGenerator\nfrom _pytest.mark.structures import EMPTY_PARAMETERSET_OPTION\nfrom _pytest.nodes import Collector\nfrom _pytest.nodes import Node\nfrom _pytest.pytester import Pytester\n\n\nclass TestMark:\n @pytest.mark.parametrize(\"attr\", [\"mark\", \"param\"])\n @pytest.mark.parametrize(\"modulename\", [\"py.test\", \"pytest\"])\n def test_pytest_exists_in_namespace_all(self, attr: str, modulename: str) -> None:\n module = sys.modules[modulename]\n assert attr in module.__all__ # type: ignore\n\n def test_pytest_mark_notcallable(self) -> None:\n mark = MarkGenerator(_ispytest=True)\n with pytest.raises(TypeError):\n mark() # type: ignore[operator]\n\n def test_mark_with_param(self):\n def some_function(abc):\n pass\n\n class SomeClass:\n pass\n\n assert pytest.mark.foo(some_function) is some_function\n marked_with_args = pytest.mark.foo.with_args(some_function)\n assert marked_with_args is not some_function # type: ignore[comparison-overlap]\n\n assert pytest.mark.foo(SomeClass) is SomeClass\n assert pytest.mark.foo.with_args(SomeClass) is not SomeClass # type: ignore[comparison-overlap]\n\n def test_pytest_mark_name_starts_with_underscore(self) -> None:\n mark = MarkGenerator(_ispytest=True)\n with pytest.raises(AttributeError):\n mark._some_name", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_xfail_run_with_skip_mark_TestXFail.test_xfail_run_with_skip_mark.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_xfail_run_with_skip_mark_TestXFail.test_xfail_run_with_skip_mark.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_skipping.py", "file_name": "test_skipping.py", "file_type": "text/x-python", "category": "test", "start_line": 370, "end_line": 395, "span_ids": ["TestXFail.test_xfail_run_with_skip_mark"], "tokens": 179}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestXFail:\n\n @pytest.mark.parametrize(\n \"test_input,expected\",\n [\n (\n [\"-rs\"],\n [\"SKIPPED [1] test_sample.py:2: unconditional skip\", \"*1 skipped*\"],\n ),\n (\n [\"-rs\", \"--runxfail\"],\n [\"SKIPPED [1] test_sample.py:2: unconditional skip\", \"*1 skipped*\"],\n ),\n ],\n )\n def test_xfail_run_with_skip_mark(\n self, pytester: Pytester, test_input, expected\n ) -> None:\n pytester.makepyfile(\n test_sample=\"\"\"\n import pytest\n @pytest.mark.skip\n def test_skip_location() -> None:\n assert 0\n \"\"\"\n )\n result = pytester.runpytest(*test_input)\n result.stdout.fnmatch_lines(expected)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_xfail_evalfalse_but_fails_TestXFail.test_xfail_not_report_default._": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_xfail_evalfalse_but_fails_TestXFail.test_xfail_not_report_default._", "embedding": null, "metadata": {"file_path": "testing/test_skipping.py", "file_name": "test_skipping.py", "file_type": "text/x-python", "category": "test", "start_line": 397, "end_line": 424, "span_ids": ["TestXFail.test_xfail_not_report_default", "TestXFail.test_xfail_evalfalse_but_fails"], "tokens": 202}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestXFail:\n\n def test_xfail_evalfalse_but_fails(self, pytester: Pytester) -> None:\n item = pytester.getitem(\n \"\"\"\n import pytest\n @pytest.mark.xfail('False')\n def test_func():\n assert 0\n \"\"\"\n )\n reports = runtestprotocol(item, log=False)\n callreport = reports[1]\n assert callreport.failed\n assert not hasattr(callreport, \"wasxfail\")\n assert \"xfail\" in callreport.keywords\n\n def test_xfail_not_report_default(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n test_one=\"\"\"\n import pytest\n @pytest.mark.xfail\n def test_this():\n assert 0\n \"\"\"\n )\n pytester.runpytest(p, \"-v\")\n # result.stdout.fnmatch_lines([\n # \"*HINT*use*-r*\"\n # ])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_summary_stats_test_summary_stats._Fake__is_last_item_to": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_summary_stats_test_summary_stats._Fake__is_last_item_to", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 1704, "end_line": 1842, "span_ids": ["test_summary_stats"], "tokens": 1273}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\n \"exp_color, exp_line, stats_arg\",\n [\n # The method under test only cares about the length of each\n # dict value, not the actual contents, so tuples of anything\n # suffice\n # Important statuses -- the highest priority of these always wins\n (\"red\", [(\"1 failed\", {\"bold\": True, \"red\": True})], {\"failed\": [1]}),\n (\n \"red\",\n [\n (\"1 failed\", {\"bold\": True, \"red\": True}),\n (\"1 passed\", {\"bold\": False, \"green\": True}),\n ],\n {\"failed\": [1], \"passed\": [1]},\n ),\n (\"red\", [(\"1 error\", {\"bold\": True, \"red\": True})], {\"error\": [1]}),\n (\"red\", [(\"2 errors\", {\"bold\": True, \"red\": True})], {\"error\": [1, 2]}),\n (\n \"red\",\n [\n (\"1 passed\", {\"bold\": False, \"green\": True}),\n (\"1 error\", {\"bold\": True, \"red\": True}),\n ],\n {\"error\": [1], \"passed\": [1]},\n ),\n # (a status that's not known to the code)\n (\"yellow\", [(\"1 weird\", {\"bold\": True, \"yellow\": True})], {\"weird\": [1]}),\n (\n \"yellow\",\n [\n (\"1 passed\", {\"bold\": False, \"green\": True}),\n (\"1 weird\", {\"bold\": True, \"yellow\": True}),\n ],\n {\"weird\": [1], \"passed\": [1]},\n ),\n (\"yellow\", [(\"1 warning\", {\"bold\": True, \"yellow\": True})], {\"warnings\": [1]}),\n (\n \"yellow\",\n [\n (\"1 passed\", {\"bold\": False, \"green\": True}),\n (\"1 warning\", {\"bold\": True, \"yellow\": True}),\n ],\n {\"warnings\": [1], \"passed\": [1]},\n ),\n (\n \"green\",\n [(\"5 passed\", {\"bold\": True, \"green\": True})],\n {\"passed\": [1, 2, 3, 4, 5]},\n ),\n # \"Boring\" statuses. These have no effect on the color of the summary\n # line. Thus, if *every* test has a boring status, the summary line stays\n # at its default color, i.e. yellow, to warn the user that the test run\n # produced no useful information\n (\"yellow\", [(\"1 skipped\", {\"bold\": True, \"yellow\": True})], {\"skipped\": [1]}),\n (\n \"green\",\n [\n (\"1 passed\", {\"bold\": True, \"green\": True}),\n (\"1 skipped\", {\"bold\": False, \"yellow\": True}),\n ],\n {\"skipped\": [1], \"passed\": [1]},\n ),\n (\n \"yellow\",\n [(\"1 deselected\", {\"bold\": True, \"yellow\": True})],\n {\"deselected\": [1]},\n ),\n (\n \"green\",\n [\n (\"1 passed\", {\"bold\": True, \"green\": True}),\n (\"1 deselected\", {\"bold\": False, \"yellow\": True}),\n ],\n {\"deselected\": [1], \"passed\": [1]},\n ),\n (\"yellow\", [(\"1 xfailed\", {\"bold\": True, \"yellow\": True})], {\"xfailed\": [1]}),\n (\n \"green\",\n [\n (\"1 passed\", {\"bold\": True, \"green\": True}),\n (\"1 xfailed\", {\"bold\": False, \"yellow\": True}),\n ],\n {\"xfailed\": [1], \"passed\": [1]},\n ),\n (\"yellow\", [(\"1 xpassed\", {\"bold\": True, \"yellow\": True})], {\"xpassed\": [1]}),\n (\n \"yellow\",\n [\n (\"1 passed\", {\"bold\": False, \"green\": True}),\n (\"1 xpassed\", {\"bold\": True, \"yellow\": True}),\n ],\n {\"xpassed\": [1], \"passed\": [1]},\n ),\n # Likewise if no tests were found at all\n (\"yellow\", [(\"no tests ran\", {\"yellow\": True})], {}),\n # Test the empty-key special case\n (\"yellow\", [(\"no tests ran\", {\"yellow\": True})], {\"\": [1]}),\n (\n \"green\",\n [(\"1 passed\", {\"bold\": True, \"green\": True})],\n {\"\": [1], \"passed\": [1]},\n ),\n # A couple more complex combinations\n (\n \"red\",\n [\n (\"1 failed\", {\"bold\": True, \"red\": True}),\n (\"2 passed\", {\"bold\": False, \"green\": True}),\n (\"3 xfailed\", {\"bold\": False, \"yellow\": True}),\n ],\n {\"passed\": [1, 2], \"failed\": [1], \"xfailed\": [1, 2, 3]},\n ),\n (\n \"green\",\n [\n (\"1 passed\", {\"bold\": True, \"green\": True}),\n (\"2 skipped\", {\"bold\": False, \"yellow\": True}),\n (\"3 deselected\", {\"bold\": False, \"yellow\": True}),\n (\"2 xfailed\", {\"bold\": False, \"yellow\": True}),\n ],\n {\n \"passed\": [1],\n \"skipped\": [1, 2],\n \"deselected\": [1, 2, 3],\n \"xfailed\": [1, 2],\n },\n ),\n ],\n)\ndef test_summary_stats(\n tr: TerminalReporter,\n exp_line: List[Tuple[str, Dict[str, bool]]],\n exp_color: str,\n stats_arg: Dict[str, List[object]],\n) -> None:\n tr.stats = stats_arg\n\n # Fake \"_is_last_item\" to be True.\n # ... other code", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_summary_stats.fake_session_test_summary_stats.assert_color_exp_color": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_summary_stats.fake_session_test_summary_stats.assert_color_exp_color", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 1843, "end_line": 1857, "span_ids": ["test_summary_stats.fake_session:2", "test_summary_stats.fake_session"], "tokens": 1381}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\n \"exp_color, exp_line, stats_arg\",\n [\n # The method under test only cares about the length of each\n # dict value, not the actual contents, so tuples of anything\n # suffice\n # Important statuses -- the highest priority of these always wins\n (\"red\", [(\"1 failed\", {\"bold\": True, \"red\": True})], {\"failed\": [1]}),\n (\n \"red\",\n [\n (\"1 failed\", {\"bold\": True, \"red\": True}),\n (\"1 passed\", {\"bold\": False, \"green\": True}),\n ],\n {\"failed\": [1], \"passed\": [1]},\n ),\n (\"red\", [(\"1 error\", {\"bold\": True, \"red\": True})], {\"error\": [1]}),\n (\"red\", [(\"2 errors\", {\"bold\": True, \"red\": True})], {\"error\": [1, 2]}),\n (\n \"red\",\n [\n (\"1 passed\", {\"bold\": False, \"green\": True}),\n (\"1 error\", {\"bold\": True, \"red\": True}),\n ],\n {\"error\": [1], \"passed\": [1]},\n ),\n # (a status that's not known to the code)\n (\"yellow\", [(\"1 weird\", {\"bold\": True, \"yellow\": True})], {\"weird\": [1]}),\n (\n \"yellow\",\n [\n (\"1 passed\", {\"bold\": False, \"green\": True}),\n (\"1 weird\", {\"bold\": True, \"yellow\": True}),\n ],\n {\"weird\": [1], \"passed\": [1]},\n ),\n (\"yellow\", [(\"1 warning\", {\"bold\": True, \"yellow\": True})], {\"warnings\": [1]}),\n (\n \"yellow\",\n [\n (\"1 passed\", {\"bold\": False, \"green\": True}),\n (\"1 warning\", {\"bold\": True, \"yellow\": True}),\n ],\n {\"warnings\": [1], \"passed\": [1]},\n ),\n (\n \"green\",\n [(\"5 passed\", {\"bold\": True, \"green\": True})],\n {\"passed\": [1, 2, 3, 4, 5]},\n ),\n # \"Boring\" statuses. These have no effect on the color of the summary\n # line. Thus, if *every* test has a boring status, the summary line stays\n # at its default color, i.e. yellow, to warn the user that the test run\n # produced no useful information\n (\"yellow\", [(\"1 skipped\", {\"bold\": True, \"yellow\": True})], {\"skipped\": [1]}),\n (\n \"green\",\n [\n (\"1 passed\", {\"bold\": True, \"green\": True}),\n (\"1 skipped\", {\"bold\": False, \"yellow\": True}),\n ],\n {\"skipped\": [1], \"passed\": [1]},\n ),\n (\n \"yellow\",\n [(\"1 deselected\", {\"bold\": True, \"yellow\": True})],\n {\"deselected\": [1]},\n ),\n (\n \"green\",\n [\n (\"1 passed\", {\"bold\": True, \"green\": True}),\n (\"1 deselected\", {\"bold\": False, \"yellow\": True}),\n ],\n {\"deselected\": [1], \"passed\": [1]},\n ),\n (\"yellow\", [(\"1 xfailed\", {\"bold\": True, \"yellow\": True})], {\"xfailed\": [1]}),\n (\n \"green\",\n [\n (\"1 passed\", {\"bold\": True, \"green\": True}),\n (\"1 xfailed\", {\"bold\": False, \"yellow\": True}),\n ],\n {\"xfailed\": [1], \"passed\": [1]},\n ),\n (\"yellow\", [(\"1 xpassed\", {\"bold\": True, \"yellow\": True})], {\"xpassed\": [1]}),\n (\n \"yellow\",\n [\n (\"1 passed\", {\"bold\": False, \"green\": True}),\n (\"1 xpassed\", {\"bold\": True, \"yellow\": True}),\n ],\n {\"xpassed\": [1], \"passed\": [1]},\n ),\n # Likewise if no tests were found at all\n (\"yellow\", [(\"no tests ran\", {\"yellow\": True})], {}),\n # Test the empty-key special case\n (\"yellow\", [(\"no tests ran\", {\"yellow\": True})], {\"\": [1]}),\n (\n \"green\",\n [(\"1 passed\", {\"bold\": True, \"green\": True})],\n {\"\": [1], \"passed\": [1]},\n ),\n # A couple more complex combinations\n (\n \"red\",\n [\n (\"1 failed\", {\"bold\": True, \"red\": True}),\n (\"2 passed\", {\"bold\": False, \"green\": True}),\n (\"3 xfailed\", {\"bold\": False, \"yellow\": True}),\n ],\n {\"passed\": [1, 2], \"failed\": [1], \"xfailed\": [1, 2, 3]},\n ),\n (\n \"green\",\n [\n (\"1 passed\", {\"bold\": True, \"green\": True}),\n (\"2 skipped\", {\"bold\": False, \"yellow\": True}),\n (\"3 deselected\", {\"bold\": False, \"yellow\": True}),\n (\"2 xfailed\", {\"bold\": False, \"yellow\": True}),\n ],\n {\n \"passed\": [1],\n \"skipped\": [1, 2],\n \"deselected\": [1, 2, 3],\n \"xfailed\": [1, 2],\n },\n ),\n ],\n)\ndef test_summary_stats(\n tr: TerminalReporter,\n exp_line: List[Tuple[str, Dict[str, bool]]],\n exp_color: str,\n stats_arg: Dict[str, List[object]],\n) -> None:\n # ... other code\n class fake_session:\n testscollected = 0\n\n tr._session = fake_session # type: ignore[assignment]\n assert tr._is_last_item\n\n # Reset cache.\n tr._main_color = None\n\n print(\"Based on stats: %s\" % stats_arg)\n print(f'Expect summary: \"{exp_line}\"; with color \"{exp_color}\"')\n (line, color) = tr.build_summary_stats_line()\n print(f'Actually got: \"{line}\"; with color \"{color}\"')\n assert line == exp_line\n assert color == exp_color", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/bench/unit_test.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/bench/unit_test.py__", "embedding": null, "metadata": {"file_path": "bench/unit_test.py", "file_name": "unit_test.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 14, "span_ids": ["impl", "docstring", "imports"], "tokens": 73}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from unittest import TestCase # noqa: F401\n\nfor i in range(15000):\n exec(\n f\"\"\"\nclass Test{i}(TestCase):\n @classmethod\n def setUpClass(cls): pass\n def test_1(self): pass\n def test_2(self): pass\n def test_3(self): pass\n\"\"\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/bench/xunit.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/bench/xunit.py__", "embedding": null, "metadata": {"file_path": "bench/xunit.py", "file_name": "xunit.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 12, "span_ids": ["impl"], "tokens": 61}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "for i in range(5000):\n exec(\n f\"\"\"\nclass Test{i}:\n @classmethod\n def setup_class(cls): pass\n def test_1(self): pass\n def test_2(self): pass\n def test_3(self): pass\n\"\"\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/fixtures/test_fixtures_order_autouse.py_pytest_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/fixtures/test_fixtures_order_autouse.py_pytest_", "embedding": null, "metadata": {"file_path": "doc/en/example/fixtures/test_fixtures_order_autouse.py", "file_name": "test_fixtures_order_autouse.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 46, "span_ids": ["order", "d", "a", "b", "g", "imports", "e", "f", "test_order_and_g", "c"], "tokens": 155}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import pytest\n\n\n@pytest.fixture\ndef order():\n return []\n\n\n@pytest.fixture\ndef a(order):\n order.append(\"a\")\n\n\n@pytest.fixture\ndef b(a, order):\n order.append(\"b\")\n\n\n@pytest.fixture(autouse=True)\ndef c(b, order):\n order.append(\"c\")\n\n\n@pytest.fixture\ndef d(b, order):\n order.append(\"d\")\n\n\n@pytest.fixture\ndef e(d, order):\n order.append(\"e\")\n\n\n@pytest.fixture\ndef f(e, order):\n order.append(\"f\")\n\n\n@pytest.fixture\ndef g(f, c, order):\n order.append(\"g\")\n\n\ndef test_order_and_g(g, order):\n assert order == [\"a\", \"b\", \"c\", \"d\", \"e\", \"f\", \"g\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/fixtures/test_fixtures_order_autouse_multiple_scopes.py_pytest_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/fixtures/test_fixtures_order_autouse_multiple_scopes.py_pytest_", "embedding": null, "metadata": {"file_path": "doc/en/example/fixtures/test_fixtures_order_autouse_multiple_scopes.py", "file_name": "test_fixtures_order_autouse_multiple_scopes.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 32, "span_ids": ["order", "c2", "TestClassWithC1Request.test_order", "TestClassWithoutC1Request", "c3", "TestClassWithC1Request", "c1", "imports", "TestClassWithoutC1Request.test_order"], "tokens": 142}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import pytest\n\n\n@pytest.fixture(scope=\"class\")\ndef order():\n return []\n\n\n@pytest.fixture(scope=\"class\", autouse=True)\ndef c1(order):\n order.append(\"c1\")\n\n\n@pytest.fixture(scope=\"class\")\ndef c2(order):\n order.append(\"c2\")\n\n\n@pytest.fixture(scope=\"class\")\ndef c3(order, c1):\n order.append(\"c3\")\n\n\nclass TestClassWithC1Request:\n def test_order(self, order, c1, c3):\n assert order == [\"c1\", \"c3\"]\n\n\nclass TestClassWithoutC1Request:\n def test_order(self, order, c2):\n assert order == [\"c1\", \"c2\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/fixtures/test_fixtures_order_autouse_temp_effects.py_pytest_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/fixtures/test_fixtures_order_autouse_temp_effects.py_pytest_", "embedding": null, "metadata": {"file_path": "doc/en/example/fixtures/test_fixtures_order_autouse_temp_effects.py", "file_name": "test_fixtures_order_autouse_temp_effects.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 37, "span_ids": ["order", "c2", "TestClassWithAutouse.test_req", "TestClassWithAutouse", "TestClassWithoutAutouse.test_no_req", "TestClassWithoutAutouse.test_req", "c1", "imports", "TestClassWithAutouse.test_no_req", "TestClassWithoutAutouse", "TestClassWithAutouse.c3"], "tokens": 165}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import pytest\n\n\n@pytest.fixture\ndef order():\n return []\n\n\n@pytest.fixture\ndef c1(order):\n order.append(\"c1\")\n\n\n@pytest.fixture\ndef c2(order):\n order.append(\"c2\")\n\n\nclass TestClassWithAutouse:\n @pytest.fixture(autouse=True)\n def c3(self, order, c2):\n order.append(\"c3\")\n\n def test_req(self, order, c1):\n assert order == [\"c2\", \"c3\", \"c1\"]\n\n def test_no_req(self, order):\n assert order == [\"c2\", \"c3\"]\n\n\nclass TestClassWithoutAutouse:\n def test_req(self, order, c1):\n assert order == [\"c1\"]\n\n def test_no_req(self, order):\n assert order == []", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/fixtures/test_fixtures_order_dependencies.py_pytest_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/fixtures/test_fixtures_order_dependencies.py_pytest_", "embedding": null, "metadata": {"file_path": "doc/en/example/fixtures/test_fixtures_order_dependencies.py", "file_name": "test_fixtures_order_dependencies.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 46, "span_ids": ["order", "d", "a", "b", "g", "imports", "e", "f", "test_order", "c"], "tokens": 155}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import pytest\n\n\n@pytest.fixture\ndef order():\n return []\n\n\n@pytest.fixture\ndef a(order):\n order.append(\"a\")\n\n\n@pytest.fixture\ndef b(a, order):\n order.append(\"b\")\n\n\n@pytest.fixture\ndef c(a, b, order):\n order.append(\"c\")\n\n\n@pytest.fixture\ndef d(c, b, order):\n order.append(\"d\")\n\n\n@pytest.fixture\ndef e(d, b, order):\n order.append(\"e\")\n\n\n@pytest.fixture\ndef f(e, order):\n order.append(\"f\")\n\n\n@pytest.fixture\ndef g(f, c, order):\n order.append(\"g\")\n\n\ndef test_order(g, order):\n assert order == [\"a\", \"b\", \"c\", \"d\", \"e\", \"f\", \"g\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/fixtures/test_fixtures_order_scope.py_pytest_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/fixtures/test_fixtures_order_scope.py_pytest_", "embedding": null, "metadata": {"file_path": "doc/en/example/fixtures/test_fixtures_order_scope.py", "file_name": "test_fixtures_order_scope.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 37, "span_ids": ["mod", "order", "pack", "sess", "TestClass.test_order", "TestClass", "func", "imports", "cls"], "tokens": 133}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import pytest\n\n\n@pytest.fixture(scope=\"session\")\ndef order():\n return []\n\n\n@pytest.fixture\ndef func(order):\n order.append(\"function\")\n\n\n@pytest.fixture(scope=\"class\")\ndef cls(order):\n order.append(\"class\")\n\n\n@pytest.fixture(scope=\"module\")\ndef mod(order):\n order.append(\"module\")\n\n\n@pytest.fixture(scope=\"package\")\ndef pack(order):\n order.append(\"package\")\n\n\n@pytest.fixture(scope=\"session\")\ndef sess(order):\n order.append(\"session\")\n\n\nclass TestClass:\n def test_order(self, func, cls, mod, pack, sess, order):\n assert order == [\"session\", \"package\", \"module\", \"class\", \"function\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/fixtures/test_fixtures_request_different_scope.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/doc/en/example/fixtures/test_fixtures_request_different_scope.py__", "embedding": null, "metadata": {"file_path": "doc/en/example/fixtures/test_fixtures_request_different_scope.py", "file_name": "test_fixtures_request_different_scope.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 30, "span_ids": ["order", "TestOne.test_order", "outer", "TestOne.inner", "imports", "TestTwo.test_order", "TestTwo.inner", "TestOne", "TestTwo"], "tokens": 112}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import pytest\n\n\n@pytest.fixture\ndef order():\n return []\n\n\n@pytest.fixture\ndef outer(order, inner):\n order.append(\"outer\")\n\n\nclass TestOne:\n @pytest.fixture\n def inner(self, order):\n order.append(\"one\")\n\n def test_order(self, order, outer):\n assert order == [\"one\", \"outer\"]\n\n\nclass TestTwo:\n @pytest.fixture\n def inner(self, order):\n order.append(\"two\")\n\n def test_order(self, order, outer):\n assert order == [\"two\", \"outer\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/scripts/prepare-release-pr.py___login.return.github_repository_owner_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/scripts/prepare-release-pr.py___login.return.github_repository_owner_", "embedding": null, "metadata": {"file_path": "scripts/prepare-release-pr.py", "file_name": "prepare-release-pr.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 47, "span_ids": ["impl", "impl:2", "login", "docstring", "InvalidFeatureRelease", "imports"], "tokens": 293}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"\nThis script is part of the pytest release process which is triggered manually in the Actions\ntab of the repository.\n\nThe user will need to enter the base branch to start the release from (for example\n``6.1.x`` or ``master``) and if it should be a major release.\n\nThe appropriate version will be obtained based on the given branch automatically.\n\nAfter that, it will create a release using the `release` tox environment, and push a new PR.\n\n**Secret**: currently the secret is defined in the @pytestbot account,\nwhich the core maintainers have access to. There we created a new secret named `chatops`\nwith write access to the repository.\n\"\"\"\nimport argparse\nimport re\nfrom pathlib import Path\nfrom subprocess import check_call\nfrom subprocess import check_output\nfrom subprocess import run\n\nfrom colorama import Fore\nfrom colorama import init\nfrom github3.repos import Repository\n\n\nclass InvalidFeatureRelease(Exception):\n pass\n\n\nSLUG = \"pytest-dev/pytest\"\n\nPR_BODY = \"\"\"\\\nCreated automatically from manual trigger.\n\nOnce all builds pass and it has been **approved** by one or more maintainers, the build\ncan be released by pushing a tag `{version}` to this repository.\n\"\"\"\n\n\ndef login(token: str) -> Repository:\n import github3\n\n github = github3.login(token=token)\n owner, repo = SLUG.split(\"/\")\n return github.repository(owner, repo)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/scripts/prepare-release-pr.py_prepare_release_pr_prepare_release_pr.print_f_Pull_request_For": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/scripts/prepare-release-pr.py_prepare_release_pr_prepare_release_pr.print_f_Pull_request_For", "embedding": null, "metadata": {"file_path": "scripts/prepare-release-pr.py", "file_name": "prepare-release-pr.py", "file_type": "text/x-python", "category": "implementation", "start_line": 50, "end_line": 116, "span_ids": ["prepare_release_pr"], "tokens": 474}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def prepare_release_pr(base_branch: str, is_major: bool, token: str) -> None:\n print()\n print(f\"Processing release for branch {Fore.CYAN}{base_branch}\")\n\n check_call([\"git\", \"checkout\", f\"origin/{base_branch}\"])\n\n try:\n version = find_next_version(base_branch, is_major)\n except InvalidFeatureRelease as e:\n print(f\"{Fore.RED}{e}\")\n raise SystemExit(1)\n\n print(f\"Version: {Fore.CYAN}{version}\")\n\n release_branch = f\"release-{version}\"\n\n run(\n [\"git\", \"config\", \"user.name\", \"pytest bot\"],\n text=True,\n check=True,\n capture_output=True,\n )\n run(\n [\"git\", \"config\", \"user.email\", \"pytestbot@gmail.com\"],\n text=True,\n check=True,\n capture_output=True,\n )\n\n run(\n [\"git\", \"checkout\", \"-b\", release_branch, f\"origin/{base_branch}\"],\n text=True,\n check=True,\n capture_output=True,\n )\n\n print(f\"Branch {Fore.CYAN}{release_branch}{Fore.RESET} created.\")\n\n # important to use tox here because we have changed branches, so dependencies\n # might have changed as well\n cmdline = [\"tox\", \"-e\", \"release\", \"--\", version, \"--skip-check-links\"]\n print(\"Running\", \" \".join(cmdline))\n run(\n cmdline,\n text=True,\n check=True,\n capture_output=True,\n )\n\n oauth_url = f\"https://{token}:x-oauth-basic@github.com/{SLUG}.git\"\n run(\n [\"git\", \"push\", oauth_url, f\"HEAD:{release_branch}\", \"--force\"],\n text=True,\n check=True,\n capture_output=True,\n )\n print(f\"Branch {Fore.CYAN}{release_branch}{Fore.RESET} pushed.\")\n\n body = PR_BODY.format(version=version)\n repo = login(token)\n pr = repo.create_pull(\n f\"Prepare release {version}\",\n base=base_branch,\n head=release_branch,\n body=body,\n )\n print(f\"Pull request {Fore.CYAN}{pr.url}{Fore.RESET} created.\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/scripts/prepare-release-pr.py_find_next_version_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/scripts/prepare-release-pr.py_find_next_version_", "embedding": null, "metadata": {"file_path": "scripts/prepare-release-pr.py", "file_name": "prepare-release-pr.py", "file_type": "text/x-python", "category": "implementation", "start_line": 119, "end_line": 158, "span_ids": ["main", "find_next_version", "impl:5"], "tokens": 306}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def find_next_version(base_branch: str, is_major: bool) -> str:\n output = check_output([\"git\", \"tag\"], encoding=\"UTF-8\")\n valid_versions = []\n for v in output.splitlines():\n m = re.match(r\"\\d.\\d.\\d+$\", v.strip())\n if m:\n valid_versions.append(tuple(int(x) for x in v.split(\".\")))\n\n valid_versions.sort()\n last_version = valid_versions[-1]\n\n changelog = Path(\"changelog\")\n\n features = list(changelog.glob(\"*.feature.rst\"))\n breaking = list(changelog.glob(\"*.breaking.rst\"))\n is_feature_release = features or breaking\n\n if is_major:\n return f\"{last_version[0]+1}.0.0\"\n elif is_feature_release:\n return f\"{last_version[0]}.{last_version[1] + 1}.0\"\n else:\n return f\"{last_version[0]}.{last_version[1]}.{last_version[2] + 1}\"\n\n\ndef main() -> None:\n init(autoreset=True)\n parser = argparse.ArgumentParser()\n parser.add_argument(\"base_branch\")\n parser.add_argument(\"token\")\n parser.add_argument(\"--major\", action=\"store_true\", default=False)\n options = parser.parse_args()\n prepare_release_pr(\n base_branch=options.base_branch, is_major=options.major, token=options.token\n )\n\n\nif __name__ == \"__main__\":\n main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/scripts/release-on-comment.py___get_comment_data.if_comment_in_payload_.else_.return.payload_issue_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/scripts/release-on-comment.py___get_comment_data.if_comment_in_payload_.else_.return.payload_issue_", "embedding": null, "metadata": {"file_path": "scripts/release-on-comment.py", "file_name": "release-on-comment.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 77, "span_ids": ["impl", "impl:2", "login", "docstring", "get_comment_data", "InvalidFeatureRelease", "imports"], "tokens": 501}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"\nThis script is part of the pytest release process which is triggered by comments\nin issues.\n\nThis script is started by the `release-on-comment.yml` workflow, which always executes on\n`master` and is triggered by two comment related events:\n\n* https://help.github.com/en/actions/reference/events-that-trigger-workflows#issue-comment-event-issue_comment\n* https://help.github.com/en/actions/reference/events-that-trigger-workflows#issues-event-issues\n\nThis script receives the payload and a secrets on the command line.\n\nThe payload must contain a comment with a phrase matching this pseudo-regular expression:\n\n @pytestbot please prepare (major )? release from \n\nThen the appropriate version will be obtained based on the given branch name:\n\n* a major release from master if \"major\" appears in the phrase in that position\n* a feature or bug fix release from master (based if there are features in the current changelog\n folder)\n* a bug fix from a maintenance branch\n\nAfter that, it will create a release using the `release` tox environment, and push a new PR.\n\n**Secret**: currently the secret is defined in the @pytestbot account, which the core maintainers\nhave access to. There we created a new secret named `chatops` with write access to the repository.\n\"\"\"\nimport argparse\nimport json\nimport os\nimport re\nimport traceback\nfrom pathlib import Path\nfrom subprocess import CalledProcessError\nfrom subprocess import check_call\nfrom subprocess import check_output\nfrom subprocess import run\nfrom textwrap import dedent\nfrom typing import Dict\nfrom typing import Optional\nfrom typing import Tuple\n\nfrom colorama import Fore\nfrom colorama import init\nfrom github3.repos import Repository\n\n\nclass InvalidFeatureRelease(Exception):\n pass\n\n\nSLUG = \"pytest-dev/pytest\"\n\nPR_BODY = \"\"\"\\\nCreated automatically from {comment_url}.\n\nOnce all builds pass and it has been **approved** by one or more maintainers, the build\ncan be released by pushing a tag `{version}` to this repository.\n\nCloses #{issue_number}.\n\"\"\"\n\n\ndef login(token: str) -> Repository:\n import github3\n\n github = github3.login(token=token)\n owner, repo = SLUG.split(\"/\")\n return github.repository(owner, repo)\n\n\ndef get_comment_data(payload: Dict) -> str:\n if \"comment\" in payload:\n return payload[\"comment\"]\n else:\n return payload[\"issue\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/scripts/release-on-comment.py_validate_and_get_issue_comment_payload_print_and_exit.raise_SystemExit_1_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/scripts/release-on-comment.py_validate_and_get_issue_comment_payload_print_and_exit.raise_SystemExit_1_", "embedding": null, "metadata": {"file_path": "scripts/release-on-comment.py", "file_name": "release-on-comment.py", "file_type": "text/x-python", "category": "implementation", "start_line": 80, "end_line": 95, "span_ids": ["print_and_exit", "validate_and_get_issue_comment_payload"], "tokens": 148}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def validate_and_get_issue_comment_payload(\n issue_payload_path: Optional[Path],\n) -> Tuple[str, str, bool]:\n payload = json.loads(issue_payload_path.read_text(encoding=\"UTF-8\"))\n body = get_comment_data(payload)[\"body\"]\n m = re.match(r\"@pytestbot please prepare (major )?release from ([\\w\\-_\\.]+)\", body)\n if m:\n is_major, base_branch = m.group(1) is not None, m.group(2)\n else:\n is_major, base_branch = False, None\n return payload, base_branch, is_major\n\n\ndef print_and_exit(msg) -> None:\n print(msg)\n raise SystemExit(1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/scripts/release-on-comment.py_trigger_release_trigger_release.if_error_contents_.else_.print_f_Fore_GREEN_Succe": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/scripts/release-on-comment.py_trigger_release_trigger_release.if_error_contents_.else_.print_f_Fore_GREEN_Succe", "embedding": null, "metadata": {"file_path": "scripts/release-on-comment.py", "file_name": "release-on-comment.py", "file_type": "text/x-python", "category": "implementation", "start_line": 98, "end_line": 202, "span_ids": ["trigger_release"], "tokens": 780}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def trigger_release(payload_path: Path, token: str) -> None:\n payload, base_branch, is_major = validate_and_get_issue_comment_payload(\n payload_path\n )\n if base_branch is None:\n url = get_comment_data(payload)[\"html_url\"]\n print_and_exit(\n f\"Comment {Fore.CYAN}{url}{Fore.RESET} did not match the trigger command.\"\n )\n print()\n print(f\"Precessing release for branch {Fore.CYAN}{base_branch}\")\n\n repo = login(token)\n\n issue_number = payload[\"issue\"][\"number\"]\n issue = repo.issue(issue_number)\n\n check_call([\"git\", \"checkout\", f\"origin/{base_branch}\"])\n\n try:\n version = find_next_version(base_branch, is_major)\n except InvalidFeatureRelease as e:\n issue.create_comment(str(e))\n print_and_exit(f\"{Fore.RED}{e}\")\n\n error_contents = \"\"\n try:\n print(f\"Version: {Fore.CYAN}{version}\")\n\n release_branch = f\"release-{version}\"\n\n run(\n [\"git\", \"config\", \"user.name\", \"pytest bot\"],\n text=True,\n check=True,\n capture_output=True,\n )\n run(\n [\"git\", \"config\", \"user.email\", \"pytestbot@gmail.com\"],\n text=True,\n check=True,\n capture_output=True,\n )\n\n run(\n [\"git\", \"checkout\", \"-b\", release_branch, f\"origin/{base_branch}\"],\n text=True,\n check=True,\n capture_output=True,\n )\n\n print(f\"Branch {Fore.CYAN}{release_branch}{Fore.RESET} created.\")\n\n # important to use tox here because we have changed branches, so dependencies\n # might have changed as well\n cmdline = [\"tox\", \"-e\", \"release\", \"--\", version, \"--skip-check-links\"]\n print(\"Running\", \" \".join(cmdline))\n run(\n cmdline,\n text=True,\n check=True,\n capture_output=True,\n )\n\n oauth_url = f\"https://{token}:x-oauth-basic@github.com/{SLUG}.git\"\n run(\n [\"git\", \"push\", oauth_url, f\"HEAD:{release_branch}\", \"--force\"],\n text=True,\n check=True,\n capture_output=True,\n )\n print(f\"Branch {Fore.CYAN}{release_branch}{Fore.RESET} pushed.\")\n\n body = PR_BODY.format(\n comment_url=get_comment_data(payload)[\"html_url\"],\n version=version,\n issue_number=issue_number,\n )\n pr = repo.create_pull(\n f\"Prepare release {version}\",\n base=base_branch,\n head=release_branch,\n body=body,\n )\n print(f\"Pull request {Fore.CYAN}{pr.url}{Fore.RESET} created.\")\n\n comment = issue.create_comment(\n f\"As requested, opened a PR for release `{version}`: #{pr.number}.\"\n )\n print(f\"Notified in original comment {Fore.CYAN}{comment.url}{Fore.RESET}.\")\n\n except CalledProcessError as e:\n error_contents = f\"CalledProcessError\\noutput:\\n{e.output}\\nstderr:\\n{e.stderr}\"\n except Exception:\n error_contents = f\"Exception:\\n{traceback.format_exc()}\"\n\n if error_contents:\n link = f\"https://github.com/{SLUG}/actions/runs/{os.environ['GITHUB_RUN_ID']}\"\n msg = ERROR_COMMENT.format(\n version=version, base_branch=base_branch, contents=error_contents, link=link\n )\n issue.create_comment(msg)\n print_and_exit(f\"{Fore.RED}{error_contents}\")\n else:\n print(f\"{Fore.GREEN}Success.\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/scripts/release-on-comment.py_ERROR_COMMENT_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/scripts/release-on-comment.py_ERROR_COMMENT_", "embedding": null, "metadata": {"file_path": "scripts/release-on-comment.py", "file_name": "release-on-comment.py", "file_type": "text/x-python", "category": "implementation", "start_line": 205, "end_line": 262, "span_ids": ["impl:7", "main", "find_next_version", "impl:5"], "tokens": 390}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "ERROR_COMMENT = \"\"\"\\\nThe request to prepare release `{version}` from {base_branch} failed with:\n\n```\n{contents}\n```\n\nSee: {link}.\n\"\"\"\n\n\ndef find_next_version(base_branch: str, is_major: bool) -> str:\n output = check_output([\"git\", \"tag\"], encoding=\"UTF-8\")\n valid_versions = []\n for v in output.splitlines():\n m = re.match(r\"\\d.\\d.\\d+$\", v.strip())\n if m:\n valid_versions.append(tuple(int(x) for x in v.split(\".\")))\n\n valid_versions.sort()\n last_version = valid_versions[-1]\n\n changelog = Path(\"changelog\")\n\n features = list(changelog.glob(\"*.feature.rst\"))\n breaking = list(changelog.glob(\"*.breaking.rst\"))\n is_feature_release = features or breaking\n\n if is_feature_release and base_branch != \"master\":\n msg = dedent(\n f\"\"\"\n Found features or breaking changes in `{base_branch}`, and feature releases can only be\n created from `master`:\n \"\"\"\n )\n msg += \"\\n\".join(f\"* `{x.name}`\" for x in sorted(features + breaking))\n raise InvalidFeatureRelease(msg)\n\n if is_major:\n return f\"{last_version[0]+1}.0.0\"\n elif is_feature_release:\n return f\"{last_version[0]}.{last_version[1] + 1}.0\"\n else:\n return f\"{last_version[0]}.{last_version[1]}.{last_version[2] + 1}\"\n\n\ndef main() -> None:\n init(autoreset=True)\n parser = argparse.ArgumentParser()\n parser.add_argument(\"payload\")\n parser.add_argument(\"token\")\n options = parser.parse_args()\n trigger_release(Path(options.payload), options.token)\n\n\nif __name__ == \"__main__\":\n main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/scripts/release.py__Invoke_development_tas_announce.check_call_git_add_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/scripts/release.py__Invoke_development_tas_announce.check_call_git_add_", "embedding": null, "metadata": {"file_path": "scripts/release.py", "file_name": "release.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 58, "span_ids": ["announce", "docstring", "imports"], "tokens": 485}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"Invoke development tasks.\"\"\"\nimport argparse\nimport os\nfrom pathlib import Path\nfrom subprocess import call\nfrom subprocess import check_call\nfrom subprocess import check_output\n\nfrom colorama import Fore\nfrom colorama import init\n\n\ndef announce(version):\n \"\"\"Generates a new release announcement entry in the docs.\"\"\"\n stdout = check_output([\"git\", \"describe\", \"--abbrev=0\", \"--tags\"])\n stdout = stdout.decode(\"utf-8\")\n last_version = stdout.strip()\n\n stdout = check_output([\"git\", \"log\", f\"{last_version}..HEAD\", \"--format=%aN\"])\n stdout = stdout.decode(\"utf-8\")\n\n contributors = set(stdout.splitlines())\n\n template_name = (\n \"release.minor.rst\" if version.endswith(\".0\") else \"release.patch.rst\"\n )\n template_text = (\n Path(__file__).parent.joinpath(template_name).read_text(encoding=\"UTF-8\")\n )\n\n contributors_text = \"\\n\".join(f\"* {name}\" for name in sorted(contributors)) + \"\\n\"\n text = template_text.format(version=version, contributors=contributors_text)\n\n target = Path(__file__).parent.joinpath(f\"../doc/en/announce/release-{version}.rst\")\n target.write_text(text, encoding=\"UTF-8\")\n print(f\"{Fore.CYAN}[generate.announce] {Fore.RESET}Generated {target.name}\")\n\n # Update index with the new release entry\n index_path = Path(__file__).parent.joinpath(\"../doc/en/announce/index.rst\")\n lines = index_path.read_text(encoding=\"UTF-8\").splitlines()\n indent = \" \"\n for index, line in enumerate(lines):\n if line.startswith(f\"{indent}release-\"):\n new_line = indent + target.stem\n if line != new_line:\n lines.insert(index, new_line)\n index_path.write_text(\"\\n\".join(lines) + \"\\n\", encoding=\"UTF-8\")\n print(\n f\"{Fore.CYAN}[generate.announce] {Fore.RESET}Updated {index_path.name}\"\n )\n else:\n print(\n f\"{Fore.CYAN}[generate.announce] {Fore.RESET}Skip {index_path.name} (already contains release)\"\n )\n break\n\n check_call([\"git\", \"add\", str(target)])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/scripts/update-plugin-list.py_datetime_DEVELOPMENT_STATUS_CLASSIFIERS._": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/scripts/update-plugin-list.py_datetime_DEVELOPMENT_STATUS_CLASSIFIERS._", "embedding": null, "metadata": {"file_path": "scripts/update-plugin-list.py", "file_name": "update-plugin-list.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 23, "span_ids": ["impl", "imports"], "tokens": 142}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import datetime\nimport pathlib\nimport re\n\nimport packaging.version\nimport requests\nimport tabulate\n\nFILE_HEAD = r\"\"\"Plugins List\n============\n\nPyPI projects that match \"pytest-\\*\" are considered plugins and are listed\nautomatically. Packages classified as inactive are excluded.\n\"\"\"\nDEVELOPMENT_STATUS_CLASSIFIERS = (\n \"Development Status :: 1 - Planning\",\n \"Development Status :: 2 - Pre-Alpha\",\n \"Development Status :: 3 - Alpha\",\n \"Development Status :: 4 - Beta\",\n \"Development Status :: 5 - Production/Stable\",\n \"Development Status :: 6 - Mature\",\n \"Development Status :: 7 - Inactive\",\n)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/scripts/update-plugin-list.py_iter_plugins_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/scripts/update-plugin-list.py_iter_plugins_", "embedding": null, "metadata": {"file_path": "scripts/update-plugin-list.py", "file_name": "update-plugin-list.py", "file_type": "text/x-python", "category": "implementation", "start_line": 26, "end_line": 87, "span_ids": ["main", "iter_plugins", "impl:5"], "tokens": 521}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def iter_plugins():\n regex = r\">([\\d\\w-]*)\"\n response = requests.get(\"https://pypi.org/simple\")\n for match in re.finditer(regex, response.text):\n name = match.groups()[0]\n if not name.startswith(\"pytest-\"):\n continue\n response = requests.get(f\"https://pypi.org/pypi/{name}/json\")\n if response.status_code == 404:\n # Some packages, like pytest-azurepipelines42, are included in https://pypi.org/simple but\n # return 404 on the JSON API. Skip.\n continue\n response.raise_for_status()\n info = response.json()[\"info\"]\n if \"Development Status :: 7 - Inactive\" in info[\"classifiers\"]:\n continue\n for classifier in DEVELOPMENT_STATUS_CLASSIFIERS:\n if classifier in info[\"classifiers\"]:\n status = classifier[22:]\n break\n else:\n status = \"N/A\"\n requires = \"N/A\"\n if info[\"requires_dist\"]:\n for requirement in info[\"requires_dist\"]:\n if requirement == \"pytest\" or \"pytest \" in requirement:\n requires = requirement\n break\n releases = response.json()[\"releases\"]\n for release in sorted(releases, key=packaging.version.parse, reverse=True):\n if releases[release]:\n release_date = datetime.date.fromisoformat(\n releases[release][-1][\"upload_time_iso_8601\"].split(\"T\")[0]\n )\n last_release = release_date.strftime(\"%b %d, %Y\")\n break\n name = f'`{info[\"name\"]} <{info[\"project_url\"]}>`_'\n summary = info[\"summary\"].replace(\"\\n\", \"\")\n summary = re.sub(r\"_\\b\", \"\", summary)\n yield {\n \"name\": name,\n \"summary\": summary,\n \"last release\": last_release,\n \"status\": status,\n \"requires\": requires,\n }\n\n\ndef main():\n plugins = list(iter_plugins())\n plugin_table = tabulate.tabulate(plugins, headers=\"keys\", tablefmt=\"rst\")\n plugin_list = pathlib.Path(\"doc\", \"en\", \"plugin_list.rst\")\n with plugin_list.open(\"w\") as f:\n f.write(FILE_HEAD)\n f.write(f\"This list contains {len(plugins)} plugins.\\n\\n\")\n f.write(plugin_table)\n f.write(\"\\n\")\n\n\nif __name__ == \"__main__\":\n main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_argcomplete.py__Allow_bash_completion__from_typing_import_Option": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_argcomplete.py__Allow_bash_completion__from_typing_import_Option", "embedding": null, "metadata": {"file_path": "src/_pytest/_argcomplete.py", "file_name": "_argcomplete.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 70, "span_ids": ["docstring", "imports"], "tokens": 552}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"Allow bash-completion for argparse with argcomplete if installed.\n\nNeeds argcomplete>=0.5.6 for python 3.2/3.3 (older versions fail\nto find the magic string, so _ARGCOMPLETE env. var is never set, and\nthis does not need special code).\n\nFunction try_argcomplete(parser) should be called directly before\nthe call to ArgumentParser.parse_args().\n\nThe filescompleter is what you normally would use on the positional\narguments specification, in order to get \"dirname/\" after \"dirn\"\ninstead of the default \"dirname \":\n\n optparser.add_argument(Config._file_or_dir, nargs='*').completer=filescompleter\n\nOther, application specific, completers should go in the file\ndoing the add_argument calls as they need to be specified as .completer\nattributes as well. (If argcomplete is not installed, the function the\nattribute points to will not be used).\n\nSPEEDUP\n=======\n\nThe generic argcomplete script for bash-completion\n(/etc/bash_completion.d/python-argcomplete.sh)\nuses a python program to determine startup script generated by pip.\nYou can speed up completion somewhat by changing this script to include\n # PYTHON_ARGCOMPLETE_OK\nso the python-argcomplete-check-easy-install-script does not\nneed to be called to find the entry point of the code and see if that is\nmarked with PYTHON_ARGCOMPLETE_OK.\n\nINSTALL/DEBUGGING\n=================\n\nTo include this support in another application that has setup.py generated\nscripts:\n\n- Add the line:\n # PYTHON_ARGCOMPLETE_OK\n near the top of the main python entry point.\n\n- Include in the file calling parse_args():\n from _argcomplete import try_argcomplete, filescompleter\n Call try_argcomplete just before parse_args(), and optionally add\n filescompleter to the positional arguments' add_argument().\n\nIf things do not work right away:\n\n- Switch on argcomplete debugging with (also helpful when doing custom\n completers):\n export _ARC_DEBUG=1\n\n- Run:\n python-argcomplete-check-easy-install-script $(which appname)\n echo $?\n will echo 0 if the magic line has been found, 1 if not.\n\n- Sometimes it helps to find early on errors using:\n _ARGCOMPLETE=1 _ARC_DEBUG=1 appname\n which should throw a KeyError: 'COMPLINE' (which is properly set by the\n global argcomplete script).\n\"\"\"\nimport argparse\nimport os\nimport sys\nfrom glob import glob\nfrom typing import Any\nfrom typing import List\nfrom typing import Optional", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_Code_Code.name.return.self_raw_co_name": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_Code_Code.name.return.self_raw_co_name", "embedding": null, "metadata": {"file_path": "src/_pytest/_code/code.py", "file_name": "code.py", "file_type": "text/x-python", "category": "implementation", "start_line": 56, "end_line": 80, "span_ids": ["Code.from_function", "Code.firstlineno", "Code.__eq__", "Code.name", "Code:5", "Code"], "tokens": 156}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Code:\n \"\"\"Wrapper around Python code objects.\"\"\"\n\n __slots__ = (\"raw\",)\n\n def __init__(self, obj: CodeType) -> None:\n self.raw = obj\n\n @classmethod\n def from_function(cls, obj: object) -> \"Code\":\n return cls(getrawcode(obj))\n\n def __eq__(self, other):\n return self.raw == other.raw\n\n # Ignore type because of https://github.com/python/mypy/issues/4266.\n __hash__ = None # type: ignore\n\n @property\n def firstlineno(self) -> int:\n return self.raw.co_firstlineno - 1\n\n @property\n def name(self) -> str:\n return self.raw.co_name", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_TracebackEntry.ishidden_TracebackEntry.ishidden.return.tbh": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_TracebackEntry.ishidden_TracebackEntry.ishidden.return.tbh", "embedding": null, "metadata": {"file_path": "src/_pytest/_code/code.py", "file_name": "code.py", "file_type": "text/x-python", "category": "implementation", "start_line": 266, "end_line": 291, "span_ids": ["TracebackEntry.ishidden"], "tokens": 237}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TracebackEntry:\n\n def ishidden(self) -> bool:\n \"\"\"Return True if the current frame has a var __tracebackhide__\n resolving to True.\n\n If __tracebackhide__ is a callable, it gets called with the\n ExceptionInfo instance and can decide whether to hide the traceback.\n\n Mostly for internal use.\n \"\"\"\n tbh: Union[\n bool, Callable[[Optional[ExceptionInfo[BaseException]]], bool]\n ] = False\n for maybe_ns_dct in (self.frame.f_locals, self.frame.f_globals):\n # in normal cases, f_locals and f_globals are dictionaries\n # however via `exec(...)` / `eval(...)` they can be other types\n # (even incorrect types!).\n # as such, we suppress all exceptions while accessing __tracebackhide__\n try:\n tbh = maybe_ns_dct[\"__tracebackhide__\"]\n except Exception:\n pass\n else:\n break\n if tbh and callable(tbh):\n return tbh(None if self._excinfo is None else self._excinfo())\n return tbh", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_TracebackEntry.__str___TracebackEntry.name.return.self_frame_code_raw_co_na": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_TracebackEntry.__str___TracebackEntry.name.return.self_frame_code_raw_co_na", "embedding": null, "metadata": {"file_path": "src/_pytest/_code/code.py", "file_name": "code.py", "file_type": "text/x-python", "category": "implementation", "start_line": 293, "end_line": 314, "span_ids": ["TracebackEntry.__str__", "TracebackEntry.name"], "tokens": 175}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TracebackEntry:\n\n def __str__(self) -> str:\n name = self.frame.code.name\n try:\n line = str(self.statement).lstrip()\n except KeyboardInterrupt:\n raise\n except BaseException:\n line = \"???\"\n # This output does not quite match Python's repr for traceback entries,\n # but changing it to do so would break certain plugins. See\n # https://github.com/pytest-dev/pytest/pull/7535/ for details.\n return \" File %r:%d in %s\\n %s\\n\" % (\n str(self.path),\n self.lineno + 1,\n name,\n line,\n )\n\n @property\n def name(self) -> str:\n \"\"\"co_name of underlying code.\"\"\"\n return self.frame.code.raw.co_name", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_Traceback.__getitem___Traceback.filter.return.Traceback_filter_fn_self": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_Traceback.__getitem___Traceback.filter.return.Traceback_filter_fn_self", "embedding": null, "metadata": {"file_path": "src/_pytest/_code/code.py", "file_name": "code.py", "file_type": "text/x-python", "category": "implementation", "start_line": 373, "end_line": 399, "span_ids": ["Traceback.__getitem__", "Traceback.__getitem___3", "Traceback.__getitem___2", "Traceback.filter"], "tokens": 235}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Traceback(List[TracebackEntry]):\n\n @overload\n def __getitem__(self, key: int) -> TracebackEntry:\n ...\n\n @overload\n def __getitem__(self, key: slice) -> \"Traceback\":\n ...\n\n def __getitem__(self, key: Union[int, slice]) -> Union[TracebackEntry, \"Traceback\"]:\n if isinstance(key, slice):\n return self.__class__(super().__getitem__(key))\n else:\n return super().__getitem__(key)\n\n def filter(\n self, fn: Callable[[TracebackEntry], bool] = lambda x: not x.ishidden()\n ) -> \"Traceback\":\n \"\"\"Return a Traceback instance with certain items removed\n\n fn is a function that gets a single argument, a TracebackEntry\n instance, and should return True when the item should be added\n to the Traceback, False when not.\n\n By default this removes all the TracebackEntries which are hidden\n (see ishidden() above).\n \"\"\"\n return Traceback(filter(fn, self), self._excinfo)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_Traceback.getcrashentry__E.TypeVar__E_bound_BaseE": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py_Traceback.getcrashentry__E.TypeVar__E_bound_BaseE", "embedding": null, "metadata": {"file_path": "src/_pytest/_code/code.py", "file_name": "code.py", "file_type": "text/x-python", "category": "implementation", "start_line": 401, "end_line": 440, "span_ids": ["Traceback.getcrashentry", "impl:4", "Traceback.recursionindex"], "tokens": 353}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Traceback(List[TracebackEntry]):\n\n def getcrashentry(self) -> TracebackEntry:\n \"\"\"Return last non-hidden traceback entry that lead to the exception of a traceback.\"\"\"\n for i in range(-1, -len(self) - 1, -1):\n entry = self[i]\n if not entry.ishidden():\n return entry\n return self[-1]\n\n def recursionindex(self) -> Optional[int]:\n \"\"\"Return the index of the frame/TracebackEntry where recursion originates if\n appropriate, None if no recursion occurred.\"\"\"\n cache: Dict[Tuple[Any, int, int], List[Dict[str, Any]]] = {}\n for i, entry in enumerate(self):\n # id for the code.raw is needed to work around\n # the strange metaprogramming in the decorator lib from pypi\n # which generates code objects that have hash/value equality\n # XXX needs a test\n key = entry.frame.code.path, id(entry.frame.code.raw), entry.lineno\n # print \"checking for recursion at\", key\n values = cache.setdefault(key, [])\n if values:\n f = entry.frame\n loc = f.f_locals\n for otherloc in values:\n if f.eval(\n co_equal,\n __recursioncache_locals_1=loc,\n __recursioncache_locals_2=otherloc,\n ):\n return i\n values.append(entry.frame.f_locals)\n return None\n\n\nco_equal = compile(\n \"__recursioncache_locals_1 == __recursioncache_locals_2\", \"?\", \"eval\"\n)\n\n\n_E = TypeVar(\"_E\", bound=BaseException, covariant=True)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py__Relative_paths_that_we___PY_DIR.Path_py___file___parent": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/_code/code.py__Relative_paths_that_we___PY_DIR.Path_py___file___parent", "embedding": null, "metadata": {"file_path": "src/_pytest/_code/code.py", "file_name": "code.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1223, "end_line": 1233, "span_ids": ["impl:8", "getfslineno"], "tokens": 128}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "# Relative paths that we use to filter traceback entries from appearing to the user;\n# see filter_traceback.\n# note: if we need to add more paths than what we have now we should probably use a list\n# for better maintenance.\n\n_PLUGGY_DIR = Path(pluggy.__file__.rstrip(\"oc\"))\n# pluggy is either a package or a single module depending on the version\nif _PLUGGY_DIR.name == \"__init__.py\":\n _PLUGGY_DIR = _PLUGGY_DIR.parent\n_PYTEST_DIR = Path(_pytest.__file__).parent\n_PY_DIR = Path(py.__file__).parent", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/__init__.py__Support_for_presenting_if_TYPE_CHECKING_.from__pytest_main_import_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/__init__.py__Support_for_presenting_if_TYPE_CHECKING_.from__pytest_main_import_", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 19, "span_ids": ["impl", "docstring", "imports", "imports:15"], "tokens": 120}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"Support for presenting detailed information in failing assertions.\"\"\"\nimport sys\nfrom typing import Any\nfrom typing import Generator\nfrom typing import List\nfrom typing import Optional\nfrom typing import TYPE_CHECKING\n\nfrom _pytest.assertion import rewrite\nfrom _pytest.assertion import truncate\nfrom _pytest.assertion import util\nfrom _pytest.assertion.rewrite import assertstate_key\nfrom _pytest.config import Config\nfrom _pytest.config import hookimpl\nfrom _pytest.config.argparsing import Parser\nfrom _pytest.nodes import Item\n\nif TYPE_CHECKING:\n from _pytest.main import Session", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py__should_repr_global_name__call_reprcompare.return.expl": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/rewrite.py__should_repr_global_name__call_reprcompare.return.expl", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/rewrite.py", "file_name": "rewrite.py", "file_type": "text/x-python", "category": "implementation", "start_line": 455, "end_line": 487, "span_ids": ["_format_boolop", "_call_reprcompare", "_should_repr_global_name"], "tokens": 223}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _should_repr_global_name(obj: object) -> bool:\n if callable(obj):\n return False\n\n try:\n return not hasattr(obj, \"__name__\")\n except Exception:\n return True\n\n\ndef _format_boolop(explanations: Iterable[str], is_or: bool) -> str:\n explanation = \"(\" + (is_or and \" or \" or \" and \").join(explanations) + \")\"\n return explanation.replace(\"%\", \"%%\")\n\n\ndef _call_reprcompare(\n ops: Sequence[str],\n results: Sequence[bool],\n expls: Sequence[str],\n each_obj: Sequence[object],\n) -> str:\n for i, res, expl in zip(range(len(ops)), results, expls):\n try:\n done = not res\n except Exception:\n done = True\n if done:\n break\n if util._reprcompare is not None:\n custom = util._reprcompare(ops[i], each_obj[i], each_obj[i + 1])\n if custom is not None:\n return custom\n return expl", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/truncate.py__Utilities_for_truncati__running_on_ci.return.any_var_in_os_environ_for": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/truncate.py__Utilities_for_truncati__running_on_ci.return.any_var_in_os_environ_for", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/truncate.py", "file_name": "truncate.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 36, "span_ids": ["impl", "_running_on_ci", "docstring", "imports", "_should_truncate_item", "truncate_if_required"], "tokens": 244}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"Utilities for truncating assertion output.\n\nCurrent default behaviour is to truncate assertion explanations at\n~8 terminal lines, unless running in \"-vv\" mode or running on CI.\n\"\"\"\nimport os\nfrom typing import List\nfrom typing import Optional\n\nfrom _pytest.nodes import Item\n\n\nDEFAULT_MAX_LINES = 8\nDEFAULT_MAX_CHARS = 8 * 80\nUSAGE_MSG = \"use '-vv' to show\"\n\n\ndef truncate_if_required(\n explanation: List[str], item: Item, max_length: Optional[int] = None\n) -> List[str]:\n \"\"\"Truncate this assertion explanation if the given test item is eligible.\"\"\"\n if _should_truncate_item(item):\n return _truncate_explanation(explanation)\n return explanation\n\n\ndef _should_truncate_item(item: Item) -> bool:\n \"\"\"Whether or not this test item is eligible for truncation.\"\"\"\n verbose = item.config.option.verbose\n return verbose < 2 and not _running_on_ci()\n\n\ndef _running_on_ci() -> bool:\n \"\"\"Check if we're currently running on a CI system.\"\"\"\n env_vars = [\"CI\", \"BUILD_NUMBER\"]\n return any(var in os.environ for var in env_vars)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/util.py_format_explanation_format_explanation.return._n_join_result_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/assertion/util.py_format_explanation_format_explanation.return._n_join_result_", "embedding": null, "metadata": {"file_path": "src/_pytest/assertion/util.py", "file_name": "util.py", "file_type": "text/x-python", "category": "implementation", "start_line": 30, "end_line": 42, "span_ids": ["format_explanation"], "tokens": 131}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def format_explanation(explanation: str) -> str:\n r\"\"\"Format an explanation.\n\n Normally all embedded newlines are escaped, however there are\n three exceptions: \\n{, \\n} and \\n~. The first two are intended\n cover nested explanations, see function and attribute explanations\n for examples (.visit_Call(), visit_Attribute()). The last one is\n for when one explanation needs to span multiple lines, e.g. when\n displaying diffs.\n \"\"\"\n lines = _split_explanation(explanation)\n result = _format_lines(lines)\n return \"\\n\".join(result)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py__Implementation_of_the__CACHEDIR_TAG_CONTENT.b_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py__Implementation_of_the__CACHEDIR_TAG_CONTENT.b_", "embedding": null, "metadata": {"file_path": "src/_pytest/cacheprovider.py", "file_name": "cacheprovider.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 53, "span_ids": ["impl", "docstring", "imports"], "tokens": 357}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"Implementation of the cache provider.\"\"\"\n# This plugin was not named \"cache\" to avoid conflicts with the external\n# pytest-cache version.\nimport json\nimport os\nfrom pathlib import Path\nfrom typing import Dict\nfrom typing import Generator\nfrom typing import Iterable\nfrom typing import List\nfrom typing import Optional\nfrom typing import Set\nfrom typing import Union\n\nimport attr\nimport py\n\nfrom .pathlib import resolve_from_str\nfrom .pathlib import rm_rf\nfrom .reports import CollectReport\nfrom _pytest import nodes\nfrom _pytest._io import TerminalWriter\nfrom _pytest.compat import final\nfrom _pytest.config import Config\nfrom _pytest.config import ExitCode\nfrom _pytest.config import hookimpl\nfrom _pytest.config.argparsing import Parser\nfrom _pytest.deprecated import check_ispytest\nfrom _pytest.fixtures import fixture\nfrom _pytest.fixtures import FixtureRequest\nfrom _pytest.main import Session\nfrom _pytest.python import Module\nfrom _pytest.python import Package\nfrom _pytest.reports import TestReport\n\n\nREADME_CONTENT = \"\"\"\\\n# pytest cache directory #\n\nThis directory contains data from the pytest's cache plugin,\nwhich provides the `--lf` and `--ff` options, as well as the `cache` fixture.\n\n**Do not** commit this to version control.\n\nSee [the docs](https://docs.pytest.org/en/stable/cache.html) for more information.\n\"\"\"\n\nCACHEDIR_TAG_CONTENT = b\"\"\"\\\nSignature: 8a477f597d28d172789f06886806bc55\n# This file is a cache directory tag created by pytest.\n# For information about cache directory tags, see:\n#\thttp://www.bford.info/cachedir/spec.html\n\"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py_Cache_Cache.__init__.self._config.config": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py_Cache_Cache.__init__.self._config.config", "embedding": null, "metadata": {"file_path": "src/_pytest/cacheprovider.py", "file_name": "cacheprovider.py", "file_type": "text/x-python", "category": "implementation", "start_line": 56, "end_line": 73, "span_ids": ["Cache"], "tokens": 141}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\n@attr.s(init=False)\nclass Cache:\n _cachedir = attr.ib(type=Path, repr=False)\n _config = attr.ib(type=Config, repr=False)\n\n # sub-directory under cache-dir for directories created by \"makedir\"\n _CACHE_PREFIX_DIRS = \"d\"\n\n # sub-directory under cache-dir for values created by \"set\"\n _CACHE_PREFIX_VALUES = \"v\"\n\n def __init__(\n self, cachedir: Path, config: Config, *, _ispytest: bool = False\n ) -> None:\n check_ispytest(_ispytest)\n self._cachedir = cachedir\n self._config = config", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py_Cache.for_config_Cache.for_config.return.cls_cachedir_config__is": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py_Cache.for_config_Cache.for_config.return.cls_cachedir_config__is", "embedding": null, "metadata": {"file_path": "src/_pytest/cacheprovider.py", "file_name": "cacheprovider.py", "file_type": "text/x-python", "category": "implementation", "start_line": 75, "end_line": 85, "span_ids": ["Cache.for_config"], "tokens": 124}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\n@attr.s(init=False)\nclass Cache:\n\n @classmethod\n def for_config(cls, config: Config, *, _ispytest: bool = False) -> \"Cache\":\n \"\"\"Create the Cache instance for a Config.\n\n :meta private:\n \"\"\"\n check_ispytest(_ispytest)\n cachedir = cls.cache_dir_from_config(config, _ispytest=True)\n if config.getoption(\"cacheclear\") and cachedir.is_dir():\n cls.clear_cache(cachedir, _ispytest=True)\n return cls(cachedir, config, _ispytest=True)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py_Cache.clear_cache_Cache.warn.warnings_warn_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/cacheprovider.py_Cache.clear_cache_Cache.warn.warnings_warn_", "embedding": null, "metadata": {"file_path": "src/_pytest/cacheprovider.py", "file_name": "cacheprovider.py", "file_type": "text/x-python", "category": "implementation", "start_line": 87, "end_line": 121, "span_ids": ["Cache.clear_cache", "Cache.warn", "Cache.cache_dir_from_config"], "tokens": 272}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\n@attr.s(init=False)\nclass Cache:\n\n @classmethod\n def clear_cache(cls, cachedir: Path, _ispytest: bool = False) -> None:\n \"\"\"Clear the sub-directories used to hold cached directories and values.\n\n :meta private:\n \"\"\"\n check_ispytest(_ispytest)\n for prefix in (cls._CACHE_PREFIX_DIRS, cls._CACHE_PREFIX_VALUES):\n d = cachedir / prefix\n if d.is_dir():\n rm_rf(d)\n\n @staticmethod\n def cache_dir_from_config(config: Config, *, _ispytest: bool = False) -> Path:\n \"\"\"Get the path to the cache directory for a Config.\n\n :meta private:\n \"\"\"\n check_ispytest(_ispytest)\n return resolve_from_str(config.getini(\"cache_dir\"), config.rootpath)\n\n def warn(self, fmt: str, *, _ispytest: bool = False, **args: object) -> None:\n \"\"\"Issue a cache warning.\n\n :meta private:\n \"\"\"\n check_ispytest(_ispytest)\n import warnings\n from _pytest.warning_types import PytestCacheWarning\n\n warnings.warn(\n PytestCacheWarning(fmt.format(**args) if args else fmt),\n self._config.hook,\n stacklevel=3,\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py__Per_test_stdout_stderr_if_TYPE_CHECKING_._CaptureMethod.Literal_fd_sys_no_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py__Per_test_stdout_stderr_if_TYPE_CHECKING_._CaptureMethod.Literal_fd_sys_no_", "embedding": null, "metadata": {"file_path": "src/_pytest/capture.py", "file_name": "capture.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 34, "span_ids": ["imports:28", "impl", "impl:2", "docstring", "imports"], "tokens": 203}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"Per-test stdout/stderr capturing mechanism.\"\"\"\nimport contextlib\nimport functools\nimport io\nimport os\nimport sys\nfrom io import UnsupportedOperation\nfrom tempfile import TemporaryFile\nfrom typing import Any\nfrom typing import AnyStr\nfrom typing import Generator\nfrom typing import Generic\nfrom typing import Iterator\nfrom typing import Optional\nfrom typing import TextIO\nfrom typing import Tuple\nfrom typing import TYPE_CHECKING\nfrom typing import Union\n\nfrom _pytest.compat import final\nfrom _pytest.config import Config\nfrom _pytest.config import hookimpl\nfrom _pytest.config.argparsing import Parser\nfrom _pytest.deprecated import check_ispytest\nfrom _pytest.fixtures import fixture\nfrom _pytest.fixtures import SubRequest\nfrom _pytest.nodes import Collector\nfrom _pytest.nodes import File\nfrom _pytest.nodes import Item\n\nif TYPE_CHECKING:\n from typing_extensions import Literal\n\n _CaptureMethod = Literal[\"fd\", \"sys\", \"no\", \"tee-sys\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_SysCaptureBinary_SysCaptureBinary.writeorg.self__old_buffer_flush_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_SysCaptureBinary_SysCaptureBinary.writeorg.self__old_buffer_flush_", "embedding": null, "metadata": {"file_path": "src/_pytest/capture.py", "file_name": "capture.py", "file_type": "text/x-python", "category": "implementation", "start_line": 253, "end_line": 332, "span_ids": ["SysCaptureBinary.__repr__", "SysCaptureBinary.writeorg", "SysCaptureBinary.suspend", "SysCaptureBinary.resume", "SysCaptureBinary", "SysCaptureBinary.start", "SysCaptureBinary.repr", "SysCaptureBinary.snap", "SysCaptureBinary._assert_state", "SysCaptureBinary.done"], "tokens": 639}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class SysCaptureBinary:\n\n EMPTY_BUFFER = b\"\"\n\n def __init__(self, fd: int, tmpfile=None, *, tee: bool = False) -> None:\n name = patchsysdict[fd]\n self._old = getattr(sys, name)\n self.name = name\n if tmpfile is None:\n if name == \"stdin\":\n tmpfile = DontReadFromInput()\n else:\n tmpfile = CaptureIO() if not tee else TeeCaptureIO(self._old)\n self.tmpfile = tmpfile\n self._state = \"initialized\"\n\n def repr(self, class_name: str) -> str:\n return \"<{} {} _old={} _state={!r} tmpfile={!r}>\".format(\n class_name,\n self.name,\n hasattr(self, \"_old\") and repr(self._old) or \"\",\n self._state,\n self.tmpfile,\n )\n\n def __repr__(self) -> str:\n return \"<{} {} _old={} _state={!r} tmpfile={!r}>\".format(\n self.__class__.__name__,\n self.name,\n hasattr(self, \"_old\") and repr(self._old) or \"\",\n self._state,\n self.tmpfile,\n )\n\n def _assert_state(self, op: str, states: Tuple[str, ...]) -> None:\n assert (\n self._state in states\n ), \"cannot {} in state {!r}: expected one of {}\".format(\n op, self._state, \", \".join(states)\n )\n\n def start(self) -> None:\n self._assert_state(\"start\", (\"initialized\",))\n setattr(sys, self.name, self.tmpfile)\n self._state = \"started\"\n\n def snap(self):\n self._assert_state(\"snap\", (\"started\", \"suspended\"))\n self.tmpfile.seek(0)\n res = self.tmpfile.buffer.read()\n self.tmpfile.seek(0)\n self.tmpfile.truncate()\n return res\n\n def done(self) -> None:\n self._assert_state(\"done\", (\"initialized\", \"started\", \"suspended\", \"done\"))\n if self._state == \"done\":\n return\n setattr(sys, self.name, self._old)\n del self._old\n self.tmpfile.close()\n self._state = \"done\"\n\n def suspend(self) -> None:\n self._assert_state(\"suspend\", (\"started\", \"suspended\"))\n setattr(sys, self.name, self._old)\n self._state = \"suspended\"\n\n def resume(self) -> None:\n self._assert_state(\"resume\", (\"started\", \"suspended\"))\n if self._state == \"started\":\n return\n setattr(sys, self.name, self.tmpfile)\n self._state = \"started\"\n\n def writeorg(self, data) -> None:\n self._assert_state(\"writeorg\", (\"started\", \"suspended\"))\n self._old.flush()\n self._old.buffer.write(data)\n self._old.buffer.flush()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_SysCapture_FDCaptureBinary.snap.return.res": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_SysCapture_FDCaptureBinary.snap.return.res", "embedding": null, "metadata": {"file_path": "src/_pytest/capture.py", "file_name": "capture.py", "file_type": "text/x-python", "category": "implementation", "start_line": 335, "end_line": 428, "span_ids": ["FDCaptureBinary", "SysCapture", "FDCaptureBinary.__repr__", "SysCapture.snap", "FDCaptureBinary._assert_state", "FDCaptureBinary.snap", "SysCapture.writeorg", "FDCaptureBinary.start"], "tokens": 705}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class SysCapture(SysCaptureBinary):\n EMPTY_BUFFER = \"\" # type: ignore[assignment]\n\n def snap(self):\n res = self.tmpfile.getvalue()\n self.tmpfile.seek(0)\n self.tmpfile.truncate()\n return res\n\n def writeorg(self, data):\n self._assert_state(\"writeorg\", (\"started\", \"suspended\"))\n self._old.write(data)\n self._old.flush()\n\n\nclass FDCaptureBinary:\n \"\"\"Capture IO to/from a given OS-level file descriptor.\n\n snap() produces `bytes`.\n \"\"\"\n\n EMPTY_BUFFER = b\"\"\n\n def __init__(self, targetfd: int) -> None:\n self.targetfd = targetfd\n\n try:\n os.fstat(targetfd)\n except OSError:\n # FD capturing is conceptually simple -- create a temporary file,\n # redirect the FD to it, redirect back when done. But when the\n # target FD is invalid it throws a wrench into this loveley scheme.\n #\n # Tests themselves shouldn't care if the FD is valid, FD capturing\n # should work regardless of external circumstances. So falling back\n # to just sys capturing is not a good option.\n #\n # Further complications are the need to support suspend() and the\n # possibility of FD reuse (e.g. the tmpfile getting the very same\n # target FD). The following approach is robust, I believe.\n self.targetfd_invalid: Optional[int] = os.open(os.devnull, os.O_RDWR)\n os.dup2(self.targetfd_invalid, targetfd)\n else:\n self.targetfd_invalid = None\n self.targetfd_save = os.dup(targetfd)\n\n if targetfd == 0:\n self.tmpfile = open(os.devnull)\n self.syscapture = SysCapture(targetfd)\n else:\n self.tmpfile = EncodedFile(\n TemporaryFile(buffering=0),\n encoding=\"utf-8\",\n errors=\"replace\",\n newline=\"\",\n write_through=True,\n )\n if targetfd in patchsysdict:\n self.syscapture = SysCapture(targetfd, self.tmpfile)\n else:\n self.syscapture = NoCapture()\n\n self._state = \"initialized\"\n\n def __repr__(self) -> str:\n return \"<{} {} oldfd={} _state={!r} tmpfile={!r}>\".format(\n self.__class__.__name__,\n self.targetfd,\n self.targetfd_save,\n self._state,\n self.tmpfile,\n )\n\n def _assert_state(self, op: str, states: Tuple[str, ...]) -> None:\n assert (\n self._state in states\n ), \"cannot {} in state {!r}: expected one of {}\".format(\n op, self._state, \", \".join(states)\n )\n\n def start(self) -> None:\n \"\"\"Start capturing on targetfd using memorized tmpfile.\"\"\"\n self._assert_state(\"start\", (\"initialized\",))\n os.dup2(self.tmpfile.fileno(), self.targetfd)\n self.syscapture.start()\n self._state = \"started\"\n\n def snap(self):\n self._assert_state(\"snap\", (\"started\", \"suspended\"))\n self.tmpfile.seek(0)\n res = self.tmpfile.buffer.read()\n self.tmpfile.seek(0)\n self.tmpfile.truncate()\n return res", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_FDCapture_FDCapture.writeorg._XXX_use_encoding_of_ori": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_FDCapture_FDCapture.writeorg._XXX_use_encoding_of_ori", "embedding": null, "metadata": {"file_path": "src/_pytest/capture.py", "file_name": "capture.py", "file_type": "text/x-python", "category": "implementation", "start_line": 468, "end_line": 487, "span_ids": ["FDCapture", "FDCapture.writeorg", "FDCapture.snap"], "tokens": 149}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class FDCapture(FDCaptureBinary):\n \"\"\"Capture IO to/from a given OS-level file descriptor.\n\n snap() produces text.\n \"\"\"\n\n # Ignore type because it doesn't match the type in the superclass (bytes).\n EMPTY_BUFFER = \"\" # type: ignore\n\n def snap(self):\n self._assert_state(\"snap\", (\"started\", \"suspended\"))\n self.tmpfile.seek(0)\n res = self.tmpfile.read()\n self.tmpfile.seek(0)\n self.tmpfile.truncate()\n return res\n\n def writeorg(self, data):\n \"\"\"Write to original file descriptor.\"\"\"\n super().writeorg(data.encode(\"utf-8\")) # XXX use encoding of original stream", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py__MultiCapture_CaptureResult.__repr__.return.f_CaptureResult_out_self": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py__MultiCapture_CaptureResult.__repr__.return.f_CaptureResult_out_self", "embedding": null, "metadata": {"file_path": "src/_pytest/capture.py", "file_name": "capture.py", "file_type": "text/x-python", "category": "implementation", "start_line": 490, "end_line": 545, "span_ids": ["CaptureResult.__len__", "CaptureResult.__hash__", "CaptureResult._replace", "CaptureResult.__getitem__", "CaptureResult.__lt__", "CaptureResult.__eq__", "CaptureResult.__repr__", "CaptureResult.count", "CaptureResult.index", "CaptureResult.__iter__", "CaptureResult", "FDCapture.writeorg"], "tokens": 453}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "# MultiCapture\n\n\n# This class was a namedtuple, but due to mypy limitation[0] it could not be\n# made generic, so was replaced by a regular class which tries to emulate the\n# pertinent parts of a namedtuple. If the mypy limitation is ever lifted, can\n# make it a namedtuple again.\n# [0]: https://github.com/python/mypy/issues/685\n@final\n@functools.total_ordering\nclass CaptureResult(Generic[AnyStr]):\n \"\"\"The result of :method:`CaptureFixture.readouterr`.\"\"\"\n\n __slots__ = (\"out\", \"err\")\n\n def __init__(self, out: AnyStr, err: AnyStr) -> None:\n self.out: AnyStr = out\n self.err: AnyStr = err\n\n def __len__(self) -> int:\n return 2\n\n def __iter__(self) -> Iterator[AnyStr]:\n return iter((self.out, self.err))\n\n def __getitem__(self, item: int) -> AnyStr:\n return tuple(self)[item]\n\n def _replace(\n self, *, out: Optional[AnyStr] = None, err: Optional[AnyStr] = None\n ) -> \"CaptureResult[AnyStr]\":\n return CaptureResult(\n out=self.out if out is None else out, err=self.err if err is None else err\n )\n\n def count(self, value: AnyStr) -> int:\n return tuple(self).count(value)\n\n def index(self, value) -> int:\n return tuple(self).index(value)\n\n def __eq__(self, other: object) -> bool:\n if not isinstance(other, (CaptureResult, tuple)):\n return NotImplemented\n return tuple(self) == tuple(other)\n\n def __hash__(self) -> int:\n return hash(tuple(self))\n\n def __lt__(self, other: object) -> bool:\n if not isinstance(other, (CaptureResult, tuple)):\n return NotImplemented\n return tuple(self) < tuple(other)\n\n def __repr__(self) -> str:\n return f\"CaptureResult(out={self.out!r}, err={self.err!r})\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py__get_multicapture__get_multicapture.raise_ValueError_f_unknow": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py__get_multicapture__get_multicapture.raise_ValueError_f_unknow", "embedding": null, "metadata": {"file_path": "src/_pytest/capture.py", "file_name": "capture.py", "file_type": "text/x-python", "category": "implementation", "start_line": 632, "end_line": 643, "span_ids": ["_get_multicapture"], "tokens": 158}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _get_multicapture(method: \"_CaptureMethod\") -> MultiCapture[str]:\n if method == \"fd\":\n return MultiCapture(in_=FDCapture(0), out=FDCapture(1), err=FDCapture(2))\n elif method == \"sys\":\n return MultiCapture(in_=SysCapture(0), out=SysCapture(1), err=SysCapture(2))\n elif method == \"no\":\n return MultiCapture(in_=None, out=None, err=None)\n elif method == \"tee-sys\":\n return MultiCapture(\n in_=None, out=SysCapture(1, tee=True), err=SysCapture(2, tee=True)\n )\n raise ValueError(f\"unknown capturing method: {method!r}\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_CaptureManager.activate_fixture_CaptureManager._Helper_context_managers": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_CaptureManager.activate_fixture_CaptureManager._Helper_context_managers", "embedding": null, "metadata": {"file_path": "src/_pytest/capture.py", "file_name": "capture.py", "file_type": "text/x-python", "category": "implementation", "start_line": 740, "end_line": 759, "span_ids": ["CaptureManager.activate_fixture", "CaptureManager.resume_fixture", "CaptureManager.deactivate_fixture", "CaptureManager.suspend_fixture"], "tokens": 156}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class CaptureManager:\n\n def activate_fixture(self) -> None:\n \"\"\"If the current item is using ``capsys`` or ``capfd``, activate\n them so they take precedence over the global capture.\"\"\"\n if self._capture_fixture:\n self._capture_fixture._start()\n\n def deactivate_fixture(self) -> None:\n \"\"\"Deactivate the ``capsys`` or ``capfd`` fixture of this item, if any.\"\"\"\n if self._capture_fixture:\n self._capture_fixture.close()\n\n def suspend_fixture(self) -> None:\n if self._capture_fixture:\n self._capture_fixture._suspend()\n\n def resume_fixture(self) -> None:\n if self._capture_fixture:\n self._capture_fixture._resume()\n\n # Helper context managers", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_CaptureManager.global_and_fixture_disabled_CaptureManager.global_and_fixture_disabled.try_.finally_.if_do_fixture_.self_resume_fixture_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_CaptureManager.global_and_fixture_disabled_CaptureManager.global_and_fixture_disabled.try_.finally_.if_do_fixture_.self_resume_fixture_", "embedding": null, "metadata": {"file_path": "src/_pytest/capture.py", "file_name": "capture.py", "file_type": "text/x-python", "category": "implementation", "start_line": 761, "end_line": 776, "span_ids": ["CaptureManager.global_and_fixture_disabled"], "tokens": 131}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class CaptureManager:\n\n @contextlib.contextmanager\n def global_and_fixture_disabled(self) -> Generator[None, None, None]:\n \"\"\"Context manager to temporarily disable global and current fixture capturing.\"\"\"\n do_fixture = self._capture_fixture and self._capture_fixture._is_started()\n if do_fixture:\n self.suspend_fixture()\n do_global = self._global_capturing and self._global_capturing.is_started()\n if do_global:\n self.suspend_global_capture()\n try:\n yield\n finally:\n if do_global:\n self.resume_global_capture()\n if do_fixture:\n self.resume_fixture()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_CaptureManager.item_capture_CaptureManager.pytest_internalerror.self_stop_global_capturin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/capture.py_CaptureManager.item_capture_CaptureManager.pytest_internalerror.self_stop_global_capturin", "embedding": null, "metadata": {"file_path": "src/_pytest/capture.py", "file_name": "capture.py", "file_type": "text/x-python", "category": "implementation", "start_line": 778, "end_line": 830, "span_ids": ["CaptureManager.pytest_runtest_call", "CaptureManager.pytest_internalerror", "CaptureManager.pytest_runtest_setup", "CaptureManager.pytest_keyboard_interrupt", "CaptureManager.pytest_make_collect_report", "CaptureManager.pytest_runtest_teardown", "CaptureManager.item_capture"], "tokens": 395}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class CaptureManager:\n\n @contextlib.contextmanager\n def item_capture(self, when: str, item: Item) -> Generator[None, None, None]:\n self.resume_global_capture()\n self.activate_fixture()\n try:\n yield\n finally:\n self.deactivate_fixture()\n self.suspend_global_capture(in_=False)\n\n out, err = self.read_global_capture()\n item.add_report_section(when, \"stdout\", out)\n item.add_report_section(when, \"stderr\", err)\n\n # Hooks\n\n @hookimpl(hookwrapper=True)\n def pytest_make_collect_report(self, collector: Collector):\n if isinstance(collector, File):\n self.resume_global_capture()\n outcome = yield\n self.suspend_global_capture()\n out, err = self.read_global_capture()\n rep = outcome.get_result()\n if out:\n rep.sections.append((\"Captured stdout\", out))\n if err:\n rep.sections.append((\"Captured stderr\", err))\n else:\n yield\n\n @hookimpl(hookwrapper=True)\n def pytest_runtest_setup(self, item: Item) -> Generator[None, None, None]:\n with self.item_capture(\"setup\", item):\n yield\n\n @hookimpl(hookwrapper=True)\n def pytest_runtest_call(self, item: Item) -> Generator[None, None, None]:\n with self.item_capture(\"call\", item):\n yield\n\n @hookimpl(hookwrapper=True)\n def pytest_runtest_teardown(self, item: Item) -> Generator[None, None, None]:\n with self.item_capture(\"teardown\", item):\n yield\n\n @hookimpl(tryfirst=True)\n def pytest_keyboard_interrupt(self) -> None:\n self.stop_global_capturing()\n\n @hookimpl(tryfirst=True)\n def pytest_internalerror(self) -> None:\n self.stop_global_capturing()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/compat.py__Python_version_compati_is_generator.return.genfunc_and_not_iscorouti": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/compat.py__Python_version_compati_is_generator.return.genfunc_and_not_iscorouti", "embedding": null, "metadata": {"file_path": "src/_pytest/compat.py", "file_name": "compat.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 58, "span_ids": ["impl", "impl:2", "impl:7", "impl:6", "docstring", "imports:21", "imports", "_format_args", "is_generator", "NotSetType", "impl:13"], "tokens": 327}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"Python version compatibility code.\"\"\"\nimport enum\nimport functools\nimport inspect\nimport re\nimport sys\nfrom contextlib import contextmanager\nfrom inspect import Parameter\nfrom inspect import signature\nfrom pathlib import Path\nfrom typing import Any\nfrom typing import Callable\nfrom typing import Generic\nfrom typing import Optional\nfrom typing import Tuple\nfrom typing import TYPE_CHECKING\nfrom typing import TypeVar\nfrom typing import Union\n\nimport attr\n\nfrom _pytest.outcomes import fail\nfrom _pytest.outcomes import TEST_OUTCOME\n\nif TYPE_CHECKING:\n from typing import NoReturn\n from typing_extensions import Final\n\n\n_T = TypeVar(\"_T\")\n_S = TypeVar(\"_S\")\n\n\n# fmt: off\n# Singleton type for NOTSET, as described in:\n# https://www.python.org/dev/peps/pep-0484/#support-for-singleton-types-in-unions\nclass NotSetType(enum.Enum):\n token = 0\nNOTSET: \"Final\" = NotSetType.token # noqa: E305\n# fmt: on\n\nif sys.version_info >= (3, 8):\n from importlib import metadata as importlib_metadata\nelse:\n import importlib_metadata # noqa: F401\n\n\ndef _format_args(func: Callable[..., Any]) -> str:\n return str(signature(func))\n\n\n# The type of re.compile objects is not exposed in Python.\nREGEX_TYPE = type(re.compile(\"\"))\n\n\ndef is_generator(func: object) -> bool:\n genfunc = inspect.isgeneratorfunction(func)\n return genfunc and not iscoroutinefunction(func)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/compat.py_if_sys_version_info_3___bytes_to_ascii.return.val_decode_ascii_back": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/compat.py_if_sys_version_info_3___bytes_to_ascii.return.val_decode_ascii_back", "embedding": null, "metadata": {"file_path": "src/_pytest/compat.py", "file_name": "compat.py", "file_type": "text/x-python", "category": "implementation", "start_line": 180, "end_line": 219, "span_ids": ["impl:18", "_translate_non_printable", "_bytes_to_ascii", "impl:15", "impl:21", "get_default_arg_names"], "tokens": 274}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "if sys.version_info < (3, 7):\n\n @contextmanager\n def nullcontext():\n yield\n\n\nelse:\n from contextlib import nullcontext as nullcontext # noqa: F401\n\n\ndef get_default_arg_names(function: Callable[..., Any]) -> Tuple[str, ...]:\n # Note: this code intentionally mirrors the code at the beginning of\n # getfuncargnames, to get the arguments which were excluded from its result\n # because they had default values.\n return tuple(\n p.name\n for p in signature(function).parameters.values()\n if p.kind in (Parameter.POSITIONAL_OR_KEYWORD, Parameter.KEYWORD_ONLY)\n and p.default is not Parameter.empty\n )\n\n\n_non_printable_ascii_translate_table = {\n i: f\"\\\\x{i:02x}\" for i in range(128) if i not in range(32, 127)\n}\n_non_printable_ascii_translate_table.update(\n {ord(\"\\t\"): \"\\\\t\", ord(\"\\r\"): \"\\\\r\", ord(\"\\n\"): \"\\\\n\"}\n)\n\n\ndef _translate_non_printable(s: str) -> str:\n return s.translate(_non_printable_ascii_translate_table)\n\n\nSTRING_TYPES = bytes, str\n\n\ndef _bytes_to_ascii(val: bytes) -> str:\n return val.decode(\"ascii\", \"backslashreplace\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/compat.py_ascii_escaped__PytestWrapper.obj.attr_ib_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/compat.py_ascii_escaped__PytestWrapper.obj.attr_ib_", "embedding": null, "metadata": {"file_path": "src/_pytest/compat.py", "file_name": "compat.py", "file_type": "text/x-python", "category": "implementation", "start_line": 222, "end_line": 255, "span_ids": ["_PytestWrapper", "ascii_escaped"], "tokens": 290}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def ascii_escaped(val: Union[bytes, str]) -> str:\n r\"\"\"If val is pure ASCII, return it as an str, otherwise, escape\n bytes objects into a sequence of escaped bytes:\n\n b'\\xc3\\xb4\\xc5\\xd6' -> r'\\xc3\\xb4\\xc5\\xd6'\n\n and escapes unicode objects into a sequence of escaped unicode\n ids, e.g.:\n\n r'4\\nV\\U00043efa\\x0eMXWB\\x1e\\u3028\\u15fd\\xcd\\U0007d944'\n\n Note:\n The obvious \"v.decode('unicode-escape')\" will return\n valid UTF-8 unicode if it finds them in bytes, but we\n want to return escaped bytes for any byte, even if they match\n a UTF-8 string.\n \"\"\"\n if isinstance(val, bytes):\n ret = _bytes_to_ascii(val)\n else:\n ret = val.encode(\"unicode_escape\").decode(\"ascii\")\n return _translate_non_printable(ret)\n\n\n@attr.s\nclass _PytestWrapper:\n \"\"\"Dummy wrapper around a function object for internal use only.\n\n Used to correctly unwrap the underlying function object when we are\n creating fixtures, because we wrap the function object ourselves with a\n decorator to issue warnings when the fixture function is called directly.\n \"\"\"\n\n obj = attr.ib()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/compat.py_get_real_func_get_real_func.return.obj": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/compat.py_get_real_func_get_real_func.return.obj", "embedding": null, "metadata": {"file_path": "src/_pytest/compat.py", "file_name": "compat.py", "file_type": "text/x-python", "category": "implementation", "start_line": 258, "end_line": 284, "span_ids": ["get_real_func"], "tokens": 249}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def get_real_func(obj):\n \"\"\"Get the real function object of the (possibly) wrapped object by\n functools.wraps or functools.partial.\"\"\"\n start_obj = obj\n for i in range(100):\n # __pytest_wrapped__ is set by @pytest.fixture when wrapping the fixture function\n # to trigger a warning if it gets called directly instead of by pytest: we don't\n # want to unwrap further than this otherwise we lose useful wrappings like @mock.patch (#3774)\n new_obj = getattr(obj, \"__pytest_wrapped__\", None)\n if isinstance(new_obj, _PytestWrapper):\n obj = new_obj.obj\n break\n new_obj = getattr(obj, \"__wrapped__\", None)\n if new_obj is None:\n break\n obj = new_obj\n else:\n from _pytest._io.saferepr import saferepr\n\n raise ValueError(\n (\"could not find real function of {start}\\nstopped at {current}\").format(\n start=saferepr(start_obj), current=saferepr(obj)\n )\n )\n if isinstance(obj, functools.partial):\n obj = obj.func\n return obj", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/compat.py_get_real_method_getimfunc.try_.except_AttributeError_.return.func": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/compat.py_get_real_method_getimfunc.try_.except_AttributeError_.return.func", "embedding": null, "metadata": {"file_path": "src/_pytest/compat.py", "file_name": "compat.py", "file_type": "text/x-python", "category": "implementation", "start_line": 287, "end_line": 305, "span_ids": ["getimfunc", "get_real_method"], "tokens": 141}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def get_real_method(obj, holder):\n \"\"\"Attempt to obtain the real function object that might be wrapping\n ``obj``, while at the same time returning a bound method to ``holder`` if\n the original object was a bound method.\"\"\"\n try:\n is_method = hasattr(obj, \"__func__\")\n obj = get_real_func(obj)\n except Exception: # pragma: no cover\n return obj\n if is_method and hasattr(obj, \"__get__\") and callable(obj.__get__):\n obj = obj.__get__(holder)\n return obj\n\n\ndef getimfunc(func):\n try:\n return func.__func__\n except AttributeError:\n return func", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/compat.py_safe_getattr_safe_getattr.try_.except_TEST_OUTCOME_.return.default": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/compat.py_safe_getattr_safe_getattr.try_.except_TEST_OUTCOME_.return.default", "embedding": null, "metadata": {"file_path": "src/_pytest/compat.py", "file_name": "compat.py", "file_type": "text/x-python", "category": "implementation", "start_line": 308, "end_line": 320, "span_ids": ["safe_getattr"], "tokens": 114}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def safe_getattr(object: Any, name: str, default: Any) -> Any:\n \"\"\"Like getattr but return default upon any Exception or any OutcomeException.\n\n Attribute access can potentially fail for 'evil' Python objects.\n See issue #214.\n It catches OutcomeException because of #2490 (issue #580), new outcomes\n are derived from BaseException instead of Exception (for more details\n check #2707).\n \"\"\"\n try:\n return getattr(object, name, default)\n except TEST_OUTCOME:\n return default", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py__Command_line_options__hookspec.HookspecMarker_pytest_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py__Command_line_options__hookspec.HookspecMarker_pytest_", "embedding": null, "metadata": {"file_path": "src/_pytest/config/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 79, "span_ids": ["impl", "docstring", "imports:55", "imports:40", "impl:3", "imports", "impl:4"], "tokens": 468}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"Command line options, ini-file and conftest.py processing.\"\"\"\nimport argparse\nimport collections.abc\nimport contextlib\nimport copy\nimport enum\nimport inspect\nimport os\nimport re\nimport shlex\nimport sys\nimport types\nimport warnings\nfrom functools import lru_cache\nfrom pathlib import Path\nfrom types import TracebackType\nfrom typing import Any\nfrom typing import Callable\nfrom typing import Dict\nfrom typing import Generator\nfrom typing import IO\nfrom typing import Iterable\nfrom typing import Iterator\nfrom typing import List\nfrom typing import Optional\nfrom typing import Sequence\nfrom typing import Set\nfrom typing import TextIO\nfrom typing import Tuple\nfrom typing import Type\nfrom typing import TYPE_CHECKING\nfrom typing import Union\n\nimport attr\nimport py\nfrom pluggy import HookimplMarker\nfrom pluggy import HookspecMarker\nfrom pluggy import PluginManager\n\nimport _pytest._code\nimport _pytest.deprecated\nimport _pytest.hookspec\nfrom .exceptions import PrintHelp as PrintHelp\nfrom .exceptions import UsageError as UsageError\nfrom .findpaths import determine_setup\nfrom _pytest._code import ExceptionInfo\nfrom _pytest._code import filter_traceback\nfrom _pytest._io import TerminalWriter\nfrom _pytest.compat import final\nfrom _pytest.compat import importlib_metadata\nfrom _pytest.outcomes import fail\nfrom _pytest.outcomes import Skipped\nfrom _pytest.pathlib import absolutepath\nfrom _pytest.pathlib import bestrelpath\nfrom _pytest.pathlib import import_path\nfrom _pytest.pathlib import ImportMode\nfrom _pytest.pathlib import resolve_package_path\nfrom _pytest.store import Store\nfrom _pytest.warning_types import PytestConfigWarning\n\nif TYPE_CHECKING:\n\n from _pytest._code.code import _TracebackStyle\n from _pytest.terminal import TerminalReporter\n from .argparsing import Argument\n\n\n_PluggyPlugin = object\n\"\"\"A type to represent plugin objects.\n\nPlugins can be any namespace, so we can't narrow it down much, but we use an\nalias to make the intent clear.\n\nIdeally this type would be provided by pluggy itself.\n\"\"\"\n\n\nhookimpl = HookimplMarker(\"pytest\")\nhookspec = HookspecMarker(\"pytest\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_ExitCode_ExitCode.NO_TESTS_COLLECTED.5": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_ExitCode_ExitCode.NO_TESTS_COLLECTED.5", "embedding": null, "metadata": {"file_path": "src/_pytest/config/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 82, "end_line": 102, "span_ids": ["ExitCode"], "tokens": 132}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass ExitCode(enum.IntEnum):\n \"\"\"Encodes the valid exit codes by pytest.\n\n Currently users and plugins may supply other exit codes as well.\n\n .. versionadded:: 5.0\n \"\"\"\n\n #: Tests passed.\n OK = 0\n #: Tests failed.\n TESTS_FAILED = 1\n #: pytest was interrupted.\n INTERRUPTED = 2\n #: An internal error got in the way.\n INTERNAL_ERROR = 3\n #: pytest was misused.\n USAGE_ERROR = 4\n #: pytest couldn't find tests.\n NO_TESTS_COLLECTED = 5", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_cmdline_None_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_cmdline_None_1", "embedding": null, "metadata": {"file_path": "src/_pytest/config/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 198, "end_line": 262, "span_ids": ["directory_arg", "cmdline", "filename_arg", "impl:10"], "tokens": 382}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class cmdline: # compatibility namespace\n main = staticmethod(main)\n\n\ndef filename_arg(path: str, optname: str) -> str:\n \"\"\"Argparse type validator for filename arguments.\n\n :path: Path of filename.\n :optname: Name of the option.\n \"\"\"\n if os.path.isdir(path):\n raise UsageError(f\"{optname} must be a filename, given: {path}\")\n return path\n\n\ndef directory_arg(path: str, optname: str) -> str:\n \"\"\"Argparse type validator for directory arguments.\n\n :path: Path of directory.\n :optname: Name of the option.\n \"\"\"\n if not os.path.isdir(path):\n raise UsageError(f\"{optname} must be a directory, given: {path}\")\n return path\n\n\n# Plugins that cannot be disabled via \"-p no:X\" currently.\nessential_plugins = (\n \"mark\",\n \"main\",\n \"runner\",\n \"fixtures\",\n \"helpconfig\", # Provides -p.\n)\n\ndefault_plugins = essential_plugins + (\n \"python\",\n \"terminal\",\n \"debugging\",\n \"unittest\",\n \"capture\",\n \"skipping\",\n \"tmpdir\",\n \"monkeypatch\",\n \"recwarn\",\n \"pastebin\",\n \"nose\",\n \"assertion\",\n \"junitxml\",\n \"doctest\",\n \"cacheprovider\",\n \"freeze_support\",\n \"setuponly\",\n \"setupplan\",\n \"stepwise\",\n \"warnings\",\n \"logging\",\n \"reports\",\n *([\"unraisableexception\", \"threadexception\"] if sys.version_info >= (3, 8) else []),\n \"faulthandler\",\n)\n\nbuiltin_plugins = set(default_plugins)\nbuiltin_plugins.add(\"pytester\")\nbuiltin_plugins.add(\"pytester_assertions\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py__iter_rewritable_modules__args_converter.return.tuple_args_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py__iter_rewritable_modules__args_converter.return.tuple_args_", "embedding": null, "metadata": {"file_path": "src/_pytest/config/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 771, "end_line": 836, "span_ids": ["_iter_rewritable_modules", "_args_converter"], "tokens": 308}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _iter_rewritable_modules(package_files: Iterable[str]) -> Iterator[str]:\n package_files = list(package_files)\n seen_some = False\n for fn in package_files:\n is_simple_module = \"/\" not in fn and fn.endswith(\".py\")\n is_package = fn.count(\"/\") == 1 and fn.endswith(\"__init__.py\")\n if is_simple_module:\n module_name, _ = os.path.splitext(fn)\n # we ignore \"setup.py\" at the root of the distribution\n if module_name != \"setup\":\n seen_some = True\n yield module_name\n elif is_package:\n package_name = os.path.dirname(fn)\n seen_some = True\n yield package_name\n\n if not seen_some:\n # At this point we did not find any packages or modules suitable for assertion\n # rewriting, so we try again by stripping the first path component (to account for\n # \"src\" based source trees for example).\n # This approach lets us have the common case continue to be fast, as egg-distributions\n # are rarer.\n new_package_files = []\n for fn in package_files:\n parts = fn.split(\"/\")\n new_fn = \"/\".join(parts[1:])\n if new_fn:\n new_package_files.append(new_fn)\n if new_package_files:\n yield from _iter_rewritable_modules(new_package_files)\n\n\ndef _args_converter(args: Iterable[str]) -> Tuple[str, ...]:\n return tuple(args)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config_Config.InvocationParams._The_directory_from_whi": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config_Config.InvocationParams._The_directory_from_whi", "embedding": null, "metadata": {"file_path": "src/_pytest/config/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 839, "end_line": 881, "span_ids": ["Config", "Config.InvocationParams"], "tokens": 280}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Config:\n \"\"\"Access to configuration values, pluginmanager and plugin hooks.\n\n :param PytestPluginManager pluginmanager:\n\n :param InvocationParams invocation_params:\n Object containing parameters regarding the :func:`pytest.main`\n invocation.\n \"\"\"\n\n @final\n @attr.s(frozen=True)\n class InvocationParams:\n \"\"\"Holds parameters passed during :func:`pytest.main`.\n\n The object attributes are read-only.\n\n .. versionadded:: 5.1\n\n .. note::\n\n Note that the environment variable ``PYTEST_ADDOPTS`` and the ``addopts``\n ini option are handled by pytest, not being included in the ``args`` attribute.\n\n Plugins accessing ``InvocationParams`` must be aware of that.\n \"\"\"\n\n args = attr.ib(type=Tuple[str, ...], converter=_args_converter)\n \"\"\"The command-line arguments as passed to :func:`pytest.main`.\n\n :type: Tuple[str, ...]\n \"\"\"\n plugins = attr.ib(type=Optional[Sequence[Union[str, _PluggyPlugin]]])\n \"\"\"Extra plugins, might be `None`.\n\n :type: Optional[Sequence[Union[str, plugin]]]\n \"\"\"\n dir = attr.ib(type=Path)\n \"\"\"The directory from which :func:`pytest.main` was invoked.\n\n :type: pathlib.Path\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config.__init___Config.add_cleanup.self__cleanup_append_func": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config.__init___Config.add_cleanup.self__cleanup_append_func", "embedding": null, "metadata": {"file_path": "src/_pytest/config/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 883, "end_line": 993, "span_ids": ["Config.invocation_dir", "Config.inifile", "Config.inipath", "Config.rootpath", "Config.rootdir", "Config.__init__", "Config.__init__:2", "Config.add_cleanup"], "tokens": 800}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Config:\n\n def __init__(\n self,\n pluginmanager: PytestPluginManager,\n *,\n invocation_params: Optional[InvocationParams] = None,\n ) -> None:\n from .argparsing import Parser, FILE_OR_DIR\n\n if invocation_params is None:\n invocation_params = self.InvocationParams(\n args=(), plugins=None, dir=Path.cwd()\n )\n\n self.option = argparse.Namespace()\n \"\"\"Access to command line option as attributes.\n\n :type: argparse.Namespace\n \"\"\"\n\n self.invocation_params = invocation_params\n \"\"\"The parameters with which pytest was invoked.\n\n :type: InvocationParams\n \"\"\"\n\n _a = FILE_OR_DIR\n self._parser = Parser(\n usage=f\"%(prog)s [options] [{_a}] [{_a}] [...]\",\n processopt=self._processopt,\n )\n self.pluginmanager = pluginmanager\n \"\"\"The plugin manager handles plugin registration and hook invocation.\n\n :type: PytestPluginManager\n \"\"\"\n\n self.trace = self.pluginmanager.trace.root.get(\"config\")\n self.hook = self.pluginmanager.hook\n self._inicache: Dict[str, Any] = {}\n self._override_ini: Sequence[str] = ()\n self._opt2dest: Dict[str, str] = {}\n self._cleanup: List[Callable[[], None]] = []\n # A place where plugins can store information on the config for their\n # own use. Currently only intended for internal plugins.\n self._store = Store()\n self.pluginmanager.register(self, \"pytestconfig\")\n self._configured = False\n self.hook.pytest_addoption.call_historic(\n kwargs=dict(parser=self._parser, pluginmanager=self.pluginmanager)\n )\n\n if TYPE_CHECKING:\n from _pytest.cacheprovider import Cache\n\n self.cache: Optional[Cache] = None\n\n @property\n def invocation_dir(self) -> py.path.local:\n \"\"\"The directory from which pytest was invoked.\n\n Prefer to use :attr:`invocation_params.dir `,\n which is a :class:`pathlib.Path`.\n\n :type: py.path.local\n \"\"\"\n return py.path.local(str(self.invocation_params.dir))\n\n @property\n def rootpath(self) -> Path:\n \"\"\"The path to the :ref:`rootdir `.\n\n :type: pathlib.Path\n\n .. versionadded:: 6.1\n \"\"\"\n return self._rootpath\n\n @property\n def rootdir(self) -> py.path.local:\n \"\"\"The path to the :ref:`rootdir `.\n\n Prefer to use :attr:`rootpath`, which is a :class:`pathlib.Path`.\n\n :type: py.path.local\n \"\"\"\n return py.path.local(str(self.rootpath))\n\n @property\n def inipath(self) -> Optional[Path]:\n \"\"\"The path to the :ref:`configfile `.\n\n :type: Optional[pathlib.Path]\n\n .. versionadded:: 6.1\n \"\"\"\n return self._inipath\n\n @property\n def inifile(self) -> Optional[py.path.local]:\n \"\"\"The path to the :ref:`configfile `.\n\n Prefer to use :attr:`inipath`, which is a :class:`pathlib.Path`.\n\n :type: Optional[py.path.local]\n \"\"\"\n return py.path.local(str(self.inipath)) if self.inipath else None\n\n def add_cleanup(self, func: Callable[[], None]) -> None:\n \"\"\"Add a function to be called when the config object gets out of\n use (usually coninciding with pytest_unconfigure).\"\"\"\n self._cleanup.append(func)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config._do_configure_Config.get_terminal_writer.return.terminalreporter__tw": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config._do_configure_Config.get_terminal_writer.return.terminalreporter__tw", "embedding": null, "metadata": {"file_path": "src/_pytest/config/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 995, "end_line": 1015, "span_ids": ["Config._ensure_unconfigure", "Config.get_terminal_writer", "Config._do_configure"], "tokens": 163}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Config:\n\n def _do_configure(self) -> None:\n assert not self._configured\n self._configured = True\n with warnings.catch_warnings():\n warnings.simplefilter(\"default\")\n self.hook.pytest_configure.call_historic(kwargs=dict(config=self))\n\n def _ensure_unconfigure(self) -> None:\n if self._configured:\n self._configured = False\n self.hook.pytest_unconfigure(config=self)\n self.hook.pytest_configure._call_history = []\n while self._cleanup:\n fin = self._cleanup.pop()\n fin()\n\n def get_terminal_writer(self) -> TerminalWriter:\n terminalreporter: TerminalReporter = self.pluginmanager.get_plugin(\n \"terminalreporter\"\n )\n return terminalreporter._tw", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config._preparse_Config._preparse.try_.except_ConftestImportFail.if_self_known_args_namesp.else_.raise": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config._preparse_Config._preparse.try_.except_ConftestImportFail.if_self_known_args_namesp.else_.raise", "embedding": null, "metadata": {"file_path": "src/_pytest/config/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1167, "end_line": 1221, "span_ids": ["Config._preparse"], "tokens": 491}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Config:\n\n def _preparse(self, args: List[str], addopts: bool = True) -> None:\n if addopts:\n env_addopts = os.environ.get(\"PYTEST_ADDOPTS\", \"\")\n if len(env_addopts):\n args[:] = (\n self._validate_args(shlex.split(env_addopts), \"via PYTEST_ADDOPTS\")\n + args\n )\n self._initini(args)\n if addopts:\n args[:] = (\n self._validate_args(self.getini(\"addopts\"), \"via addopts config\") + args\n )\n\n self.known_args_namespace = self._parser.parse_known_args(\n args, namespace=copy.copy(self.option)\n )\n self._checkversion()\n self._consider_importhook(args)\n self.pluginmanager.consider_preparse(args, exclude_only=False)\n if not os.environ.get(\"PYTEST_DISABLE_PLUGIN_AUTOLOAD\"):\n # Don't autoload from setuptools entry point. Only explicitly specified\n # plugins are going to be loaded.\n self.pluginmanager.load_setuptools_entrypoints(\"pytest11\")\n self.pluginmanager.consider_env()\n\n self.known_args_namespace = self._parser.parse_known_args(\n args, namespace=copy.copy(self.known_args_namespace)\n )\n\n self._validate_plugins()\n self._warn_about_skipped_plugins()\n\n if self.known_args_namespace.strict:\n self.issue_config_time_warning(\n _pytest.deprecated.STRICT_OPTION, stacklevel=2\n )\n\n if self.known_args_namespace.confcutdir is None and self.inipath is not None:\n confcutdir = str(self.inipath.parent)\n self.known_args_namespace.confcutdir = confcutdir\n try:\n self.hook.pytest_load_initial_conftests(\n early_config=self, args=args, parser=self._parser\n )\n except ConftestImportFailure as e:\n if self.known_args_namespace.help or self.known_args_namespace.version:\n # we don't want to prevent --help/--version to work\n # so just let is pass and print a warning at the end\n self.issue_config_time_warning(\n PytestConfigWarning(f\"could not load initial conftests: {e.path}\"),\n stacklevel=2,\n )\n else:\n raise", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config.pytest_collection_Config._checkversion.if_minver_.if_Version_minver_Vers.raise_pytest_UsageError_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config.pytest_collection_Config._checkversion.if_minver_.if_Version_minver_Vers.raise_pytest_UsageError_", "embedding": null, "metadata": {"file_path": "src/_pytest/config/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1223, "end_line": 1251, "span_ids": ["Config.pytest_collection", "Config._checkversion"], "tokens": 216}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Config:\n\n @hookimpl(hookwrapper=True)\n def pytest_collection(self) -> Generator[None, None, None]:\n \"\"\"Validate invalid ini keys after collection is done so we take in account\n options added by late-loading conftest files.\"\"\"\n yield\n self._validate_config_options()\n\n def _checkversion(self) -> None:\n import pytest\n\n minver = self.inicfg.get(\"minversion\", None)\n if minver:\n # Imported lazily to improve start-up time.\n from packaging.version import Version\n\n if not isinstance(minver, str):\n raise pytest.UsageError(\n \"%s: 'minversion' must be a single value\" % self.inipath\n )\n\n if Version(minver) > Version(pytest.__version__):\n raise pytest.UsageError(\n \"%s: 'minversion' requires pytest-%s, actual pytest-%s'\"\n % (\n self.inipath,\n minver,\n pytest.__version__,\n )\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config._validate_config_options_Config._validate_plugins.if_missing_plugins_.raise_UsageError_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config._validate_config_options_Config._validate_plugins.if_missing_plugins_.raise_UsageError_", "embedding": null, "metadata": {"file_path": "src/_pytest/config/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1253, "end_line": 1285, "span_ids": ["Config._validate_config_options", "Config._validate_plugins"], "tokens": 245}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Config:\n\n def _validate_config_options(self) -> None:\n for key in sorted(self._get_unknown_ini_keys()):\n self._warn_or_fail_if_strict(f\"Unknown config option: {key}\\n\")\n\n def _validate_plugins(self) -> None:\n required_plugins = sorted(self.getini(\"required_plugins\"))\n if not required_plugins:\n return\n\n # Imported lazily to improve start-up time.\n from packaging.version import Version\n from packaging.requirements import InvalidRequirement, Requirement\n\n plugin_info = self.pluginmanager.list_plugin_distinfo()\n plugin_dist_info = {dist.project_name: dist.version for _, dist in plugin_info}\n\n missing_plugins = []\n for required_plugin in required_plugins:\n try:\n spec = Requirement(required_plugin)\n except InvalidRequirement:\n missing_plugins.append(required_plugin)\n continue\n\n if spec.name not in plugin_dist_info:\n missing_plugins.append(required_plugin)\n elif Version(plugin_dist_info[spec.name]) not in spec.specifier:\n missing_plugins.append(required_plugin)\n\n if missing_plugins:\n raise UsageError(\n \"Missing required plugins: {}\".format(\", \".join(missing_plugins)),\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config._warn_or_fail_if_strict_Config.parse.try_.except_PrintHelp_.pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config._warn_or_fail_if_strict_Config.parse.try_.except_PrintHelp_.pass", "embedding": null, "metadata": {"file_path": "src/_pytest/config/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1287, "end_line": 1320, "span_ids": ["Config.parse", "Config._get_unknown_ini_keys", "Config._warn_or_fail_if_strict"], "tokens": 315}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Config:\n\n def _warn_or_fail_if_strict(self, message: str) -> None:\n if self.known_args_namespace.strict_config:\n raise UsageError(message)\n\n self.issue_config_time_warning(PytestConfigWarning(message), stacklevel=3)\n\n def _get_unknown_ini_keys(self) -> List[str]:\n parser_inicfg = self._parser._inidict\n return [name for name in self.inicfg if name not in parser_inicfg]\n\n def parse(self, args: List[str], addopts: bool = True) -> None:\n # Parse given cmdline arguments into this config object.\n assert not hasattr(\n self, \"args\"\n ), \"can only parse cmdline args at most once per Config object\"\n self.hook.pytest_addhooks.call_historic(\n kwargs=dict(pluginmanager=self.pluginmanager)\n )\n self._preparse(args, addopts=addopts)\n # XXX deprecated hook:\n self.hook.pytest_cmdline_preparse(config=self, args=args)\n self._parser.after_preparse = True # type: ignore\n try:\n args = self._parser.parse_setoption(\n args, self.option, namespace=self.option\n )\n if not args:\n if self.invocation_params.dir == self.rootpath:\n args = self.getini(\"testpaths\")\n if not args:\n args = [str(self.invocation_params.dir)]\n self.args = args\n except PrintHelp:\n pass", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config.issue_config_time_warning_Config.issue_config_time_warning.if_records_.None_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config.issue_config_time_warning_Config.issue_config_time_warning.if_records_.None_1", "embedding": null, "metadata": {"file_path": "src/_pytest/config/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1322, "end_line": 1363, "span_ids": ["Config.issue_config_time_warning"], "tokens": 332}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Config:\n\n def issue_config_time_warning(self, warning: Warning, stacklevel: int) -> None:\n \"\"\"Issue and handle a warning during the \"configure\" stage.\n\n During ``pytest_configure`` we can't capture warnings using the ``catch_warnings_for_item``\n function because it is not possible to have hookwrappers around ``pytest_configure``.\n\n This function is mainly intended for plugins that need to issue warnings during\n ``pytest_configure`` (or similar stages).\n\n :param warning: The warning instance.\n :param stacklevel: stacklevel forwarded to warnings.warn.\n \"\"\"\n if self.pluginmanager.is_blocked(\"warnings\"):\n return\n\n cmdline_filters = self.known_args_namespace.pythonwarnings or []\n config_filters = self.getini(\"filterwarnings\")\n\n with warnings.catch_warnings(record=True) as records:\n warnings.simplefilter(\"always\", type(warning))\n apply_warning_filters(config_filters, cmdline_filters)\n warnings.warn(warning, stacklevel=stacklevel)\n\n if records:\n frame = sys._getframe(stacklevel - 1)\n location = frame.f_code.co_filename, frame.f_lineno, frame.f_code.co_name\n self.hook.pytest_warning_captured.call_historic(\n kwargs=dict(\n warning_message=records[0],\n when=\"config\",\n item=None,\n location=location,\n )\n )\n self.hook.pytest_warning_recorded.call_historic(\n kwargs=dict(\n warning_message=records[0],\n when=\"config\",\n nodeid=\"\",\n location=location,\n )\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config.getoption_Config.getvalueorskip.return.self_getoption_name_skip": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config.getoption_Config.getvalueorskip.return.self_getoption_name_skip", "embedding": null, "metadata": {"file_path": "src/_pytest/config/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1476, "end_line": 1506, "span_ids": ["Config.getvalueorskip", "Config.getvalue", "Config.getoption"], "tokens": 274}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Config:\n\n def getoption(self, name: str, default=notset, skip: bool = False):\n \"\"\"Return command line option value.\n\n :param name: Name of the option. You may also specify\n the literal ``--OPT`` option instead of the \"dest\" option name.\n :param default: Default value if no option of that name exists.\n :param skip: If True, raise pytest.skip if option does not exists\n or has a None value.\n \"\"\"\n name = self._opt2dest.get(name, name)\n try:\n val = getattr(self.option, name)\n if val is None and skip:\n raise AttributeError(name)\n return val\n except AttributeError as e:\n if default is not notset:\n return default\n if skip:\n import pytest\n\n pytest.skip(f\"no {name!r} option found\")\n raise ValueError(f\"no option named {name!r}\") from e\n\n def getvalue(self, name: str, path=None):\n \"\"\"Deprecated, use getoption() instead.\"\"\"\n return self.getoption(name)\n\n def getvalueorskip(self, name: str, path=None):\n \"\"\"Deprecated, use getoption(skip=True) instead.\"\"\"\n return self.getoption(name, skip=True)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config._warn_about_missing_assertion_Config._warn_about_skipped_plugins.for_module_name_msg_in_s.self_issue_config_time_wa": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_Config._warn_about_missing_assertion_Config._warn_about_skipped_plugins.for_module_name_msg_in_s.self_issue_config_time_wa", "embedding": null, "metadata": {"file_path": "src/_pytest/config/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1508, "end_line": 1534, "span_ids": ["Config._warn_about_skipped_plugins", "Config._warn_about_missing_assertion"], "tokens": 218}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Config:\n\n def _warn_about_missing_assertion(self, mode: str) -> None:\n if not _assertion_supported():\n if mode == \"plain\":\n warning_text = (\n \"ASSERTIONS ARE NOT EXECUTED\"\n \" and FAILING TESTS WILL PASS. Are you\"\n \" using python -O?\"\n )\n else:\n warning_text = (\n \"assertions not in test modules or\"\n \" plugins will be ignored\"\n \" because assert statements are not executed \"\n \"by the underlying Python interpreter \"\n \"(are you using python -O?)\\n\"\n )\n self.issue_config_time_warning(\n PytestConfigWarning(warning_text),\n stacklevel=3,\n )\n\n def _warn_about_skipped_plugins(self) -> None:\n for module_name, msg in self.pluginmanager.skipped_plugins:\n self.issue_config_time_warning(\n PytestConfigWarning(f\"skipped plugin {module_name!r}: {msg}\"),\n stacklevel=2,\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py__strtobool__strtobool.if_val_in_y_yes_t.else_.raise_ValueError_f_invali": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py__strtobool__strtobool.if_val_in_y_yes_t.else_.raise_ValueError_f_invali", "embedding": null, "metadata": {"file_path": "src/_pytest/config/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1570, "end_line": 1585, "span_ids": ["_strtobool"], "tokens": 172}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _strtobool(val: str) -> bool:\n \"\"\"Convert a string representation of truth to True or False.\n\n True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values\n are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if\n 'val' is anything else.\n\n .. note:: Copied from distutils.util.\n \"\"\"\n val = val.lower()\n if val in (\"y\", \"yes\", \"t\", \"true\", \"on\", \"1\"):\n return True\n elif val in (\"n\", \"no\", \"f\", \"false\", \"off\", \"0\"):\n return False\n else:\n raise ValueError(f\"invalid truth value {val!r}\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_parse_warning_filter_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/__init__.py_parse_warning_filter_", "embedding": null, "metadata": {"file_path": "src/_pytest/config/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1588, "end_line": 1632, "span_ids": ["parse_warning_filter", "apply_warning_filters"], "tokens": 400}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@lru_cache(maxsize=50)\ndef parse_warning_filter(\n arg: str, *, escape: bool\n) -> Tuple[str, str, Type[Warning], str, int]:\n \"\"\"Parse a warnings filter string.\n\n This is copied from warnings._setoption, but does not apply the filter,\n only parses it, and makes the escaping optional.\n \"\"\"\n parts = arg.split(\":\")\n if len(parts) > 5:\n raise warnings._OptionError(f\"too many fields (max 5): {arg!r}\")\n while len(parts) < 5:\n parts.append(\"\")\n action_, message, category_, module, lineno_ = [s.strip() for s in parts]\n action: str = warnings._getaction(action_) # type: ignore[attr-defined]\n category: Type[Warning] = warnings._getcategory(category_) # type: ignore[attr-defined]\n if message and escape:\n message = re.escape(message)\n if module and escape:\n module = re.escape(module) + r\"\\Z\"\n if lineno_:\n try:\n lineno = int(lineno_)\n if lineno < 0:\n raise ValueError\n except (ValueError, OverflowError) as e:\n raise warnings._OptionError(f\"invalid lineno {lineno_!r}\") from e\n else:\n lineno = 0\n return action, message, category, module, lineno\n\n\ndef apply_warning_filters(\n config_filters: Iterable[str], cmdline_filters: Iterable[str]\n) -> None:\n \"\"\"Applies pytest-configured filters to the warnings module\"\"\"\n # Filters should have this precedence: cmdline options, config.\n # Filters should be applied in the inverse order of precedence.\n for arg in config_filters:\n warnings.filterwarnings(*parse_warning_filter(arg, escape=False))\n\n for arg in cmdline_filters:\n warnings.filterwarnings(*parse_warning_filter(arg, escape=True))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_argparse_FILE_OR_DIR._file_or_dir_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_argparse_FILE_OR_DIR._file_or_dir_", "embedding": null, "metadata": {"file_path": "src/_pytest/config/argparsing.py", "file_name": "argparsing.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 29, "span_ids": ["impl:2", "impl", "imports", "imports:23"], "tokens": 159}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import argparse\nimport os\nimport sys\nimport warnings\nfrom gettext import gettext\nfrom typing import Any\nfrom typing import Callable\nfrom typing import cast\nfrom typing import Dict\nfrom typing import List\nfrom typing import Mapping\nfrom typing import Optional\nfrom typing import Sequence\nfrom typing import Tuple\nfrom typing import TYPE_CHECKING\nfrom typing import Union\n\nimport _pytest._io\nfrom _pytest.compat import final\nfrom _pytest.config.exceptions import UsageError\nfrom _pytest.deprecated import ARGUMENT_PERCENT_DEFAULT\nfrom _pytest.deprecated import ARGUMENT_TYPE_STR\nfrom _pytest.deprecated import ARGUMENT_TYPE_STR_CHOICE\n\nif TYPE_CHECKING:\n from typing import NoReturn\n from typing_extensions import Literal\n\nFILE_OR_DIR = \"file_or_dir\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_Parser_Parser.processoption.if_self__processopt_.if_option_dest_.self__processopt_option_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/config/argparsing.py_Parser_Parser.processoption.if_self__processopt_.if_option_dest_.self__processopt_option_", "embedding": null, "metadata": {"file_path": "src/_pytest/config/argparsing.py", "file_name": "argparsing.py", "file_type": "text/x-python", "category": "implementation", "start_line": 32, "end_line": 58, "span_ids": ["Parser", "Parser.processoption"], "tokens": 213}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Parser:\n \"\"\"Parser for command line arguments and ini-file values.\n\n :ivar extra_info: Dict of generic param -> value to display in case\n there's an error processing the command line arguments.\n \"\"\"\n\n prog: Optional[str] = None\n\n def __init__(\n self,\n usage: Optional[str] = None,\n processopt: Optional[Callable[[\"Argument\"], None]] = None,\n ) -> None:\n self._anonymous = OptionGroup(\"custom options\", parser=self)\n self._groups: List[OptionGroup] = []\n self._processopt = processopt\n self._usage = usage\n self._inidict: Dict[str, Tuple[str, Optional[str], Any]] = {}\n self._ininames: List[str] = []\n self.extra_info: Dict[str, Any] = {}\n\n def processoption(self, option: \"Argument\") -> None:\n if self._processopt:\n if option.dest:\n self._processopt(option)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/debugging.py__Interactive_debugging___validate_usepdb_cls.return._modname_classname_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/debugging.py__Interactive_debugging___validate_usepdb_cls.return._modname_classname_", "embedding": null, "metadata": {"file_path": "src/_pytest/debugging.py", "file_name": "debugging.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 40, "span_ids": ["impl", "docstring", "imports:24", "imports", "_validate_usepdb_cls"], "tokens": 261}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"Interactive debugging with PDB, the Python Debugger.\"\"\"\nimport argparse\nimport functools\nimport sys\nimport types\nfrom typing import Any\nfrom typing import Callable\nfrom typing import Generator\nfrom typing import List\nfrom typing import Optional\nfrom typing import Tuple\nfrom typing import Type\nfrom typing import TYPE_CHECKING\nfrom typing import Union\n\nfrom _pytest import outcomes\nfrom _pytest._code import ExceptionInfo\nfrom _pytest.config import Config\nfrom _pytest.config import ConftestImportFailure\nfrom _pytest.config import hookimpl\nfrom _pytest.config import PytestPluginManager\nfrom _pytest.config.argparsing import Parser\nfrom _pytest.config.exceptions import UsageError\nfrom _pytest.nodes import Node\nfrom _pytest.reports import BaseReport\n\nif TYPE_CHECKING:\n from _pytest.capture import CaptureManager\n from _pytest.runner import CallInfo\n\n\ndef _validate_usepdb_cls(value: str) -> Tuple[str, str]:\n \"\"\"Validate syntax of --pdbcls option.\"\"\"\n try:\n modname, classname = value.split(\":\")\n except ValueError as e:\n raise argparse.ArgumentTypeError(\n f\"{value!r} is not in the format 'modname:classname'\"\n ) from e\n return (modname, classname)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/debugging.py_pytestPDB_pytestPDB._is_capturing.return.False": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/debugging.py_pytestPDB_pytestPDB._is_capturing.return.False", "embedding": null, "metadata": {"file_path": "src/_pytest/debugging.py", "file_name": "debugging.py", "file_type": "text/x-python", "category": "implementation", "start_line": 94, "end_line": 109, "span_ids": ["pytestPDB", "pytestPDB._is_capturing"], "tokens": 146}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class pytestPDB:\n \"\"\"Pseudo PDB that defers to the real pdb.\"\"\"\n\n _pluginmanager: Optional[PytestPluginManager] = None\n _config: Optional[Config] = None\n _saved: List[\n Tuple[Callable[..., None], Optional[PytestPluginManager], Optional[Config]]\n ] = []\n _recursive_debug = 0\n _wrapped_pdb_cls: Optional[Tuple[Type[Any], Type[Any]]] = None\n\n @classmethod\n def _is_capturing(cls, capman: Optional[\"CaptureManager\"]) -> Union[str, bool]:\n if capman:\n return capman.is_capturing()\n return False", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/debugging.py_pytestPDB._import_pdb_cls_pytestPDB._import_pdb_cls.return.wrapped_cls": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/debugging.py_pytestPDB._import_pdb_cls_pytestPDB._import_pdb_cls.return.wrapped_cls", "embedding": null, "metadata": {"file_path": "src/_pytest/debugging.py", "file_name": "debugging.py", "file_type": "text/x-python", "category": "implementation", "start_line": 111, "end_line": 148, "span_ids": ["pytestPDB._import_pdb_cls"], "tokens": 298}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class pytestPDB:\n\n @classmethod\n def _import_pdb_cls(cls, capman: Optional[\"CaptureManager\"]):\n if not cls._config:\n import pdb\n\n # Happens when using pytest.set_trace outside of a test.\n return pdb.Pdb\n\n usepdb_cls = cls._config.getvalue(\"usepdb_cls\")\n\n if cls._wrapped_pdb_cls and cls._wrapped_pdb_cls[0] == usepdb_cls:\n return cls._wrapped_pdb_cls[1]\n\n if usepdb_cls:\n modname, classname = usepdb_cls\n\n try:\n __import__(modname)\n mod = sys.modules[modname]\n\n # Handle --pdbcls=pdb:pdb.Pdb (useful e.g. with pdbpp).\n parts = classname.split(\".\")\n pdb_cls = getattr(mod, parts[0])\n for part in parts[1:]:\n pdb_cls = getattr(pdb_cls, part)\n except Exception as exc:\n value = \":\".join((modname, classname))\n raise UsageError(\n f\"--pdbcls: could not import {value!r}: {exc}\"\n ) from exc\n else:\n import pdb\n\n pdb_cls = pdb.Pdb\n\n wrapped_cls = cls._get_pdb_wrapper_class(pdb_cls, capman)\n cls._wrapped_pdb_cls = (usepdb_cls, wrapped_cls)\n return wrapped_cls", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/deprecated.py__Deprecation_messages_a_ARGUMENT_TYPE_STR_CHOICE.UnformattedWarning_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/deprecated.py__Deprecation_messages_a_ARGUMENT_TYPE_STR_CHOICE.UnformattedWarning_", "embedding": null, "metadata": {"file_path": "src/_pytest/deprecated.py", "file_name": "deprecated.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 82, "span_ids": ["impl", "docstring", "imports"], "tokens": 693}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"Deprecation messages and bits of code used elsewhere in the codebase that\nis planned to be removed in the next pytest release.\n\nKeeping it in a central location makes it easy to track what is deprecated and should\nbe removed when the time comes.\n\nAll constants defined in this module should be either instances of\n:class:`PytestWarning`, or :class:`UnformattedWarning`\nin case of warnings which need to format their messages.\n\"\"\"\nfrom warnings import warn\n\nfrom _pytest.warning_types import PytestDeprecationWarning\nfrom _pytest.warning_types import UnformattedWarning\n\n# set of plugins which have been integrated into the core; we use this list to ignore\n# them during registration to avoid conflicts\nDEPRECATED_EXTERNAL_PLUGINS = {\n \"pytest_catchlog\",\n \"pytest_capturelog\",\n \"pytest_faulthandler\",\n}\n\n\nFILLFUNCARGS = UnformattedWarning(\n PytestDeprecationWarning,\n \"{name} is deprecated, use \"\n \"function._request._fillfixtures() instead if you cannot avoid reaching into internals.\",\n)\n\nPYTEST_COLLECT_MODULE = UnformattedWarning(\n PytestDeprecationWarning,\n \"pytest.collect.{name} was moved to pytest.{name}\\n\"\n \"Please update to the new name.\",\n)\n\nYIELD_FIXTURE = PytestDeprecationWarning(\n \"@pytest.yield_fixture is deprecated.\\n\"\n \"Use @pytest.fixture instead; they are the same.\"\n)\n\nMINUS_K_DASH = PytestDeprecationWarning(\n \"The `-k '-expr'` syntax to -k is deprecated.\\nUse `-k 'not expr'` instead.\"\n)\n\nMINUS_K_COLON = PytestDeprecationWarning(\n \"The `-k 'expr:'` syntax to -k is deprecated.\\n\"\n \"Please open an issue if you use this and want a replacement.\"\n)\n\nWARNING_CAPTURED_HOOK = PytestDeprecationWarning(\n \"The pytest_warning_captured is deprecated and will be removed in a future release.\\n\"\n \"Please use pytest_warning_recorded instead.\"\n)\n\nFSCOLLECTOR_GETHOOKPROXY_ISINITPATH = PytestDeprecationWarning(\n \"The gethookproxy() and isinitpath() methods of FSCollector and Package are deprecated; \"\n \"use self.session.gethookproxy() and self.session.isinitpath() instead. \"\n)\n\nSTRICT_OPTION = PytestDeprecationWarning(\n \"The --strict option is deprecated, use --strict-markers instead.\"\n)\n\nPRIVATE = PytestDeprecationWarning(\"A private pytest class or function was used.\")\n\nUNITTEST_SKIP_DURING_COLLECTION = PytestDeprecationWarning(\n \"Raising unittest.SkipTest to skip tests during collection is deprecated. \"\n \"Use pytest.skip() instead.\"\n)\n\nARGUMENT_PERCENT_DEFAULT = PytestDeprecationWarning(\n 'pytest now uses argparse. \"%default\" should be changed to \"%(default)s\"',\n)\n\nARGUMENT_TYPE_STR_CHOICE = UnformattedWarning(\n PytestDeprecationWarning,\n \"`type` argument to addoption() is the string {typ!r}.\"\n \" For choices this is optional and can be omitted, \"\n \" but when supplied should be a type (for example `str` or `int`).\"\n \" (options: {names})\",\n)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/deprecated.py_ARGUMENT_TYPE_STR_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/deprecated.py_ARGUMENT_TYPE_STR_", "embedding": null, "metadata": {"file_path": "src/_pytest/deprecated.py", "file_name": "deprecated.py", "file_type": "text/x-python", "category": "implementation", "start_line": 84, "end_line": 112, "span_ids": ["check_ispytest", "impl:27"], "tokens": 214}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "ARGUMENT_TYPE_STR = UnformattedWarning(\n PytestDeprecationWarning,\n \"`type` argument to addoption() is the string {typ!r}, \"\n \" but when supplied should be a type (for example `str` or `int`).\"\n \" (options: {names})\",\n)\n\n\n# You want to make some `__init__` or function \"private\".\n#\n# def my_private_function(some, args):\n# ...\n#\n# Do this:\n#\n# def my_private_function(some, args, *, _ispytest: bool = False):\n# check_ispytest(_ispytest)\n# ...\n#\n# Change all internal/allowed calls to\n#\n# my_private_function(some, args, _ispytest=True)\n#\n# All other calls will get the default _ispytest=False and trigger\n# the warning (possibly error in the future).\ndef check_ispytest(ispytest: bool) -> None:\n if not ispytest:\n warn(PRIVATE, stacklevel=3)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py__Discover_and_run_docte_CHECKER_CLASS.None": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py__Discover_and_run_docte_CHECKER_CLASS.None", "embedding": null, "metadata": {"file_path": "src/_pytest/doctest.py", "file_name": "doctest.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 64, "span_ids": ["impl", "impl:2", "docstring", "imports:40", "imports"], "tokens": 426}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"Discover and run doctests in modules and test files.\"\"\"\nimport bdb\nimport inspect\nimport platform\nimport sys\nimport traceback\nimport types\nimport warnings\nfrom contextlib import contextmanager\nfrom pathlib import Path\nfrom typing import Any\nfrom typing import Callable\nfrom typing import Dict\nfrom typing import Generator\nfrom typing import Iterable\nfrom typing import List\nfrom typing import Optional\nfrom typing import Pattern\nfrom typing import Sequence\nfrom typing import Tuple\nfrom typing import Type\nfrom typing import TYPE_CHECKING\nfrom typing import Union\n\nimport py.path\n\nimport pytest\nfrom _pytest import outcomes\nfrom _pytest._code.code import ExceptionInfo\nfrom _pytest._code.code import ReprFileLocation\nfrom _pytest._code.code import TerminalRepr\nfrom _pytest._io import TerminalWriter\nfrom _pytest.compat import safe_getattr\nfrom _pytest.config import Config\nfrom _pytest.config.argparsing import Parser\nfrom _pytest.fixtures import FixtureRequest\nfrom _pytest.nodes import Collector\nfrom _pytest.outcomes import OutcomeException\nfrom _pytest.pathlib import fnmatch_ex\nfrom _pytest.pathlib import import_path\nfrom _pytest.python_api import approx\nfrom _pytest.warning_types import PytestWarning\n\nif TYPE_CHECKING:\n import doctest\n\nDOCTEST_REPORT_CHOICE_NONE = \"none\"\nDOCTEST_REPORT_CHOICE_CDIFF = \"cdiff\"\nDOCTEST_REPORT_CHOICE_NDIFF = \"ndiff\"\nDOCTEST_REPORT_CHOICE_UDIFF = \"udiff\"\nDOCTEST_REPORT_CHOICE_ONLY_FIRST_FAILURE = \"only_first_failure\"\n\nDOCTEST_REPORT_CHOICES = (\n DOCTEST_REPORT_CHOICE_NONE,\n DOCTEST_REPORT_CHOICE_CDIFF,\n DOCTEST_REPORT_CHOICE_NDIFF,\n DOCTEST_REPORT_CHOICE_UDIFF,\n DOCTEST_REPORT_CHOICE_ONLY_FIRST_FAILURE,\n)\n\n# Lazy definition of runner class\nRUNNER_CLASS = None\n# Lazy definition of output checker class\nCHECKER_CLASS: Optional[Type[\"doctest.OutputChecker\"]] = None", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py_pytest_addoption_pytest_addoption.None_6": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py_pytest_addoption_pytest_addoption.None_6", "embedding": null, "metadata": {"file_path": "src/_pytest/doctest.py", "file_name": "doctest.py", "file_type": "text/x-python", "category": "implementation", "start_line": 67, "end_line": 114, "span_ids": ["pytest_addoption"], "tokens": 326}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_addoption(parser: Parser) -> None:\n parser.addini(\n \"doctest_optionflags\",\n \"option flags for doctests\",\n type=\"args\",\n default=[\"ELLIPSIS\"],\n )\n parser.addini(\n \"doctest_encoding\", \"encoding used for doctest files\", default=\"utf-8\"\n )\n group = parser.getgroup(\"collect\")\n group.addoption(\n \"--doctest-modules\",\n action=\"store_true\",\n default=False,\n help=\"run doctests in all .py modules\",\n dest=\"doctestmodules\",\n )\n group.addoption(\n \"--doctest-report\",\n type=str.lower,\n default=\"udiff\",\n help=\"choose another output format for diffs on doctest failure\",\n choices=DOCTEST_REPORT_CHOICES,\n dest=\"doctestreport\",\n )\n group.addoption(\n \"--doctest-glob\",\n action=\"append\",\n default=[],\n metavar=\"pat\",\n help=\"doctests file matching pattern, default: test*.txt\",\n dest=\"doctestglob\",\n )\n group.addoption(\n \"--doctest-ignore-import-errors\",\n action=\"store_true\",\n default=False,\n help=\"ignore doctest ImportErrors\",\n dest=\"doctest_ignore_import_errors\",\n )\n group.addoption(\n \"--doctest-continue-on-failure\",\n action=\"store_true\",\n default=False,\n help=\"for a given doctest, continue to run after the first failure\",\n dest=\"doctest_continue_on_failure\",\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py_DoctestItem_DoctestItem.setup.if_self_dtest_is_not_None.self_dtest_globs_update_g": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py_DoctestItem_DoctestItem.setup.if_self_dtest_is_not_None.self_dtest_globs_update_g", "embedding": null, "metadata": {"file_path": "src/_pytest/doctest.py", "file_name": "doctest.py", "file_type": "text/x-python", "category": "implementation", "start_line": 250, "end_line": 285, "span_ids": ["DoctestItem.from_parent", "DoctestItem", "DoctestItem.setup"], "tokens": 299}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class DoctestItem(pytest.Item):\n def __init__(\n self,\n name: str,\n parent: \"Union[DoctestTextfile, DoctestModule]\",\n runner: Optional[\"doctest.DocTestRunner\"] = None,\n dtest: Optional[\"doctest.DocTest\"] = None,\n ) -> None:\n super().__init__(name, parent)\n self.runner = runner\n self.dtest = dtest\n self.obj = None\n self.fixture_request: Optional[FixtureRequest] = None\n\n @classmethod\n def from_parent( # type: ignore\n cls,\n parent: \"Union[DoctestTextfile, DoctestModule]\",\n *,\n name: str,\n runner: \"doctest.DocTestRunner\",\n dtest: \"doctest.DocTest\",\n ):\n # incompatible signature due to to imposed limits on sublcass\n \"\"\"The public named constructor.\"\"\"\n return super().from_parent(name=name, parent=parent, runner=runner, dtest=dtest)\n\n def setup(self) -> None:\n if self.dtest is not None:\n self.fixture_request = _setup_fixtures(self)\n globs = dict(getfixture=self.fixture_request.getfixturevalue)\n for name, value in self.fixture_request.getfixturevalue(\n \"doctest_namespace\"\n ).items():\n globs[name] = value\n self.dtest.globs.update(globs)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py_DoctestItem.runtest_DoctestItem.runtest.if_failures_.raise_MultipleDoctestFail": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py_DoctestItem.runtest_DoctestItem.runtest.if_failures_.raise_MultipleDoctestFail", "embedding": null, "metadata": {"file_path": "src/_pytest/doctest.py", "file_name": "doctest.py", "file_type": "text/x-python", "category": "implementation", "start_line": 287, "end_line": 297, "span_ids": ["DoctestItem.runtest"], "tokens": 128}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class DoctestItem(pytest.Item):\n\n def runtest(self) -> None:\n assert self.dtest is not None\n assert self.runner is not None\n _check_all_skipped(self.dtest)\n self._disable_output_capturing_for_darwin()\n failures: List[\"doctest.DocTestFailure\"] = []\n # Type ignored because we change the type of `out` from what\n # doctest expects.\n self.runner.run(self.dtest, out=failures) # type: ignore[arg-type]\n if failures:\n raise MultipleDoctestFailures(failures)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py__get_checker__get_number_flag.return.doctest_register_optionfl": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/doctest.py__get_checker__get_number_flag.return.doctest_register_optionfl", "embedding": null, "metadata": {"file_path": "src/_pytest/doctest.py", "file_name": "doctest.py", "file_type": "text/x-python", "category": "implementation", "start_line": 673, "end_line": 711, "span_ids": ["_get_allow_unicode_flag", "_get_checker", "_get_number_flag", "_get_allow_bytes_flag"], "tokens": 253}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _get_checker() -> \"doctest.OutputChecker\":\n \"\"\"Return a doctest.OutputChecker subclass that supports some\n additional options:\n\n * ALLOW_UNICODE and ALLOW_BYTES options to ignore u'' and b''\n prefixes (respectively) in string literals. Useful when the same\n doctest should run in Python 2 and Python 3.\n\n * NUMBER to ignore floating-point differences smaller than the\n precision of the literal number in the doctest.\n\n An inner class is used to avoid importing \"doctest\" at the module\n level.\n \"\"\"\n global CHECKER_CLASS\n if CHECKER_CLASS is None:\n CHECKER_CLASS = _init_checker_class()\n return CHECKER_CLASS()\n\n\ndef _get_allow_unicode_flag() -> int:\n \"\"\"Register and return the ALLOW_UNICODE flag.\"\"\"\n import doctest\n\n return doctest.register_optionflag(\"ALLOW_UNICODE\")\n\n\ndef _get_allow_bytes_flag() -> int:\n \"\"\"Register and return the ALLOW_BYTES flag.\"\"\"\n import doctest\n\n return doctest.register_optionflag(\"ALLOW_BYTES\")\n\n\ndef _get_number_flag() -> int:\n \"\"\"Register and return the NUMBER flag.\"\"\"\n import doctest\n\n return doctest.register_optionflag(\"NUMBER\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/faulthandler.py_io_pytest_configure.faulthandler_enable_file_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/faulthandler.py_io_pytest_configure.faulthandler_enable_file_", "embedding": null, "metadata": {"file_path": "src/_pytest/faulthandler.py", "file_name": "faulthandler.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 32, "span_ids": ["impl", "pytest_configure", "imports", "pytest_addoption"], "tokens": 221}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import io\nimport os\nimport sys\nfrom typing import Generator\nfrom typing import TextIO\n\nimport pytest\nfrom _pytest.config import Config\nfrom _pytest.config.argparsing import Parser\nfrom _pytest.nodes import Item\nfrom _pytest.store import StoreKey\n\n\nfault_handler_stderr_key = StoreKey[TextIO]()\nfault_handler_originally_enabled_key = StoreKey[bool]()\n\n\ndef pytest_addoption(parser: Parser) -> None:\n help = (\n \"Dump the traceback of all threads if a test takes \"\n \"more than TIMEOUT seconds to finish.\"\n )\n parser.addini(\"faulthandler_timeout\", help, default=0.0)\n\n\ndef pytest_configure(config: Config) -> None:\n import faulthandler\n\n stderr_fd_copy = os.dup(get_stderr_fileno())\n config._store[fault_handler_stderr_key] = open(stderr_fd_copy, \"w\")\n config._store[fault_handler_originally_enabled_key] = faulthandler.is_enabled()\n faulthandler.enable(file=config._store[fault_handler_stderr_key])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/faulthandler.py_pytest_unconfigure_pytest_unconfigure.if_config__store_get_faul.faulthandler_enable_file_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/faulthandler.py_pytest_unconfigure_pytest_unconfigure.if_config__store_get_faul.faulthandler_enable_file_", "embedding": null, "metadata": {"file_path": "src/_pytest/faulthandler.py", "file_name": "faulthandler.py", "file_type": "text/x-python", "category": "implementation", "start_line": 35, "end_line": 45, "span_ids": ["pytest_unconfigure"], "tokens": 113}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_unconfigure(config: Config) -> None:\n import faulthandler\n\n faulthandler.disable()\n # Close the dup file installed during pytest_configure.\n if fault_handler_stderr_key in config._store:\n config._store[fault_handler_stderr_key].close()\n del config._store[fault_handler_stderr_key]\n if config._store.get(fault_handler_originally_enabled_key, False):\n # Re-enable the faulthandler if it was originally enabled.\n faulthandler.enable(file=get_stderr_fileno())", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/faulthandler.py_get_stderr_fileno_get_timeout_config_value.return.float_config_getini_faul": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/faulthandler.py_get_stderr_fileno_get_timeout_config_value.return.float_config_getini_faul", "embedding": null, "metadata": {"file_path": "src/_pytest/faulthandler.py", "file_name": "faulthandler.py", "file_type": "text/x-python", "category": "implementation", "start_line": 48, "end_line": 64, "span_ids": ["get_timeout_config_value", "get_stderr_fileno"], "tokens": 185}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def get_stderr_fileno() -> int:\n try:\n fileno = sys.stderr.fileno()\n # The Twisted Logger will return an invalid file descriptor since it is not backed\n # by an FD. So, let's also forward this to the same code path as with pytest-xdist.\n if fileno == -1:\n raise AttributeError()\n return fileno\n except (AttributeError, io.UnsupportedOperation):\n # pytest-xdist monkeypatches sys.stderr with an object that is not an actual file.\n # https://docs.python.org/3/library/faulthandler.html#issue-with-file-descriptors\n # This is potentially dangerous, but the best we can do.\n return sys.__stderr__.fileno()\n\n\ndef get_timeout_config_value(config: Config) -> float:\n return float(config.getini(\"faulthandler_timeout\") or 0.0)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/faulthandler.py_pytest_runtest_protocol_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/faulthandler.py_pytest_runtest_protocol_", "embedding": null, "metadata": {"file_path": "src/_pytest/faulthandler.py", "file_name": "faulthandler.py", "file_type": "text/x-python", "category": "implementation", "start_line": 67, "end_line": 98, "span_ids": ["pytest_runtest_protocol", "pytest_enter_pdb", "pytest_exception_interact"], "tokens": 219}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.hookimpl(hookwrapper=True, trylast=True)\ndef pytest_runtest_protocol(item: Item) -> Generator[None, None, None]:\n timeout = get_timeout_config_value(item.config)\n stderr = item.config._store[fault_handler_stderr_key]\n if timeout > 0 and stderr is not None:\n import faulthandler\n\n faulthandler.dump_traceback_later(timeout, file=stderr)\n try:\n yield\n finally:\n faulthandler.cancel_dump_traceback_later()\n else:\n yield\n\n\n@pytest.hookimpl(tryfirst=True)\ndef pytest_enter_pdb() -> None:\n \"\"\"Cancel any traceback dumping due to timeout before entering pdb.\"\"\"\n import faulthandler\n\n faulthandler.cancel_dump_traceback_later()\n\n\n@pytest.hookimpl(tryfirst=True)\ndef pytest_exception_interact() -> None:\n \"\"\"Cancel any traceback dumping due to an interactive exception being\n raised.\"\"\"\n import faulthandler\n\n faulthandler.cancel_dump_traceback_later()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_functools_pytest_sessionstart.session._fixturemanager.FixtureManager_session_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_functools_pytest_sessionstart.session._fixturemanager.FixtureManager_session_", "embedding": null, "metadata": {"file_path": "src/_pytest/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 113, "span_ids": ["impl", "pytest_sessionstart", "impl:2", "PseudoFixtureDef", "imports:63", "imports"], "tokens": 761}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import functools\nimport inspect\nimport os\nimport sys\nimport warnings\nfrom collections import defaultdict\nfrom collections import deque\nfrom pathlib import Path\nfrom types import TracebackType\nfrom typing import Any\nfrom typing import Callable\nfrom typing import cast\nfrom typing import Dict\nfrom typing import Generator\nfrom typing import Generic\nfrom typing import Iterable\nfrom typing import Iterator\nfrom typing import List\nfrom typing import MutableMapping\nfrom typing import Optional\nfrom typing import overload\nfrom typing import Sequence\nfrom typing import Set\nfrom typing import Tuple\nfrom typing import Type\nfrom typing import TYPE_CHECKING\nfrom typing import TypeVar\nfrom typing import Union\n\nimport attr\nimport py\n\nimport _pytest\nfrom _pytest import nodes\nfrom _pytest._code import getfslineno\nfrom _pytest._code.code import FormattedExcinfo\nfrom _pytest._code.code import TerminalRepr\nfrom _pytest._io import TerminalWriter\nfrom _pytest.compat import _format_args\nfrom _pytest.compat import _PytestWrapper\nfrom _pytest.compat import assert_never\nfrom _pytest.compat import final\nfrom _pytest.compat import get_real_func\nfrom _pytest.compat import get_real_method\nfrom _pytest.compat import getfuncargnames\nfrom _pytest.compat import getimfunc\nfrom _pytest.compat import getlocation\nfrom _pytest.compat import is_generator\nfrom _pytest.compat import NOTSET\nfrom _pytest.compat import safe_getattr\nfrom _pytest.config import _PluggyPlugin\nfrom _pytest.config import Config\nfrom _pytest.config.argparsing import Parser\nfrom _pytest.deprecated import check_ispytest\nfrom _pytest.deprecated import FILLFUNCARGS\nfrom _pytest.deprecated import YIELD_FIXTURE\nfrom _pytest.mark import Mark\nfrom _pytest.mark import ParameterSet\nfrom _pytest.mark.structures import MarkDecorator\nfrom _pytest.outcomes import fail\nfrom _pytest.outcomes import TEST_OUTCOME\nfrom _pytest.pathlib import absolutepath\nfrom _pytest.pathlib import bestrelpath\nfrom _pytest.store import StoreKey\n\nif TYPE_CHECKING:\n from typing import Deque\n from typing import NoReturn\n from typing_extensions import Literal\n\n from _pytest.main import Session\n from _pytest.python import CallSpec2\n from _pytest.python import Function\n from _pytest.python import Metafunc\n\n _Scope = Literal[\"session\", \"package\", \"module\", \"class\", \"function\"]\n\n\n# The value of the fixture -- return/yield of the fixture function (type variable).\n_FixtureValue = TypeVar(\"_FixtureValue\")\n# The type of the fixture function (type variable).\n_FixtureFunction = TypeVar(\"_FixtureFunction\", bound=Callable[..., object])\n# The type of a fixture function (type alias generic in fixture value).\n_FixtureFunc = Union[\n Callable[..., _FixtureValue], Callable[..., Generator[_FixtureValue, None, None]]\n]\n# The type of FixtureDef.cached_result (type alias generic in fixture value).\n_FixtureCachedResult = Union[\n Tuple[\n # The result.\n _FixtureValue,\n # Cache key.\n object,\n None,\n ],\n Tuple[\n None,\n # Cache key.\n object,\n # Exc info if raised.\n Tuple[Type[BaseException], BaseException, TracebackType],\n ],\n]\n\n\n@attr.s(frozen=True)\nclass PseudoFixtureDef(Generic[_FixtureValue]):\n cached_result = attr.ib(type=\"_FixtureCachedResult[_FixtureValue]\")\n scope = attr.ib(type=\"_Scope\")\n\n\ndef pytest_sessionstart(session: \"Session\") -> None:\n session._fixturemanager = FixtureManager(session)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_get_scope_package_name2pseudofixturedef_key.StoreKey_Dict_str_Fixtu": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_get_scope_package_name2pseudofixturedef_key.StoreKey_Dict_str_Fixtu", "embedding": null, "metadata": {"file_path": "src/_pytest/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 116, "end_line": 151, "span_ids": ["get_scope_package", "impl:12", "get_scope_node"], "tokens": 257}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def get_scope_package(node, fixturedef: \"FixtureDef[object]\"):\n import pytest\n\n cls = pytest.Package\n current = node\n fixture_package_name = \"{}/{}\".format(fixturedef.baseid, \"__init__.py\")\n while current and (\n type(current) is not cls or fixture_package_name != current.nodeid\n ):\n current = current.parent\n if current is None:\n return node.session\n return current\n\n\ndef get_scope_node(\n node: nodes.Node, scope: \"_Scope\"\n) -> Optional[Union[nodes.Item, nodes.Collector]]:\n import _pytest.python\n\n if scope == \"function\":\n return node.getparent(nodes.Item)\n elif scope == \"class\":\n return node.getparent(_pytest.python.Class)\n elif scope == \"module\":\n return node.getparent(_pytest.python.Module)\n elif scope == \"package\":\n return node.getparent(_pytest.python.Package)\n elif scope == \"session\":\n return node.getparent(_pytest.main.Session)\n else:\n assert_never(scope)\n\n\n# Used for storing artificial fixturedefs for direct parametrization.\nname2pseudofixturedef_key = StoreKey[Dict[str, \"FixtureDef[Any]\"]]()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_add_funcarg_pseudo_fixture_def_add_funcarg_pseudo_fixture_def.for_argname_valuelist_in.if_name2pseudofixturedef_.else_.if_name2pseudofixturedef_.name2pseudofixturedef_arg": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_add_funcarg_pseudo_fixture_def_add_funcarg_pseudo_fixture_def.for_argname_valuelist_in.if_name2pseudofixturedef_.else_.if_name2pseudofixturedef_.name2pseudofixturedef_arg", "embedding": null, "metadata": {"file_path": "src/_pytest/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 154, "end_line": 219, "span_ids": ["add_funcarg_pseudo_fixture_def"], "tokens": 725}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def add_funcarg_pseudo_fixture_def(\n collector: nodes.Collector, metafunc: \"Metafunc\", fixturemanager: \"FixtureManager\"\n) -> None:\n # This function will transform all collected calls to functions\n # if they use direct funcargs (i.e. direct parametrization)\n # because we want later test execution to be able to rely on\n # an existing FixtureDef structure for all arguments.\n # XXX we can probably avoid this algorithm if we modify CallSpec2\n # to directly care for creating the fixturedefs within its methods.\n if not metafunc._calls[0].funcargs:\n # This function call does not have direct parametrization.\n return\n # Collect funcargs of all callspecs into a list of values.\n arg2params: Dict[str, List[object]] = {}\n arg2scope: Dict[str, _Scope] = {}\n for callspec in metafunc._calls:\n for argname, argvalue in callspec.funcargs.items():\n assert argname not in callspec.params\n callspec.params[argname] = argvalue\n arg2params_list = arg2params.setdefault(argname, [])\n callspec.indices[argname] = len(arg2params_list)\n arg2params_list.append(argvalue)\n if argname not in arg2scope:\n scopenum = callspec._arg2scopenum.get(argname, scopenum_function)\n arg2scope[argname] = scopes[scopenum]\n callspec.funcargs.clear()\n\n # Register artificial FixtureDef's so that later at test execution\n # time we can rely on a proper FixtureDef to exist for fixture setup.\n arg2fixturedefs = metafunc._arg2fixturedefs\n for argname, valuelist in arg2params.items():\n # If we have a scope that is higher than function, we need\n # to make sure we only ever create an according fixturedef on\n # a per-scope basis. We thus store and cache the fixturedef on the\n # node related to the scope.\n scope = arg2scope[argname]\n node = None\n if scope != \"function\":\n node = get_scope_node(collector, scope)\n if node is None:\n assert scope == \"class\" and isinstance(collector, _pytest.python.Module)\n # Use module-level collector for class-scope (for now).\n node = collector\n if node is None:\n name2pseudofixturedef = None\n else:\n default: Dict[str, FixtureDef[Any]] = {}\n name2pseudofixturedef = node._store.setdefault(\n name2pseudofixturedef_key, default\n )\n if name2pseudofixturedef is not None and argname in name2pseudofixturedef:\n arg2fixturedefs[argname] = [name2pseudofixturedef[argname]]\n else:\n fixturedef = FixtureDef(\n fixturemanager=fixturemanager,\n baseid=\"\",\n argname=argname,\n func=get_direct_param_fixture_func,\n scope=arg2scope[argname],\n params=valuelist,\n unittest=False,\n ids=None,\n )\n arg2fixturedefs[argname] = [fixturedef]\n if name2pseudofixturedef is not None:\n name2pseudofixturedef[argname] = fixturedef", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py__Algorithm_for_sorting_o_reorder_items.return.list_reorder_items_atscop": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py__Algorithm_for_sorting_o_reorder_items.return.list_reorder_items_atscop", "embedding": null, "metadata": {"file_path": "src/_pytest/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 268, "end_line": 289, "span_ids": ["get_parametrized_fixture_keys", "reorder_items"], "tokens": 276}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "# Algorithm for sorting on a per-parametrized resource setup basis.\n# It is called for scopenum==0 (session) first and performs sorting\n# down to the lower scopes such as to minimize number of \"high scope\"\n# setups and teardowns.\n\n\ndef reorder_items(items: Sequence[nodes.Item]) -> List[nodes.Item]:\n argkeys_cache: Dict[int, Dict[nodes.Item, Dict[_Key, None]]] = {}\n items_by_argkey: Dict[int, Dict[_Key, Deque[nodes.Item]]] = {}\n for scopenum in range(0, scopenum_function):\n d: Dict[nodes.Item, Dict[_Key, None]] = {}\n argkeys_cache[scopenum] = d\n item_d: Dict[_Key, Deque[nodes.Item]] = defaultdict(deque)\n items_by_argkey[scopenum] = item_d\n for item in items:\n keys = dict.fromkeys(get_parametrized_fixture_keys(item, scopenum), None)\n if keys:\n d[item] = keys\n for key in keys:\n item_d[key].append(item)\n items_dict = dict.fromkeys(items, None)\n return list(reorder_items_atscope(items_dict, argkeys_cache, items_by_argkey, 0))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py__fillfuncargs_fillfixtures._fill_fixtures_impl_funct": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py__fillfuncargs_fillfixtures._fill_fixtures_impl_funct", "embedding": null, "metadata": {"file_path": "src/_pytest/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 348, "end_line": 359, "span_ids": ["fillfixtures", "_fillfuncargs"], "tokens": 118}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _fillfuncargs(function: \"Function\") -> None:\n \"\"\"Fill missing fixtures for a test function, old public API (deprecated).\"\"\"\n warnings.warn(FILLFUNCARGS.format(name=\"pytest._fillfuncargs()\"), stacklevel=2)\n _fill_fixtures_impl(function)\n\n\ndef fillfixtures(function: \"Function\") -> None:\n \"\"\"Fill missing fixtures for a test function (deprecated).\"\"\"\n warnings.warn(\n FILLFUNCARGS.format(name=\"_pytest.fixtures.fillfixtures()\"), stacklevel=2\n )\n _fill_fixtures_impl(function)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py__fill_fixtures_impl_get_direct_param_fixture_func.return.request_param": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py__fill_fixtures_impl_get_direct_param_fixture_func.return.request_param", "embedding": null, "metadata": {"file_path": "src/_pytest/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 362, "end_line": 387, "span_ids": ["_fill_fixtures_impl", "get_direct_param_fixture_func"], "tokens": 224}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _fill_fixtures_impl(function: \"Function\") -> None:\n \"\"\"Internal implementation to fill fixtures on the given function object.\"\"\"\n try:\n request = function._request\n except AttributeError:\n # XXX this special code path is only expected to execute\n # with the oejskit plugin. It uses classes with funcargs\n # and we thus have to work a bit to allow this.\n fm = function.session._fixturemanager\n assert function.parent is not None\n fi = fm.getfixtureinfo(function.parent, function.obj, None)\n function._fixtureinfo = fi\n request = function._request = FixtureRequest(function, _ispytest=True)\n fm.session._setupstate.setup(function)\n request._fillfixtures()\n # Prune out funcargs for jstests.\n newfuncargs = {}\n for name in fi.argnames:\n newfuncargs[name] = function.funcargs[name]\n function.funcargs = newfuncargs\n else:\n request._fillfixtures()\n\n\ndef get_direct_param_fixture_func(request):\n return request.param", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureRequest._get_active_fixturedef_FixtureRequest._get_fixturestack.return.values": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureRequest._get_active_fixturedef_FixtureRequest._get_fixturestack.return.values", "embedding": null, "metadata": {"file_path": "src/_pytest/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 588, "end_line": 615, "span_ids": ["FixtureRequest._get_fixturestack", "FixtureRequest._get_active_fixturedef"], "tokens": 236}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class FixtureRequest:\n\n def _get_active_fixturedef(\n self, argname: str\n ) -> Union[\"FixtureDef[object]\", PseudoFixtureDef[object]]:\n try:\n return self._fixture_defs[argname]\n except KeyError:\n try:\n fixturedef = self._getnextfixturedef(argname)\n except FixtureLookupError:\n if argname == \"request\":\n cached_result = (self, [0], None)\n scope: _Scope = \"function\"\n return PseudoFixtureDef(cached_result, scope)\n raise\n # Remove indent to prevent the python3 exception\n # from leaking into the call.\n self._compute_fixture_value(fixturedef)\n self._fixture_defs[argname] = fixturedef\n return fixturedef\n\n def _get_fixturestack(self) -> List[\"FixtureDef[Any]\"]:\n current = self\n values: List[FixtureDef[Any]] = []\n while isinstance(current, SubRequest):\n values.append(current._fixturedef) # type: ignore[has-type]\n current = current._parent_request\n values.reverse()\n return values", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureRequest._schedule_finalizers_FixtureRequest._check_scope.if_scopemismatch_invoking.fail_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureRequest._schedule_finalizers_FixtureRequest._check_scope.if_scopemismatch_invoking.fail_", "embedding": null, "metadata": {"file_path": "src/_pytest/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 692, "end_line": 715, "span_ids": ["FixtureRequest._check_scope", "FixtureRequest._schedule_finalizers"], "tokens": 202}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class FixtureRequest:\n\n def _schedule_finalizers(\n self, fixturedef: \"FixtureDef[object]\", subrequest: \"SubRequest\"\n ) -> None:\n # If fixture function failed it might have registered finalizers.\n subrequest.node.addfinalizer(lambda: fixturedef.finish(request=subrequest))\n\n def _check_scope(\n self,\n argname: str,\n invoking_scope: \"_Scope\",\n requested_scope: \"_Scope\",\n ) -> None:\n if argname == \"request\":\n return\n if scopemismatch(invoking_scope, requested_scope):\n # Try to report something helpful.\n lines = self._factorytraceback()\n fail(\n \"ScopeMismatch: You tried to access the %r scoped \"\n \"fixture %r with a %r scoped request object, \"\n \"involved factories\\n%s\"\n % ((requested_scope, argname, invoking_scope, \"\\n\".join(lines))),\n pytrace=False,\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureRequest._factorytraceback_FixtureRequest._factorytraceback.return.lines": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureRequest._factorytraceback_FixtureRequest._factorytraceback.return.lines", "embedding": null, "metadata": {"file_path": "src/_pytest/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 717, "end_line": 729, "span_ids": ["FixtureRequest._factorytraceback"], "tokens": 130}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class FixtureRequest:\n\n def _factorytraceback(self) -> List[str]:\n lines = []\n for fixturedef in self._get_fixturestack():\n factory = fixturedef.func\n fs, lineno = getfslineno(factory)\n if isinstance(fs, Path):\n session: Session = self._pyfuncitem.session\n p = bestrelpath(Path(session.fspath), fs)\n else:\n p = fs\n args = _format_args(factory)\n lines.append(\"%s:%d: def %s%s\" % (p, lineno + 1, factory.__name__, args))\n return lines", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_SubRequest._schedule_finalizers_SubRequest._schedule_finalizers.super__schedule_finaliz": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_SubRequest._schedule_finalizers_SubRequest._schedule_finalizers.super__schedule_finaliz", "embedding": null, "metadata": {"file_path": "src/_pytest/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 789, "end_line": 799, "span_ids": ["SubRequest._schedule_finalizers"], "tokens": 127}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass SubRequest(FixtureRequest):\n\n def _schedule_finalizers(\n self, fixturedef: \"FixtureDef[object]\", subrequest: \"SubRequest\"\n ) -> None:\n # If the executing fixturedef was not explicitly requested in the argument list (via\n # getfixturevalue inside the fixture call) then ensure this fixture def will be finished\n # first.\n if fixturedef.argname not in self.fixturenames:\n fixturedef.addfinalizer(\n functools.partial(self._fixturedef.finish, request=self)\n )\n super()._schedule_finalizers(fixturedef, subrequest)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_scopes_scope2index.try_.except_ValueError_.fail_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_scopes_scope2index.try_.except_ValueError_.fail_", "embedding": null, "metadata": {"file_path": "src/_pytest/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 802, "end_line": 822, "span_ids": ["impl:16", "scope2index", "scopemismatch"], "tokens": 173}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "scopes: List[\"_Scope\"] = [\"session\", \"package\", \"module\", \"class\", \"function\"]\nscopenum_function = scopes.index(\"function\")\n\n\ndef scopemismatch(currentscope: \"_Scope\", newscope: \"_Scope\") -> bool:\n return scopes.index(newscope) > scopes.index(currentscope)\n\n\ndef scope2index(scope: str, descr: str, where: Optional[str] = None) -> int:\n \"\"\"Look up the index of ``scope`` and raise a descriptive value error\n if not defined.\"\"\"\n strscopes: Sequence[str] = scopes\n try:\n return strscopes.index(scope)\n except ValueError:\n fail(\n \"{} {}got an unexpected scope value '{}'\".format(\n descr, f\"from {where} \" if where else \"\", scope\n ),\n pytrace=False,\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureLookupErrorRepr_FixtureLookupErrorRepr.toterminal.tw_line_s_d_os_fsp": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureLookupErrorRepr_FixtureLookupErrorRepr.toterminal.tw_line_s_d_os_fsp", "embedding": null, "metadata": {"file_path": "src/_pytest/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 882, "end_line": 913, "span_ids": ["FixtureLookupErrorRepr", "FixtureLookupErrorRepr.toterminal"], "tokens": 254}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class FixtureLookupErrorRepr(TerminalRepr):\n def __init__(\n self,\n filename: Union[str, \"os.PathLike[str]\"],\n firstlineno: int,\n tblines: Sequence[str],\n errorstring: str,\n argname: Optional[str],\n ) -> None:\n self.tblines = tblines\n self.errorstring = errorstring\n self.filename = filename\n self.firstlineno = firstlineno\n self.argname = argname\n\n def toterminal(self, tw: TerminalWriter) -> None:\n # tw.line(\"FixtureLookupError: %s\" %(self.argname), red=True)\n for tbline in self.tblines:\n tw.line(tbline.rstrip())\n lines = self.errorstring.split(\"\\n\")\n if lines:\n tw.line(\n \"{} {}\".format(FormattedExcinfo.fail_marker, lines[0].strip()),\n red=True,\n )\n for line in lines[1:]:\n tw.line(\n f\"{FormattedExcinfo.flow_marker} {line.strip()}\",\n red=True,\n )\n tw.line()\n tw.line(\"%s:%d\" % (os.fspath(self.filename), self.firstlineno + 1))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_wrap_function_to_error_out_if_called_directly_wrap_function_to_error_out_if_called_directly.return.cast__FixtureFunction_re": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_wrap_function_to_error_out_if_called_directly_wrap_function_to_error_out_if_called_directly.return.cast__FixtureFunction_re", "embedding": null, "metadata": {"file_path": "src/_pytest/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1169, "end_line": 1190, "span_ids": ["wrap_function_to_error_out_if_called_directly"], "tokens": 265}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def wrap_function_to_error_out_if_called_directly(\n function: _FixtureFunction,\n fixture_marker: \"FixtureFunctionMarker\",\n) -> _FixtureFunction:\n \"\"\"Wrap the given fixture function so we can raise an error about it being called directly,\n instead of used as an argument in a test function.\"\"\"\n message = (\n 'Fixture \"{name}\" called directly. Fixtures are not meant to be called directly,\\n'\n \"but are created automatically when test functions request them as parameters.\\n\"\n \"See https://docs.pytest.org/en/stable/fixture.html for more information about fixtures, and\\n\"\n \"https://docs.pytest.org/en/stable/deprecations.html#calling-fixtures-directly about how to update your code.\"\n ).format(name=fixture_marker.name or function.__name__)\n\n @functools.wraps(function)\n def result(*args, **kwargs):\n fail(message, pytrace=False)\n\n # Keep reference to the original function in our own custom attribute so we don't unwrap\n # further than this point and lose useful wrappings like @mock.patch (#3774).\n result.__pytest_wrapped__ = _PytestWrapper(function) # type: ignore[attr-defined]\n\n return cast(_FixtureFunction, result)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_fixture_fixture._": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_fixture_fixture._", "embedding": null, "metadata": {"file_path": "src/_pytest/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1235, "end_line": 1250, "span_ids": ["fixture"], "tokens": 114}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@overload\ndef fixture(\n fixture_function: _FixtureFunction,\n *,\n scope: \"Union[_Scope, Callable[[str, Config], _Scope]]\" = ...,\n params: Optional[Iterable[object]] = ...,\n autouse: bool = ...,\n ids: Optional[\n Union[\n Iterable[Union[None, str, float, int, bool]],\n Callable[[Any], Optional[object]],\n ]\n ] = ...,\n name: Optional[str] = ...,\n) -> _FixtureFunction:\n ...", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_fixture_25_fixture_25._": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_fixture_25_fixture_25._", "embedding": null, "metadata": {"file_path": "src/_pytest/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1253, "end_line": 1268, "span_ids": ["fixture_25"], "tokens": 114}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@overload\ndef fixture(\n fixture_function: None = ...,\n *,\n scope: \"Union[_Scope, Callable[[str, Config], _Scope]]\" = ...,\n params: Optional[Iterable[object]] = ...,\n autouse: bool = ...,\n ids: Optional[\n Union[\n Iterable[Union[None, str, float, int, bool]],\n Callable[[Any], Optional[object]],\n ]\n ] = ...,\n name: Optional[str] = None,\n) -> FixtureFunctionMarker:\n ...", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_fixture_26_fixture_26.return.fixture_marker": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_fixture_26_fixture_26.return.fixture_marker", "embedding": null, "metadata": {"file_path": "src/_pytest/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1271, "end_line": 1347, "span_ids": ["fixture_26"], "tokens": 662}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def fixture(\n fixture_function: Optional[_FixtureFunction] = None,\n *,\n scope: \"Union[_Scope, Callable[[str, Config], _Scope]]\" = \"function\",\n params: Optional[Iterable[object]] = None,\n autouse: bool = False,\n ids: Optional[\n Union[\n Iterable[Union[None, str, float, int, bool]],\n Callable[[Any], Optional[object]],\n ]\n ] = None,\n name: Optional[str] = None,\n) -> Union[FixtureFunctionMarker, _FixtureFunction]:\n \"\"\"Decorator to mark a fixture factory function.\n\n This decorator can be used, with or without parameters, to define a\n fixture function.\n\n The name of the fixture function can later be referenced to cause its\n invocation ahead of running tests: test modules or classes can use the\n ``pytest.mark.usefixtures(fixturename)`` marker.\n\n Test functions can directly use fixture names as input arguments in which\n case the fixture instance returned from the fixture function will be\n injected.\n\n Fixtures can provide their values to test functions using ``return`` or\n ``yield`` statements. When using ``yield`` the code block after the\n ``yield`` statement is executed as teardown code regardless of the test\n outcome, and must yield exactly once.\n\n :param scope:\n The scope for which this fixture is shared; one of ``\"function\"``\n (default), ``\"class\"``, ``\"module\"``, ``\"package\"`` or ``\"session\"``.\n\n This parameter may also be a callable which receives ``(fixture_name, config)``\n as parameters, and must return a ``str`` with one of the values mentioned above.\n\n See :ref:`dynamic scope` in the docs for more information.\n\n :param params:\n An optional list of parameters which will cause multiple invocations\n of the fixture function and all of the tests using it. The current\n parameter is available in ``request.param``.\n\n :param autouse:\n If True, the fixture func is activated for all tests that can see it.\n If False (the default), an explicit reference is needed to activate\n the fixture.\n\n :param ids:\n List of string ids each corresponding to the params so that they are\n part of the test id. If no ids are provided they will be generated\n automatically from the params.\n\n :param name:\n The name of the fixture. This defaults to the name of the decorated\n function. If a fixture is used in the same module in which it is\n defined, the function name of the fixture will be shadowed by the\n function arg that requests the fixture; one way to resolve this is to\n name the decorated function ``fixture_`` and then use\n ``@pytest.fixture(name='')``.\n \"\"\"\n fixture_marker = FixtureFunctionMarker(\n scope=scope,\n params=params,\n autouse=autouse,\n ids=ids,\n name=name,\n )\n\n # Direct decoration.\n if fixture_function:\n return fixture_marker(fixture_function)\n\n return fixture_marker", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureManager.pytest_plugin_registered_FixtureManager._getautousenames.for_parentnodeid_in_nodes.if_basenames_.yield_from_basenames": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureManager.pytest_plugin_registered_FixtureManager._getautousenames.for_parentnodeid_in_nodes.if_basenames_.yield_from_basenames", "embedding": null, "metadata": {"file_path": "src/_pytest/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1481, "end_line": 1508, "span_ids": ["FixtureManager._getautousenames", "FixtureManager.pytest_plugin_registered"], "tokens": 247}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class FixtureManager:\n\n def pytest_plugin_registered(self, plugin: _PluggyPlugin) -> None:\n nodeid = None\n try:\n p = absolutepath(plugin.__file__) # type: ignore[attr-defined]\n except AttributeError:\n pass\n else:\n # Construct the base nodeid which is later used to check\n # what fixtures are visible for particular tests (as denoted\n # by their test id).\n if p.name.startswith(\"conftest.py\"):\n try:\n nodeid = str(p.parent.relative_to(self.config.rootpath))\n except ValueError:\n nodeid = \"\"\n if nodeid == \".\":\n nodeid = \"\"\n if os.sep != nodes.SEP:\n nodeid = nodeid.replace(os.sep, nodes.SEP)\n\n self.parsefactories(plugin, nodeid)\n\n def _getautousenames(self, nodeid: str) -> Iterator[str]:\n \"\"\"Return the names of autouse fixtures applicable to nodeid.\"\"\"\n for parentnodeid in nodes.iterparentnodeids(nodeid):\n basenames = self._nodeid_autousenames.get(parentnodeid)\n if basenames:\n yield from basenames", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureManager.parsefactories_FixtureManager.parsefactories.if_autousenames_.self__nodeid_autousenames": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/fixtures.py_FixtureManager.parsefactories_FixtureManager.parsefactories.if_autousenames_.self__nodeid_autousenames", "embedding": null, "metadata": {"file_path": "src/_pytest/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1612, "end_line": 1669, "span_ids": ["FixtureManager.parsefactories"], "tokens": 459}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class FixtureManager:\n\n def parsefactories(\n self, node_or_obj, nodeid=NOTSET, unittest: bool = False\n ) -> None:\n if nodeid is not NOTSET:\n holderobj = node_or_obj\n else:\n holderobj = node_or_obj.obj\n nodeid = node_or_obj.nodeid\n if holderobj in self._holderobjseen:\n return\n\n self._holderobjseen.add(holderobj)\n autousenames = []\n for name in dir(holderobj):\n # The attribute can be an arbitrary descriptor, so the attribute\n # access below can raise. safe_getatt() ignores such exceptions.\n obj = safe_getattr(holderobj, name, None)\n marker = getfixturemarker(obj)\n if not isinstance(marker, FixtureFunctionMarker):\n # Magic globals with __getattr__ might have got us a wrong\n # fixture attribute.\n continue\n\n if marker.name:\n name = marker.name\n\n # During fixture definition we wrap the original fixture function\n # to issue a warning if called directly, so here we unwrap it in\n # order to not emit the warning when pytest itself calls the\n # fixture function.\n obj = get_real_method(obj, holderobj)\n\n fixture_def = FixtureDef(\n fixturemanager=self,\n baseid=nodeid,\n argname=name,\n func=obj,\n scope=marker.scope,\n params=marker.params,\n unittest=unittest,\n ids=marker.ids,\n )\n\n faclist = self._arg2fixturedefs.setdefault(name, [])\n if fixture_def.has_location:\n faclist.append(fixture_def)\n else:\n # fixturedefs with no location are at the front\n # so this inserts the current fixturedef after the\n # existing fixturedefs from external plugins but\n # before the fixturedefs provided in conftests.\n i = len([f for f in faclist if not f.has_location])\n faclist.insert(i, fixture_def)\n if marker.autouse:\n autousenames.append(name)\n\n if autousenames:\n self._nodeid_autousenames.setdefault(nodeid or \"\", []).extend(autousenames)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/freeze_support.py__Provides_a_function_to_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/freeze_support.py__Provides_a_function_to_", "embedding": null, "metadata": {"file_path": "src/_pytest/freeze_support.py", "file_name": "freeze_support.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 47, "span_ids": ["freeze_includes", "docstring", "imports", "_iter_all_modules"], "tokens": 316}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"Provides a function to report all internal modules for using freezing\ntools.\"\"\"\nimport types\nfrom typing import Iterator\nfrom typing import List\nfrom typing import Union\n\n\ndef freeze_includes() -> List[str]:\n \"\"\"Return a list of module names used by pytest that should be\n included by cx_freeze.\"\"\"\n import py\n import _pytest\n\n result = list(_iter_all_modules(py))\n result += list(_iter_all_modules(_pytest))\n return result\n\n\ndef _iter_all_modules(\n package: Union[str, types.ModuleType],\n prefix: str = \"\",\n) -> Iterator[str]:\n \"\"\"Iterate over the names of all modules that can be found in the given\n package, recursively.\n\n >>> import _pytest\n >>> list(_iter_all_modules(_pytest))\n ['_pytest._argcomplete', '_pytest._code.code', ...]\n \"\"\"\n import os\n import pkgutil\n\n if isinstance(package, str):\n path = package\n else:\n # Type ignored because typeshed doesn't define ModuleType.__path__\n # (only defined on packages).\n package_path = package.__path__ # type: ignore[attr-defined]\n path, prefix = package_path[0], package.__name__ + \".\"\n for _, name, is_package in pkgutil.iter_modules([path]):\n if is_package:\n for m in _iter_all_modules(os.path.join(path, name), prefix=name + \".\"):\n yield prefix + m\n else:\n yield prefix + name", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/helpconfig.py__Version_info_help_mes_HelpAction.__call__.if_getattr_parser__parser.raise_PrintHelp": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/helpconfig.py__Version_info_help_mes_HelpAction.__call__.if_getattr_parser__parser.raise_PrintHelp", "embedding": null, "metadata": {"file_path": "src/_pytest/helpconfig.py", "file_name": "helpconfig.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 43, "span_ids": ["HelpAction", "docstring", "imports", "HelpAction.__call__"], "tokens": 278}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"Version info, help messages, tracing configuration.\"\"\"\nimport os\nimport sys\nfrom argparse import Action\nfrom typing import List\nfrom typing import Optional\nfrom typing import Union\n\nimport py\n\nimport pytest\nfrom _pytest.config import Config\nfrom _pytest.config import ExitCode\nfrom _pytest.config import PrintHelp\nfrom _pytest.config.argparsing import Parser\n\n\nclass HelpAction(Action):\n \"\"\"An argparse Action that will raise an exception in order to skip the\n rest of the argument parsing when --help is passed.\n\n This prevents argparse from quitting due to missing required arguments\n when any are defined, for example by ``pytest_addoption``.\n This is similar to the way that the builtin argparse --help option is\n implemented by raising SystemExit.\n \"\"\"\n\n def __init__(self, option_strings, dest=None, default=False, help=None):\n super().__init__(\n option_strings=option_strings,\n dest=dest,\n const=True,\n default=default,\n nargs=0,\n help=help,\n )\n\n def __call__(self, parser, namespace, values, option_string=None):\n setattr(namespace, self.dest, self.const)\n\n # We should only skip the rest of the parsing after preparse is done.\n if getattr(parser._parser, \"after_preparse\", False):\n raise PrintHelp", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py__Hook_specifications_fo_pytest_plugin_registered._A_new_pytest_plugin_go": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py__Hook_specifications_fo_pytest_plugin_registered._A_new_pytest_plugin_go", "embedding": null, "metadata": {"file_path": "src/_pytest/hookspec.py", "file_name": "hookspec.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 77, "span_ids": ["impl", "impl:2", "docstring", "pytest_addhooks", "imports:14", "imports", "pytest_plugin_registered"], "tokens": 513}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"Hook specifications for pytest plugins which are invoked by pytest itself\nand by builtin plugins.\"\"\"\nfrom pathlib import Path\nfrom typing import Any\nfrom typing import Dict\nfrom typing import List\nfrom typing import Mapping\nfrom typing import Optional\nfrom typing import Sequence\nfrom typing import Tuple\nfrom typing import TYPE_CHECKING\nfrom typing import Union\n\nimport py.path\nfrom pluggy import HookspecMarker\n\nfrom _pytest.deprecated import WARNING_CAPTURED_HOOK\n\nif TYPE_CHECKING:\n import pdb\n import warnings\n from typing_extensions import Literal\n\n from _pytest._code.code import ExceptionRepr\n from _pytest.code import ExceptionInfo\n from _pytest.config import Config\n from _pytest.config import ExitCode\n from _pytest.config import PytestPluginManager\n from _pytest.config import _PluggyPlugin\n from _pytest.config.argparsing import Parser\n from _pytest.fixtures import FixtureDef\n from _pytest.fixtures import SubRequest\n from _pytest.main import Session\n from _pytest.nodes import Collector\n from _pytest.nodes import Item\n from _pytest.outcomes import Exit\n from _pytest.python import Function\n from _pytest.python import Metafunc\n from _pytest.python import Module\n from _pytest.python import PyCollector\n from _pytest.reports import CollectReport\n from _pytest.reports import TestReport\n from _pytest.runner import CallInfo\n from _pytest.terminal import TerminalReporter\n\n\nhookspec = HookspecMarker(\"pytest\")\n\n# -------------------------------------------------------------------------\n# Initialization hooks called for every plugin\n# -------------------------------------------------------------------------\n\n\n@hookspec(historic=True)\ndef pytest_addhooks(pluginmanager: \"PytestPluginManager\") -> None:\n \"\"\"Called at plugin registration time to allow adding new hooks via a call to\n ``pluginmanager.add_hookspecs(module_or_class, prefix)``.\n\n :param _pytest.config.PytestPluginManager pluginmanager: pytest plugin manager.\n\n .. note::\n This hook is incompatible with ``hookwrapper=True``.\n \"\"\"\n\n\n@hookspec(historic=True)\ndef pytest_plugin_registered(\n plugin: \"_PluggyPlugin\", manager: \"PytestPluginManager\"\n) -> None:\n \"\"\"A new pytest plugin got registered.\n\n :param plugin: The plugin module or instance.\n :param _pytest.config.PytestPluginManager manager: pytest plugin manager.\n\n .. note::\n This hook is incompatible with ``hookwrapper=True``.\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_addoption_pytest_addoption._Register_argparse_styl": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_addoption_pytest_addoption._Register_argparse_styl", "embedding": null, "metadata": {"file_path": "src/_pytest/hookspec.py", "file_name": "hookspec.py", "file_type": "text/x-python", "category": "implementation", "start_line": 80, "end_line": 116, "span_ids": ["pytest_addoption"], "tokens": 379}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@hookspec(historic=True)\ndef pytest_addoption(parser: \"Parser\", pluginmanager: \"PytestPluginManager\") -> None:\n \"\"\"Register argparse-style options and ini-style config values,\n called once at the beginning of a test run.\n\n .. note::\n\n This function should be implemented only in plugins or ``conftest.py``\n files situated at the tests root directory due to how pytest\n :ref:`discovers plugins during startup `.\n\n :param _pytest.config.argparsing.Parser parser:\n To add command line options, call\n :py:func:`parser.addoption(...) <_pytest.config.argparsing.Parser.addoption>`.\n To add ini-file values call :py:func:`parser.addini(...)\n <_pytest.config.argparsing.Parser.addini>`.\n\n :param _pytest.config.PytestPluginManager pluginmanager:\n pytest plugin manager, which can be used to install :py:func:`hookspec`'s\n or :py:func:`hookimpl`'s and allow one plugin to call another plugin's hooks\n to change how command line options are added.\n\n Options can later be accessed through the\n :py:class:`config <_pytest.config.Config>` object, respectively:\n\n - :py:func:`config.getoption(name) <_pytest.config.Config.getoption>` to\n retrieve the value of a command line option.\n\n - :py:func:`config.getini(name) <_pytest.config.Config.getini>` to retrieve\n a value read from an ini-style file.\n\n The config object is passed around on many internal objects via the ``.config``\n attribute or can be retrieved as the ``pytestconfig`` fixture.\n\n .. note::\n This hook is incompatible with ``hookwrapper=True``.\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_configure_pytest_configure._Allow_plugins_and_conf": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_configure_pytest_configure._Allow_plugins_and_conf", "embedding": null, "metadata": {"file_path": "src/_pytest/hookspec.py", "file_name": "hookspec.py", "file_type": "text/x-python", "category": "implementation", "start_line": 119, "end_line": 133, "span_ids": ["pytest_configure"], "tokens": 110}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@hookspec(historic=True)\ndef pytest_configure(config: \"Config\") -> None:\n \"\"\"Allow plugins and conftest files to perform initial configuration.\n\n This hook is called for every plugin and initial conftest file\n after command line options have been parsed.\n\n After that, the hook is called for other conftest files as they are\n imported.\n\n .. note::\n This hook is incompatible with ``hookwrapper=True``.\n\n :param _pytest.config.Config config: The pytest config object.\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_None_4_pytest_cmdline_parse._Return_an_initialized_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_None_4_pytest_cmdline_parse._Return_an_initialized_", "embedding": null, "metadata": {"file_path": "src/_pytest/hookspec.py", "file_name": "hookspec.py", "file_type": "text/x-python", "category": "implementation", "start_line": 136, "end_line": 157, "span_ids": ["pytest_configure", "pytest_cmdline_parse"], "tokens": 167}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "# -------------------------------------------------------------------------\n# Bootstrapping hooks called for plugins registered early enough:\n# internal and 3rd party plugins.\n# -------------------------------------------------------------------------\n\n\n@hookspec(firstresult=True)\ndef pytest_cmdline_parse(\n pluginmanager: \"PytestPluginManager\", args: List[str]\n) -> Optional[\"Config\"]:\n \"\"\"Return an initialized config object, parsing the specified args.\n\n Stops at first non-None result, see :ref:`firstresult`.\n\n .. note::\n This hook will only be called for plugin classes passed to the\n ``plugins`` arg when using `pytest.main`_ to perform an in-process\n test run.\n\n :param _pytest.config.PytestPluginManager pluginmanager: Pytest plugin manager.\n :param List[str] args: List of arguments passed on the command line.\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_cmdline_main_pytest_cmdline_main._Called_for_performing_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_cmdline_main_pytest_cmdline_main._Called_for_performing_", "embedding": null, "metadata": {"file_path": "src/_pytest/hookspec.py", "file_name": "hookspec.py", "file_type": "text/x-python", "category": "implementation", "start_line": 174, "end_line": 185, "span_ids": ["pytest_cmdline_main"], "tokens": 111}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@hookspec(firstresult=True)\ndef pytest_cmdline_main(config: \"Config\") -> Optional[Union[\"ExitCode\", int]]:\n \"\"\"Called for performing the main command line action. The default\n implementation will invoke the configure hooks and runtest_mainloop.\n\n .. note::\n This hook will not be called for ``conftest.py`` files, only for setuptools plugins.\n\n Stops at first non-None result, see :ref:`firstresult`.\n\n :param _pytest.config.Config config: The pytest config object.\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_load_initial_conftests_pytest_load_initial_conftests._Called_to_implement_th": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_load_initial_conftests_pytest_load_initial_conftests._Called_to_implement_th", "embedding": null, "metadata": {"file_path": "src/_pytest/hookspec.py", "file_name": "hookspec.py", "file_type": "text/x-python", "category": "implementation", "start_line": 188, "end_line": 200, "span_ids": ["pytest_load_initial_conftests"], "tokens": 126}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_load_initial_conftests(\n early_config: \"Config\", parser: \"Parser\", args: List[str]\n) -> None:\n \"\"\"Called to implement the loading of initial conftest files ahead\n of command line option parsing.\n\n .. note::\n This hook will not be called for ``conftest.py`` files, only for setuptools plugins.\n\n :param _pytest.config.Config early_config: The pytest config object.\n :param List[str] args: Arguments passed on the command line.\n :param _pytest.config.argparsing.Parser parser: To add command line options.\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_collect_file_pytest_collect_file._Create_a_Collector_for": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_collect_file_pytest_collect_file._Create_a_Collector_for", "embedding": null, "metadata": {"file_path": "src/_pytest/hookspec.py", "file_name": "hookspec.py", "file_type": "text/x-python", "category": "implementation", "start_line": 285, "end_line": 298, "span_ids": ["pytest_collect_file"], "tokens": 129}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_collect_file(\n fspath: Path, path: py.path.local, parent: \"Collector\"\n) -> \"Optional[Collector]\":\n \"\"\"Create a Collector for the given path, or None if not relevant.\n\n The new node needs to have the specified ``parent`` as a parent.\n\n :param pathlib.Path fspath: The path to analyze.\n :param py.path.local path: The path to collect.\n\n .. versionchanged:: 6.3.0\n The ``fspath`` parameter was added as a :class:`pathlib.Path`\n equivalent of the ``path`` parameter.\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py__logging_hooks_for_colle_pytest_make_collect_report._Perform_collector_co": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py__logging_hooks_for_colle_pytest_make_collect_report._Perform_collector_co", "embedding": null, "metadata": {"file_path": "src/_pytest/hookspec.py", "file_name": "hookspec.py", "file_type": "text/x-python", "category": "implementation", "start_line": 301, "end_line": 328, "span_ids": ["pytest_itemcollected", "pytest_collectreport", "pytest_make_collect_report", "pytest_collect_file", "pytest_collectstart", "pytest_deselected"], "tokens": 166}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "# logging hooks for collection\n\n\ndef pytest_collectstart(collector: \"Collector\") -> None:\n \"\"\"Collector starts collecting.\"\"\"\n\n\ndef pytest_itemcollected(item: \"Item\") -> None:\n \"\"\"We just collected a test item.\"\"\"\n\n\ndef pytest_collectreport(report: \"CollectReport\") -> None:\n \"\"\"Collector finished collecting.\"\"\"\n\n\ndef pytest_deselected(items: Sequence[\"Item\"]) -> None:\n \"\"\"Called for deselected test items, e.g. by keyword.\n\n May be called multiple times.\n \"\"\"\n\n\n@hookspec(firstresult=True)\ndef pytest_make_collect_report(collector: \"Collector\") -> \"Optional[CollectReport]\":\n \"\"\"Perform ``collector.collect()`` and return a CollectReport.\n\n Stops at first non-None result, see :ref:`firstresult`.\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_pycollect_makeitem_pytest_generate_tests._Generate_multiple_pa": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_pycollect_makeitem_pytest_generate_tests._Generate_multiple_pa", "embedding": null, "metadata": {"file_path": "src/_pytest/hookspec.py", "file_name": "hookspec.py", "file_type": "text/x-python", "category": "implementation", "start_line": 357, "end_line": 376, "span_ids": ["pytest_pyfunc_call", "pytest_pycollect_makeitem", "pytest_generate_tests"], "tokens": 168}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@hookspec(firstresult=True)\ndef pytest_pycollect_makeitem(\n collector: \"PyCollector\", name: str, obj: object\n) -> Union[None, \"Item\", \"Collector\", List[Union[\"Item\", \"Collector\"]]]:\n \"\"\"Return a custom item/collector for a Python object in a module, or None.\n\n Stops at first non-None result, see :ref:`firstresult`.\n \"\"\"\n\n\n@hookspec(firstresult=True)\ndef pytest_pyfunc_call(pyfuncitem: \"Function\") -> Optional[object]:\n \"\"\"Call underlying test function.\n\n Stops at first non-None result, see :ref:`firstresult`.\n \"\"\"\n\n\ndef pytest_generate_tests(metafunc: \"Metafunc\") -> None:\n \"\"\"Generate (multiple) parametrized calls to a test function.\"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_None_15_pytest_runtestloop._Perform_the_main_runte": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_None_15_pytest_runtestloop._Perform_the_main_runte", "embedding": null, "metadata": {"file_path": "src/_pytest/hookspec.py", "file_name": "hookspec.py", "file_type": "text/x-python", "category": "implementation", "start_line": 397, "end_line": 420, "span_ids": ["pytest_make_parametrize_id", "pytest_runtestloop"], "tokens": 193}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "# -------------------------------------------------------------------------\n# runtest related hooks\n# -------------------------------------------------------------------------\n\n\n@hookspec(firstresult=True)\ndef pytest_runtestloop(session: \"Session\") -> Optional[object]:\n \"\"\"Perform the main runtest loop (after collection finished).\n\n The default hook implementation performs the runtest protocol for all items\n collected in the session (``session.items``), unless the collection failed\n or the ``collectonly`` pytest option is set.\n\n If at any point :py:func:`pytest.exit` is called, the loop is\n terminated immediately.\n\n If at any point ``session.shouldfail`` or ``session.shouldstop`` are set, the\n loop is terminated after the runtest protocol for the current item is finished.\n\n :param pytest.Session session: The pytest session object.\n\n Stops at first non-None result, see :ref:`firstresult`.\n The return value is not used, but only stops further processing.\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_runtest_protocol_pytest_runtest_protocol._Perform_the_runtest_pr": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_runtest_protocol_pytest_runtest_protocol._Perform_the_runtest_pr", "embedding": null, "metadata": {"file_path": "src/_pytest/hookspec.py", "file_name": "hookspec.py", "file_type": "text/x-python", "category": "implementation", "start_line": 423, "end_line": 458, "span_ids": ["pytest_runtest_protocol"], "tokens": 415}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@hookspec(firstresult=True)\ndef pytest_runtest_protocol(\n item: \"Item\", nextitem: \"Optional[Item]\"\n) -> Optional[object]:\n \"\"\"Perform the runtest protocol for a single test item.\n\n The default runtest protocol is this (see individual hooks for full details):\n\n - ``pytest_runtest_logstart(nodeid, location)``\n\n - Setup phase:\n - ``call = pytest_runtest_setup(item)`` (wrapped in ``CallInfo(when=\"setup\")``)\n - ``report = pytest_runtest_makereport(item, call)``\n - ``pytest_runtest_logreport(report)``\n - ``pytest_exception_interact(call, report)`` if an interactive exception occurred\n\n - Call phase, if the the setup passed and the ``setuponly`` pytest option is not set:\n - ``call = pytest_runtest_call(item)`` (wrapped in ``CallInfo(when=\"call\")``)\n - ``report = pytest_runtest_makereport(item, call)``\n - ``pytest_runtest_logreport(report)``\n - ``pytest_exception_interact(call, report)`` if an interactive exception occurred\n\n - Teardown phase:\n - ``call = pytest_runtest_teardown(item, nextitem)`` (wrapped in ``CallInfo(when=\"teardown\")``)\n - ``report = pytest_runtest_makereport(item, call)``\n - ``pytest_runtest_logreport(report)``\n - ``pytest_exception_interact(call, report)`` if an interactive exception occurred\n\n - ``pytest_runtest_logfinish(nodeid, location)``\n\n :param item: Test item for which the runtest protocol is performed.\n :param nextitem: The scheduled-to-be-next test item (or None if this is the end my friend).\n\n Stops at first non-None result, see :ref:`firstresult`.\n The return value is not used, but only stops further processing.\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_runtest_logreport_pytest_report_from_serializable._Restore_a_report_objec": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_runtest_logreport_pytest_report_from_serializable._Restore_a_report_objec", "embedding": null, "metadata": {"file_path": "src/_pytest/hookspec.py", "file_name": "hookspec.py", "file_type": "text/x-python", "category": "implementation", "start_line": 533, "end_line": 555, "span_ids": ["pytest_runtest_logreport", "pytest_report_to_serializable", "pytest_report_from_serializable"], "tokens": 198}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_runtest_logreport(report: \"TestReport\") -> None:\n \"\"\"Process the :py:class:`_pytest.reports.TestReport` produced for each\n of the setup, call and teardown runtest phases of an item.\n\n See :func:`pytest_runtest_protocol` for a description of the runtest protocol.\n \"\"\"\n\n\n@hookspec(firstresult=True)\ndef pytest_report_to_serializable(\n config: \"Config\",\n report: Union[\"CollectReport\", \"TestReport\"],\n) -> Optional[Dict[str, Any]]:\n \"\"\"Serialize the given report object into a data structure suitable for\n sending over the wire, e.g. converted to JSON.\"\"\"\n\n\n@hookspec(firstresult=True)\ndef pytest_report_from_serializable(\n config: \"Config\",\n data: Dict[str, Any],\n) -> Optional[Union[\"CollectReport\", \"TestReport\"]]:\n \"\"\"Restore a report object previously serialized with pytest_report_to_serializable().\"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_None_18_pytest_fixture_setup._Perform_fixture_setup_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_None_18_pytest_fixture_setup._Perform_fixture_setup_", "embedding": null, "metadata": {"file_path": "src/_pytest/hookspec.py", "file_name": "hookspec.py", "file_type": "text/x-python", "category": "implementation", "start_line": 558, "end_line": 577, "span_ids": ["pytest_fixture_setup", "pytest_report_from_serializable"], "tokens": 125}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "# -------------------------------------------------------------------------\n# Fixture related hooks\n# -------------------------------------------------------------------------\n\n\n@hookspec(firstresult=True)\ndef pytest_fixture_setup(\n fixturedef: \"FixtureDef[Any]\", request: \"SubRequest\"\n) -> Optional[object]:\n \"\"\"Perform fixture setup execution.\n\n :returns: The return value of the call to the fixture function.\n\n Stops at first non-None result, see :ref:`firstresult`.\n\n .. note::\n If the fixture function returns None, other implementations of\n this hook function will continue to be called, according to the\n behavior of the :ref:`firstresult` option.\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_assertion_pass_pytest_assertion_pass._Experimental_Cal": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_assertion_pass_pytest_assertion_pass._Experimental_Cal", "embedding": null, "metadata": {"file_path": "src/_pytest/hookspec.py", "file_name": "hookspec.py", "file_type": "text/x-python", "category": "implementation", "start_line": 638, "end_line": 670, "span_ids": ["pytest_assertion_pass"], "tokens": 270}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_assertion_pass(item: \"Item\", lineno: int, orig: str, expl: str) -> None:\n \"\"\"**(Experimental)** Called whenever an assertion passes.\n\n .. versionadded:: 5.0\n\n Use this hook to do some processing after a passing assertion.\n The original assertion information is available in the `orig` string\n and the pytest introspected assertion information is available in the\n `expl` string.\n\n This hook must be explicitly enabled by the ``enable_assertion_pass_hook``\n ini-file option:\n\n .. code-block:: ini\n\n [pytest]\n enable_assertion_pass_hook=true\n\n You need to **clean the .pyc** files in your project directory and interpreter libraries\n when enabling this option, as assertions will require to be re-written.\n\n :param pytest.Item item: pytest item object of current test.\n :param int lineno: Line number of the assert statement.\n :param str orig: String with the original assertion.\n :param str expl: String with the assert explanation.\n\n .. note::\n\n This hook is **experimental**, so its parameters or even the hook itself might\n be changed/removed without warning in any future pytest release.\n\n If you find this hook useful, please share your feedback in an issue.\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_None_27_pytest_report_header._Return_a_string_or_lis": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_None_27_pytest_report_header._Return_a_string_or_lis", "embedding": null, "metadata": {"file_path": "src/_pytest/hookspec.py", "file_name": "hookspec.py", "file_type": "text/x-python", "category": "implementation", "start_line": 673, "end_line": 703, "span_ids": ["pytest_assertion_pass", "pytest_report_header"], "tokens": 246}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "# -------------------------------------------------------------------------\n# Hooks for influencing reporting (invoked from _pytest_terminal).\n# -------------------------------------------------------------------------\n\n\ndef pytest_report_header(\n config: \"Config\", startpath: Path, startdir: py.path.local\n) -> Union[str, List[str]]:\n \"\"\"Return a string or list of strings to be displayed as header info for terminal reporting.\n\n :param _pytest.config.Config config: The pytest config object.\n :param Path startpath: The starting dir.\n :param py.path.local startdir: The starting dir.\n\n .. note::\n\n Lines returned by a plugin are displayed before those of plugins which\n ran before it.\n If you want to have your line(s) displayed first, use\n :ref:`trylast=True `.\n\n .. note::\n\n This function should be implemented only in plugins or ``conftest.py``\n files situated at the tests root directory due to how pytest\n :ref:`discovers plugins during startup `.\n\n .. versionchanged:: 6.3.0\n The ``startpath`` parameter was added as a :class:`pathlib.Path`\n equivalent of the ``startdir`` parameter.\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_report_collectionfinish_pytest_report_collectionfinish._Return_a_string_or_lis": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_report_collectionfinish_pytest_report_collectionfinish._Return_a_string_or_lis", "embedding": null, "metadata": {"file_path": "src/_pytest/hookspec.py", "file_name": "hookspec.py", "file_type": "text/x-python", "category": "implementation", "start_line": 706, "end_line": 734, "span_ids": ["pytest_report_collectionfinish"], "tokens": 242}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_report_collectionfinish(\n config: \"Config\",\n startpath: Path,\n startdir: py.path.local,\n items: Sequence[\"Item\"],\n) -> Union[str, List[str]]:\n \"\"\"Return a string or list of strings to be displayed after collection\n has finished successfully.\n\n These strings will be displayed after the standard \"collected X items\" message.\n\n .. versionadded:: 3.2\n\n :param _pytest.config.Config config: The pytest config object.\n :param Path startpath: The starting path.\n :param py.path.local startdir: The starting dir.\n :param items: List of pytest items that are going to be executed; this list should not be modified.\n\n .. note::\n\n Lines returned by a plugin are displayed before those of plugins which\n ran before it.\n If you want to have your line(s) displayed first, use\n :ref:`trylast=True `.\n\n .. versionchanged:: 6.3.0\n The ``startpath`` parameter was added as a :class:`pathlib.Path`\n equivalent of the ``startdir`` parameter.\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_warning_recorded_pytest_warning_recorded._Process_a_warning_capt": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_pytest_warning_recorded_pytest_warning_recorded._Process_a_warning_capt", "embedding": null, "metadata": {"file_path": "src/_pytest/hookspec.py", "file_name": "hookspec.py", "file_type": "text/x-python", "category": "implementation", "start_line": 815, "end_line": 844, "span_ids": ["pytest_warning_recorded"], "tokens": 259}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@hookspec(historic=True)\ndef pytest_warning_recorded(\n warning_message: \"warnings.WarningMessage\",\n when: \"Literal['config', 'collect', 'runtest']\",\n nodeid: str,\n location: Optional[Tuple[str, int, str]],\n) -> None:\n \"\"\"Process a warning captured by the internal pytest warnings plugin.\n\n :param warnings.WarningMessage warning_message:\n The captured warning. This is the same object produced by :py:func:`warnings.catch_warnings`, and contains\n the same attributes as the parameters of :py:func:`warnings.showwarning`.\n\n :param str when:\n Indicates when the warning was captured. Possible values:\n\n * ``\"config\"``: during pytest configuration/initialization stage.\n * ``\"collect\"``: during test collection.\n * ``\"runtest\"``: during test execution.\n\n :param str nodeid:\n Full id of the item.\n\n :param tuple|None location:\n When available, holds information about the execution context of the captured\n warning (filename, linenumber, function). ``function`` evaluates to \n when the execution context is at the module level.\n\n .. versionadded:: 6.0\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_None_30_pytest_markeval_namespace._Called_when_constructi": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_None_30_pytest_markeval_namespace._Called_when_constructi", "embedding": null, "metadata": {"file_path": "src/_pytest/hookspec.py", "file_name": "hookspec.py", "file_type": "text/x-python", "category": "implementation", "start_line": 847, "end_line": 865, "span_ids": ["pytest_warning_recorded", "pytest_markeval_namespace"], "tokens": 126}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "# -------------------------------------------------------------------------\n# Hooks for influencing skipping\n# -------------------------------------------------------------------------\n\n\ndef pytest_markeval_namespace(config: \"Config\") -> Dict[str, Any]:\n \"\"\"Called when constructing the globals dictionary used for\n evaluating string conditions in xfail/skipif markers.\n\n This is useful when the condition for a marker requires\n objects that are expensive or impossible to obtain during\n collection time, which is required by normal boolean\n conditions.\n\n .. versionadded:: 6.2\n\n :param _pytest.config.Config config: The pytest config object.\n :returns: A dictionary of additional globals to add.\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_None_33_pytest_keyboard_interrupt._Called_for_keyboard_in": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/hookspec.py_None_33_pytest_keyboard_interrupt._Called_for_keyboard_in", "embedding": null, "metadata": {"file_path": "src/_pytest/hookspec.py", "file_name": "hookspec.py", "file_type": "text/x-python", "category": "implementation", "start_line": 868, "end_line": 887, "span_ids": ["pytest_markeval_namespace", "pytest_internalerror", "pytest_keyboard_interrupt"], "tokens": 108}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "# -------------------------------------------------------------------------\n# error handling and internal debugging hooks\n# -------------------------------------------------------------------------\n\n\ndef pytest_internalerror(\n excrepr: \"ExceptionRepr\",\n excinfo: \"ExceptionInfo[BaseException]\",\n) -> Optional[bool]:\n \"\"\"Called for internal errors.\n\n Return True to suppress the fallback handling of printing an\n INTERNALERROR message directly to sys.stderr.\n \"\"\"\n\n\ndef pytest_keyboard_interrupt(\n excinfo: \"ExceptionInfo[Union[KeyboardInterrupt, Exit]]\",\n) -> None:\n \"\"\"Called for keyboard interrupt.\"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py__Report_test_results_in_xml_key.StoreKey_LogXML_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py__Report_test_results_in_xml_key.StoreKey_LogXML_", "embedding": null, "metadata": {"file_path": "src/_pytest/junitxml.py", "file_name": "junitxml.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 37, "span_ids": ["impl", "docstring", "imports"], "tokens": 237}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"Report test results in JUnit-XML format, for use with Jenkins and build\nintegration servers.\n\nBased on initial code from Ross Lawley.\n\nOutput conforms to\nhttps://github.com/jenkinsci/xunit-plugin/blob/master/src/main/resources/org/jenkinsci/plugins/xunit/types/model/xsd/junit-10.xsd\n\"\"\"\nimport functools\nimport os\nimport platform\nimport re\nimport xml.etree.ElementTree as ET\nfrom datetime import datetime\nfrom typing import Callable\nfrom typing import Dict\nfrom typing import List\nfrom typing import Match\nfrom typing import Optional\nfrom typing import Tuple\nfrom typing import Union\n\nimport pytest\nfrom _pytest import nodes\nfrom _pytest import timing\nfrom _pytest._code.code import ExceptionRepr\nfrom _pytest._code.code import ReprFileLocation\nfrom _pytest.config import Config\nfrom _pytest.config import filename_arg\nfrom _pytest.config.argparsing import Parser\nfrom _pytest.fixtures import FixtureRequest\nfrom _pytest.reports import TestReport\nfrom _pytest.store import StoreKey\nfrom _pytest.terminal import TerminalReporter\n\n\nxml_key = StoreKey[\"LogXML\"]()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py_bin_xml_escape_bin_xml_escape.return.re_sub_illegal_xml_re_re": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py_bin_xml_escape_bin_xml_escape.return.re_sub_illegal_xml_re_re", "embedding": null, "metadata": {"file_path": "src/_pytest/junitxml.py", "file_name": "junitxml.py", "file_type": "text/x-python", "category": "implementation", "start_line": 40, "end_line": 64, "span_ids": ["bin_xml_escape"], "tokens": 280}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def bin_xml_escape(arg: object) -> str:\n r\"\"\"Visually escape invalid XML characters.\n\n For example, transforms\n 'hello\\aworld\\b'\n into\n 'hello#x07world#x08'\n Note that the #xABs are *not* XML escapes - missing the ampersand «.\n The idea is to escape visually for the user rather than for XML itself.\n \"\"\"\n\n def repl(matchobj: Match[str]) -> str:\n i = ord(matchobj.group())\n if i <= 0xFF:\n return \"#x%02X\" % i\n else:\n return \"#x%04X\" % i\n\n # The spec range of valid chars is:\n # Char ::= #x9 | #xA | #xD | [#x20-#xD7FF] | [#xE000-#xFFFD] | [#x10000-#x10FFFF]\n # For an unknown(?) reason, we disallow #x7F (DEL) as well.\n illegal_xml_re = (\n \"[^\\u0009\\u000A\\u000D\\u0020-\\u007E\\u0080-\\uD7FF\\uE000-\\uFFFD\\u10000-\\u10FFFF]\"\n )\n return re.sub(illegal_xml_re, repl, str(arg))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py_merge_family_families_xunit2_fami": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py_merge_family_families_xunit2_fami", "embedding": null, "metadata": {"file_path": "src/_pytest/junitxml.py", "file_name": "junitxml.py", "file_type": "text/x-python", "category": "implementation", "start_line": 67, "end_line": 86, "span_ids": ["impl:3", "merge_family"], "tokens": 165}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def merge_family(left, right) -> None:\n result = {}\n for kl, vl in left.items():\n for kr, vr in right.items():\n if not isinstance(vl, list):\n raise TypeError(type(vl))\n result[kl] = vl + vr\n left.update(result)\n\n\nfamilies = {}\nfamilies[\"_base\"] = {\"testcase\": [\"classname\", \"name\"]}\nfamilies[\"_base_legacy\"] = {\"testcase\": [\"file\", \"line\", \"url\"]}\n\n# xUnit 1.x inherits legacy attributes.\nfamilies[\"xunit1\"] = families[\"_base\"].copy()\nmerge_family(families[\"xunit1\"], families[\"_base_legacy\"])\n\n# xUnit 2.x uses strict base attributes.\nfamilies[\"xunit2\"] = families[\"_base\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py__NodeReporter__NodeReporter.make_properties_node.return.None": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py__NodeReporter__NodeReporter.make_properties_node.return.None", "embedding": null, "metadata": {"file_path": "src/_pytest/junitxml.py", "file_name": "junitxml.py", "file_type": "text/x-python", "category": "implementation", "start_line": 89, "end_line": 117, "span_ids": ["_NodeReporter.add_attribute", "_NodeReporter", "_NodeReporter.add_property", "_NodeReporter.make_properties_node", "_NodeReporter.append"], "tokens": 260}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class _NodeReporter:\n def __init__(self, nodeid: Union[str, TestReport], xml: \"LogXML\") -> None:\n self.id = nodeid\n self.xml = xml\n self.add_stats = self.xml.add_stats\n self.family = self.xml.family\n self.duration = 0\n self.properties: List[Tuple[str, str]] = []\n self.nodes: List[ET.Element] = []\n self.attrs: Dict[str, str] = {}\n\n def append(self, node: ET.Element) -> None:\n self.xml.add_stats(node.tag)\n self.nodes.append(node)\n\n def add_property(self, name: str, value: object) -> None:\n self.properties.append((str(name), bin_xml_escape(value)))\n\n def add_attribute(self, name: str, value: object) -> None:\n self.attrs[str(name)] = bin_xml_escape(value)\n\n def make_properties_node(self) -> Optional[ET.Element]:\n \"\"\"Return a Junit node containing custom properties, if any.\"\"\"\n if self.properties:\n properties = ET.Element(\"properties\")\n for name, value in self.properties:\n properties.append(ET.Element(\"property\", name=name, value=value))\n return properties\n return None", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py__NodeReporter.to_xml__NodeReporter._add_simple.self_append_node_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py__NodeReporter.to_xml__NodeReporter._add_simple.self_append_node_", "embedding": null, "metadata": {"file_path": "src/_pytest/junitxml.py", "file_name": "junitxml.py", "file_type": "text/x-python", "category": "implementation", "start_line": 149, "end_line": 160, "span_ids": ["_NodeReporter._add_simple", "_NodeReporter.to_xml"], "tokens": 117}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class _NodeReporter:\n\n def to_xml(self) -> ET.Element:\n testcase = ET.Element(\"testcase\", self.attrs, time=\"%.3f\" % self.duration)\n properties = self.make_properties_node()\n if properties is not None:\n testcase.append(properties)\n testcase.extend(self.nodes)\n return testcase\n\n def _add_simple(self, tag: str, message: str, data: Optional[str] = None) -> None:\n node = ET.Element(tag, message=message)\n node.text = bin_xml_escape(data)\n self.append(node)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py__NodeReporter.write_captured_output__NodeReporter.write_captured_output.if_content_all_.self__write_content_repor": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py__NodeReporter.write_captured_output__NodeReporter.write_captured_output.if_content_all_.self__write_content_repor", "embedding": null, "metadata": {"file_path": "src/_pytest/junitxml.py", "file_name": "junitxml.py", "file_type": "text/x-python", "category": "implementation", "start_line": 162, "end_line": 183, "span_ids": ["_NodeReporter.write_captured_output"], "tokens": 234}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class _NodeReporter:\n\n def write_captured_output(self, report: TestReport) -> None:\n if not self.xml.log_passing_tests and report.passed:\n return\n\n content_out = report.capstdout\n content_log = report.caplog\n content_err = report.capstderr\n if self.xml.logging == \"no\":\n return\n content_all = \"\"\n if self.xml.logging in [\"log\", \"all\"]:\n content_all = self._prepare_content(content_log, \" Captured Log \")\n if self.xml.logging in [\"system-out\", \"out-err\", \"all\"]:\n content_all += self._prepare_content(content_out, \" Captured Out \")\n self._write_content(report, content_all, \"system-out\")\n content_all = \"\"\n if self.xml.logging in [\"system-err\", \"out-err\", \"all\"]:\n content_all += self._prepare_content(content_err, \" Captured Err \")\n self._write_content(report, content_all, \"system-err\")\n content_all = \"\"\n if content_all:\n self._write_content(report, content_all, \"system-out\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py__NodeReporter._prepare_content__NodeReporter.append_failure.if_hasattr_report_wasxf.else_.self__add_simple_failure": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py__NodeReporter._prepare_content__NodeReporter.append_failure.if_hasattr_report_wasxf.else_.self__add_simple_failure", "embedding": null, "metadata": {"file_path": "src/_pytest/junitxml.py", "file_name": "junitxml.py", "file_type": "text/x-python", "category": "implementation", "start_line": 185, "end_line": 210, "span_ids": ["_NodeReporter.append_failure", "_NodeReporter._write_content", "_NodeReporter.append_pass", "_NodeReporter._prepare_content"], "tokens": 259}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class _NodeReporter:\n\n def _prepare_content(self, content: str, header: str) -> str:\n return \"\\n\".join([header.center(80, \"-\"), content, \"\"])\n\n def _write_content(self, report: TestReport, content: str, jheader: str) -> None:\n tag = ET.Element(jheader)\n tag.text = bin_xml_escape(content)\n self.append(tag)\n\n def append_pass(self, report: TestReport) -> None:\n self.add_stats(\"passed\")\n\n def append_failure(self, report: TestReport) -> None:\n # msg = str(report.longrepr.reprtraceback.extraline)\n if hasattr(report, \"wasxfail\"):\n self._add_simple(\"skipped\", \"xfail-marked test passes unexpectedly\")\n else:\n assert report.longrepr is not None\n reprcrash: Optional[ReprFileLocation] = getattr(\n report.longrepr, \"reprcrash\", None\n )\n if reprcrash is not None:\n message = reprcrash.message\n else:\n message = str(report.longrepr)\n message = bin_xml_escape(message)\n self._add_simple(\"failure\", message, str(report.longrepr))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py__NodeReporter.append_collect_error__NodeReporter.append_error.self__add_simple_error_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py__NodeReporter.append_collect_error__NodeReporter.append_error.self__add_simple_error_", "embedding": null, "metadata": {"file_path": "src/_pytest/junitxml.py", "file_name": "junitxml.py", "file_type": "text/x-python", "category": "implementation", "start_line": 212, "end_line": 234, "span_ids": ["_NodeReporter.append_collect_skipped", "_NodeReporter.append_error", "_NodeReporter.append_collect_error"], "tokens": 230}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class _NodeReporter:\n\n def append_collect_error(self, report: TestReport) -> None:\n # msg = str(report.longrepr.reprtraceback.extraline)\n assert report.longrepr is not None\n self._add_simple(\"error\", \"collection failure\", str(report.longrepr))\n\n def append_collect_skipped(self, report: TestReport) -> None:\n self._add_simple(\"skipped\", \"collection skipped\", str(report.longrepr))\n\n def append_error(self, report: TestReport) -> None:\n assert report.longrepr is not None\n reprcrash: Optional[ReprFileLocation] = getattr(\n report.longrepr, \"reprcrash\", None\n )\n if reprcrash is not None:\n reason = reprcrash.message\n else:\n reason = str(report.longrepr)\n\n if report.when == \"teardown\":\n msg = f'failed on teardown with \"{reason}\"'\n else:\n msg = f'failed on setup with \"{reason}\"'\n self._add_simple(\"error\", msg, str(report.longrepr))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py__NodeReporter.append_skipped__NodeReporter.finalize._type_ignore_assignment": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/junitxml.py__NodeReporter.append_skipped__NodeReporter.finalize._type_ignore_assignment", "embedding": null, "metadata": {"file_path": "src/_pytest/junitxml.py", "file_name": "junitxml.py", "file_type": "text/x-python", "category": "implementation", "start_line": 236, "end_line": 261, "span_ids": ["_NodeReporter.finalize", "_NodeReporter.append_skipped"], "tokens": 265}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class _NodeReporter:\n\n def append_skipped(self, report: TestReport) -> None:\n if hasattr(report, \"wasxfail\"):\n xfailreason = report.wasxfail\n if xfailreason.startswith(\"reason: \"):\n xfailreason = xfailreason[8:]\n xfailreason = bin_xml_escape(xfailreason)\n skipped = ET.Element(\"skipped\", type=\"pytest.xfail\", message=xfailreason)\n self.append(skipped)\n else:\n assert isinstance(report.longrepr, tuple)\n filename, lineno, skipreason = report.longrepr\n if skipreason.startswith(\"Skipped: \"):\n skipreason = skipreason[9:]\n details = f\"{filename}:{lineno}: {skipreason}\"\n\n skipped = ET.Element(\"skipped\", type=\"pytest.skip\", message=skipreason)\n skipped.text = bin_xml_escape(details)\n self.append(skipped)\n self.write_captured_output(report)\n\n def finalize(self) -> None:\n data = self.to_xml()\n self.__dict__.clear()\n # Type ignored becuase mypy doesn't like overriding a method.\n # Also the return value doesn't match...\n self.to_xml = lambda: data # type: ignore[assignment]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py__Access_and_control_log__remove_ansi_escape_sequences.return._ANSI_ESCAPE_SEQ_sub_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/logging.py__Access_and_control_log__remove_ansi_escape_sequences.return._ANSI_ESCAPE_SEQ_sub_", "embedding": null, "metadata": {"file_path": "src/_pytest/logging.py", "file_name": "logging.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 46, "span_ids": ["impl", "docstring", "imports", "_remove_ansi_escape_sequences"], "tokens": 338}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"Access and control log capturing.\"\"\"\nimport logging\nimport os\nimport re\nimport sys\nfrom contextlib import contextmanager\nfrom io import StringIO\nfrom pathlib import Path\nfrom typing import AbstractSet\nfrom typing import Dict\nfrom typing import Generator\nfrom typing import List\nfrom typing import Mapping\nfrom typing import Optional\nfrom typing import Tuple\nfrom typing import TypeVar\nfrom typing import Union\n\nfrom _pytest import nodes\nfrom _pytest._io import TerminalWriter\nfrom _pytest.capture import CaptureManager\nfrom _pytest.compat import final\nfrom _pytest.compat import nullcontext\nfrom _pytest.config import _strtobool\nfrom _pytest.config import Config\nfrom _pytest.config import create_terminal_writer\nfrom _pytest.config import hookimpl\nfrom _pytest.config import UsageError\nfrom _pytest.config.argparsing import Parser\nfrom _pytest.deprecated import check_ispytest\nfrom _pytest.fixtures import fixture\nfrom _pytest.fixtures import FixtureRequest\nfrom _pytest.main import Session\nfrom _pytest.store import StoreKey\nfrom _pytest.terminal import TerminalReporter\n\n\nDEFAULT_LOG_FORMAT = \"%(levelname)-8s %(name)s:%(filename)s:%(lineno)d %(message)s\"\nDEFAULT_LOG_DATE_FORMAT = \"%H:%M:%S\"\n_ANSI_ESCAPE_SEQ = re.compile(r\"\\x1b\\[[\\d;]+m\")\ncaplog_handler_key = StoreKey[\"LogCaptureHandler\"]()\ncaplog_records_key = StoreKey[Dict[str, List[logging.LogRecord]]]()\n\n\ndef _remove_ansi_escape_sequences(text: str) -> str:\n return _ANSI_ESCAPE_SEQ.sub(\"\", text)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py__Core_implementation_of_if_TYPE_CHECKING_.from_typing_extensions_im": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py__Core_implementation_of_if_TYPE_CHECKING_.from_typing_extensions_im", "embedding": null, "metadata": {"file_path": "src/_pytest/main.py", "file_name": "main.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 49, "span_ids": ["impl", "imports:43", "docstring", "imports"], "tokens": 294}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"Core implementation of the testing process: init, session, runtest loop.\"\"\"\nimport argparse\nimport fnmatch\nimport functools\nimport importlib\nimport os\nimport sys\nfrom pathlib import Path\nfrom typing import Callable\nfrom typing import Dict\nfrom typing import FrozenSet\nfrom typing import Iterator\nfrom typing import List\nfrom typing import Optional\nfrom typing import overload\nfrom typing import Sequence\nfrom typing import Set\nfrom typing import Tuple\nfrom typing import Type\nfrom typing import TYPE_CHECKING\nfrom typing import Union\n\nimport attr\nimport py\n\nimport _pytest._code\nfrom _pytest import nodes\nfrom _pytest.compat import final\nfrom _pytest.config import Config\nfrom _pytest.config import directory_arg\nfrom _pytest.config import ExitCode\nfrom _pytest.config import hookimpl\nfrom _pytest.config import PytestPluginManager\nfrom _pytest.config import UsageError\nfrom _pytest.config.argparsing import Parser\nfrom _pytest.fixtures import FixtureManager\nfrom _pytest.outcomes import exit\nfrom _pytest.pathlib import absolutepath\nfrom _pytest.pathlib import bestrelpath\nfrom _pytest.pathlib import fnmatch_ex\nfrom _pytest.pathlib import visit\nfrom _pytest.reports import CollectReport\nfrom _pytest.reports import TestReport\nfrom _pytest.runner import collect_one_node\nfrom _pytest.runner import SetupState\n\n\nif TYPE_CHECKING:\n from typing_extensions import Literal", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_pytest_addoption_pytest_addoption.None_15": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_pytest_addoption_pytest_addoption.None_15", "embedding": null, "metadata": {"file_path": "src/_pytest/main.py", "file_name": "main.py", "file_type": "text/x-python", "category": "implementation", "start_line": 52, "end_line": 171, "span_ids": ["pytest_addoption"], "tokens": 771}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_addoption(parser: Parser) -> None:\n parser.addini(\n \"norecursedirs\",\n \"directory patterns to avoid for recursion\",\n type=\"args\",\n default=[\n \"*.egg\",\n \".*\",\n \"_darcs\",\n \"build\",\n \"CVS\",\n \"dist\",\n \"node_modules\",\n \"venv\",\n \"{arch}\",\n ],\n )\n parser.addini(\n \"testpaths\",\n \"directories to search for tests when no files or directories are given in the \"\n \"command line.\",\n type=\"args\",\n default=[],\n )\n group = parser.getgroup(\"general\", \"running and selection options\")\n group._addoption(\n \"-x\",\n \"--exitfirst\",\n action=\"store_const\",\n dest=\"maxfail\",\n const=1,\n help=\"exit instantly on first error or failed test.\",\n )\n group = parser.getgroup(\"pytest-warnings\")\n group.addoption(\n \"-W\",\n \"--pythonwarnings\",\n action=\"append\",\n help=\"set which warnings to report, see -W option of python itself.\",\n )\n parser.addini(\n \"filterwarnings\",\n type=\"linelist\",\n help=\"Each line specifies a pattern for \"\n \"warnings.filterwarnings. \"\n \"Processed after -W/--pythonwarnings.\",\n )\n group._addoption(\n \"--maxfail\",\n metavar=\"num\",\n action=\"store\",\n type=int,\n dest=\"maxfail\",\n default=0,\n help=\"exit after first num failures or errors.\",\n )\n group._addoption(\n \"--strict-config\",\n action=\"store_true\",\n help=\"any warnings encountered while parsing the `pytest` section of the configuration file raise errors.\",\n )\n group._addoption(\n \"--strict-markers\",\n action=\"store_true\",\n help=\"markers not registered in the `markers` section of the configuration file raise errors.\",\n )\n group._addoption(\n \"--strict\",\n action=\"store_true\",\n help=\"(deprecated) alias to --strict-markers.\",\n )\n group._addoption(\n \"-c\",\n metavar=\"file\",\n type=str,\n dest=\"inifilename\",\n help=\"load configuration from `file` instead of trying to locate one of the implicit \"\n \"configuration files.\",\n )\n group._addoption(\n \"--continue-on-collection-errors\",\n action=\"store_true\",\n default=False,\n dest=\"continue_on_collection_errors\",\n help=\"Force test execution even if collection errors occur.\",\n )\n group._addoption(\n \"--rootdir\",\n action=\"store\",\n dest=\"rootdir\",\n help=\"Define root directory for tests. Can be relative path: 'root_dir', './root_dir', \"\n \"'root_dir/another_dir/'; absolute path: '/home/user/root_dir'; path with variables: \"\n \"'$HOME/root_dir'.\",\n )\n\n group = parser.getgroup(\"collect\", \"collection\")\n group.addoption(\n \"--collectonly\",\n \"--collect-only\",\n \"--co\",\n action=\"store_true\",\n help=\"only collect tests, don't execute them.\",\n )\n group.addoption(\n \"--pyargs\",\n action=\"store_true\",\n help=\"try to interpret all arguments as python packages.\",\n )\n group.addoption(\n \"--ignore\",\n action=\"append\",\n metavar=\"path\",\n help=\"ignore path during collection (multi-allowed).\",\n )\n group.addoption(\n \"--ignore-glob\",\n action=\"append\",\n metavar=\"path\",\n help=\"ignore path pattern during collection (multi-allowed).\",\n )\n # ... other code", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_pytest_addoption.None_16_pytest_addoption.None_22": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_pytest_addoption.None_16_pytest_addoption.None_22", "embedding": null, "metadata": {"file_path": "src/_pytest/main.py", "file_name": "main.py", "file_type": "text/x-python", "category": "implementation", "start_line": 172, "end_line": 228, "span_ids": ["pytest_addoption"], "tokens": 400}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_addoption(parser: Parser) -> None:\n # ... other code\n group.addoption(\n \"--deselect\",\n action=\"append\",\n metavar=\"nodeid_prefix\",\n help=\"deselect item (via node id prefix) during collection (multi-allowed).\",\n )\n group.addoption(\n \"--confcutdir\",\n dest=\"confcutdir\",\n default=None,\n metavar=\"dir\",\n type=functools.partial(directory_arg, optname=\"--confcutdir\"),\n help=\"only load conftest.py's relative to specified dir.\",\n )\n group.addoption(\n \"--noconftest\",\n action=\"store_true\",\n dest=\"noconftest\",\n default=False,\n help=\"Don't load any conftest.py files.\",\n )\n group.addoption(\n \"--keepduplicates\",\n \"--keep-duplicates\",\n action=\"store_true\",\n dest=\"keepduplicates\",\n default=False,\n help=\"Keep duplicate tests.\",\n )\n group.addoption(\n \"--collect-in-virtualenv\",\n action=\"store_true\",\n dest=\"collect_in_virtualenv\",\n default=False,\n help=\"Don't ignore tests in a local virtualenv directory\",\n )\n group.addoption(\n \"--import-mode\",\n default=\"prepend\",\n choices=[\"prepend\", \"append\", \"importlib\"],\n dest=\"importmode\",\n help=\"prepend/append to sys.path when importing test modules and conftest files, \"\n \"default is to prepend.\",\n )\n\n group = parser.getgroup(\"debugconfig\", \"test session debugging and configuration\")\n group.addoption(\n \"--basetemp\",\n dest=\"basetemp\",\n default=None,\n type=validate_basetemp,\n metavar=\"dir\",\n help=(\n \"base temporary directory for this test run.\"\n \"(warning: this directory is removed if it exists)\"\n ),\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py__in_venv__in_venv.return.any_fname_name_in_activat": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py__in_venv__in_venv.return.any_fname_name_in_activat", "embedding": null, "metadata": {"file_path": "src/_pytest/main.py", "file_name": "main.py", "file_type": "text/x-python", "category": "implementation", "start_line": 359, "end_line": 376, "span_ids": ["_in_venv"], "tokens": 138}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _in_venv(path: Path) -> bool:\n \"\"\"Attempt to detect if ``path`` is the root of a Virtual Environment by\n checking for the existence of the appropriate activate script.\"\"\"\n bindir = path.joinpath(\"Scripts\" if sys.platform.startswith(\"win\") else \"bin\")\n try:\n if not bindir.is_dir():\n return False\n except OSError:\n return False\n activates = (\n \"activate\",\n \"activate.csh\",\n \"activate.fish\",\n \"Activate\",\n \"Activate.bat\",\n \"Activate.ps1\",\n )\n return any(fname.name in activates for fname in bindir.iterdir())", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_FSHookProxy__bestrelpath_cache.__missing__.return.r": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_FSHookProxy__bestrelpath_cache.__missing__.return.r", "embedding": null, "metadata": {"file_path": "src/_pytest/main.py", "file_name": "main.py", "file_type": "text/x-python", "category": "implementation", "start_line": 424, "end_line": 452, "span_ids": ["_bestrelpath_cache.__missing__", "_bestrelpath_cache", "Interrupted", "FSHookProxy.__getattr__", "FSHookProxy", "Failed"], "tokens": 195}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class FSHookProxy:\n def __init__(self, pm: PytestPluginManager, remove_mods) -> None:\n self.pm = pm\n self.remove_mods = remove_mods\n\n def __getattr__(self, name: str):\n x = self.pm.subset_hook_caller(name, remove_plugins=self.remove_mods)\n self.__dict__[name] = x\n return x\n\n\nclass Interrupted(KeyboardInterrupt):\n \"\"\"Signals that the test run was interrupted.\"\"\"\n\n __module__ = \"builtins\" # For py3.\n\n\nclass Failed(Exception):\n \"\"\"Signals a stop as failed test run.\"\"\"\n\n\n@attr.s\nclass _bestrelpath_cache(Dict[Path, str]):\n path = attr.ib(type=Path)\n\n def __missing__(self, path: Path) -> str:\n r = bestrelpath(self.path, path)\n self[path] = r\n return r", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_Session_Session.isinitpath.return.Path_path_in_self__initi": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_Session_Session.isinitpath.return.Path_path_in_self__initi", "embedding": null, "metadata": {"file_path": "src/_pytest/main.py", "file_name": "main.py", "file_type": "text/x-python", "category": "implementation", "start_line": 455, "end_line": 519, "span_ids": ["Session.isinitpath", "Session.pytest_runtest_logreport", "Session.pytest_collectstart", "Session._node_location_to_relpath", "Session:9", "Session.from_config", "Session.__repr__", "Session"], "tokens": 563}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Session(nodes.FSCollector):\n Interrupted = Interrupted\n Failed = Failed\n # Set on the session by runner.pytest_sessionstart.\n _setupstate: SetupState\n # Set on the session by fixtures.pytest_sessionstart.\n _fixturemanager: FixtureManager\n exitstatus: Union[int, ExitCode]\n\n def __init__(self, config: Config) -> None:\n super().__init__(\n config.rootdir, parent=None, config=config, session=self, nodeid=\"\"\n )\n self.testsfailed = 0\n self.testscollected = 0\n self.shouldstop: Union[bool, str] = False\n self.shouldfail: Union[bool, str] = False\n self.trace = config.trace.root.get(\"collection\")\n self.startdir = config.invocation_dir\n self._initialpaths: FrozenSet[Path] = frozenset()\n\n self._bestrelpathcache: Dict[Path, str] = _bestrelpath_cache(config.rootpath)\n\n self.config.pluginmanager.register(self, name=\"session\")\n\n @classmethod\n def from_config(cls, config: Config) -> \"Session\":\n session: Session = cls._create(config)\n return session\n\n def __repr__(self) -> str:\n return \"<%s %s exitstatus=%r testsfailed=%d testscollected=%d>\" % (\n self.__class__.__name__,\n self.name,\n getattr(self, \"exitstatus\", \"\"),\n self.testsfailed,\n self.testscollected,\n )\n\n def _node_location_to_relpath(self, node_path: Path) -> str:\n # bestrelpath is a quite slow function.\n return self._bestrelpathcache[node_path]\n\n @hookimpl(tryfirst=True)\n def pytest_collectstart(self) -> None:\n if self.shouldfail:\n raise self.Failed(self.shouldfail)\n if self.shouldstop:\n raise self.Interrupted(self.shouldstop)\n\n @hookimpl(tryfirst=True)\n def pytest_runtest_logreport(\n self, report: Union[TestReport, CollectReport]\n ) -> None:\n if report.failed and not hasattr(report, \"wasxfail\"):\n self.testsfailed += 1\n maxfail = self.config.getvalue(\"maxfail\")\n if maxfail and self.testsfailed >= maxfail:\n self.shouldfail = \"stopping after %d failures\" % (self.testsfailed)\n\n pytest_collectreport = pytest_runtest_logreport\n\n def isinitpath(self, path: Union[str, \"os.PathLike[str]\"]) -> bool:\n return Path(path) in self._initialpaths", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_Session.gethookproxy_Session.gethookproxy.return.proxy": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_Session.gethookproxy_Session.gethookproxy.return.proxy", "embedding": null, "metadata": {"file_path": "src/_pytest/main.py", "file_name": "main.py", "file_type": "text/x-python", "category": "implementation", "start_line": 521, "end_line": 535, "span_ids": ["Session.gethookproxy"], "tokens": 169}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Session(nodes.FSCollector):\n\n def gethookproxy(self, fspath: \"os.PathLike[str]\"):\n # Check if we have the common case of running\n # hooks with all conftest.py files.\n pm = self.config.pluginmanager\n my_conftestmodules = pm._getconftestmodules(\n Path(fspath), self.config.getoption(\"importmode\")\n )\n remove_mods = pm._conftest_plugins.difference(my_conftestmodules)\n if remove_mods:\n # One or more conftests are not in use at this fspath.\n proxy = FSHookProxy(pm, remove_mods)\n else:\n # All plugins are active for this fspath.\n proxy = self.config.hook\n return proxy", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_Session._recurse_Session._recurse.return.True": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_Session._recurse_Session._recurse.return.True", "embedding": null, "metadata": {"file_path": "src/_pytest/main.py", "file_name": "main.py", "file_type": "text/x-python", "category": "implementation", "start_line": 537, "end_line": 548, "span_ids": ["Session._recurse"], "tokens": 145}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Session(nodes.FSCollector):\n\n def _recurse(self, direntry: \"os.DirEntry[str]\") -> bool:\n if direntry.name == \"__pycache__\":\n return False\n fspath = Path(direntry.path)\n path = py.path.local(fspath)\n ihook = self.gethookproxy(fspath.parent)\n if ihook.pytest_ignore_collect(fspath=fspath, path=path, config=self.config):\n return False\n norecursepatterns = self.config.getini(\"norecursedirs\")\n if any(fnmatch_ex(pat, fspath) for pat in norecursepatterns):\n return False\n return True", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_Session._collectfile_Session.perform_collect_10._": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_Session._collectfile_Session.perform_collect_10._", "embedding": null, "metadata": {"file_path": "src/_pytest/main.py", "file_name": "main.py", "file_type": "text/x-python", "category": "implementation", "start_line": 550, "end_line": 587, "span_ids": ["Session.perform_collect", "Session._collectfile", "Session.perform_collect_10"], "tokens": 334}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Session(nodes.FSCollector):\n\n def _collectfile(\n self, fspath: Path, handle_dupes: bool = True\n ) -> Sequence[nodes.Collector]:\n path = py.path.local(fspath)\n assert (\n fspath.is_file()\n ), \"{!r} is not a file (isdir={!r}, exists={!r}, islink={!r})\".format(\n fspath, fspath.is_dir(), fspath.exists(), fspath.is_symlink()\n )\n ihook = self.gethookproxy(fspath)\n if not self.isinitpath(fspath):\n if ihook.pytest_ignore_collect(\n fspath=fspath, path=path, config=self.config\n ):\n return ()\n\n if handle_dupes:\n keepduplicates = self.config.getoption(\"keepduplicates\")\n if not keepduplicates:\n duplicate_paths = self.config.pluginmanager._duplicatepaths\n if fspath in duplicate_paths:\n return ()\n else:\n duplicate_paths.add(fspath)\n\n return ihook.pytest_collect_file(fspath=fspath, path=path, parent=self) # type: ignore[no-any-return]\n\n @overload\n def perform_collect(\n self, args: Optional[Sequence[str]] = ..., genitems: \"Literal[True]\" = ...\n ) -> Sequence[nodes.Item]:\n ...\n\n @overload\n def perform_collect(\n self, args: Optional[Sequence[str]] = ..., genitems: bool = ...\n ) -> Sequence[Union[nodes.Item, nodes.Collector]]:\n ...", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_Session.perform_collect_11_Session.perform_collect_11.return.items": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_Session.perform_collect_11_Session.perform_collect_11.return.items", "embedding": null, "metadata": {"file_path": "src/_pytest/main.py", "file_name": "main.py", "file_type": "text/x-python", "category": "implementation", "start_line": 589, "end_line": 654, "span_ids": ["Session.perform_collect_11"], "tokens": 565}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Session(nodes.FSCollector):\n\n def perform_collect(\n self, args: Optional[Sequence[str]] = None, genitems: bool = True\n ) -> Sequence[Union[nodes.Item, nodes.Collector]]:\n \"\"\"Perform the collection phase for this session.\n\n This is called by the default\n :func:`pytest_collection <_pytest.hookspec.pytest_collection>` hook\n implementation; see the documentation of this hook for more details.\n For testing purposes, it may also be called directly on a fresh\n ``Session``.\n\n This function normally recursively expands any collectors collected\n from the session to their items, and only items are returned. For\n testing purposes, this may be suppressed by passing ``genitems=False``,\n in which case the return value contains these collectors unexpanded,\n and ``session.items`` is empty.\n \"\"\"\n if args is None:\n args = self.config.args\n\n self.trace(\"perform_collect\", self, args)\n self.trace.root.indent += 1\n\n self._notfound: List[Tuple[str, Sequence[nodes.Collector]]] = []\n self._initial_parts: List[Tuple[Path, List[str]]] = []\n self.items: List[nodes.Item] = []\n\n hook = self.config.hook\n\n items: Sequence[Union[nodes.Item, nodes.Collector]] = self.items\n try:\n initialpaths: List[Path] = []\n for arg in args:\n fspath, parts = resolve_collection_argument(\n self.config.invocation_params.dir,\n arg,\n as_pypath=self.config.option.pyargs,\n )\n self._initial_parts.append((fspath, parts))\n initialpaths.append(fspath)\n self._initialpaths = frozenset(initialpaths)\n rep = collect_one_node(self)\n self.ihook.pytest_collectreport(report=rep)\n self.trace.root.indent -= 1\n if self._notfound:\n errors = []\n for arg, cols in self._notfound:\n line = f\"(no name {arg!r} in any of {cols!r})\"\n errors.append(f\"not found: {arg}\\n{line}\")\n raise UsageError(*errors)\n if not genitems:\n items = rep.result\n else:\n if rep.passed:\n for node in rep.result:\n self.items.extend(self.genitems(node))\n\n self.config.pluginmanager.check_pending()\n hook.pytest_collection_modifyitems(\n session=self, config=self.config, items=items\n )\n finally:\n hook.pytest_collection_finish(session=self)\n\n self.testscollected = len(items)\n return items", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_Session.collect_Session.collect.pkg_roots._": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_Session.collect_Session.collect.pkg_roots._", "embedding": null, "metadata": {"file_path": "src/_pytest/main.py", "file_name": "main.py", "file_type": "text/x-python", "category": "implementation", "start_line": 656, "end_line": 668, "span_ids": ["Session.collect"], "tokens": 176}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Session(nodes.FSCollector):\n\n def collect(self) -> Iterator[Union[nodes.Item, nodes.Collector]]:\n from _pytest.python import Package\n\n # Keep track of any collected nodes in here, so we don't duplicate fixtures.\n node_cache1: Dict[Path, Sequence[nodes.Collector]] = {}\n node_cache2: Dict[Tuple[Type[nodes.Collector], Path], nodes.Collector] = {}\n\n # Keep track of any collected collectors in matchnodes paths, so they\n # are not collected more than once.\n matchnodes_cache: Dict[Tuple[Type[nodes.Collector], str], CollectReport] = {}\n\n # Dirnames of pkgs with dunder-init files.\n pkg_roots: Dict[str, Package] = {}\n # ... other code", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_Session.collect.for_argpath_names_in_sel_Session.collect.for_argpath_names_in_sel.None_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_Session.collect.for_argpath_names_in_sel_Session.collect.for_argpath_names_in_sel.None_1", "embedding": null, "metadata": {"file_path": "src/_pytest/main.py", "file_name": "main.py", "file_type": "text/x-python", "category": "implementation", "start_line": 670, "end_line": 804, "span_ids": ["Session.collect"], "tokens": 1176}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Session(nodes.FSCollector):\n\n def collect(self) -> Iterator[Union[nodes.Item, nodes.Collector]]:\n # ... other code\n\n for argpath, names in self._initial_parts:\n self.trace(\"processing argument\", (argpath, names))\n self.trace.root.indent += 1\n\n # Start with a Session root, and delve to argpath item (dir or file)\n # and stack all Packages found on the way.\n # No point in finding packages when collecting doctests.\n if not self.config.getoption(\"doctestmodules\", False):\n pm = self.config.pluginmanager\n confcutdir = pm._confcutdir\n for parent in (argpath, *argpath.parents):\n if confcutdir and parent in confcutdir.parents:\n break\n\n if parent.is_dir():\n pkginit = parent / \"__init__.py\"\n if pkginit.is_file() and pkginit not in node_cache1:\n col = self._collectfile(pkginit, handle_dupes=False)\n if col:\n if isinstance(col[0], Package):\n pkg_roots[str(parent)] = col[0]\n node_cache1[Path(col[0].fspath)] = [col[0]]\n\n # If it's a directory argument, recurse and look for any Subpackages.\n # Let the Package collector deal with subnodes, don't collect here.\n if argpath.is_dir():\n assert not names, \"invalid arg {!r}\".format((argpath, names))\n\n seen_dirs: Set[Path] = set()\n for direntry in visit(str(argpath), self._recurse):\n if not direntry.is_file():\n continue\n\n path = Path(direntry.path)\n dirpath = path.parent\n\n if dirpath not in seen_dirs:\n # Collect packages first.\n seen_dirs.add(dirpath)\n pkginit = dirpath / \"__init__.py\"\n if pkginit.exists():\n for x in self._collectfile(pkginit):\n yield x\n if isinstance(x, Package):\n pkg_roots[str(dirpath)] = x\n if str(dirpath) in pkg_roots:\n # Do not collect packages here.\n continue\n\n for x in self._collectfile(path):\n key2 = (type(x), Path(x.fspath))\n if key2 in node_cache2:\n yield node_cache2[key2]\n else:\n node_cache2[key2] = x\n yield x\n else:\n assert argpath.is_file()\n\n if argpath in node_cache1:\n col = node_cache1[argpath]\n else:\n collect_root = pkg_roots.get(str(argpath.parent), self)\n col = collect_root._collectfile(argpath, handle_dupes=False)\n if col:\n node_cache1[argpath] = col\n\n matching = []\n work: List[\n Tuple[Sequence[Union[nodes.Item, nodes.Collector]], Sequence[str]]\n ] = [(col, names)]\n while work:\n self.trace(\"matchnodes\", col, names)\n self.trace.root.indent += 1\n\n matchnodes, matchnames = work.pop()\n for node in matchnodes:\n if not matchnames:\n matching.append(node)\n continue\n if not isinstance(node, nodes.Collector):\n continue\n key = (type(node), node.nodeid)\n if key in matchnodes_cache:\n rep = matchnodes_cache[key]\n else:\n rep = collect_one_node(node)\n matchnodes_cache[key] = rep\n if rep.passed:\n submatchnodes = []\n for r in rep.result:\n # TODO: Remove parametrized workaround once collection structure contains\n # parametrization.\n if (\n r.name == matchnames[0]\n or r.name.split(\"[\")[0] == matchnames[0]\n ):\n submatchnodes.append(r)\n if submatchnodes:\n work.append((submatchnodes, matchnames[1:]))\n # XXX Accept IDs that don't have \"()\" for class instances.\n elif len(rep.result) == 1 and rep.result[0].name == \"()\":\n work.append((rep.result, matchnames))\n else:\n # Report collection failures here to avoid failing to run some test\n # specified in the command line because the module could not be\n # imported (#134).\n node.ihook.pytest_collectreport(report=rep)\n\n self.trace(\"matchnodes finished -> \", len(matching), \"nodes\")\n self.trace.root.indent -= 1\n\n if not matching:\n report_arg = \"::\".join((str(argpath), *names))\n self._notfound.append((report_arg, col))\n continue\n\n # If __init__.py was the only file requested, then the matched\n # node will be the corresponding Package (by default), and the\n # first yielded item will be the __init__ Module itself, so\n # just use that. If this special case isn't taken, then all the\n # files in the package will be yielded.\n if argpath.name == \"__init__.py\" and isinstance(matching[0], Package):\n try:\n yield next(iter(matching[0].collect()))\n except StopIteration:\n # The package collects nothing with only an __init__.py\n # file in it, which gets ignored by the default\n # \"python_files\" option.\n pass\n continue\n\n yield from matching\n\n self.trace.root.indent -= 1", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_Session.genitems_Session.genitems.if_isinstance_node_nodes.else_.node_ihook_pytest_collect": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_Session.genitems_Session.genitems.if_isinstance_node_nodes.else_.node_ihook_pytest_collect", "embedding": null, "metadata": {"file_path": "src/_pytest/main.py", "file_name": "main.py", "file_type": "text/x-python", "category": "implementation", "start_line": 806, "end_line": 819, "span_ids": ["Session.genitems"], "tokens": 130}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Session(nodes.FSCollector):\n\n def genitems(\n self, node: Union[nodes.Item, nodes.Collector]\n ) -> Iterator[nodes.Item]:\n self.trace(\"genitems\", node)\n if isinstance(node, nodes.Item):\n node.ihook.pytest_itemcollected(item=node)\n yield node\n else:\n assert isinstance(node, nodes.Collector)\n rep = collect_one_node(node)\n if rep.passed:\n for subnode in rep.result:\n yield from self.genitems(subnode)\n node.ihook.pytest_collectreport(report=rep)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_search_pypath_search_pypath.if_spec_is_None_or_spec_o.else_.return.spec_origin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_search_pypath_search_pypath.if_spec_is_None_or_spec_o.else_.return.spec_origin", "embedding": null, "metadata": {"file_path": "src/_pytest/main.py", "file_name": "main.py", "file_type": "text/x-python", "category": "implementation", "start_line": 822, "end_line": 836, "span_ids": ["search_pypath"], "tokens": 138}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def search_pypath(module_name: str) -> str:\n \"\"\"Search sys.path for the given a dotted module name, and return its file system path.\"\"\"\n try:\n spec = importlib.util.find_spec(module_name)\n # AttributeError: looks like package module, but actually filename\n # ImportError: module does not exist\n # ValueError: not a module name\n except (AttributeError, ImportError, ValueError):\n return module_name\n if spec is None or spec.origin is None or spec.origin == \"namespace\":\n return module_name\n elif spec.submodule_search_locations:\n return os.path.dirname(spec.origin)\n else:\n return spec.origin", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_resolve_collection_argument_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/main.py_resolve_collection_argument_", "embedding": null, "metadata": {"file_path": "src/_pytest/main.py", "file_name": "main.py", "file_type": "text/x-python", "category": "implementation", "start_line": 839, "end_line": 884, "span_ids": ["resolve_collection_argument"], "tokens": 404}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def resolve_collection_argument(\n invocation_path: Path, arg: str, *, as_pypath: bool = False\n) -> Tuple[Path, List[str]]:\n \"\"\"Parse path arguments optionally containing selection parts and return (fspath, names).\n\n Command-line arguments can point to files and/or directories, and optionally contain\n parts for specific tests selection, for example:\n\n \"pkg/tests/test_foo.py::TestClass::test_foo\"\n\n This function ensures the path exists, and returns a tuple:\n\n (Path(\"/full/path/to/pkg/tests/test_foo.py\"), [\"TestClass\", \"test_foo\"])\n\n When as_pypath is True, expects that the command-line argument actually contains\n module paths instead of file-system paths:\n\n \"pkg.tests.test_foo::TestClass::test_foo\"\n\n In which case we search sys.path for a matching module, and then return the *path* to the\n found module.\n\n If the path doesn't exist, raise UsageError.\n If the path is a directory and selection parts are present, raise UsageError.\n \"\"\"\n strpath, *parts = str(arg).split(\"::\")\n if as_pypath:\n strpath = search_pypath(strpath)\n fspath = invocation_path / strpath\n fspath = absolutepath(fspath)\n if not fspath.exists():\n msg = (\n \"module or package not found: {arg} (missing __init__.py?)\"\n if as_pypath\n else \"file or directory not found: {arg}\"\n )\n raise UsageError(msg.format(arg=arg))\n if parts and fspath.is_dir():\n msg = (\n \"package argument cannot contain :: selection parts: {arg}\"\n if as_pypath\n else \"directory argument cannot contain :: selection parts: {arg}\"\n )\n raise UsageError(msg.format(arg=arg))\n return fspath, parts", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/__init__.py__Generic_mechanism_for__old_mark_config_key.StoreKey_Optional_Config_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/__init__.py__Generic_mechanism_for__old_mark_config_key.StoreKey_Optional_Config_", "embedding": null, "metadata": {"file_path": "src/_pytest/mark/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 44, "span_ids": ["impl", "imports:26", "impl:2", "docstring", "imports"], "tokens": 252}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"Generic mechanism for marking and selecting python functions.\"\"\"\nimport warnings\nfrom typing import AbstractSet\nfrom typing import Collection\nfrom typing import List\nfrom typing import Optional\nfrom typing import TYPE_CHECKING\nfrom typing import Union\n\nimport attr\n\nfrom .expression import Expression\nfrom .expression import ParseError\nfrom .structures import EMPTY_PARAMETERSET_OPTION\nfrom .structures import get_empty_parameterset_mark\nfrom .structures import Mark\nfrom .structures import MARK_GEN\nfrom .structures import MarkDecorator\nfrom .structures import MarkGenerator\nfrom .structures import ParameterSet\nfrom _pytest.config import Config\nfrom _pytest.config import ExitCode\nfrom _pytest.config import hookimpl\nfrom _pytest.config import UsageError\nfrom _pytest.config.argparsing import Parser\nfrom _pytest.deprecated import MINUS_K_COLON\nfrom _pytest.deprecated import MINUS_K_DASH\nfrom _pytest.store import StoreKey\n\nif TYPE_CHECKING:\n from _pytest.nodes import Item\n\n\n__all__ = [\n \"MARK_GEN\",\n \"Mark\",\n \"MarkDecorator\",\n \"MarkGenerator\",\n \"ParameterSet\",\n \"get_empty_parameterset_mark\",\n]\n\n\nold_mark_config_key = StoreKey[Optional[Config]]()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/expression.py_r_Evaluate_match_expres_ParseError.__str__.return.f_at_column_self_column_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/expression.py_r_Evaluate_match_expres_ParseError.__str__.return.f_at_column_self_column_", "embedding": null, "metadata": {"file_path": "src/_pytest/mark/expression.py", "file_name": "expression.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 69, "span_ids": ["Token", "imports:12", "impl", "ParseError", "impl:2", "ParseError.__str__", "docstring", "imports", "TokenType"], "tokens": 386}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "r\"\"\"Evaluate match expressions, as used by `-k` and `-m`.\n\nThe grammar is:\n\nexpression: expr? EOF\nexpr: and_expr ('or' and_expr)*\nand_expr: not_expr ('and' not_expr)*\nnot_expr: 'not' not_expr | '(' expr ')' | ident\nident: (\\w|:|\\+|-|\\.|\\[|\\])+\n\nThe semantics are:\n\n- Empty expression evaluates to False.\n- ident evaluates to True of False according to a provided matcher function.\n- or/and/not evaluate according to the usual boolean semantics.\n\"\"\"\nimport ast\nimport enum\nimport re\nimport types\nfrom typing import Callable\nfrom typing import Iterator\nfrom typing import Mapping\nfrom typing import Optional\nfrom typing import Sequence\nfrom typing import TYPE_CHECKING\n\nimport attr\n\nif TYPE_CHECKING:\n from typing import NoReturn\n\n\n__all__ = [\n \"Expression\",\n \"ParseError\",\n]\n\n\nclass TokenType(enum.Enum):\n LPAREN = \"left parenthesis\"\n RPAREN = \"right parenthesis\"\n OR = \"or\"\n AND = \"and\"\n NOT = \"not\"\n IDENT = \"identifier\"\n EOF = \"end of input\"\n\n\n@attr.s(frozen=True, slots=True)\nclass Token:\n type = attr.ib(type=TokenType)\n value = attr.ib(type=str)\n pos = attr.ib(type=int)\n\n\nclass ParseError(Exception):\n \"\"\"The expression contains invalid syntax.\n\n :param column: The column in the line where the error occurred (1-based).\n :param message: A description of the error.\n \"\"\"\n\n def __init__(self, column: int, message: str) -> None:\n self.column = column\n self.message = message\n\n def __str__(self) -> str:\n return f\"at column {self.column}: {self.message}\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/structures.py_Mark.combined_with_Markable.TypeVar_Markable_bound": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/structures.py_Mark.combined_with_Markable.TypeVar_Markable_bound", "embedding": null, "metadata": {"file_path": "src/_pytest/mark/structures.py", "file_name": "structures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 240, "end_line": 271, "span_ids": ["Mark.combined_with", "impl:4"], "tokens": 258}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\n@attr.s(frozen=True, init=False, auto_attribs=True)\nclass Mark:\n\n def combined_with(self, other: \"Mark\") -> \"Mark\":\n \"\"\"Return a new Mark which is a combination of this\n Mark and another Mark.\n\n Combines by appending args and merging kwargs.\n\n :param Mark other: The mark to combine with.\n :rtype: Mark\n \"\"\"\n assert self.name == other.name\n\n # Remember source of ids with parametrize Marks.\n param_ids_from: Optional[Mark] = None\n if self.name == \"parametrize\":\n if other._has_param_ids():\n param_ids_from = other\n elif self._has_param_ids():\n param_ids_from = self\n\n return Mark(\n self.name,\n self.args + other.args,\n dict(self.kwargs, **other.kwargs),\n param_ids_from=param_ids_from,\n _ispytest=True,\n )\n\n\n# A generic parameter designating an object to which a Mark may\n# be applied -- a test function (callable) or class.\n# Note: a lambda is not allowed, but this can't be represented.\nMarkable = TypeVar(\"Markable\", bound=Union[Callable[..., object], type])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/structures.py_store_mark_None_1._FilterwarningsMarkDecorator.__call__._": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/structures.py_store_mark_None_1._FilterwarningsMarkDecorator.__call__._", "embedding": null, "metadata": {"file_path": "src/_pytest/mark/structures.py", "file_name": "structures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 394, "end_line": 468, "span_ids": ["None_1._UsefixturesMarkDecorator.__call__", "store_mark", "None_1._XfailMarkDecorator", "None_1._ParametrizeMarkDecorator.__call__", "None_1._SkipMarkDecorator", "None_1._FilterwarningsMarkDecorator", "None_1._UsefixturesMarkDecorator", "impl:6", "None_1._XfailMarkDecorator.__call__", "None_1._FilterwarningsMarkDecorator.__call__", "None_1._SkipifMarkDecorator.__call__", "None_1._SkipMarkDecorator.__call__", "None_1._ParametrizeMarkDecorator", "None_1._SkipifMarkDecorator"], "tokens": 591}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def store_mark(obj, mark: Mark) -> None:\n \"\"\"Store a Mark on an object.\n\n This is used to implement the Mark declarations/decorators correctly.\n \"\"\"\n assert isinstance(mark, Mark), mark\n # Always reassign name to avoid updating pytestmark in a reference that\n # was only borrowed.\n obj.pytestmark = get_unpacked_marks(obj) + [mark]\n\n\n# Typing for builtin pytest marks. This is cheating; it gives builtin marks\n# special privilege, and breaks modularity. But practicality beats purity...\nif TYPE_CHECKING:\n from _pytest.fixtures import _Scope\n\n class _SkipMarkDecorator(MarkDecorator):\n @overload # type: ignore[override,misc]\n def __call__(self, arg: Markable) -> Markable:\n ...\n\n @overload\n def __call__(self, reason: str = ...) -> \"MarkDecorator\":\n ...\n\n class _SkipifMarkDecorator(MarkDecorator):\n def __call__( # type: ignore[override]\n self,\n condition: Union[str, bool] = ...,\n *conditions: Union[str, bool],\n reason: str = ...,\n ) -> MarkDecorator:\n ...\n\n class _XfailMarkDecorator(MarkDecorator):\n @overload # type: ignore[override,misc]\n def __call__(self, arg: Markable) -> Markable:\n ...\n\n @overload\n def __call__(\n self,\n condition: Union[str, bool] = ...,\n *conditions: Union[str, bool],\n reason: str = ...,\n run: bool = ...,\n raises: Union[Type[BaseException], Tuple[Type[BaseException], ...]] = ...,\n strict: bool = ...,\n ) -> MarkDecorator:\n ...\n\n class _ParametrizeMarkDecorator(MarkDecorator):\n def __call__( # type: ignore[override]\n self,\n argnames: Union[str, List[str], Tuple[str, ...]],\n argvalues: Iterable[Union[ParameterSet, Sequence[object], object]],\n *,\n indirect: Union[bool, Sequence[str]] = ...,\n ids: Optional[\n Union[\n Iterable[Union[None, str, float, int, bool]],\n Callable[[Any], Optional[object]],\n ]\n ] = ...,\n scope: Optional[_Scope] = ...,\n ) -> MarkDecorator:\n ...\n\n class _UsefixturesMarkDecorator(MarkDecorator):\n def __call__(self, *fixtures: str) -> MarkDecorator: # type: ignore[override]\n ...\n\n class _FilterwarningsMarkDecorator(MarkDecorator):\n def __call__(self, *filters: str) -> MarkDecorator: # type: ignore[override]\n ...", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/structures.py_MarkGenerator_MarkGenerator.__init__.self._markers.set_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/mark/structures.py_MarkGenerator_MarkGenerator.__init__.self._markers.set_", "embedding": null, "metadata": {"file_path": "src/_pytest/mark/structures.py", "file_name": "structures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 471, "end_line": 499, "span_ids": ["MarkGenerator"], "tokens": 196}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass MarkGenerator:\n \"\"\"Factory for :class:`MarkDecorator` objects - exposed as\n a ``pytest.mark`` singleton instance.\n\n Example::\n\n import pytest\n\n @pytest.mark.slowtest\n def test_function():\n pass\n\n applies a 'slowtest' :class:`Mark` on ``test_function``.\n \"\"\"\n\n # See TYPE_CHECKING above.\n if TYPE_CHECKING:\n skip: _SkipMarkDecorator\n skipif: _SkipifMarkDecorator\n xfail: _XfailMarkDecorator\n parametrize: _ParametrizeMarkDecorator\n usefixtures: _UsefixturesMarkDecorator\n filterwarnings: _FilterwarningsMarkDecorator\n\n def __init__(self, *, _ispytest: bool = False) -> None:\n check_ispytest(_ispytest)\n self._config: Optional[Config] = None\n self._markers: Set[str] = set()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/monkeypatch.py__Monkeypatching_and_moc_V.TypeVar_V_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/monkeypatch.py__Monkeypatching_and_moc_V.TypeVar_V_", "embedding": null, "metadata": {"file_path": "src/_pytest/monkeypatch.py", "file_name": "monkeypatch.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 25, "span_ids": ["impl", "docstring", "imports"], "tokens": 129}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"Monkeypatching and mocking functionality.\"\"\"\nimport os\nimport re\nimport sys\nimport warnings\nfrom contextlib import contextmanager\nfrom typing import Any\nfrom typing import Generator\nfrom typing import List\nfrom typing import MutableMapping\nfrom typing import Optional\nfrom typing import overload\nfrom typing import Tuple\nfrom typing import TypeVar\nfrom typing import Union\n\nfrom _pytest.compat import final\nfrom _pytest.fixtures import fixture\nfrom _pytest.warning_types import PytestWarning\n\nRE_IMPORT_ERROR_NAME = re.compile(r\"^No module named (.*)$\")\n\n\nK = TypeVar(\"K\")\nV = TypeVar(\"V\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/monkeypatch.py_MonkeyPatch_MonkeyPatch.__init__.self._savesyspath.None": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/monkeypatch.py_MonkeyPatch_MonkeyPatch.__init__.self._savesyspath.None", "embedding": null, "metadata": {"file_path": "src/_pytest/monkeypatch.py", "file_name": "monkeypatch.py", "file_type": "text/x-python", "category": "implementation", "start_line": 111, "end_line": 129, "span_ids": ["MonkeyPatch"], "tokens": 180}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass MonkeyPatch:\n \"\"\"Helper to conveniently monkeypatch attributes/items/environment\n variables/syspath.\n\n Returned by the :fixture:`monkeypatch` fixture.\n\n :versionchanged:: 6.2\n Can now also be used directly as `pytest.MonkeyPatch()`, for when\n the fixture is not available. In this case, use\n :meth:`with MonkeyPatch.context() as mp: ` or remember to call\n :meth:`undo` explicitly.\n \"\"\"\n\n def __init__(self) -> None:\n self._setattr: List[Tuple[object, str, object]] = []\n self._setitem: List[Tuple[MutableMapping[Any, Any], object, object]] = []\n self._cwd: Optional[str] = None\n self._savesyspath: Optional[List[str]] = None", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/monkeypatch.py_MonkeyPatch.context_MonkeyPatch.setattr_2._": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/monkeypatch.py_MonkeyPatch.context_MonkeyPatch.setattr_2._", "embedding": null, "metadata": {"file_path": "src/_pytest/monkeypatch.py", "file_name": "monkeypatch.py", "file_type": "text/x-python", "category": "implementation", "start_line": 131, "end_line": 176, "span_ids": ["MonkeyPatch.context", "MonkeyPatch.setattr_2", "MonkeyPatch.setattr"], "tokens": 276}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass MonkeyPatch:\n\n @classmethod\n @contextmanager\n def context(cls) -> Generator[\"MonkeyPatch\", None, None]:\n \"\"\"Context manager that returns a new :class:`MonkeyPatch` object\n which undoes any patching done inside the ``with`` block upon exit.\n\n Example:\n\n .. code-block:: python\n\n import functools\n\n\n def test_partial(monkeypatch):\n with monkeypatch.context() as m:\n m.setattr(functools, \"partial\", 3)\n\n Useful in situations where it is desired to undo some patches before the test ends,\n such as mocking ``stdlib`` functions that might break pytest itself if mocked (for examples\n of this see `#3290 `_.\n \"\"\"\n m = cls()\n try:\n yield m\n finally:\n m.undo()\n\n @overload\n def setattr(\n self,\n target: str,\n name: object,\n value: Notset = ...,\n raising: bool = ...,\n ) -> None:\n ...\n\n @overload\n def setattr(\n self,\n target: object,\n name: str,\n value: object,\n raising: bool = ...,\n ) -> None:\n ...", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/monkeypatch.py_MonkeyPatch.setenv_MonkeyPatch.setenv.self_setitem_os_environ_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/monkeypatch.py_MonkeyPatch.setenv_MonkeyPatch.setenv.self_setitem_os_environ_", "embedding": null, "metadata": {"file_path": "src/_pytest/monkeypatch.py", "file_name": "monkeypatch.py", "file_type": "text/x-python", "category": "implementation", "start_line": 282, "end_line": 302, "span_ids": ["MonkeyPatch.setenv"], "tokens": 205}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass MonkeyPatch:\n\n def setenv(self, name: str, value: str, prepend: Optional[str] = None) -> None:\n \"\"\"Set environment variable ``name`` to ``value``.\n\n If ``prepend`` is a character, read the current environment variable\n value and prepend the ``value`` adjoined with the ``prepend``\n character.\n \"\"\"\n if not isinstance(value, str):\n warnings.warn( # type: ignore[unreachable]\n PytestWarning(\n \"Value of environment variable {name} type should be str, but got \"\n \"{value!r} (type: {type}); converted to str implicitly\".format(\n name=name, value=value, type=type(value).__name__\n )\n ),\n stacklevel=2,\n )\n value = str(value)\n if prepend and name in os.environ:\n value = value + prepend + os.environ[name]\n self.setitem(os.environ, name, value)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/monkeypatch.py_MonkeyPatch.delenv_MonkeyPatch.chdir.os_chdir_path_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/monkeypatch.py_MonkeyPatch.delenv_MonkeyPatch.chdir.os_chdir_path_", "embedding": null, "metadata": {"file_path": "src/_pytest/monkeypatch.py", "file_name": "monkeypatch.py", "file_type": "text/x-python", "category": "implementation", "start_line": 304, "end_line": 342, "span_ids": ["MonkeyPatch.chdir", "MonkeyPatch.syspath_prepend", "MonkeyPatch.delenv"], "tokens": 364}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass MonkeyPatch:\n\n def delenv(self, name: str, raising: bool = True) -> None:\n \"\"\"Delete ``name`` from the environment.\n\n Raises ``KeyError`` if it does not exist, unless ``raising`` is set to\n False.\n \"\"\"\n environ: MutableMapping[str, str] = os.environ\n self.delitem(environ, name, raising=raising)\n\n def syspath_prepend(self, path) -> None:\n \"\"\"Prepend ``path`` to ``sys.path`` list of import locations.\"\"\"\n from pkg_resources import fixup_namespace_packages\n\n if self._savesyspath is None:\n self._savesyspath = sys.path[:]\n sys.path.insert(0, str(path))\n\n # https://github.com/pypa/setuptools/blob/d8b901bc/docs/pkg_resources.txt#L162-L171\n fixup_namespace_packages(str(path))\n\n # A call to syspathinsert() usually means that the caller wants to\n # import some dynamically created files, thus with python3 we\n # invalidate its import caches.\n # This is especially important when any namespace package is in use,\n # since then the mtime based FileFinder cache (that gets created in\n # this case already) gets not invalidated when writing the new files\n # quickly afterwards.\n from importlib import invalidate_caches\n\n invalidate_caches()\n\n def chdir(self, path: Union[str, \"os.PathLike[str]\"]) -> None:\n \"\"\"Change the current working directory to the specified path.\n\n Path can be a string or a path object.\n \"\"\"\n if self._cwd is None:\n self._cwd = os.getcwd()\n os.chdir(path)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nodes.py_os_tracebackcutdir.Path__pytest___file___pa": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nodes.py_os_tracebackcutdir.Path__pytest___file___pa", "embedding": null, "metadata": {"file_path": "src/_pytest/nodes.py", "file_name": "nodes.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 44, "span_ids": ["imports:33", "impl", "imports", "impl:2"], "tokens": 278}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import os\nimport warnings\nfrom pathlib import Path\nfrom typing import Any\nfrom typing import Callable\nfrom typing import Iterable\nfrom typing import Iterator\nfrom typing import List\nfrom typing import MutableMapping\nfrom typing import Optional\nfrom typing import overload\nfrom typing import Set\nfrom typing import Tuple\nfrom typing import Type\nfrom typing import TYPE_CHECKING\nfrom typing import TypeVar\nfrom typing import Union\n\nimport py\n\nimport _pytest._code\nfrom _pytest._code import getfslineno\nfrom _pytest._code.code import ExceptionInfo\nfrom _pytest._code.code import TerminalRepr\nfrom _pytest.compat import cached_property\nfrom _pytest.config import Config\nfrom _pytest.config import ConftestImportFailure\nfrom _pytest.deprecated import FSCOLLECTOR_GETHOOKPROXY_ISINITPATH\nfrom _pytest.mark.structures import Mark\nfrom _pytest.mark.structures import MarkDecorator\nfrom _pytest.mark.structures import NodeKeywords\nfrom _pytest.outcomes import fail\nfrom _pytest.pathlib import absolutepath\nfrom _pytest.store import Store\n\nif TYPE_CHECKING:\n # Imported here due to circular import.\n from _pytest.main import Session\n from _pytest._code.code import _TracebackStyle\n\n\nSEP = \"/\"\n\ntracebackcutdir = Path(_pytest.__file__).parent", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nodes.py_iterparentnodeids_iterparentnodeids.while_True_.if_at_1_and_sep_SE.else_.pos.at_len_sep_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nodes.py_iterparentnodeids_iterparentnodeids.while_True_.if_at_1_and_sep_SE.else_.pos.at_len_sep_", "embedding": null, "metadata": {"file_path": "src/_pytest/nodes.py", "file_name": "nodes.py", "file_type": "text/x-python", "category": "implementation", "start_line": 47, "end_line": 79, "span_ids": ["iterparentnodeids"], "tokens": 215}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def iterparentnodeids(nodeid: str) -> Iterator[str]:\n \"\"\"Return the parent node IDs of a given node ID, inclusive.\n\n For the node ID\n\n \"testing/code/test_excinfo.py::TestFormattedExcinfo::test_repr_source\"\n\n the result would be\n\n \"\"\n \"testing\"\n \"testing/code\"\n \"testing/code/test_excinfo.py\"\n \"testing/code/test_excinfo.py::TestFormattedExcinfo\"\n \"testing/code/test_excinfo.py::TestFormattedExcinfo::test_repr_source\"\n\n Note that :: parts are only considered at the last / component.\n \"\"\"\n pos = 0\n sep = SEP\n yield \"\"\n while True:\n at = nodeid.find(sep, pos)\n if at == -1 and sep == SEP:\n sep = \"::\"\n elif at == -1:\n if nodeid:\n yield nodeid\n break\n else:\n if at:\n yield nodeid[:at]\n pos = at + len(sep)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nodes.py__NodeType_NodeMeta._create.return.super___call___k_kw": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nodes.py__NodeType_NodeMeta._create.return.super___call___k_kw", "embedding": null, "metadata": {"file_path": "src/_pytest/nodes.py", "file_name": "nodes.py", "file_type": "text/x-python", "category": "implementation", "start_line": 82, "end_line": 96, "span_ids": ["NodeMeta.__call__", "impl:6", "NodeMeta", "NodeMeta._create"], "tokens": 131}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "_NodeType = TypeVar(\"_NodeType\", bound=\"Node\")\n\n\nclass NodeMeta(type):\n def __call__(self, *k, **kw):\n msg = (\n \"Direct construction of {name} has been deprecated, please use {name}.from_parent.\\n\"\n \"See \"\n \"https://docs.pytest.org/en/stable/deprecations.html#node-construction-changed-to-node-from-parent\"\n \" for more details.\"\n ).format(name=self.__name__)\n fail(msg, pytrace=False)\n\n def _create(self, *k, **kw):\n return super().__call__(*k, **kw)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nodes.py_Node._Methods_for_ordering_no_Node.listchain.return.chain": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nodes.py_Node._Methods_for_ordering_no_Node.listchain.return.chain", "embedding": null, "metadata": {"file_path": "src/_pytest/nodes.py", "file_name": "nodes.py", "file_type": "text/x-python", "category": "implementation", "start_line": 240, "end_line": 265, "span_ids": ["Node.listchain", "Node.warn", "Node.__hash__", "Node.nodeid", "Node.teardown", "Node.setup"], "tokens": 163}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Node(metaclass=NodeMeta):\n\n # Methods for ordering nodes.\n\n @property\n def nodeid(self) -> str:\n \"\"\"A ::-separated string denoting its collection tree address.\"\"\"\n return self._nodeid\n\n def __hash__(self) -> int:\n return hash(self._nodeid)\n\n def setup(self) -> None:\n pass\n\n def teardown(self) -> None:\n pass\n\n def listchain(self) -> List[\"Node\"]:\n \"\"\"Return list of all parent collectors up to self, starting from\n the root of collection tree.\"\"\"\n chain = []\n item: Optional[Node] = self\n while item is not None:\n chain.append(item)\n item = item.parent\n chain.reverse()\n return chain", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nodes.py__check_initialpaths_for_relpath_FSCollector.isinitpath.return.self_session_isinitpath_p": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nodes.py__check_initialpaths_for_relpath_FSCollector.isinitpath.return.self_session_isinitpath_p", "embedding": null, "metadata": {"file_path": "src/_pytest/nodes.py", "file_name": "nodes.py", "file_type": "text/x-python", "category": "implementation", "start_line": 487, "end_line": 537, "span_ids": ["FSCollector.isinitpath", "_check_initialpaths_for_relpath", "FSCollector", "FSCollector.from_parent", "FSCollector.gethookproxy"], "tokens": 431}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _check_initialpaths_for_relpath(\n session: \"Session\", fspath: py.path.local\n) -> Optional[str]:\n for initial_path in session._initialpaths:\n initial_path_ = py.path.local(initial_path)\n if fspath.common(initial_path_) == initial_path_:\n return fspath.relto(initial_path_)\n return None\n\n\nclass FSCollector(Collector):\n def __init__(\n self,\n fspath: py.path.local,\n parent=None,\n config: Optional[Config] = None,\n session: Optional[\"Session\"] = None,\n nodeid: Optional[str] = None,\n ) -> None:\n name = fspath.basename\n if parent is not None:\n rel = fspath.relto(parent.fspath)\n if rel:\n name = rel\n name = name.replace(os.sep, SEP)\n self.fspath = fspath\n\n session = session or parent.session\n\n if nodeid is None:\n nodeid = self.fspath.relto(session.config.rootdir)\n\n if not nodeid:\n nodeid = _check_initialpaths_for_relpath(session, fspath)\n if nodeid and os.sep != SEP:\n nodeid = nodeid.replace(os.sep, SEP)\n\n super().__init__(name, parent, config, session, nodeid=nodeid, fspath=fspath)\n\n @classmethod\n def from_parent(cls, parent, *, fspath, **kw):\n \"\"\"The public constructor.\"\"\"\n return super().from_parent(parent=parent, fspath=fspath, **kw)\n\n def gethookproxy(self, fspath: \"os.PathLike[str]\"):\n warnings.warn(FSCOLLECTOR_GETHOOKPROXY_ISINITPATH, stacklevel=2)\n return self.session.gethookproxy(fspath)\n\n def isinitpath(self, path: Union[str, \"os.PathLike[str]\"]) -> bool:\n warnings.warn(FSCOLLECTOR_GETHOOKPROXY_ISINITPATH, stacklevel=2)\n return self.session.isinitpath(path)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nose.py__Run_testsuites_written_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/nose.py__Run_testsuites_written_", "embedding": null, "metadata": {"file_path": "src/_pytest/nose.py", "file_name": "nose.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 48, "span_ids": ["call_optional", "docstring", "imports", "pytest_runtest_setup"], "tokens": 357}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"Run testsuites written for nose.\"\"\"\nfrom _pytest.config import hookimpl\nfrom _pytest.fixtures import getfixturemarker\nfrom _pytest.nodes import Item\nfrom _pytest.python import Function\nfrom _pytest.unittest import TestCaseFunction\n\n\n@hookimpl(trylast=True)\ndef pytest_runtest_setup(item: Item) -> None:\n if not isinstance(item, Function):\n return\n # Don't do nose style setup/teardown on direct unittest style classes.\n if isinstance(item, TestCaseFunction):\n return\n\n # Capture the narrowed type of item for the teardown closure,\n # see https://github.com/python/mypy/issues/2608\n func = item\n\n if not call_optional(func.obj, \"setup\"):\n # Call module level setup if there is no object level one.\n assert func.parent is not None\n call_optional(func.parent.obj, \"setup\") # type: ignore[attr-defined]\n\n def teardown_nose() -> None:\n if not call_optional(func.obj, \"teardown\"):\n assert func.parent is not None\n call_optional(func.parent.obj, \"teardown\") # type: ignore[attr-defined]\n\n # XXX This implies we only call teardown when setup worked.\n func.addfinalizer(teardown_nose)\n\n\ndef call_optional(obj: object, name: str) -> bool:\n method = getattr(obj, name, None)\n if method is None:\n return False\n is_fixture = getfixturemarker(method) is not None\n if is_fixture:\n return False\n if not callable(method):\n return False\n # If there are any problems allow the exception to raise rather than\n # silently ignoring it.\n method()\n return True", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/outcomes.py__Exception_classes_and__if_TYPE_CHECKING_.else_.Protocol.Generic": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/outcomes.py__Exception_classes_and__if_TYPE_CHECKING_.else_.Protocol.Generic", "embedding": null, "metadata": {"file_path": "src/_pytest/outcomes.py", "file_name": "outcomes.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 22, "span_ids": ["impl", "imports:10", "impl:5", "docstring", "imports", "imports:8", "impl:4"], "tokens": 141}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"Exception classes and constants handling test outcomes as well as\nfunctions creating them.\"\"\"\nimport sys\nfrom typing import Any\nfrom typing import Callable\nfrom typing import cast\nfrom typing import Optional\nfrom typing import Type\nfrom typing import TypeVar\n\nTYPE_CHECKING = False # Avoid circular import through compat.\n\nif TYPE_CHECKING:\n from typing import NoReturn\n from typing_extensions import Protocol\nelse:\n # typing.Protocol is only available starting from Python 3.8. It is also\n # available from typing_extensions, but we don't want a runtime dependency\n # on that. So use a dummy runtime implementation.\n from typing import Generic\n\n Protocol = Generic", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/outcomes.py_TEST_OUTCOME_Skipped.__init__.self._use_item_location._use_item_location": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/outcomes.py_TEST_OUTCOME_Skipped.__init__.self._use_item_location._use_item_location", "embedding": null, "metadata": {"file_path": "src/_pytest/outcomes.py", "file_name": "outcomes.py", "file_type": "text/x-python", "category": "implementation", "start_line": 48, "end_line": 68, "span_ids": ["impl:7", "Skipped"], "tokens": 178}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "TEST_OUTCOME = (OutcomeException, Exception)\n\n\nclass Skipped(OutcomeException):\n # XXX hackish: on 3k we fake to live in the builtins\n # in order to have Skipped exception printing shorter/nicer\n __module__ = \"builtins\"\n\n def __init__(\n self,\n msg: Optional[str] = None,\n pytrace: bool = True,\n allow_module_level: bool = False,\n *,\n _use_item_location: bool = False,\n ) -> None:\n OutcomeException.__init__(self, msg=msg, pytrace=pytrace)\n self.allow_module_level = allow_module_level\n # If true, the skip location is reported as the item's location,\n # instead of the place that raises the exception/calls skip().\n self._use_item_location = _use_item_location", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/outcomes.py_fail_XFailed._Raised_from_an_explici": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/outcomes.py_fail_XFailed._Raised_from_an_explici", "embedding": null, "metadata": {"file_path": "src/_pytest/outcomes.py", "file_name": "outcomes.py", "file_type": "text/x-python", "category": "implementation", "start_line": 147, "end_line": 162, "span_ids": ["XFailed", "fail"], "tokens": 127}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@_with_exception(Failed)\ndef fail(msg: str = \"\", pytrace: bool = True) -> \"NoReturn\":\n \"\"\"Explicitly fail an executing test with the given message.\n\n :param str msg:\n The message to show the user as reason for the failure.\n :param bool pytrace:\n If False, msg represents the full failure information and no\n python traceback will be reported.\n \"\"\"\n __tracebackhide__ = True\n raise Failed(msg=msg, pytrace=pytrace)\n\n\nclass XFailed(Failed):\n \"\"\"Raised from an explicit call to pytest.xfail().\"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pastebin.py__Submit_failure_or_test_pytest_addoption.group__addoption_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pastebin.py__Submit_failure_or_test_pytest_addoption.group__addoption_", "embedding": null, "metadata": {"file_path": "src/_pytest/pastebin.py", "file_name": "pastebin.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 28, "span_ids": ["impl", "docstring", "imports", "pytest_addoption"], "tokens": 168}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"Submit failure or test session information to a pastebin service.\"\"\"\nimport tempfile\nfrom io import StringIO\nfrom typing import IO\nfrom typing import Union\n\nimport pytest\nfrom _pytest.config import Config\nfrom _pytest.config import create_terminal_writer\nfrom _pytest.config.argparsing import Parser\nfrom _pytest.store import StoreKey\nfrom _pytest.terminal import TerminalReporter\n\n\npastebinfile_key = StoreKey[IO[bytes]]()\n\n\ndef pytest_addoption(parser: Parser) -> None:\n group = parser.getgroup(\"terminal reporting\")\n group._addoption(\n \"--pastebin\",\n metavar=\"mode\",\n action=\"store\",\n dest=\"pastebin\",\n default=None,\n choices=[\"failed\", \"all\"],\n help=\"send failed|all info to bpaste.net pastebin service.\",\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py_ensure_deletable_ensure_deletable.None_1.else_.return.False": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py_ensure_deletable_ensure_deletable.None_1.else_.return.False", "embedding": null, "metadata": {"file_path": "src/_pytest/pathlib.py", "file_name": "pathlib.py", "file_type": "text/x-python", "category": "implementation", "start_line": 298, "end_line": 323, "span_ids": ["ensure_deletable"], "tokens": 228}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def ensure_deletable(path: Path, consider_lock_dead_if_created_before: float) -> bool:\n \"\"\"Check if `path` is deletable based on whether the lock file is expired.\"\"\"\n if path.is_symlink():\n return False\n lock = get_lock_path(path)\n try:\n if not lock.is_file():\n return True\n except OSError:\n # we might not have access to the lock file at all, in this case assume\n # we don't have access to the entire directory (#7491).\n return False\n try:\n lock_time = lock.stat().st_mtime\n except Exception:\n return False\n else:\n if lock_time < consider_lock_dead_if_created_before:\n # We want to ignore any errors while trying to remove the lock such as:\n # - PermissionDenied, like the file permissions have changed since the lock creation;\n # - FileNotFoundError, in case another pytest process got here first;\n # and any other cause of failure.\n with contextlib.suppress(OSError):\n lock.unlink()\n return True\n return False", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py_import_path_import_path.return.mod": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py_import_path_import_path.return.mod", "embedding": null, "metadata": {"file_path": "src/_pytest/pathlib.py", "file_name": "pathlib.py", "file_type": "text/x-python", "category": "implementation", "start_line": 456, "end_line": 551, "span_ids": ["import_path"], "tokens": 808}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def import_path(\n p: Union[str, \"os.PathLike[str]\"],\n *,\n mode: Union[str, ImportMode] = ImportMode.prepend,\n) -> ModuleType:\n \"\"\"Import and return a module from the given path, which can be a file (a module) or\n a directory (a package).\n\n The import mechanism used is controlled by the `mode` parameter:\n\n * `mode == ImportMode.prepend`: the directory containing the module (or package, taking\n `__init__.py` files into account) will be put at the *start* of `sys.path` before\n being imported with `__import__.\n\n * `mode == ImportMode.append`: same as `prepend`, but the directory will be appended\n to the end of `sys.path`, if not already in `sys.path`.\n\n * `mode == ImportMode.importlib`: uses more fine control mechanisms provided by `importlib`\n to import the module, which avoids having to use `__import__` and muck with `sys.path`\n at all. It effectively allows having same-named test modules in different places.\n\n :raises ImportPathMismatchError:\n If after importing the given `path` and the module `__file__`\n are different. Only raised in `prepend` and `append` modes.\n \"\"\"\n mode = ImportMode(mode)\n\n path = Path(p)\n\n if not path.exists():\n raise ImportError(path)\n\n if mode is ImportMode.importlib:\n module_name = path.stem\n\n for meta_importer in sys.meta_path:\n spec = meta_importer.find_spec(module_name, [str(path.parent)])\n if spec is not None:\n break\n else:\n spec = importlib.util.spec_from_file_location(module_name, str(path))\n\n if spec is None:\n raise ImportError(\n \"Can't find module {} at location {}\".format(module_name, str(path))\n )\n mod = importlib.util.module_from_spec(spec)\n spec.loader.exec_module(mod) # type: ignore[union-attr]\n return mod\n\n pkg_path = resolve_package_path(path)\n if pkg_path is not None:\n pkg_root = pkg_path.parent\n names = list(path.with_suffix(\"\").relative_to(pkg_root).parts)\n if names[-1] == \"__init__\":\n names.pop()\n module_name = \".\".join(names)\n else:\n pkg_root = path.parent\n module_name = path.stem\n\n # Change sys.path permanently: restoring it at the end of this function would cause surprising\n # problems because of delayed imports: for example, a conftest.py file imported by this function\n # might have local imports, which would fail at runtime if we restored sys.path.\n if mode is ImportMode.append:\n if str(pkg_root) not in sys.path:\n sys.path.append(str(pkg_root))\n elif mode is ImportMode.prepend:\n if str(pkg_root) != sys.path[0]:\n sys.path.insert(0, str(pkg_root))\n else:\n assert_never(mode)\n\n importlib.import_module(module_name)\n\n mod = sys.modules[module_name]\n if path.name == \"__init__.py\":\n return mod\n\n ignore = os.environ.get(\"PY_IGNORE_IMPORTMISMATCH\", \"\")\n if ignore != \"1\":\n module_file = mod.__file__\n if module_file.endswith((\".pyc\", \".pyo\")):\n module_file = module_file[:-1]\n if module_file.endswith(os.path.sep + \"__init__.py\"):\n module_file = module_file[: -(len(os.path.sep + \"__init__.py\"))]\n\n try:\n is_same = _is_same(str(path), module_file)\n except FileNotFoundError:\n is_same = False\n\n if not is_same:\n raise ImportPathMismatchError(module_name, module_file, path)\n\n return mod", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py__Implement_a_special__is_resolve_package_path.return.result": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py__Implement_a_special__is_resolve_package_path.return.result", "embedding": null, "metadata": {"file_path": "src/_pytest/pathlib.py", "file_name": "pathlib.py", "file_type": "text/x-python", "category": "implementation", "start_line": 554, "end_line": 582, "span_ids": ["impl:9", "import_path", "resolve_package_path"], "tokens": 228}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "# Implement a special _is_same function on Windows which returns True if the two filenames\n# compare equal, to circumvent os.path.samefile returning False for mounts in UNC (#7678).\nif sys.platform.startswith(\"win\"):\n\n def _is_same(f1: str, f2: str) -> bool:\n return Path(f1) == Path(f2) or os.path.samefile(f1, f2)\n\n\nelse:\n\n def _is_same(f1: str, f2: str) -> bool:\n return os.path.samefile(f1, f2)\n\n\ndef resolve_package_path(path: Path) -> Optional[Path]:\n \"\"\"Return the Python package path by looking for the last\n directory upwards which still contains an __init__.py.\n\n Returns None if it can not be determined.\n \"\"\"\n result = None\n for parent in itertools.chain((path,), path.parents):\n if parent.is_dir():\n if not parent.joinpath(\"__init__.py\").is_file():\n break\n if not parent.name.isidentifier():\n break\n result = parent\n return result", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py_visit_visit.for_entry_in_entries_.if_entry_is_dir_and_rec.yield_from_visit_entry_pa": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py_visit_visit.for_entry_in_entries_.if_entry_is_dir_and_rec.yield_from_visit_entry_pa", "embedding": null, "metadata": {"file_path": "src/_pytest/pathlib.py", "file_name": "pathlib.py", "file_type": "text/x-python", "category": "implementation", "start_line": 585, "end_line": 611, "span_ids": ["visit"], "tokens": 166}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def visit(\n path: str, recurse: Callable[[\"os.DirEntry[str]\"], bool]\n) -> Iterator[\"os.DirEntry[str]\"]:\n \"\"\"Walk a directory recursively, in breadth-first order.\n\n Entries at each directory level are sorted.\n \"\"\"\n\n # Skip entries with symlink loops and other brokenness, so the caller doesn't\n # have to deal with it.\n entries = []\n for entry in os.scandir(path):\n try:\n entry.is_file()\n except OSError as err:\n if _ignore_error(err):\n continue\n raise\n entries.append(entry)\n\n entries.sort(key=lambda entry: entry.name)\n\n yield from entries\n\n for entry in entries:\n if entry.is_dir() and recurse(entry):\n yield from visit(entry.path, recurse)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py_absolutepath_commonpath.try_.except_ValueError_.return.None": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py_absolutepath_commonpath.try_.except_ValueError_.return.None", "embedding": null, "metadata": {"file_path": "src/_pytest/pathlib.py", "file_name": "pathlib.py", "file_type": "text/x-python", "category": "implementation", "start_line": 614, "end_line": 632, "span_ids": ["commonpath", "absolutepath"], "tokens": 151}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def absolutepath(path: Union[Path, str]) -> Path:\n \"\"\"Convert a path to an absolute path using os.path.abspath.\n\n Prefer this over Path.resolve() (see #6523).\n Prefer this over Path.absolute() (not public, doesn't normalize).\n \"\"\"\n return Path(os.path.abspath(str(path)))\n\n\ndef commonpath(path1: Path, path2: Path) -> Optional[Path]:\n \"\"\"Return the common part shared with the other path, or None if there is\n no common part.\n\n If one path is relative and one is absolute, returns None.\n \"\"\"\n try:\n return Path(os.path.commonpath((str(path1), str(path2))))\n except ValueError:\n return None", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py_bestrelpath_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pathlib.py_bestrelpath_", "embedding": null, "metadata": {"file_path": "src/_pytest/pathlib.py", "file_name": "pathlib.py", "file_type": "text/x-python", "category": "implementation", "start_line": 635, "end_line": 660, "span_ids": ["bestrelpath"], "tokens": 215}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def bestrelpath(directory: Path, dest: Path) -> str:\n \"\"\"Return a string which is a relative path from directory to dest such\n that directory/bestrelpath == dest.\n\n The paths must be either both absolute or both relative.\n\n If no such path can be determined, returns dest.\n \"\"\"\n if dest == directory:\n return os.curdir\n # Find the longest common directory.\n base = commonpath(directory, dest)\n # Can be the case on Windows for two absolute paths on different drives.\n # Can be the case for two relative paths without common prefix.\n # Can be the case for a relative path and an absolute path.\n if not base:\n return str(dest)\n reldirectory = directory.relative_to(base)\n reldest = dest.relative_to(base)\n return os.path.join(\n # Back from directory to base.\n *([os.pardir] * len(reldirectory.parts)),\n # Forward from base to dest.\n *reldest.parts,\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py__Disabled_by_default__IGNORE_PAM._filenames_added_when": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py__Disabled_by_default__IGNORE_PAM._filenames_added_when", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 83, "span_ids": ["impl", "impl:2", "docstring", "imports:63", "imports"], "tokens": 489}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"(Disabled by default) support for testing pytest and pytest plugins.\n\nPYTEST_DONT_REWRITE\n\"\"\"\nimport collections.abc\nimport contextlib\nimport gc\nimport importlib\nimport os\nimport platform\nimport re\nimport shutil\nimport subprocess\nimport sys\nimport traceback\nfrom fnmatch import fnmatch\nfrom io import StringIO\nfrom pathlib import Path\nfrom typing import Any\nfrom typing import Callable\nfrom typing import Dict\nfrom typing import Generator\nfrom typing import IO\nfrom typing import Iterable\nfrom typing import List\nfrom typing import Optional\nfrom typing import overload\nfrom typing import Sequence\nfrom typing import TextIO\nfrom typing import Tuple\nfrom typing import Type\nfrom typing import TYPE_CHECKING\nfrom typing import Union\nfrom weakref import WeakKeyDictionary\n\nimport attr\nimport py\nfrom iniconfig import IniConfig\nfrom iniconfig import SectionWrapper\n\nfrom _pytest import timing\nfrom _pytest._code import Source\nfrom _pytest.capture import _get_multicapture\nfrom _pytest.compat import final\nfrom _pytest.compat import NOTSET\nfrom _pytest.compat import NotSetType\nfrom _pytest.config import _PluggyPlugin\nfrom _pytest.config import Config\nfrom _pytest.config import ExitCode\nfrom _pytest.config import hookimpl\nfrom _pytest.config import main\nfrom _pytest.config import PytestPluginManager\nfrom _pytest.config.argparsing import Parser\nfrom _pytest.deprecated import check_ispytest\nfrom _pytest.fixtures import fixture\nfrom _pytest.fixtures import FixtureRequest\nfrom _pytest.main import Session\nfrom _pytest.monkeypatch import MonkeyPatch\nfrom _pytest.nodes import Collector\nfrom _pytest.nodes import Item\nfrom _pytest.outcomes import fail\nfrom _pytest.outcomes import importorskip\nfrom _pytest.outcomes import skip\nfrom _pytest.pathlib import make_numbered_dir\nfrom _pytest.reports import CollectReport\nfrom _pytest.reports import TestReport\nfrom _pytest.tmpdir import TempPathFactory\nfrom _pytest.warning_types import PytestWarning\n\n\nif TYPE_CHECKING:\n from typing_extensions import Final\n from typing_extensions import Literal\n\n import pexpect\n\n\npytest_plugins = [\"pytester_assertions\"]\n\n\nIGNORE_PAM = [ # filenames added when obtaining details about the current user\n \"/var/lib/sss/mc/passwd\"\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_LsofFdLeakChecker.pytest_runtest_protocol_LsofFdLeakChecker.pytest_runtest_protocol.if_leaked_files_.item_warn_PytestWarning_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_LsofFdLeakChecker.pytest_runtest_protocol_LsofFdLeakChecker.pytest_runtest_protocol.if_leaked_files_.item_warn_PytestWarning_", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 164, "end_line": 186, "span_ids": ["LsofFdLeakChecker.pytest_runtest_protocol"], "tokens": 243}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class LsofFdLeakChecker:\n\n @hookimpl(hookwrapper=True, tryfirst=True)\n def pytest_runtest_protocol(self, item: Item) -> Generator[None, None, None]:\n lines1 = self.get_open_files()\n yield\n if hasattr(sys, \"pypy_version_info\"):\n gc.collect()\n lines2 = self.get_open_files()\n\n new_fds = {t[0] for t in lines2} - {t[0] for t in lines1}\n leaked_files = [t for t in lines2 if t[0] in new_fds]\n if leaked_files:\n error = [\n \"***** %s FD leakage detected\" % len(leaked_files),\n *(str(f) for f in leaked_files),\n \"*** Before:\",\n *(str(f) for f in lines1),\n \"*** After:\",\n *(str(f) for f in lines2),\n \"***** %s FD leakage detected\" % len(leaked_files),\n \"*** function %s:%s: %s \" % item.location,\n \"See issue #2366\",\n ]\n item.warn(PytestWarning(\"\\n\".join(error)))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py__used_at_least_by_pytest_get_public_names.return._x_for_x_in_values_if_x_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py__used_at_least_by_pytest_get_public_names.return._x_for_x_in_values_if_x_0", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 189, "end_line": 212, "span_ids": ["PytestArg.gethookrecorder", "PytestArg", "get_public_names", "LsofFdLeakChecker.pytest_runtest_protocol", "_pytest"], "tokens": 186}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "# used at least by pytest-xdist plugin\n\n\n@fixture\ndef _pytest(request: FixtureRequest) -> \"PytestArg\":\n \"\"\"Return a helper which offers a gethookrecorder(hook) method which\n returns a HookRecorder instance which helps to make assertions about called\n hooks.\"\"\"\n return PytestArg(request)\n\n\nclass PytestArg:\n def __init__(self, request: FixtureRequest) -> None:\n self._request = request\n\n def gethookrecorder(self, hook) -> \"HookRecorder\":\n hookrecorder = HookRecorder(hook._pm)\n self._request.addfinalizer(hookrecorder.finish_recording)\n return hookrecorder\n\n\ndef get_public_names(values: Iterable[str]) -> List[str]:\n \"\"\"Only return names from iterator values without a leading underscore.\"\"\"\n return [x for x in values if x[0] != \"_\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_ParsedCall_ParsedCall.if_TYPE_CHECKING_.__getattr__._": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_ParsedCall_ParsedCall.if_TYPE_CHECKING_.__getattr__._", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 215, "end_line": 228, "span_ids": ["ParsedCall", "ParsedCall:2", "ParsedCall.__repr__"], "tokens": 117}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class ParsedCall:\n def __init__(self, name: str, kwargs) -> None:\n self.__dict__.update(kwargs)\n self._name = name\n\n def __repr__(self) -> str:\n d = self.__dict__.copy()\n del d[\"_name\"]\n return f\"\"\n\n if TYPE_CHECKING:\n # The class has undetermined attributes, this tells mypy about it.\n def __getattr__(self, key: str):\n ...", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_HookRecorder.assert_contains_HookRecorder.assert_contains.while_entries_.for_ind_call_in_enumerat.else_.fail_f_could_not_find_na": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_HookRecorder.assert_contains_HookRecorder.assert_contains.while_entries_.for_ind_call_in_enumerat.else_.fail_f_could_not_find_na", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 259, "end_line": 278, "span_ids": ["HookRecorder.assert_contains"], "tokens": 198}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class HookRecorder:\n\n def assert_contains(self, entries: Sequence[Tuple[str, str]]) -> None:\n __tracebackhide__ = True\n i = 0\n entries = list(entries)\n backlocals = sys._getframe(1).f_locals\n while entries:\n name, check = entries.pop(0)\n for ind, call in enumerate(self.calls[i:]):\n if call._name == name:\n print(\"NAMEMATCH\", name, call)\n if eval(check, backlocals, call.__dict__):\n print(\"CHECKERMATCH\", repr(check), \"->\", call)\n else:\n print(\"NOCHECKERMATCH\", repr(check), \"-\", call)\n continue\n i += ind + 1\n break\n print(\"NONAMEMATCH\", name, \"with\", call)\n else:\n fail(f\"could not find {name!r} check {check!r}\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_HookRecorder.popcall_HookRecorder.getreports_8.return._x_report_for_x_in_self_g": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_HookRecorder.popcall_HookRecorder.getreports_8.return._x_report_for_x_in_self_g", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 280, "end_line": 328, "span_ids": ["HookRecorder.getreports_6", "HookRecorder.getcall", "HookRecorder.getreports_8", "HookRecorder.getreports", "HookRecorder.getreports_7", "HookRecorder.popcall"], "tokens": 333}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class HookRecorder:\n\n def popcall(self, name: str) -> ParsedCall:\n __tracebackhide__ = True\n for i, call in enumerate(self.calls):\n if call._name == name:\n del self.calls[i]\n return call\n lines = [f\"could not find call {name!r}, in:\"]\n lines.extend([\" %s\" % x for x in self.calls])\n fail(\"\\n\".join(lines))\n\n def getcall(self, name: str) -> ParsedCall:\n values = self.getcalls(name)\n assert len(values) == 1, (name, values)\n return values[0]\n\n # functionality for test reports\n\n @overload\n def getreports(\n self,\n names: \"Literal['pytest_collectreport']\",\n ) -> Sequence[CollectReport]:\n ...\n\n @overload\n def getreports(\n self,\n names: \"Literal['pytest_runtest_logreport']\",\n ) -> Sequence[TestReport]:\n ...\n\n @overload\n def getreports(\n self,\n names: Union[str, Iterable[str]] = (\n \"pytest_collectreport\",\n \"pytest_runtest_logreport\",\n ),\n ) -> Sequence[Union[CollectReport, TestReport]]:\n ...\n\n def getreports(\n self,\n names: Union[str, Iterable[str]] = (\n \"pytest_collectreport\",\n \"pytest_runtest_logreport\",\n ),\n ) -> Sequence[Union[CollectReport, TestReport]]:\n return [x.report for x in self.getcalls(names)]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_HookRecorder.getfailures_HookRecorder.getfailedcollections.return.self_getfailures_pytest_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_HookRecorder.getfailures_HookRecorder.getfailedcollections.return.self_getfailures_pytest_", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 362, "end_line": 396, "span_ids": ["HookRecorder.getfailures_11", "HookRecorder.getfailedcollections", "HookRecorder.getfailures_12", "HookRecorder.getfailures_13", "HookRecorder.getfailures"], "tokens": 220}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class HookRecorder:\n\n @overload\n def getfailures(\n self,\n names: \"Literal['pytest_collectreport']\",\n ) -> Sequence[CollectReport]:\n ...\n\n @overload\n def getfailures(\n self,\n names: \"Literal['pytest_runtest_logreport']\",\n ) -> Sequence[TestReport]:\n ...\n\n @overload\n def getfailures(\n self,\n names: Union[str, Iterable[str]] = (\n \"pytest_collectreport\",\n \"pytest_runtest_logreport\",\n ),\n ) -> Sequence[Union[CollectReport, TestReport]]:\n ...\n\n def getfailures(\n self,\n names: Union[str, Iterable[str]] = (\n \"pytest_collectreport\",\n \"pytest_runtest_logreport\",\n ),\n ) -> Sequence[Union[CollectReport, TestReport]]:\n return [rep for rep in self.getreports(names) if rep.failed]\n\n def getfailedcollections(self) -> Sequence[CollectReport]:\n return self.getfailures(\"pytest_collectreport\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_HookRecorder.listoutcomes_HookRecorder.listoutcomes.return.passed_skipped_failed": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_HookRecorder.listoutcomes_HookRecorder.listoutcomes.return.passed_skipped_failed", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 398, "end_line": 420, "span_ids": ["HookRecorder.listoutcomes"], "tokens": 151}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class HookRecorder:\n\n def listoutcomes(\n self,\n ) -> Tuple[\n Sequence[TestReport],\n Sequence[Union[CollectReport, TestReport]],\n Sequence[Union[CollectReport, TestReport]],\n ]:\n passed = []\n skipped = []\n failed = []\n for rep in self.getreports(\n (\"pytest_collectreport\", \"pytest_runtest_logreport\")\n ):\n if rep.passed:\n if rep.when == \"call\":\n assert isinstance(rep, TestReport)\n passed.append(rep)\n elif rep.skipped:\n skipped.append(rep)\n else:\n assert rep.failed, f\"Unexpected outcome: {rep!r}\"\n failed.append(rep)\n return passed, skipped, failed", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_linecomp_LineMatcher_fixture.return.LineMatcher": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_linecomp_LineMatcher_fixture.return.LineMatcher", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 441, "end_line": 455, "span_ids": ["LineMatcher_fixture", "linecomp"], "tokens": 117}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@fixture\ndef linecomp() -> \"LineComp\":\n \"\"\"A :class: `LineComp` instance for checking that an input linearly\n contains a sequence of strings.\"\"\"\n return LineComp()\n\n\n@fixture(name=\"LineMatcher\")\ndef LineMatcher_fixture(request: FixtureRequest) -> Type[\"LineMatcher\"]:\n \"\"\"A reference to the :class: `LineMatcher`.\n\n This is instantiable with a list of lines (without their trailing newlines).\n This is useful for testing large texts, such as the output of commands.\n \"\"\"\n return LineMatcher", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_pytester_pytester.return.Pytester_request_tmp_pat": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_pytester_pytester.return.Pytester_request_tmp_pat", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 458, "end_line": 470, "span_ids": ["pytester"], "tokens": 138}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@fixture\ndef pytester(request: FixtureRequest, tmp_path_factory: TempPathFactory) -> \"Pytester\":\n \"\"\"\n Facilities to write tests/configuration files, execute pytest in isolation, and match\n against expected output, perfect for black-box testing of pytest plugins.\n\n It attempts to isolate the test run from external factors as much as possible, modifying\n the current working directory to ``path`` and environment variables during initialization.\n\n It is particularly useful for testing plugins. It is similar to the :fixture:`tmp_path`\n fixture but provides methods which aid in testing pytest itself.\n \"\"\"\n return Pytester(request, tmp_path_factory, _ispytest=True)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_testdir_rex_outcome.re_compile_r_d_w_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_testdir_rex_outcome.re_compile_r_d_w_", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 473, "end_line": 505, "span_ids": ["testdir", "_sys_snapshot", "_config_for_test", "impl:6"], "tokens": 260}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@fixture\ndef testdir(pytester: \"Pytester\") -> \"Testdir\":\n \"\"\"\n Identical to :fixture:`pytester`, and provides an instance whose methods return\n legacy ``py.path.local`` objects instead when applicable.\n\n New code should avoid using :fixture:`testdir` in favor of :fixture:`pytester`.\n \"\"\"\n return Testdir(pytester, _ispytest=True)\n\n\n@fixture\ndef _sys_snapshot() -> Generator[None, None, None]:\n snappaths = SysPathsSnapshot()\n snapmods = SysModulesSnapshot()\n yield\n snapmods.restore()\n snappaths.restore()\n\n\n@fixture\ndef _config_for_test() -> Generator[Config, None, None]:\n from _pytest.config import get_config\n\n config = get_config()\n yield config\n config._ensure_unconfigure() # cleanup, e.g. capman closing tmpfiles.\n\n\n# Regex to match the session duration string in the summary: \"74.34s\".\nrex_session_duration = re.compile(r\"\\d+\\.\\d\\ds\")\n# Regex to match all the counts and phrases in the summary line: \"34 passed, 111 skipped\".\nrex_outcome = re.compile(r\"(\\d+) (\\w+)\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_RunResult.assert_outcomes_RunResult.assert_outcomes.assert_outcomes_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_RunResult.assert_outcomes_RunResult.assert_outcomes.assert_outcomes_", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 580, "end_line": 603, "span_ids": ["RunResult.assert_outcomes"], "tokens": 172}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class RunResult:\n\n def assert_outcomes(\n self,\n passed: int = 0,\n skipped: int = 0,\n failed: int = 0,\n errors: int = 0,\n xpassed: int = 0,\n xfailed: int = 0,\n ) -> None:\n \"\"\"Assert that the specified outcomes appear with the respective\n numbers (0 means it didn't occur) in the text output from a test run.\"\"\"\n __tracebackhide__ = True\n from _pytest.pytester_assertions import assert_outcomes\n\n outcomes = self.parseoutcomes()\n assert_outcomes(\n outcomes,\n passed=passed,\n skipped=skipped,\n failed=failed,\n errors=errors,\n xpassed=xpassed,\n xfailed=xfailed,\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester_Pytester._finalize.self__monkeypatch_undo_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester_Pytester._finalize.self__monkeypatch_undo_", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 636, "end_line": 722, "span_ids": ["Pytester.__init__", "Pytester", "Pytester.__repr__", "Pytester.__init__:2", "Pytester.TimeoutExpired", "Pytester.path", "Pytester._finalize"], "tokens": 717}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Pytester:\n \"\"\"\n Facilities to write tests/configuration files, execute pytest in isolation, and match\n against expected output, perfect for black-box testing of pytest plugins.\n\n It attempts to isolate the test run from external factors as much as possible, modifying\n the current working directory to ``path`` and environment variables during initialization.\n\n Attributes:\n\n :ivar Path path: temporary directory path used to create files/run tests from, etc.\n\n :ivar plugins:\n A list of plugins to use with :py:meth:`parseconfig` and\n :py:meth:`runpytest`. Initially this is an empty list but plugins can\n be added to the list. The type of items to add to the list depends on\n the method using them so refer to them for details.\n \"\"\"\n\n __test__ = False\n\n CLOSE_STDIN: \"Final\" = NOTSET\n\n class TimeoutExpired(Exception):\n pass\n\n def __init__(\n self,\n request: FixtureRequest,\n tmp_path_factory: TempPathFactory,\n *,\n _ispytest: bool = False,\n ) -> None:\n check_ispytest(_ispytest)\n self._request = request\n self._mod_collections: WeakKeyDictionary[\n Collector, List[Union[Item, Collector]]\n ] = WeakKeyDictionary()\n if request.function:\n name: str = request.function.__name__\n else:\n name = request.node.name\n self._name = name\n self._path: Path = tmp_path_factory.mktemp(name, numbered=True)\n self.plugins: List[Union[str, _PluggyPlugin]] = []\n self._cwd_snapshot = CwdSnapshot()\n self._sys_path_snapshot = SysPathsSnapshot()\n self._sys_modules_snapshot = self.__take_sys_modules_snapshot()\n self.chdir()\n self._request.addfinalizer(self._finalize)\n self._method = self._request.config.getoption(\"--runpytest\")\n self._test_tmproot = tmp_path_factory.mktemp(f\"tmp-{name}\", numbered=True)\n\n self._monkeypatch = mp = MonkeyPatch()\n mp.setenv(\"PYTEST_DEBUG_TEMPROOT\", str(self._test_tmproot))\n # Ensure no unexpected caching via tox.\n mp.delenv(\"TOX_ENV_DIR\", raising=False)\n # Discard outer pytest options.\n mp.delenv(\"PYTEST_ADDOPTS\", raising=False)\n # Ensure no user config is used.\n tmphome = str(self.path)\n mp.setenv(\"HOME\", tmphome)\n mp.setenv(\"USERPROFILE\", tmphome)\n # Do not use colors for inner runs by default.\n mp.setenv(\"PY_COLORS\", \"0\")\n\n @property\n def path(self) -> Path:\n \"\"\"Temporary directory where files are created and pytest is executed.\"\"\"\n return self._path\n\n def __repr__(self) -> str:\n return f\"\"\n\n def _finalize(self) -> None:\n \"\"\"\n Clean up global state artifacts.\n\n Some methods modify the global interpreter state and this tries to\n clean this up. It does not remove the temporary directory however so\n it can be looked at after the test run has finished.\n \"\"\"\n self._sys_modules_snapshot.restore()\n self._sys_path_snapshot.restore()\n self._cwd_snapshot.restore()\n self._monkeypatch.undo()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.__take_sys_modules_snapshot_Pytester.chdir.os_chdir_self_path_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.__take_sys_modules_snapshot_Pytester.chdir.os_chdir_self_path_", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 724, "end_line": 747, "span_ids": ["Pytester.__take_sys_modules_snapshot", "Pytester.make_hook_recorder", "Pytester.chdir"], "tokens": 222}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Pytester:\n\n def __take_sys_modules_snapshot(self) -> SysModulesSnapshot:\n # Some zope modules used by twisted-related tests keep internal state\n # and can't be deleted; we had some trouble in the past with\n # `zope.interface` for example.\n #\n # Preserve readline due to https://bugs.python.org/issue41033.\n # pexpect issues a SIGWINCH.\n def preserve_module(name):\n return name.startswith((\"zope\", \"readline\"))\n\n return SysModulesSnapshot(preserve=preserve_module)\n\n def make_hook_recorder(self, pluginmanager: PytestPluginManager) -> HookRecorder:\n \"\"\"Create a new :py:class:`HookRecorder` for a PluginManager.\"\"\"\n pluginmanager.reprec = reprec = HookRecorder(pluginmanager)\n self._request.addfinalizer(reprec.finish_recording)\n return reprec\n\n def chdir(self) -> None:\n \"\"\"Cd into the temporary directory.\n\n This is done automatically upon instantiation.\n \"\"\"\n os.chdir(self.path)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester._makefile_Pytester._makefile.return.ret": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester._makefile_Pytester._makefile.return.ret", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 749, "end_line": 781, "span_ids": ["Pytester._makefile"], "tokens": 262}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Pytester:\n\n def _makefile(\n self,\n ext: str,\n lines: Sequence[Union[Any, bytes]],\n files: Dict[str, str],\n encoding: str = \"utf-8\",\n ) -> Path:\n items = list(files.items())\n\n if ext and not ext.startswith(\".\"):\n raise ValueError(\n f\"pytester.makefile expects a file extension, try .{ext} instead of {ext}\"\n )\n\n def to_text(s: Union[Any, bytes]) -> str:\n return s.decode(encoding) if isinstance(s, bytes) else str(s)\n\n if lines:\n source = \"\\n\".join(to_text(x) for x in lines)\n basename = self._name\n items.insert(0, (basename, source))\n\n ret = None\n for basename, value in items:\n p = self.path.joinpath(basename).with_suffix(ext)\n p.parent.mkdir(parents=True, exist_ok=True)\n source_ = Source(value)\n source = \"\\n\".join(to_text(line) for line in source_.lines)\n p.write_text(source.strip(), encoding=encoding)\n if ret is None:\n ret = p\n assert ret is not None\n return ret", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.makefile_Pytester.makefile.return.self__makefile_ext_args_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.makefile_Pytester.makefile.return.self__makefile_ext_args_", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 783, "end_line": 811, "span_ids": ["Pytester.makefile"], "tokens": 251}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Pytester:\n\n def makefile(self, ext: str, *args: str, **kwargs: str) -> Path:\n r\"\"\"Create new text file(s) in the test directory.\n\n :param str ext:\n The extension the file(s) should use, including the dot, e.g. `.py`.\n :param args:\n All args are treated as strings and joined using newlines.\n The result is written as contents to the file. The name of the\n file is based on the test function requesting this fixture.\n :param kwargs:\n Each keyword is the name of a file, while the value of it will\n be written as contents of the file.\n\n Examples:\n\n .. code-block:: python\n\n pytester.makefile(\".txt\", \"line1\", \"line2\")\n\n pytester.makefile(\".ini\", pytest=\"[pytest]\\naddopts=-rs\\n\")\n\n To create binary files, use :meth:`pathlib.Path.write_bytes` directly:\n\n .. code-block:: python\n\n filename = pytester.path.joinpath(\"foo.bin\")\n filename.write_bytes(b\"...\")\n \"\"\"\n return self._makefile(ext, args, kwargs)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.makeconftest_Pytester.makepyprojecttoml.return.self_makefile_toml_py": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.makeconftest_Pytester.makepyprojecttoml.return.self_makefile_toml_py", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 813, "end_line": 831, "span_ids": ["Pytester.makepyprojecttoml", "Pytester.makeini", "Pytester.makeconftest", "Pytester.getinicfg"], "tokens": 191}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Pytester:\n\n def makeconftest(self, source: str) -> Path:\n \"\"\"Write a contest.py file with 'source' as contents.\"\"\"\n return self.makepyfile(conftest=source)\n\n def makeini(self, source: str) -> Path:\n \"\"\"Write a tox.ini file with 'source' as contents.\"\"\"\n return self.makefile(\".ini\", tox=source)\n\n def getinicfg(self, source: str) -> SectionWrapper:\n \"\"\"Return the pytest section from the tox.ini config file.\"\"\"\n p = self.makeini(source)\n return IniConfig(str(p))[\"pytest\"]\n\n def makepyprojecttoml(self, source: str) -> Path:\n \"\"\"Write a pyproject.toml file with 'source' as contents.\n\n .. versionadded:: 6.0\n \"\"\"\n return self.makefile(\".toml\", pyproject=source)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.makepyfile_Pytester.makepyfile.return.self__makefile_py_arg": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.makepyfile_Pytester.makepyfile.return.self__makefile_py_arg", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 833, "end_line": 851, "span_ids": ["Pytester.makepyfile"], "tokens": 164}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Pytester:\n\n def makepyfile(self, *args, **kwargs) -> Path:\n r\"\"\"Shortcut for .makefile() with a .py extension.\n\n Defaults to the test name with a '.py' extension, e.g test_foobar.py, overwriting\n existing files.\n\n Examples:\n\n .. code-block:: python\n\n def test_something(pytester):\n # Initial file is created test_something.py.\n pytester.makepyfile(\"foobar\")\n # To create multiple files, pass kwargs accordingly.\n pytester.makepyfile(custom=\"foobar\")\n # At this point, both 'test_something.py' & 'custom.py' exist in the test directory.\n\n \"\"\"\n return self._makefile(\".py\", args, kwargs)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.maketxtfile_Pytester.maketxtfile.return.self__makefile_txt_ar": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.maketxtfile_Pytester.maketxtfile.return.self__makefile_txt_ar", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 853, "end_line": 871, "span_ids": ["Pytester.maketxtfile"], "tokens": 166}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Pytester:\n\n def maketxtfile(self, *args, **kwargs) -> Path:\n r\"\"\"Shortcut for .makefile() with a .txt extension.\n\n Defaults to the test name with a '.txt' extension, e.g test_foobar.txt, overwriting\n existing files.\n\n Examples:\n\n .. code-block:: python\n\n def test_something(pytester):\n # Initial file is created test_something.txt.\n pytester.maketxtfile(\"foobar\")\n # To create multiple files, pass kwargs accordingly.\n pytester.maketxtfile(custom=\"foobar\")\n # At this point, both 'test_something.txt' & 'custom.txt' exist in the test directory.\n\n \"\"\"\n return self._makefile(\".txt\", args, kwargs)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.syspathinsert_Pytester.mkpydir.return.p": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.syspathinsert_Pytester.mkpydir.return.p", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 873, "end_line": 901, "span_ids": ["Pytester.mkdir", "Pytester.mkpydir", "Pytester.syspathinsert"], "tokens": 219}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Pytester:\n\n def syspathinsert(\n self, path: Optional[Union[str, \"os.PathLike[str]\"]] = None\n ) -> None:\n \"\"\"Prepend a directory to sys.path, defaults to :py:attr:`tmpdir`.\n\n This is undone automatically when this object dies at the end of each\n test.\n \"\"\"\n if path is None:\n path = self.path\n\n self._monkeypatch.syspath_prepend(str(path))\n\n def mkdir(self, name: str) -> Path:\n \"\"\"Create a new (sub)directory.\"\"\"\n p = self.path / name\n p.mkdir()\n return p\n\n def mkpydir(self, name: str) -> Path:\n \"\"\"Create a new python package.\n\n This creates a (sub)directory with an empty ``__init__.py`` file so it\n gets recognised as a Python package.\n \"\"\"\n p = self.path / name\n p.mkdir()\n p.joinpath(\"__init__.py\").touch()\n return p", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.copy_example_Pytester.copy_example.if_example_path_is_dir_.else_.raise_LookupError_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.copy_example_Pytester.copy_example.if_example_path_is_dir_.else_.raise_LookupError_", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 903, "end_line": 948, "span_ids": ["Pytester.copy_example"], "tokens": 412}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Pytester:\n\n def copy_example(self, name: Optional[str] = None) -> Path:\n \"\"\"Copy file from project's directory into the testdir.\n\n :param str name: The name of the file to copy.\n :return: path to the copied directory (inside ``self.path``).\n\n \"\"\"\n example_dir = self._request.config.getini(\"pytester_example_dir\")\n if example_dir is None:\n raise ValueError(\"pytester_example_dir is unset, can't copy examples\")\n example_dir = Path(str(self._request.config.rootdir)) / example_dir\n\n for extra_element in self._request.node.iter_markers(\"pytester_example_path\"):\n assert extra_element.args\n example_dir = example_dir.joinpath(*extra_element.args)\n\n if name is None:\n func_name = self._name\n maybe_dir = example_dir / func_name\n maybe_file = example_dir / (func_name + \".py\")\n\n if maybe_dir.is_dir():\n example_path = maybe_dir\n elif maybe_file.is_file():\n example_path = maybe_file\n else:\n raise LookupError(\n f\"{func_name} can't be found as module or package in {example_dir}\"\n )\n else:\n example_path = example_dir.joinpath(name)\n\n if example_path.is_dir() and not example_path.joinpath(\"__init__.py\").is_file():\n # TODO: py.path.local.copy can copy files to existing directories,\n # while with shutil.copytree the destination directory cannot exist,\n # we will need to roll our own in order to drop py.path.local completely\n py.path.local(example_path).copy(py.path.local(self.path))\n return self.path\n elif example_path.is_file():\n result = self.path.joinpath(example_path.name)\n shutil.copy(example_path, result)\n return result\n else:\n raise LookupError(\n f'example \"{example_path}\" is not found as a file or directory'\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.Session_Pytester.getnode.return.res": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.Session_Pytester.getnode.return.res", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 950, "end_line": 969, "span_ids": ["Pytester.getnode", "Pytester:7"], "tokens": 183}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Pytester:\n\n Session = Session\n\n def getnode(\n self, config: Config, arg: Union[str, \"os.PathLike[str]\"]\n ) -> Optional[Union[Collector, Item]]:\n \"\"\"Return the collection node of a file.\n\n :param _pytest.config.Config config:\n A pytest config.\n See :py:meth:`parseconfig` and :py:meth:`parseconfigure` for creating it.\n :param py.path.local arg:\n Path to the file.\n \"\"\"\n session = Session.from_config(config)\n assert \"::\" not in str(arg)\n p = py.path.local(arg)\n config.hook.pytest_sessionstart(session=session)\n res = session.perform_collect([str(p)], genitems=False)[0]\n config.hook.pytest_sessionfinish(session=session, exitstatus=ExitCode.OK)\n return res", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.getpathnode_Pytester.getpathnode.return.res": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.getpathnode_Pytester.getpathnode.return.res", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 971, "end_line": 986, "span_ids": ["Pytester.getpathnode"], "tokens": 168}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Pytester:\n\n def getpathnode(self, path: Union[str, \"os.PathLike[str]\"]):\n \"\"\"Return the collection node of a file.\n\n This is like :py:meth:`getnode` but uses :py:meth:`parseconfigure` to\n create the (configured) pytest Config instance.\n\n :param py.path.local path: Path to the file.\n \"\"\"\n path = py.path.local(path)\n config = self.parseconfigure(path)\n session = Session.from_config(config)\n x = session.fspath.bestrelpath(path)\n config.hook.pytest_sessionstart(session=session)\n res = session.perform_collect([x], genitems=False)[0]\n config.hook.pytest_sessionfinish(session=session, exitstatus=ExitCode.OK)\n return res", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.genitems_Pytester.runitem.return.runner_item_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.genitems_Pytester.runitem.return.runner_item_", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 988, "end_line": 1013, "span_ids": ["Pytester.genitems", "Pytester.runitem"], "tokens": 243}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Pytester:\n\n def genitems(self, colitems: Sequence[Union[Item, Collector]]) -> List[Item]:\n \"\"\"Generate all test items from a collection node.\n\n This recurses into the collection node and returns a list of all the\n test items contained within.\n \"\"\"\n session = colitems[0].session\n result: List[Item] = []\n for colitem in colitems:\n result.extend(session.genitems(colitem))\n return result\n\n def runitem(self, source: str) -> Any:\n \"\"\"Run the \"test_func\" Item.\n\n The calling test instance (class containing the test method) must\n provide a ``.getrunner()`` method which should return a runner which\n can run the test protocol for a single item, e.g.\n :py:func:`_pytest.runner.runtestprotocol`.\n \"\"\"\n # used from runner functional tests\n item = self.getitem(source)\n # the test class where we are called from wants to provide the runner\n testclassinstance = self._request.instance\n runner = testclassinstance.getrunner()\n return runner(item)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.inline_runsource_Pytester.inline_runsource.return.self_inline_run_values_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.inline_runsource_Pytester.inline_runsource.return.self_inline_run_values_", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1015, "end_line": 1030, "span_ids": ["Pytester.inline_runsource"], "tokens": 155}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Pytester:\n\n def inline_runsource(self, source: str, *cmdlineargs) -> HookRecorder:\n \"\"\"Run a test module in process using ``pytest.main()``.\n\n This run writes \"source\" into a temporary file and runs\n ``pytest.main()`` on it, returning a :py:class:`HookRecorder` instance\n for the result.\n\n :param source: The source code of the test module.\n\n :param cmdlineargs: Any extra command line arguments to use.\n\n :returns: :py:class:`HookRecorder` instance of the result.\n \"\"\"\n p = self.makepyfile(source)\n values = list(cmdlineargs) + [p]\n return self.inline_run(*values)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.inline_genitems_Pytester.inline_genitems.return.items_rec": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.inline_genitems_Pytester.inline_genitems.return.items_rec", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1032, "end_line": 1041, "span_ids": ["Pytester.inline_genitems"], "tokens": 133}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Pytester:\n\n def inline_genitems(self, *args) -> Tuple[List[Item], HookRecorder]:\n \"\"\"Run ``pytest.main(['--collectonly'])`` in-process.\n\n Runs the :py:func:`pytest.main` function to run all of pytest inside\n the test process itself like :py:meth:`inline_run`, but returns a\n tuple of the collected items and a :py:class:`HookRecorder` instance.\n \"\"\"\n rec = self.inline_run(\"--collect-only\", *args)\n items = [x.item for x in rec.getcalls(\"pytest_itemcollected\")]\n return items, rec", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.inline_run_Pytester.inline_run.try_.finally_.for_finalizer_in_finalize.finalizer_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.inline_run_Pytester.inline_run.try_.finally_.for_finalizer_in_finalize.finalizer_", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1043, "end_line": 1114, "span_ids": ["Pytester.inline_run.try_.Collect", "Pytester.inline_run", "Pytester.inline_run.try_.Collect.pytest_configure", "Pytester.inline_run.try_.if_len_rec_1_.else_.reprec", "Pytester.inline_run.try_.if_len_rec_1_.else_.reprec:2"], "tokens": 616}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Pytester:\n\n def inline_run(\n self,\n *args: Union[str, \"os.PathLike[str]\"],\n plugins=(),\n no_reraise_ctrlc: bool = False,\n ) -> HookRecorder:\n \"\"\"Run ``pytest.main()`` in-process, returning a HookRecorder.\n\n Runs the :py:func:`pytest.main` function to run all of pytest inside\n the test process itself. This means it can return a\n :py:class:`HookRecorder` instance which gives more detailed results\n from that run than can be done by matching stdout/stderr from\n :py:meth:`runpytest`.\n\n :param args:\n Command line arguments to pass to :py:func:`pytest.main`.\n :param plugins:\n Extra plugin instances the ``pytest.main()`` instance should use.\n :param no_reraise_ctrlc:\n Typically we reraise keyboard interrupts from the child run. If\n True, the KeyboardInterrupt exception is captured.\n\n :returns: A :py:class:`HookRecorder` instance.\n \"\"\"\n # (maybe a cpython bug?) the importlib cache sometimes isn't updated\n # properly between file creation and inline_run (especially if imports\n # are interspersed with file creation)\n importlib.invalidate_caches()\n\n plugins = list(plugins)\n finalizers = []\n try:\n # Any sys.module or sys.path changes done while running pytest\n # inline should be reverted after the test run completes to avoid\n # clashing with later inline tests run within the same pytest test,\n # e.g. just because they use matching test module names.\n finalizers.append(self.__take_sys_modules_snapshot().restore)\n finalizers.append(SysPathsSnapshot().restore)\n\n # Important note:\n # - our tests should not leave any other references/registrations\n # laying around other than possibly loaded test modules\n # referenced from sys.modules, as nothing will clean those up\n # automatically\n\n rec = []\n\n class Collect:\n def pytest_configure(x, config: Config) -> None:\n rec.append(self.make_hook_recorder(config.pluginmanager))\n\n plugins.append(Collect())\n ret = main([str(x) for x in args], plugins=plugins)\n if len(rec) == 1:\n reprec = rec.pop()\n else:\n\n class reprec: # type: ignore\n pass\n\n reprec.ret = ret\n\n # Typically we reraise keyboard interrupts from the child run\n # because it's our user requesting interruption of the testing.\n if ret == ExitCode.INTERRUPTED and not no_reraise_ctrlc:\n calls = reprec.getcalls(\"pytest_keyboard_interrupt\")\n if calls and calls[-1].excinfo.type == KeyboardInterrupt:\n raise KeyboardInterrupt()\n return reprec\n finally:\n for finalizer in finalizers:\n finalizer()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.runpytest_inprocess_Pytester.runpytest_inprocess.return.res": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.runpytest_inprocess_Pytester.runpytest_inprocess.return.res", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1116, "end_line": 1158, "span_ids": ["Pytester.runpytest_inprocess", "Pytester.runpytest_inprocess.try_.try_.except_Exception_.reprec", "Pytester.runpytest_inprocess.try_.try_.except_SystemExit_as_e_.reprec", "Pytester.runpytest_inprocess.try_.try_.except_Exception_.reprec:2", "Pytester.runpytest_inprocess.try_.try_.except_SystemExit_as_e_.reprec:2"], "tokens": 297}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Pytester:\n\n def runpytest_inprocess(\n self, *args: Union[str, \"os.PathLike[str]\"], **kwargs: Any\n ) -> RunResult:\n \"\"\"Return result of running pytest in-process, providing a similar\n interface to what self.runpytest() provides.\"\"\"\n syspathinsert = kwargs.pop(\"syspathinsert\", False)\n\n if syspathinsert:\n self.syspathinsert()\n now = timing.time()\n capture = _get_multicapture(\"sys\")\n capture.start_capturing()\n try:\n try:\n reprec = self.inline_run(*args, **kwargs)\n except SystemExit as e:\n ret = e.args[0]\n try:\n ret = ExitCode(e.args[0])\n except ValueError:\n pass\n\n class reprec: # type: ignore\n ret = ret\n\n except Exception:\n traceback.print_exc()\n\n class reprec: # type: ignore\n ret = ExitCode(3)\n\n finally:\n out, err = capture.readouterr()\n capture.stop_capturing()\n sys.stdout.write(out)\n sys.stderr.write(err)\n\n assert reprec.ret is not None\n res = RunResult(\n reprec.ret, out.splitlines(), err.splitlines(), timing.time() - now\n )\n res.reprec = reprec # type: ignore\n return res", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.runpytest_Pytester._ensure_basetemp.return.new_args": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.runpytest_Pytester._ensure_basetemp.return.new_args", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1160, "end_line": 1181, "span_ids": ["Pytester.runpytest", "Pytester._ensure_basetemp"], "tokens": 246}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Pytester:\n\n def runpytest(\n self, *args: Union[str, \"os.PathLike[str]\"], **kwargs: Any\n ) -> RunResult:\n \"\"\"Run pytest inline or in a subprocess, depending on the command line\n option \"--runpytest\" and return a :py:class:`RunResult`.\"\"\"\n new_args = self._ensure_basetemp(args)\n if self._method == \"inprocess\":\n return self.runpytest_inprocess(*new_args, **kwargs)\n elif self._method == \"subprocess\":\n return self.runpytest_subprocess(*new_args, **kwargs)\n raise RuntimeError(f\"Unrecognized runpytest option: {self._method}\")\n\n def _ensure_basetemp(\n self, args: Sequence[Union[str, \"os.PathLike[str]\"]]\n ) -> List[Union[str, \"os.PathLike[str]\"]]:\n new_args = list(args)\n for x in new_args:\n if str(x).startswith(\"--basetemp\"):\n break\n else:\n new_args.append(\"--basetemp=%s\" % self.path.parent.joinpath(\"basetemp\"))\n return new_args", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.parseconfig_Pytester.parseconfig.return.config": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.parseconfig_Pytester.parseconfig.return.config", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1183, "end_line": 1204, "span_ids": ["Pytester.parseconfig"], "tokens": 246}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Pytester:\n\n def parseconfig(self, *args: Union[str, \"os.PathLike[str]\"]) -> Config:\n \"\"\"Return a new pytest Config instance from given commandline args.\n\n This invokes the pytest bootstrapping code in _pytest.config to create\n a new :py:class:`_pytest.core.PluginManager` and call the\n pytest_cmdline_parse hook to create a new\n :py:class:`_pytest.config.Config` instance.\n\n If :py:attr:`plugins` has been populated they should be plugin modules\n to be registered with the PluginManager.\n \"\"\"\n import _pytest.config\n\n new_args = self._ensure_basetemp(args)\n new_args = [str(x) for x in new_args]\n\n config = _pytest.config._prepareconfig(new_args, self.plugins) # type: ignore[arg-type]\n # we don't know what the test will do with this half-setup config\n # object and thus we make sure it gets unconfigured properly in any\n # case (otherwise capturing could still be active, for example)\n self._request.addfinalizer(config._ensure_unconfigure)\n return config", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.parseconfigure_Pytester.getitems.return.self_genitems_modcol_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.parseconfigure_Pytester.getitems.return.self_genitems_modcol_", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1206, "end_line": 1245, "span_ids": ["Pytester.getitems", "Pytester.getitem", "Pytester.parseconfigure"], "tokens": 344}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Pytester:\n\n def parseconfigure(self, *args: Union[str, \"os.PathLike[str]\"]) -> Config:\n \"\"\"Return a new pytest configured Config instance.\n\n Returns a new :py:class:`_pytest.config.Config` instance like\n :py:meth:`parseconfig`, but also calls the pytest_configure hook.\n \"\"\"\n config = self.parseconfig(*args)\n config._do_configure()\n return config\n\n def getitem(\n self, source: Union[str, \"os.PathLike[str]\"], funcname: str = \"test_func\"\n ) -> Item:\n \"\"\"Return the test item for a test function.\n\n Writes the source to a python file and runs pytest's collection on\n the resulting module, returning the test item for the requested\n function name.\n\n :param source:\n The module source.\n :param funcname:\n The name of the test function for which to return a test item.\n \"\"\"\n items = self.getitems(source)\n for item in items:\n if item.name == funcname:\n return item\n assert 0, \"{!r} item not found in module:\\n{}\\nitems: {}\".format(\n funcname, source, items\n )\n\n def getitems(self, source: Union[str, \"os.PathLike[str]\"]) -> List[Item]:\n \"\"\"Return all test items collected from the module.\n\n Writes the source to a Python file and runs pytest's collection on\n the resulting module, returning all test items contained within.\n \"\"\"\n modcol = self.getmodulecol(source)\n return self.genitems([modcol])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.getmodulecol_Pytester.getmodulecol.return.self_getnode_config_path": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.getmodulecol_Pytester.getmodulecol.return.self_getnode_config_path", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1247, "end_line": 1279, "span_ids": ["Pytester.getmodulecol"], "tokens": 261}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Pytester:\n\n def getmodulecol(\n self,\n source: Union[str, \"os.PathLike[str]\"],\n configargs=(),\n *,\n withinit: bool = False,\n ):\n \"\"\"Return the module collection node for ``source``.\n\n Writes ``source`` to a file using :py:meth:`makepyfile` and then\n runs the pytest collection on it, returning the collection node for the\n test module.\n\n :param source:\n The source code of the module to collect.\n\n :param configargs:\n Any extra arguments to pass to :py:meth:`parseconfigure`.\n\n :param withinit:\n Whether to also write an ``__init__.py`` file to the same\n directory to ensure it is a package.\n \"\"\"\n if isinstance(source, os.PathLike):\n path = self.path.joinpath(source)\n assert not withinit, \"not supported for paths\"\n else:\n kw = {self._name: str(source)}\n path = self.makepyfile(**kw)\n if withinit:\n self.makepyfile(__init__=\"#\")\n self.config = config = self.parseconfigure(path, *configargs)\n return self.getnode(config, path)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.collect_by_name_Pytester.collect_by_name.return.None": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.collect_by_name_Pytester.collect_by_name.return.None", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1281, "end_line": 1297, "span_ids": ["Pytester.collect_by_name"], "tokens": 158}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Pytester:\n\n def collect_by_name(\n self, modcol: Collector, name: str\n ) -> Optional[Union[Item, Collector]]:\n \"\"\"Return the collection node for name from the module collection.\n\n Searchs a module collection node for a collection node matching the\n given name.\n\n :param modcol: A module collection node; see :py:meth:`getmodulecol`.\n :param name: The name of the node to return.\n \"\"\"\n if modcol not in self._mod_collections:\n self._mod_collections[modcol] = list(modcol.collect())\n for colitem in self._mod_collections[modcol]:\n if colitem.name == name:\n return colitem\n return None", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.popen_Pytester.popen.return.popen": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.popen_Pytester.popen.return.popen", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1299, "end_line": 1335, "span_ids": ["Pytester.popen"], "tokens": 300}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Pytester:\n\n def popen(\n self,\n cmdargs: Sequence[Union[str, \"os.PathLike[str]\"]],\n stdout: Union[int, TextIO] = subprocess.PIPE,\n stderr: Union[int, TextIO] = subprocess.PIPE,\n stdin: Union[NotSetType, bytes, IO[Any], int] = CLOSE_STDIN,\n **kw,\n ):\n \"\"\"Invoke :py:class:`subprocess.Popen`.\n\n Calls :py:class:`subprocess.Popen` making sure the current working\n directory is in ``PYTHONPATH``.\n\n You probably want to use :py:meth:`run` instead.\n \"\"\"\n env = os.environ.copy()\n env[\"PYTHONPATH\"] = os.pathsep.join(\n filter(None, [os.getcwd(), env.get(\"PYTHONPATH\", \"\")])\n )\n kw[\"env\"] = env\n\n if stdin is self.CLOSE_STDIN:\n kw[\"stdin\"] = subprocess.PIPE\n elif isinstance(stdin, bytes):\n kw[\"stdin\"] = subprocess.PIPE\n else:\n kw[\"stdin\"] = stdin\n\n popen = subprocess.Popen(cmdargs, stdout=stdout, stderr=stderr, **kw)\n if stdin is self.CLOSE_STDIN:\n assert popen.stdin is not None\n popen.stdin.close()\n elif isinstance(stdin, bytes):\n assert popen.stdin is not None\n popen.stdin.write(stdin)\n\n return popen", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.run_Pytester.run.return.RunResult_ret_out_err_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.run_Pytester.run.return.RunResult_ret_out_err_", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1337, "end_line": 1421, "span_ids": ["Pytester.run"], "tokens": 671}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Pytester:\n\n def run(\n self,\n *cmdargs: Union[str, \"os.PathLike[str]\"],\n timeout: Optional[float] = None,\n stdin: Union[NotSetType, bytes, IO[Any], int] = CLOSE_STDIN,\n ) -> RunResult:\n \"\"\"Run a command with arguments.\n\n Run a process using :py:class:`subprocess.Popen` saving the stdout and\n stderr.\n\n :param cmdargs:\n The sequence of arguments to pass to :py:class:`subprocess.Popen`,\n with path-like objects being converted to :py:class:`str`\n automatically.\n :param timeout:\n The period in seconds after which to timeout and raise\n :py:class:`Pytester.TimeoutExpired`.\n :param stdin:\n Optional standard input.\n\n - If it is :py:attr:`CLOSE_STDIN` (Default), then this method calls\n :py:class:`subprocess.Popen` with ``stdin=subprocess.PIPE``, and\n the standard input is closed immediately after the new command is\n started.\n\n - If it is of type :py:class:`bytes`, these bytes are sent to the\n standard input of the command.\n\n - Otherwise, it is passed through to :py:class:`subprocess.Popen`.\n For further information in this case, consult the document of the\n ``stdin`` parameter in :py:class:`subprocess.Popen`.\n \"\"\"\n __tracebackhide__ = True\n\n cmdargs = tuple(\n os.fspath(arg) if isinstance(arg, os.PathLike) else arg for arg in cmdargs\n )\n p1 = self.path.joinpath(\"stdout\")\n p2 = self.path.joinpath(\"stderr\")\n print(\"running:\", *cmdargs)\n print(\" in:\", Path.cwd())\n\n with p1.open(\"w\", encoding=\"utf8\") as f1, p2.open(\"w\", encoding=\"utf8\") as f2:\n now = timing.time()\n popen = self.popen(\n cmdargs,\n stdin=stdin,\n stdout=f1,\n stderr=f2,\n close_fds=(sys.platform != \"win32\"),\n )\n if popen.stdin is not None:\n popen.stdin.close()\n\n def handle_timeout() -> None:\n __tracebackhide__ = True\n\n timeout_message = (\n \"{seconds} second timeout expired running:\"\n \" {command}\".format(seconds=timeout, command=cmdargs)\n )\n\n popen.kill()\n popen.wait()\n raise self.TimeoutExpired(timeout_message)\n\n if timeout is None:\n ret = popen.wait()\n else:\n try:\n ret = popen.wait(timeout)\n except subprocess.TimeoutExpired:\n handle_timeout()\n\n with p1.open(encoding=\"utf8\") as f1, p2.open(encoding=\"utf8\") as f2:\n out = f1.read().splitlines()\n err = f2.read().splitlines()\n\n self._dump_lines(out, sys.stdout)\n self._dump_lines(err, sys.stderr)\n\n with contextlib.suppress(ValueError):\n ret = ExitCode(ret)\n return RunResult(ret, out, err, timing.time() - now)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester._dump_lines_Pytester.runpython_c.return.self_run_sys_executable_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester._dump_lines_Pytester.runpython_c.return.self_run_sys_executable_", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1423, "end_line": 1439, "span_ids": ["Pytester._dump_lines", "Pytester.runpython", "Pytester._getpytestargs", "Pytester.runpython_c"], "tokens": 162}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Pytester:\n\n def _dump_lines(self, lines, fp):\n try:\n for line in lines:\n print(line, file=fp)\n except UnicodeEncodeError:\n print(f\"couldn't print to {fp} because of encoding\")\n\n def _getpytestargs(self) -> Tuple[str, ...]:\n return sys.executable, \"-mpytest\"\n\n def runpython(self, script: \"os.PathLike[str]\") -> RunResult:\n \"\"\"Run a python script using sys.executable as interpreter.\"\"\"\n return self.run(sys.executable, script)\n\n def runpython_c(self, command: str) -> RunResult:\n \"\"\"Run ``python -c \"command\"``.\"\"\"\n return self.run(sys.executable, \"-c\", command)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.runpytest_subprocess_Pytester.runpytest_subprocess.return.self_run_args_timeout_t": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.runpytest_subprocess_Pytester.runpytest_subprocess.return.self_run_args_timeout_t", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1441, "end_line": 1465, "span_ids": ["Pytester.runpytest_subprocess"], "tokens": 277}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Pytester:\n\n def runpytest_subprocess(\n self, *args: Union[str, \"os.PathLike[str]\"], timeout: Optional[float] = None\n ) -> RunResult:\n \"\"\"Run pytest as a subprocess with given arguments.\n\n Any plugins added to the :py:attr:`plugins` list will be added using the\n ``-p`` command line option. Additionally ``--basetemp`` is used to put\n any temporary files and directories in a numbered directory prefixed\n with \"runpytest-\" to not conflict with the normal numbered pytest\n location for temporary files and directories.\n\n :param args:\n The sequence of arguments to pass to the pytest subprocess.\n :param timeout:\n The period in seconds after which to timeout and raise\n :py:class:`Pytester.TimeoutExpired`.\n \"\"\"\n __tracebackhide__ = True\n p = make_numbered_dir(root=self.path, prefix=\"runpytest-\")\n args = (\"--basetemp=%s\" % p,) + args\n plugins = [x for x in self.plugins if isinstance(x, str)]\n if plugins:\n args = (\"-p\", plugins[0]) + args\n args = self._getpytestargs() + args\n return self.run(*args, timeout=timeout)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.spawn_pytest_Pytester.spawn_pytest.return.self_spawn_cmd_expect_ti": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.spawn_pytest_Pytester.spawn_pytest.return.self_spawn_cmd_expect_ti", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1467, "end_line": 1481, "span_ids": ["Pytester.spawn_pytest"], "tokens": 140}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Pytester:\n\n def spawn_pytest(\n self, string: str, expect_timeout: float = 10.0\n ) -> \"pexpect.spawn\":\n \"\"\"Run pytest using pexpect.\n\n This makes sure to use the right pytest and sets up the temporary\n directory locations.\n\n The pexpect child is returned.\n \"\"\"\n basetemp = self.path / \"temp-pexpect\"\n basetemp.mkdir()\n invoke = \" \".join(map(str, self._getpytestargs()))\n cmd = f\"{invoke} --basetemp={basetemp} {string}\"\n return self.spawn(cmd, expect_timeout=expect_timeout)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.spawn_Pytester.spawn.return.child": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Pytester.spawn_Pytester.spawn.return.child", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1483, "end_line": 1497, "span_ids": ["Pytester.spawn"], "tokens": 162}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Pytester:\n\n def spawn(self, cmd: str, expect_timeout: float = 10.0) -> \"pexpect.spawn\":\n \"\"\"Run a command using pexpect.\n\n The pexpect child is returned.\n \"\"\"\n pexpect = importorskip(\"pexpect\", \"3.0\")\n if hasattr(sys, \"pypy_version_info\") and \"64\" in platform.machine():\n skip(\"pypy-64 bit not supported\")\n if not hasattr(pexpect, \"spawn\"):\n skip(\"pexpect.spawn not available\")\n logfile = self.path.joinpath(\"spawn.out\").open(\"wb\")\n\n child = pexpect.spawn(cmd, logfile=logfile, timeout=expect_timeout)\n self._request.addfinalizer(logfile.close)\n return child", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Testdir_Testdir.finalize.return.self__pytester__finalize_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Testdir_Testdir.finalize.return.self__pytester__finalize_", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1518, "end_line": 1573, "span_ids": ["Testdir.make_hook_recorder", "Testdir.plugins_4", "Testdir.finalize", "Testdir", "Testdir.chdir", "Testdir.test_tmproot", "Testdir.monkeypatch", "Testdir.tmpdir", "Testdir.request", "Testdir.plugins"], "tokens": 410}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\n@attr.s(repr=False, str=False, init=False)\nclass Testdir:\n \"\"\"\n Similar to :class:`Pytester`, but this class works with legacy py.path.local objects instead.\n\n All methods just forward to an internal :class:`Pytester` instance, converting results\n to `py.path.local` objects as necessary.\n \"\"\"\n\n __test__ = False\n\n CLOSE_STDIN: \"Final\" = Pytester.CLOSE_STDIN\n TimeoutExpired: \"Final\" = Pytester.TimeoutExpired\n Session: \"Final\" = Pytester.Session\n\n def __init__(self, pytester: Pytester, *, _ispytest: bool = False) -> None:\n check_ispytest(_ispytest)\n self._pytester = pytester\n\n @property\n def tmpdir(self) -> py.path.local:\n \"\"\"Temporary directory where tests are executed.\"\"\"\n return py.path.local(self._pytester.path)\n\n @property\n def test_tmproot(self) -> py.path.local:\n return py.path.local(self._pytester._test_tmproot)\n\n @property\n def request(self):\n return self._pytester._request\n\n @property\n def plugins(self):\n return self._pytester.plugins\n\n @plugins.setter\n def plugins(self, plugins):\n self._pytester.plugins = plugins\n\n @property\n def monkeypatch(self) -> MonkeyPatch:\n return self._pytester._monkeypatch\n\n def make_hook_recorder(self, pluginmanager) -> HookRecorder:\n \"\"\"See :meth:`Pytester.make_hook_recorder`.\"\"\"\n return self._pytester.make_hook_recorder(pluginmanager)\n\n def chdir(self) -> None:\n \"\"\"See :meth:`Pytester.chdir`.\"\"\"\n return self._pytester.chdir()\n\n def finalize(self) -> None:\n \"\"\"See :meth:`Pytester._finalize`.\"\"\"\n return self._pytester._finalize()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Testdir.makefile_Testdir.makefile.return.py_path_local_str_self__p": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Testdir.makefile_Testdir.makefile.return.py_path_local_str_self__p", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1575, "end_line": 1585, "span_ids": ["Testdir.makefile"], "tokens": 168}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\n@attr.s(repr=False, str=False, init=False)\nclass Testdir:\n\n def makefile(self, ext, *args, **kwargs) -> py.path.local:\n \"\"\"See :meth:`Pytester.makefile`.\"\"\"\n if ext and not ext.startswith(\".\"):\n # pytester.makefile is going to throw a ValueError in a way that\n # testdir.makefile did not, because\n # pathlib.Path is stricter suffixes than py.path\n # This ext arguments is likely user error, but since testdir has\n # allowed this, we will prepend \".\" as a workaround to avoid breaking\n # testdir usage that worked before\n ext = \".\" + ext\n return py.path.local(str(self._pytester.makefile(ext, *args, **kwargs)))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Testdir.makeconftest_Testdir.runpytest_inprocess.return.self__pytester_runpytest_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Testdir.makeconftest_Testdir.runpytest_inprocess.return.self__pytester_runpytest_", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1587, "end_line": 1659, "span_ids": ["Testdir.runpytest_inprocess", "Testdir.genitems", "Testdir.copy_example", "Testdir.makeconftest", "Testdir.getpathnode", "Testdir.inline_runsource", "Testdir.inline_run", "Testdir.inline_genitems", "Testdir.maketxtfile", "Testdir.getnode", "Testdir.makepyfile", "Testdir.runitem", "Testdir.getinicfg", "Testdir.mkpydir", "Testdir.syspathinsert", "Testdir.mkdir", "Testdir.makeini", "Testdir.makepyprojecttoml"], "tokens": 794}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\n@attr.s(repr=False, str=False, init=False)\nclass Testdir:\n\n def makeconftest(self, source) -> py.path.local:\n \"\"\"See :meth:`Pytester.makeconftest`.\"\"\"\n return py.path.local(str(self._pytester.makeconftest(source)))\n\n def makeini(self, source) -> py.path.local:\n \"\"\"See :meth:`Pytester.makeini`.\"\"\"\n return py.path.local(str(self._pytester.makeini(source)))\n\n def getinicfg(self, source: str) -> SectionWrapper:\n \"\"\"See :meth:`Pytester.getinicfg`.\"\"\"\n return self._pytester.getinicfg(source)\n\n def makepyprojecttoml(self, source) -> py.path.local:\n \"\"\"See :meth:`Pytester.makepyprojecttoml`.\"\"\"\n return py.path.local(str(self._pytester.makepyprojecttoml(source)))\n\n def makepyfile(self, *args, **kwargs) -> py.path.local:\n \"\"\"See :meth:`Pytester.makepyfile`.\"\"\"\n return py.path.local(str(self._pytester.makepyfile(*args, **kwargs)))\n\n def maketxtfile(self, *args, **kwargs) -> py.path.local:\n \"\"\"See :meth:`Pytester.maketxtfile`.\"\"\"\n return py.path.local(str(self._pytester.maketxtfile(*args, **kwargs)))\n\n def syspathinsert(self, path=None) -> None:\n \"\"\"See :meth:`Pytester.syspathinsert`.\"\"\"\n return self._pytester.syspathinsert(path)\n\n def mkdir(self, name) -> py.path.local:\n \"\"\"See :meth:`Pytester.mkdir`.\"\"\"\n return py.path.local(str(self._pytester.mkdir(name)))\n\n def mkpydir(self, name) -> py.path.local:\n \"\"\"See :meth:`Pytester.mkpydir`.\"\"\"\n return py.path.local(str(self._pytester.mkpydir(name)))\n\n def copy_example(self, name=None) -> py.path.local:\n \"\"\"See :meth:`Pytester.copy_example`.\"\"\"\n return py.path.local(str(self._pytester.copy_example(name)))\n\n def getnode(self, config: Config, arg) -> Optional[Union[Item, Collector]]:\n \"\"\"See :meth:`Pytester.getnode`.\"\"\"\n return self._pytester.getnode(config, arg)\n\n def getpathnode(self, path):\n \"\"\"See :meth:`Pytester.getpathnode`.\"\"\"\n return self._pytester.getpathnode(path)\n\n def genitems(self, colitems: List[Union[Item, Collector]]) -> List[Item]:\n \"\"\"See :meth:`Pytester.genitems`.\"\"\"\n return self._pytester.genitems(colitems)\n\n def runitem(self, source):\n \"\"\"See :meth:`Pytester.runitem`.\"\"\"\n return self._pytester.runitem(source)\n\n def inline_runsource(self, source, *cmdlineargs):\n \"\"\"See :meth:`Pytester.inline_runsource`.\"\"\"\n return self._pytester.inline_runsource(source, *cmdlineargs)\n\n def inline_genitems(self, *args):\n \"\"\"See :meth:`Pytester.inline_genitems`.\"\"\"\n return self._pytester.inline_genitems(*args)\n\n def inline_run(self, *args, plugins=(), no_reraise_ctrlc: bool = False):\n \"\"\"See :meth:`Pytester.inline_run`.\"\"\"\n return self._pytester.inline_run(\n *args, plugins=plugins, no_reraise_ctrlc=no_reraise_ctrlc\n )\n\n def runpytest_inprocess(self, *args, **kwargs) -> RunResult:\n \"\"\"See :meth:`Pytester.runpytest_inprocess`.\"\"\"\n return self._pytester.runpytest_inprocess(*args, **kwargs)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Testdir.runpytest_Testdir.__str__.return.str_self_tmpdir_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester.py_Testdir.runpytest_Testdir.__str__.return.str_self_tmpdir_", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester.py", "file_name": "pytester.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1661, "end_line": 1734, "span_ids": ["Testdir.runpython_c", "Testdir.__repr__", "Testdir.run", "Testdir.spawn_pytest", "Testdir.parseconfigure", "Testdir.spawn", "Testdir.runpython", "Testdir.getitems", "Testdir.runpytest_subprocess", "Testdir.parseconfig", "Testdir.__str__", "Testdir.getmodulecol", "Testdir.popen", "Testdir.runpytest", "Testdir.getitem", "Testdir.collect_by_name"], "tokens": 701}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\n@attr.s(repr=False, str=False, init=False)\nclass Testdir:\n\n def runpytest(self, *args, **kwargs) -> RunResult:\n \"\"\"See :meth:`Pytester.runpytest`.\"\"\"\n return self._pytester.runpytest(*args, **kwargs)\n\n def parseconfig(self, *args) -> Config:\n \"\"\"See :meth:`Pytester.parseconfig`.\"\"\"\n return self._pytester.parseconfig(*args)\n\n def parseconfigure(self, *args) -> Config:\n \"\"\"See :meth:`Pytester.parseconfigure`.\"\"\"\n return self._pytester.parseconfigure(*args)\n\n def getitem(self, source, funcname=\"test_func\"):\n \"\"\"See :meth:`Pytester.getitem`.\"\"\"\n return self._pytester.getitem(source, funcname)\n\n def getitems(self, source):\n \"\"\"See :meth:`Pytester.getitems`.\"\"\"\n return self._pytester.getitems(source)\n\n def getmodulecol(self, source, configargs=(), withinit=False):\n \"\"\"See :meth:`Pytester.getmodulecol`.\"\"\"\n return self._pytester.getmodulecol(\n source, configargs=configargs, withinit=withinit\n )\n\n def collect_by_name(\n self, modcol: Collector, name: str\n ) -> Optional[Union[Item, Collector]]:\n \"\"\"See :meth:`Pytester.collect_by_name`.\"\"\"\n return self._pytester.collect_by_name(modcol, name)\n\n def popen(\n self,\n cmdargs,\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE,\n stdin=CLOSE_STDIN,\n **kw,\n ):\n \"\"\"See :meth:`Pytester.popen`.\"\"\"\n return self._pytester.popen(cmdargs, stdout, stderr, stdin, **kw)\n\n def run(self, *cmdargs, timeout=None, stdin=CLOSE_STDIN) -> RunResult:\n \"\"\"See :meth:`Pytester.run`.\"\"\"\n return self._pytester.run(*cmdargs, timeout=timeout, stdin=stdin)\n\n def runpython(self, script) -> RunResult:\n \"\"\"See :meth:`Pytester.runpython`.\"\"\"\n return self._pytester.runpython(script)\n\n def runpython_c(self, command):\n \"\"\"See :meth:`Pytester.runpython_c`.\"\"\"\n return self._pytester.runpython_c(command)\n\n def runpytest_subprocess(self, *args, timeout=None) -> RunResult:\n \"\"\"See :meth:`Pytester.runpytest_subprocess`.\"\"\"\n return self._pytester.runpytest_subprocess(*args, timeout=timeout)\n\n def spawn_pytest(\n self, string: str, expect_timeout: float = 10.0\n ) -> \"pexpect.spawn\":\n \"\"\"See :meth:`Pytester.spawn_pytest`.\"\"\"\n return self._pytester.spawn_pytest(string, expect_timeout=expect_timeout)\n\n def spawn(self, cmd: str, expect_timeout: float = 10.0) -> \"pexpect.spawn\":\n \"\"\"See :meth:`Pytester.spawn`.\"\"\"\n return self._pytester.spawn(cmd, expect_timeout=expect_timeout)\n\n def __repr__(self) -> str:\n return f\"\"\n\n def __str__(self) -> str:\n return str(self.tmpdir)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester_assertions.py__Helper_plugin_for_pyte_assertoutcome.assert_obtained_expect": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester_assertions.py__Helper_plugin_for_pyte_assertoutcome.assert_obtained_expect", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester_assertions.py", "file_name": "pytester_assertions.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 34, "span_ids": ["assertoutcome", "docstring", "imports"], "tokens": 253}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"Helper plugin for pytester; should not be loaded on its own.\"\"\"\n# This plugin contains assertions used by pytester. pytester cannot\n# contain them itself, since it is imported by the `pytest` module,\n# hence cannot be subject to assertion rewriting, which requires a\n# module to not be already imported.\nfrom typing import Dict\nfrom typing import Sequence\nfrom typing import Tuple\nfrom typing import Union\n\nfrom _pytest.reports import CollectReport\nfrom _pytest.reports import TestReport\n\n\ndef assertoutcome(\n outcomes: Tuple[\n Sequence[TestReport],\n Sequence[Union[CollectReport, TestReport]],\n Sequence[Union[CollectReport, TestReport]],\n ],\n passed: int = 0,\n skipped: int = 0,\n failed: int = 0,\n) -> None:\n __tracebackhide__ = True\n\n realpassed, realskipped, realfailed = outcomes\n obtained = {\n \"passed\": len(realpassed),\n \"skipped\": len(realskipped),\n \"failed\": len(realfailed),\n }\n expected = {\"passed\": passed, \"skipped\": skipped, \"failed\": failed}\n assert obtained == expected, outcomes", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester_assertions.py_assert_outcomes_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/pytester_assertions.py_assert_outcomes_", "embedding": null, "metadata": {"file_path": "src/_pytest/pytester_assertions.py", "file_name": "pytester_assertions.py", "file_type": "text/x-python", "category": "implementation", "start_line": 37, "end_line": 67, "span_ids": ["assert_outcomes"], "tokens": 243}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def assert_outcomes(\n outcomes: Dict[str, int],\n passed: int = 0,\n skipped: int = 0,\n failed: int = 0,\n errors: int = 0,\n xpassed: int = 0,\n xfailed: int = 0,\n) -> None:\n \"\"\"Assert that the specified outcomes appear with the respective\n numbers (0 means it didn't occur) in the text output from a test run.\"\"\"\n __tracebackhide__ = True\n\n obtained = {\n \"passed\": outcomes.get(\"passed\", 0),\n \"skipped\": outcomes.get(\"skipped\", 0),\n \"failed\": outcomes.get(\"failed\", 0),\n \"errors\": outcomes.get(\"errors\", 0),\n \"xpassed\": outcomes.get(\"xpassed\", 0),\n \"xfailed\": outcomes.get(\"xfailed\", 0),\n }\n expected = {\n \"passed\": passed,\n \"skipped\": skipped,\n \"failed\": failed,\n \"errors\": errors,\n \"xpassed\": xpassed,\n \"xfailed\": xfailed,\n }\n assert obtained == expected", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py__Python_test_discovery__if_TYPE_CHECKING_.from__pytest_fixtures_imp": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py__Python_test_discovery__if_TYPE_CHECKING_.from__pytest_fixtures_imp", "embedding": null, "metadata": {"file_path": "src/_pytest/python.py", "file_name": "python.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 80, "span_ids": ["imports:74", "impl", "docstring", "imports"], "tokens": 580}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"Python test discovery, setup and run of test functions.\"\"\"\nimport enum\nimport fnmatch\nimport inspect\nimport itertools\nimport os\nimport sys\nimport types\nimport warnings\nfrom collections import Counter\nfrom collections import defaultdict\nfrom functools import partial\nfrom pathlib import Path\nfrom typing import Any\nfrom typing import Callable\nfrom typing import Dict\nfrom typing import Generator\nfrom typing import Iterable\nfrom typing import Iterator\nfrom typing import List\nfrom typing import Mapping\nfrom typing import Optional\nfrom typing import Sequence\nfrom typing import Set\nfrom typing import Tuple\nfrom typing import TYPE_CHECKING\nfrom typing import Union\n\nimport py\n\nimport _pytest\nfrom _pytest import fixtures\nfrom _pytest import nodes\nfrom _pytest._code import filter_traceback\nfrom _pytest._code import getfslineno\nfrom _pytest._code.code import ExceptionInfo\nfrom _pytest._code.code import TerminalRepr\nfrom _pytest._io import TerminalWriter\nfrom _pytest._io.saferepr import saferepr\nfrom _pytest.compat import ascii_escaped\nfrom _pytest.compat import final\nfrom _pytest.compat import get_default_arg_names\nfrom _pytest.compat import get_real_func\nfrom _pytest.compat import getimfunc\nfrom _pytest.compat import getlocation\nfrom _pytest.compat import is_async_function\nfrom _pytest.compat import is_generator\nfrom _pytest.compat import NOTSET\nfrom _pytest.compat import REGEX_TYPE\nfrom _pytest.compat import safe_getattr\nfrom _pytest.compat import safe_isclass\nfrom _pytest.compat import STRING_TYPES\nfrom _pytest.config import Config\nfrom _pytest.config import ExitCode\nfrom _pytest.config import hookimpl\nfrom _pytest.config.argparsing import Parser\nfrom _pytest.deprecated import check_ispytest\nfrom _pytest.deprecated import FSCOLLECTOR_GETHOOKPROXY_ISINITPATH\nfrom _pytest.fixtures import FuncFixtureInfo\nfrom _pytest.main import Session\nfrom _pytest.mark import MARK_GEN\nfrom _pytest.mark import ParameterSet\nfrom _pytest.mark.structures import get_unpacked_marks\nfrom _pytest.mark.structures import Mark\nfrom _pytest.mark.structures import MarkDecorator\nfrom _pytest.mark.structures import normalize_mark_list\nfrom _pytest.outcomes import fail\nfrom _pytest.outcomes import skip\nfrom _pytest.pathlib import bestrelpath\nfrom _pytest.pathlib import fnmatch_ex\nfrom _pytest.pathlib import import_path\nfrom _pytest.pathlib import ImportPathMismatchError\nfrom _pytest.pathlib import parts\nfrom _pytest.pathlib import visit\nfrom _pytest.warning_types import PytestCollectionWarning\nfrom _pytest.warning_types import PytestUnhandledCoroutineWarning\n\nif TYPE_CHECKING:\n from typing_extensions import Literal\n from _pytest.fixtures import _Scope", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_pytest_cmdline_main_pytest_configure.None_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_pytest_cmdline_main_pytest_configure.None_1", "embedding": null, "metadata": {"file_path": "src/_pytest/python.py", "file_name": "python.py", "file_type": "text/x-python", "category": "implementation", "start_line": 129, "end_line": 161, "span_ids": ["pytest_cmdline_main", "pytest_configure", "pytest_generate_tests"], "tokens": 327}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_cmdline_main(config: Config) -> Optional[Union[int, ExitCode]]:\n if config.option.showfixtures:\n showfixtures(config)\n return 0\n if config.option.show_fixtures_per_test:\n show_fixtures_per_test(config)\n return 0\n return None\n\n\ndef pytest_generate_tests(metafunc: \"Metafunc\") -> None:\n for marker in metafunc.definition.iter_markers(name=\"parametrize\"):\n metafunc.parametrize(*marker.args, **marker.kwargs, _param_mark=marker)\n\n\ndef pytest_configure(config: Config) -> None:\n config.addinivalue_line(\n \"markers\",\n \"parametrize(argnames, argvalues): call a test function multiple \"\n \"times passing in different arguments in turn. argvalues generally \"\n \"needs to be a list of values if argnames specifies only one name \"\n \"or a list of tuples of values if argnames specifies multiple names. \"\n \"Example: @parametrize('arg1', [1,2]) would lead to two calls of the \"\n \"decorated test function, one with arg1=1 and another with arg1=2.\"\n \"see https://docs.pytest.org/en/stable/parametrize.html for more info \"\n \"and examples.\",\n )\n config.addinivalue_line(\n \"markers\",\n \"usefixtures(fixturename1, fixturename2, ...): mark tests as needing \"\n \"all of the specified fixtures. see \"\n \"https://docs.pytest.org/en/stable/fixture.html#usefixtures \",\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py__As_an_optimization_the_del__EmptyClass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py__As_an_optimization_the_del__EmptyClass", "embedding": null, "metadata": {"file_path": "src/_pytest/python.py", "file_name": "python.py", "file_type": "text/x-python", "category": "implementation", "start_line": 346, "end_line": 362, "span_ids": ["impl:2", "PyobjMixin.reportinfo", "_EmptyClass", "impl:3"], "tokens": 142}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "# As an optimization, these builtin attribute names are pre-ignored when\n# iterating over an object during collection -- the pytest_pycollect_makeitem\n# hook is not called for them.\n# fmt: off\nclass _EmptyClass: pass # noqa: E701\nIGNORED_ATTRIBUTES = frozenset.union( # noqa: E305\n frozenset(),\n # Module.\n dir(types.ModuleType(\"empty_module\")),\n # Some extra module attributes the above doesn't catch.\n {\"__builtins__\", \"__file__\", \"__cached__\"},\n # Class.\n dir(_EmptyClass),\n # Instance.\n dir(_EmptyClass()),\n)\ndel _EmptyClass", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py__fmt_on_PyCollector.istestclass.return.self_classnamefilter_name": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py__fmt_on_PyCollector.istestclass.return.self_classnamefilter_name", "embedding": null, "metadata": {"file_path": "src/_pytest/python.py", "file_name": "python.py", "file_type": "text/x-python", "category": "implementation", "start_line": 363, "end_line": 395, "span_ids": ["PyCollector.istestfunction", "PyCollector.isnosetest", "PyCollector.istestclass", "PyCollector", "impl:3", "PyCollector.classnamefilter", "PyCollector.funcnamefilter"], "tokens": 306}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "# fmt: on\n\n\nclass PyCollector(PyobjMixin, nodes.Collector):\n def funcnamefilter(self, name: str) -> bool:\n return self._matches_prefix_or_glob_option(\"python_functions\", name)\n\n def isnosetest(self, obj: object) -> bool:\n \"\"\"Look for the __test__ attribute, which is applied by the\n @nose.tools.istest decorator.\n \"\"\"\n # We explicitly check for \"is True\" here to not mistakenly treat\n # classes with a custom __getattr__ returning something truthy (like a\n # function) as test classes.\n return safe_getattr(obj, \"__test__\", False) is True\n\n def classnamefilter(self, name: str) -> bool:\n return self._matches_prefix_or_glob_option(\"python_classes\", name)\n\n def istestfunction(self, obj: object, name: str) -> bool:\n if self.funcnamefilter(name) or self.isnosetest(obj):\n if isinstance(obj, staticmethod):\n # staticmethods need to be unwrapped.\n obj = safe_getattr(obj, \"__func__\", False)\n return (\n safe_getattr(obj, \"__call__\", False)\n and fixtures.getfixturemarker(obj) is None\n )\n else:\n return False\n\n def istestclass(self, obj: object, name: str) -> bool:\n return self.classnamefilter(name) or self.isnosetest(obj)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Module._inject_setup_function_fixture_Module._inject_setup_function_fixture.if_setup_function_is_None.return": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Module._inject_setup_function_fixture_Module._inject_setup_function_fixture.if_setup_function_is_None.return", "embedding": null, "metadata": {"file_path": "src/_pytest/python.py", "file_name": "python.py", "file_type": "text/x-python", "category": "implementation", "start_line": 545, "end_line": 557, "span_ids": ["Module._inject_setup_function_fixture"], "tokens": 139}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Module(nodes.File, PyCollector):\n\n def _inject_setup_function_fixture(self) -> None:\n \"\"\"Inject a hidden autouse, function scoped fixture into the collected module object\n that invokes setup_function/teardown_function if either or both are available.\n\n Using a fixture to invoke this methods ensures we play nicely and unsurprisingly with\n other fixtures (#517).\n \"\"\"\n setup_function = _get_first_non_fixture_func(self.obj, (\"setup_function\",))\n teardown_function = _get_first_non_fixture_func(\n self.obj, (\"teardown_function\",)\n )\n if setup_function is None and teardown_function is None:\n return\n # ... other code", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Module._inject_setup_function_fixture.xunit_setup_function_fixture_Module._inject_setup_function_fixture.self.obj.__pytest_setup_function.xunit_setup_function_fixt": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Module._inject_setup_function_fixture.xunit_setup_function_fixture_Module._inject_setup_function_fixture.self.obj.__pytest_setup_function.xunit_setup_function_fixt", "embedding": null, "metadata": {"file_path": "src/_pytest/python.py", "file_name": "python.py", "file_type": "text/x-python", "category": "implementation", "start_line": 559, "end_line": 577, "span_ids": ["Module._inject_setup_function_fixture"], "tokens": 184}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Module(nodes.File, PyCollector):\n\n def _inject_setup_function_fixture(self) -> None:\n # ... other code\n\n @fixtures.fixture(\n autouse=True,\n scope=\"function\",\n # Use a unique name to speed up lookup.\n name=f\"xunit_setup_function_fixture_{self.obj.__name__}\",\n )\n def xunit_setup_function_fixture(request) -> Generator[None, None, None]:\n if request.instance is not None:\n # in this case we are bound to an instance, so we need to let\n # setup_method handle this\n yield\n return\n if setup_function is not None:\n _call_with_optional_argument(setup_function, request.function)\n yield\n if teardown_function is not None:\n _call_with_optional_argument(teardown_function, request.function)\n\n self.obj.__pytest_setup_function = xunit_setup_function_fixture", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Package.setup_Package.setup.if_teardown_module_is_not.self_addfinalizer_func_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Package.setup_Package.setup.if_teardown_module_is_not.self_addfinalizer_func_", "embedding": null, "metadata": {"file_path": "src/_pytest/python.py", "file_name": "python.py", "file_type": "text/x-python", "category": "implementation", "start_line": 645, "end_line": 659, "span_ids": ["Package.setup"], "tokens": 142}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Package(Module):\n\n def setup(self) -> None:\n # Not using fixtures to call setup_module here because autouse fixtures\n # from packages are not called automatically (#4085).\n setup_module = _get_first_non_fixture_func(\n self.obj, (\"setUpModule\", \"setup_module\")\n )\n if setup_module is not None:\n _call_with_optional_argument(setup_module, self.obj)\n\n teardown_module = _get_first_non_fixture_func(\n self.obj, (\"tearDownModule\", \"teardown_module\")\n )\n if teardown_module is not None:\n func = partial(_call_with_optional_argument, teardown_module, self.obj)\n self.addfinalizer(func)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Package.gethookproxy_Package._recurse.return.True": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Package.gethookproxy_Package._recurse.return.True", "embedding": null, "metadata": {"file_path": "src/_pytest/python.py", "file_name": "python.py", "file_type": "text/x-python", "category": "implementation", "start_line": 661, "end_line": 680, "span_ids": ["Package.gethookproxy", "Package.isinitpath", "Package._recurse"], "tokens": 241}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Package(Module):\n\n def gethookproxy(self, fspath: \"os.PathLike[str]\"):\n warnings.warn(FSCOLLECTOR_GETHOOKPROXY_ISINITPATH, stacklevel=2)\n return self.session.gethookproxy(fspath)\n\n def isinitpath(self, path: Union[str, \"os.PathLike[str]\"]) -> bool:\n warnings.warn(FSCOLLECTOR_GETHOOKPROXY_ISINITPATH, stacklevel=2)\n return self.session.isinitpath(path)\n\n def _recurse(self, direntry: \"os.DirEntry[str]\") -> bool:\n if direntry.name == \"__pycache__\":\n return False\n fspath = Path(direntry.path)\n path = py.path.local(fspath)\n ihook = self.session.gethookproxy(fspath.parent)\n if ihook.pytest_ignore_collect(fspath=fspath, path=path, config=self.config):\n return False\n norecursepatterns = self.config.getini(\"norecursedirs\")\n if any(fnmatch_ex(pat, fspath) for pat in norecursepatterns):\n return False\n return True", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Package._collectfile_Package._collectfile._type_ignore_no_any_ret": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Package._collectfile_Package._collectfile._type_ignore_no_any_ret", "embedding": null, "metadata": {"file_path": "src/_pytest/python.py", "file_name": "python.py", "file_type": "text/x-python", "category": "implementation", "start_line": 682, "end_line": 707, "span_ids": ["Package._collectfile"], "tokens": 241}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Package(Module):\n\n def _collectfile(\n self, fspath: Path, handle_dupes: bool = True\n ) -> Sequence[nodes.Collector]:\n path = py.path.local(fspath)\n assert (\n fspath.is_file()\n ), \"{!r} is not a file (isdir={!r}, exists={!r}, islink={!r})\".format(\n path, fspath.is_dir(), fspath.exists(), fspath.is_symlink()\n )\n ihook = self.session.gethookproxy(fspath)\n if not self.session.isinitpath(fspath):\n if ihook.pytest_ignore_collect(\n fspath=fspath, path=path, config=self.config\n ):\n return ()\n\n if handle_dupes:\n keepduplicates = self.config.getoption(\"keepduplicates\")\n if not keepduplicates:\n duplicate_paths = self.config.pluginmanager._duplicatepaths\n if fspath in duplicate_paths:\n return ()\n else:\n duplicate_paths.add(fspath)\n\n return ihook.pytest_collect_file(fspath=fspath, path=path, parent=self) # type: ignore[no-any-return]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Package.collect_Package.collect.for_direntry_in_visit_str.None_2.elif_path_joinpath___ini.pkg_prefixes_add_path_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Package.collect_Package.collect.for_direntry_in_visit_str.None_2.elif_path_joinpath___ini.pkg_prefixes_add_path_", "embedding": null, "metadata": {"file_path": "src/_pytest/python.py", "file_name": "python.py", "file_type": "text/x-python", "category": "implementation", "start_line": 709, "end_line": 738, "span_ids": ["Package.collect"], "tokens": 282}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Package(Module):\n\n def collect(self) -> Iterable[Union[nodes.Item, nodes.Collector]]:\n this_path = Path(self.fspath).parent\n init_module = this_path / \"__init__.py\"\n if init_module.is_file() and path_matches_patterns(\n init_module, self.config.getini(\"python_files\")\n ):\n yield Module.from_parent(self, fspath=py.path.local(init_module))\n pkg_prefixes: Set[Path] = set()\n for direntry in visit(str(this_path), recurse=self._recurse):\n path = Path(direntry.path)\n\n # We will visit our own __init__.py file, in which case we skip it.\n if direntry.is_file():\n if direntry.name == \"__init__.py\" and path.parent == this_path:\n continue\n\n parts_ = parts(direntry.path)\n if any(\n str(pkg_prefix) in parts_ and pkg_prefix / \"__init__.py\" != path\n for pkg_prefix in pkg_prefixes\n ):\n continue\n\n if direntry.is_file():\n yield from self._collectfile(path)\n elif not direntry.is_dir():\n # Broken symlink or invalid/missing file.\n continue\n elif path.joinpath(\"__init__.py\").is_file():\n pkg_prefixes.add(path)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Class._inject_setup_class_fixture_Class._inject_setup_class_fixture.if_setup_class_is_None_an.return": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Class._inject_setup_class_fixture_Class._inject_setup_class_fixture.if_setup_class_is_None_an.return", "embedding": null, "metadata": {"file_path": "src/_pytest/python.py", "file_name": "python.py", "file_type": "text/x-python", "category": "implementation", "start_line": 799, "end_line": 809, "span_ids": ["Class._inject_setup_class_fixture"], "tokens": 128}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Class(PyCollector):\n\n def _inject_setup_class_fixture(self) -> None:\n \"\"\"Inject a hidden autouse, class scoped fixture into the collected class object\n that invokes setup_class/teardown_class if either or both are available.\n\n Using a fixture to invoke this methods ensures we play nicely and unsurprisingly with\n other fixtures (#517).\n \"\"\"\n setup_class = _get_first_non_fixture_func(self.obj, (\"setup_class\",))\n teardown_class = getattr(self.obj, \"teardown_class\", None)\n if setup_class is None and teardown_class is None:\n return\n # ... other code", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Class._inject_setup_class_fixture.xunit_setup_class_fixture_Class._inject_setup_class_fixture.self.obj.__pytest_setup_class.xunit_setup_class_fixture": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Class._inject_setup_class_fixture.xunit_setup_class_fixture_Class._inject_setup_class_fixture.self.obj.__pytest_setup_class.xunit_setup_class_fixture", "embedding": null, "metadata": {"file_path": "src/_pytest/python.py", "file_name": "python.py", "file_type": "text/x-python", "category": "implementation", "start_line": 811, "end_line": 826, "span_ids": ["Class._inject_setup_class_fixture"], "tokens": 159}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Class(PyCollector):\n\n def _inject_setup_class_fixture(self) -> None:\n # ... other code\n\n @fixtures.fixture(\n autouse=True,\n scope=\"class\",\n # Use a unique name to speed up lookup.\n name=f\"xunit_setup_class_fixture_{self.obj.__qualname__}\",\n )\n def xunit_setup_class_fixture(cls) -> Generator[None, None, None]:\n if setup_class is not None:\n func = getimfunc(setup_class)\n _call_with_optional_argument(func, self.obj)\n yield\n if teardown_class is not None:\n func = getimfunc(teardown_class)\n _call_with_optional_argument(func, self.obj)\n\n self.obj.__pytest_setup_class = xunit_setup_class_fixture", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Class._inject_setup_method_fixture_Class._inject_setup_method_fixture.if_setup_method_is_None_a.return": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Class._inject_setup_method_fixture_Class._inject_setup_method_fixture.if_setup_method_is_None_a.return", "embedding": null, "metadata": {"file_path": "src/_pytest/python.py", "file_name": "python.py", "file_type": "text/x-python", "category": "implementation", "start_line": 828, "end_line": 838, "span_ids": ["Class._inject_setup_method_fixture"], "tokens": 128}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Class(PyCollector):\n\n def _inject_setup_method_fixture(self) -> None:\n \"\"\"Inject a hidden autouse, function scoped fixture into the collected class object\n that invokes setup_method/teardown_method if either or both are available.\n\n Using a fixture to invoke this methods ensures we play nicely and unsurprisingly with\n other fixtures (#517).\n \"\"\"\n setup_method = _get_first_non_fixture_func(self.obj, (\"setup_method\",))\n teardown_method = getattr(self.obj, \"teardown_method\", None)\n if setup_method is None and teardown_method is None:\n return\n # ... other code", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Class._inject_setup_method_fixture.xunit_setup_method_fixture_Class._inject_setup_method_fixture.self.obj.__pytest_setup_method.xunit_setup_method_fixtur": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Class._inject_setup_method_fixture.xunit_setup_method_fixture_Class._inject_setup_method_fixture.self.obj.__pytest_setup_method.xunit_setup_method_fixtur", "embedding": null, "metadata": {"file_path": "src/_pytest/python.py", "file_name": "python.py", "file_type": "text/x-python", "category": "implementation", "start_line": 840, "end_line": 856, "span_ids": ["Class._inject_setup_method_fixture"], "tokens": 166}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Class(PyCollector):\n\n def _inject_setup_method_fixture(self) -> None:\n # ... other code\n\n @fixtures.fixture(\n autouse=True,\n scope=\"function\",\n # Use a unique name to speed up lookup.\n name=f\"xunit_setup_method_fixture_{self.obj.__qualname__}\",\n )\n def xunit_setup_method_fixture(self, request) -> Generator[None, None, None]:\n method = request.function\n if setup_method is not None:\n func = getattr(self, \"setup_method\")\n _call_with_optional_argument(func, method)\n yield\n if teardown_method is not None:\n func = getattr(self, \"teardown_method\")\n _call_with_optional_argument(func, method)\n\n self.obj.__pytest_setup_method = xunit_setup_method_fixture", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Metafunc_Metafunc.__init__.self._arg2fixturedefs.fixtureinfo_name2fixtured": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Metafunc_Metafunc.__init__.self._arg2fixturedefs.fixtureinfo_name2fixtured", "embedding": null, "metadata": {"file_path": "src/_pytest/python.py", "file_name": "python.py", "file_type": "text/x-python", "category": "implementation", "start_line": 952, "end_line": 992, "span_ids": ["Metafunc"], "tokens": 281}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass Metafunc:\n \"\"\"Objects passed to the :func:`pytest_generate_tests <_pytest.hookspec.pytest_generate_tests>` hook.\n\n They help to inspect a test function and to generate tests according to\n test configuration or values specified in the class or module where a\n test function is defined.\n \"\"\"\n\n def __init__(\n self,\n definition: \"FunctionDefinition\",\n fixtureinfo: fixtures.FuncFixtureInfo,\n config: Config,\n cls=None,\n module=None,\n *,\n _ispytest: bool = False,\n ) -> None:\n check_ispytest(_ispytest)\n\n #: Access to the underlying :class:`_pytest.python.FunctionDefinition`.\n self.definition = definition\n\n #: Access to the :class:`_pytest.config.Config` object for the test session.\n self.config = config\n\n #: The module object where the test function is defined in.\n self.module = module\n\n #: Underlying Python test function.\n self.function = definition.obj\n\n #: Set of fixture names required by the test function.\n self.fixturenames = fixtureinfo.names_closure\n\n #: Class object where the test function is defined in or ``None``.\n self.cls = cls\n\n self._calls: List[CallSpec2] = []\n self._arg2fixturedefs = fixtureinfo.name2fixturedefs", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Function_Function.from_parent.return.super_from_parent_paren": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Function_Function.from_parent.return.super_from_parent_paren", "embedding": null, "metadata": {"file_path": "src/_pytest/python.py", "file_name": "python.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1536, "end_line": 1633, "span_ids": ["Function", "Function.from_parent"], "tokens": 808}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Function(PyobjMixin, nodes.Item):\n \"\"\"An Item responsible for setting up and executing a Python test function.\n\n param name:\n The full function name, including any decorations like those\n added by parametrization (``my_func[my_param]``).\n param parent:\n The parent Node.\n param config:\n The pytest Config object.\n param callspec:\n If given, this is function has been parametrized and the callspec contains\n meta information about the parametrization.\n param callobj:\n If given, the object which will be called when the Function is invoked,\n otherwise the callobj will be obtained from ``parent`` using ``originalname``.\n param keywords:\n Keywords bound to the function object for \"-k\" matching.\n param session:\n The pytest Session object.\n param fixtureinfo:\n Fixture information already resolved at this fixture node..\n param originalname:\n The attribute name to use for accessing the underlying function object.\n Defaults to ``name``. Set this if name is different from the original name,\n for example when it contains decorations like those added by parametrization\n (``my_func[my_param]``).\n \"\"\"\n\n # Disable since functions handle it themselves.\n _ALLOW_MARKERS = False\n\n def __init__(\n self,\n name: str,\n parent,\n config: Optional[Config] = None,\n callspec: Optional[CallSpec2] = None,\n callobj=NOTSET,\n keywords=None,\n session: Optional[Session] = None,\n fixtureinfo: Optional[FuncFixtureInfo] = None,\n originalname: Optional[str] = None,\n ) -> None:\n super().__init__(name, parent, config=config, session=session)\n\n if callobj is not NOTSET:\n self.obj = callobj\n\n #: Original function name, without any decorations (for example\n #: parametrization adds a ``\"[...]\"`` suffix to function names), used to access\n #: the underlying function object from ``parent`` (in case ``callobj`` is not given\n #: explicitly).\n #:\n #: .. versionadded:: 3.0\n self.originalname = originalname or name\n\n # Note: when FunctionDefinition is introduced, we should change ``originalname``\n # to a readonly property that returns FunctionDefinition.name.\n\n self.keywords.update(self.obj.__dict__)\n self.own_markers.extend(get_unpacked_marks(self.obj))\n if callspec:\n self.callspec = callspec\n # this is total hostile and a mess\n # keywords are broken by design by now\n # this will be redeemed later\n for mark in callspec.marks:\n # feel free to cry, this was broken for years before\n # and keywords cant fix it per design\n self.keywords[mark.name] = mark\n self.own_markers.extend(normalize_mark_list(callspec.marks))\n if keywords:\n self.keywords.update(keywords)\n\n # todo: this is a hell of a hack\n # https://github.com/pytest-dev/pytest/issues/4569\n\n self.keywords.update(\n {\n mark.name: True\n for mark in self.iter_markers()\n if mark.name not in self.keywords\n }\n )\n\n if fixtureinfo is None:\n fixtureinfo = self.session._fixturemanager.getfixtureinfo(\n self, self.obj, self.cls, funcargs=True\n )\n self._fixtureinfo: FuncFixtureInfo = fixtureinfo\n self.fixturenames = fixtureinfo.names_closure\n self._initrequest()\n\n @classmethod\n def from_parent(cls, parent, **kw): # todo: determine sound type limitations\n \"\"\"The public constructor.\"\"\"\n return super().from_parent(parent=parent, **kw)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Function._initrequest_Function.setup.self__request__fillfixtur": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python.py_Function._initrequest_Function.setup.self__request__fillfixtur", "embedding": null, "metadata": {"file_path": "src/_pytest/python.py", "file_name": "python.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1635, "end_line": 1661, "span_ids": ["Function._getobj", "Function._initrequest", "Function.setup", "Function.runtest", "Function.function", "Function._pyfuncitem"], "tokens": 213}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Function(PyobjMixin, nodes.Item):\n\n def _initrequest(self) -> None:\n self.funcargs: Dict[str, object] = {}\n self._request = fixtures.FixtureRequest(self, _ispytest=True)\n\n @property\n def function(self):\n \"\"\"Underlying python 'function' object.\"\"\"\n return getimfunc(self.obj)\n\n def _getobj(self):\n assert self.parent is not None\n return getattr(self.parent.obj, self.originalname) # type: ignore[attr-defined]\n\n @property\n def _pyfuncitem(self):\n \"\"\"(compatonly) for code expecting pytest-2.2 style request objects.\"\"\"\n return self\n\n def runtest(self) -> None:\n \"\"\"Execute the underlying test function.\"\"\"\n self.ihook.pytest_pyfunc_call(pyfuncitem=self)\n\n def setup(self) -> None:\n if isinstance(self.parent, Instance):\n self.parent.newinstance()\n self.obj = self._getobj()\n self._request._fillfixtures()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python_api.py_math__non_numeric_type_error.return.TypeError_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python_api.py_math__non_numeric_type_error.return.TypeError_", "embedding": null, "metadata": {"file_path": "src/_pytest/python_api.py", "file_name": "python_api.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 38, "span_ids": ["impl", "imports:21", "imports", "_non_numeric_type_error"], "tokens": 209}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import math\nimport pprint\nfrom collections.abc import Iterable\nfrom collections.abc import Mapping\nfrom collections.abc import Sized\nfrom decimal import Decimal\nfrom numbers import Complex\nfrom types import TracebackType\nfrom typing import Any\nfrom typing import Callable\nfrom typing import cast\nfrom typing import Generic\nfrom typing import Optional\nfrom typing import overload\nfrom typing import Pattern\nfrom typing import Tuple\nfrom typing import Type\nfrom typing import TYPE_CHECKING\nfrom typing import TypeVar\nfrom typing import Union\n\nif TYPE_CHECKING:\n from numpy import ndarray\n\n\nimport _pytest._code\nfrom _pytest.compat import final\nfrom _pytest.compat import STRING_TYPES\nfrom _pytest.outcomes import fail\n\n\ndef _non_numeric_type_error(value, at: Optional[str]) -> TypeError:\n at_str = f\" at {at}\" if at else \"\"\n return TypeError(\n \"cannot make approximate comparisons to non-numeric values: {!r} {}\".format(\n value, at_str\n )\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python_api.py__builtin_pytest_approx_h_ApproxBase._check_type.pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python_api.py__builtin_pytest_approx_h_ApproxBase._check_type.pass", "embedding": null, "metadata": {"file_path": "src/_pytest/python_api.py", "file_name": "python_api.py", "file_type": "text/x-python", "category": "implementation", "start_line": 41, "end_line": 91, "span_ids": ["ApproxBase.__eq__", "ApproxBase._approx_scalar", "_non_numeric_type_error", "ApproxBase", "ApproxBase._yield_comparisons", "ApproxBase.__repr__", "ApproxBase:7", "ApproxBase._check_type", "ApproxBase.__ne__"], "tokens": 425}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "# builtin pytest.approx helper\n\n\nclass ApproxBase:\n \"\"\"Provide shared utilities for making approximate comparisons between\n numbers or sequences of numbers.\"\"\"\n\n # Tell numpy to use our `__eq__` operator instead of its.\n __array_ufunc__ = None\n __array_priority__ = 100\n\n def __init__(self, expected, rel=None, abs=None, nan_ok: bool = False) -> None:\n __tracebackhide__ = True\n self.expected = expected\n self.abs = abs\n self.rel = rel\n self.nan_ok = nan_ok\n self._check_type()\n\n def __repr__(self) -> str:\n raise NotImplementedError\n\n def __eq__(self, actual) -> bool:\n return all(\n a == self._approx_scalar(x) for a, x in self._yield_comparisons(actual)\n )\n\n # Ignore type because of https://github.com/python/mypy/issues/4266.\n __hash__ = None # type: ignore\n\n def __ne__(self, actual) -> bool:\n return not (actual == self)\n\n def _approx_scalar(self, x) -> \"ApproxScalar\":\n return ApproxScalar(x, rel=self.rel, abs=self.abs, nan_ok=self.nan_ok)\n\n def _yield_comparisons(self, actual):\n \"\"\"Yield all the pairs of numbers to be compared.\n\n This is used to implement the `__eq__` method.\n \"\"\"\n raise NotImplementedError\n\n def _check_type(self) -> None:\n \"\"\"Raise a TypeError if the expected value is not a valid type.\"\"\"\n # This is only a concern if the expected value is a sequence. In every\n # other case, the approx() function ensures that the expected value has\n # a numeric type. For this reason, the default is to do nothing. The\n # classes that deal with sequences should reimplement this method to\n # raise if there are any non-numeric elements in the sequence.\n pass", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python_api.py__recursive_list_map_ApproxNumpy.__eq__.return.ApproxBase___eq___self_a": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python_api.py__recursive_list_map_ApproxNumpy.__eq__.return.ApproxBase___eq___self_a", "embedding": null, "metadata": {"file_path": "src/_pytest/python_api.py", "file_name": "python_api.py", "file_type": "text/x-python", "category": "implementation", "start_line": 94, "end_line": 122, "span_ids": ["ApproxNumpy.__eq__", "ApproxNumpy.__repr__", "ApproxNumpy", "_recursive_list_map"], "tokens": 207}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _recursive_list_map(f, x):\n if isinstance(x, list):\n return list(_recursive_list_map(f, xi) for xi in x)\n else:\n return f(x)\n\n\nclass ApproxNumpy(ApproxBase):\n \"\"\"Perform approximate comparisons where the expected value is numpy array.\"\"\"\n\n def __repr__(self) -> str:\n list_scalars = _recursive_list_map(self._approx_scalar, self.expected.tolist())\n return f\"approx({list_scalars!r})\"\n\n def __eq__(self, actual) -> bool:\n import numpy as np\n\n # self.expected is supposed to always be an array here.\n\n if not np.isscalar(actual):\n try:\n actual = np.asarray(actual)\n except Exception as e:\n raise TypeError(f\"cannot compare '{actual}' to numpy.ndarray\") from e\n\n if not np.isscalar(actual) and actual.shape != self.expected.shape:\n return False\n\n return ApproxBase.__eq__(self, actual)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python_api.py_ApproxMapping_ApproxMapping._check_type.for_key_value_in_self_ex.if_isinstance_value_type.raise_TypeError_msg_forma": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python_api.py_ApproxMapping_ApproxMapping._check_type.for_key_value_in_self_ex.if_isinstance_value_type.raise_TypeError_msg_forma", "embedding": null, "metadata": {"file_path": "src/_pytest/python_api.py", "file_name": "python_api.py", "file_type": "text/x-python", "category": "implementation", "start_line": 139, "end_line": 166, "span_ids": ["ApproxMapping.__eq__", "ApproxMapping._check_type", "ApproxMapping.__repr__", "ApproxMapping._yield_comparisons", "ApproxMapping"], "tokens": 233}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class ApproxMapping(ApproxBase):\n \"\"\"Perform approximate comparisons where the expected value is a mapping\n with numeric values (the keys can be anything).\"\"\"\n\n def __repr__(self) -> str:\n return \"approx({!r})\".format(\n {k: self._approx_scalar(v) for k, v in self.expected.items()}\n )\n\n def __eq__(self, actual) -> bool:\n try:\n if set(actual.keys()) != set(self.expected.keys()):\n return False\n except AttributeError:\n return False\n\n return ApproxBase.__eq__(self, actual)\n\n def _yield_comparisons(self, actual):\n for k in self.expected.keys():\n yield actual[k], self.expected[k]\n\n def _check_type(self) -> None:\n __tracebackhide__ = True\n for key, value in self.expected.items():\n if isinstance(value, type(self.expected)):\n msg = \"pytest.approx() does not support nested dictionaries: key={!r} value={!r}\\n full mapping={}\"\n raise TypeError(msg.format(key, value, pprint.pformat(self.expected)))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python_api.py_ApproxSequencelike_ApproxSequencelike._check_type.for_index_x_in_enumerate.if_isinstance_x_type_sel.raise_TypeError_msg_forma": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python_api.py_ApproxSequencelike_ApproxSequencelike._check_type.for_index_x_in_enumerate.if_isinstance_x_type_sel.raise_TypeError_msg_forma", "embedding": null, "metadata": {"file_path": "src/_pytest/python_api.py", "file_name": "python_api.py", "file_type": "text/x-python", "category": "implementation", "start_line": 169, "end_line": 196, "span_ids": ["ApproxSequencelike._yield_comparisons", "ApproxSequencelike.__repr__", "ApproxSequencelike", "ApproxSequencelike.__eq__", "ApproxSequencelike._check_type"], "tokens": 239}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class ApproxSequencelike(ApproxBase):\n \"\"\"Perform approximate comparisons where the expected value is a sequence of numbers.\"\"\"\n\n def __repr__(self) -> str:\n seq_type = type(self.expected)\n if seq_type not in (tuple, list, set):\n seq_type = list\n return \"approx({!r})\".format(\n seq_type(self._approx_scalar(x) for x in self.expected)\n )\n\n def __eq__(self, actual) -> bool:\n try:\n if len(actual) != len(self.expected):\n return False\n except TypeError:\n return False\n return ApproxBase.__eq__(self, actual)\n\n def _yield_comparisons(self, actual):\n return zip(actual, self.expected)\n\n def _check_type(self) -> None:\n __tracebackhide__ = True\n for index, x in enumerate(self.expected):\n if isinstance(x, type(self.expected)):\n msg = \"pytest.approx() does not support nested data structures: {!r} at index {}\\n full sequence: {}\"\n raise TypeError(msg.format(x, index, pprint.pformat(self.expected)))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python_api.py_ApproxScalar_ApproxScalar.__repr__.return.f_self_expected_vett": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python_api.py_ApproxScalar_ApproxScalar.__repr__.return.f_self_expected_vett", "embedding": null, "metadata": {"file_path": "src/_pytest/python_api.py", "file_name": "python_api.py", "file_type": "text/x-python", "category": "implementation", "start_line": 199, "end_line": 235, "span_ids": ["ApproxScalar", "ApproxScalar.__repr__"], "tokens": 366}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class ApproxScalar(ApproxBase):\n \"\"\"Perform approximate comparisons where the expected value is a single number.\"\"\"\n\n # Using Real should be better than this Union, but not possible yet:\n # https://github.com/python/typeshed/pull/3108\n DEFAULT_ABSOLUTE_TOLERANCE: Union[float, Decimal] = 1e-12\n DEFAULT_RELATIVE_TOLERANCE: Union[float, Decimal] = 1e-6\n\n def __repr__(self) -> str:\n \"\"\"Return a string communicating both the expected value and the\n tolerance for the comparison being made.\n\n For example, ``1.0 \u00b1 1e-6``, ``(3+4j) \u00b1 5e-6 \u2220 \u00b1180\u00b0``.\n \"\"\"\n\n # Don't show a tolerance for values that aren't compared using\n # tolerances, i.e. non-numerics and infinities. Need to call abs to\n # handle complex numbers, e.g. (inf + 1j).\n if (not isinstance(self.expected, (Complex, Decimal))) or math.isinf(\n abs(self.expected) # type: ignore[arg-type]\n ):\n return str(self.expected)\n\n # If a sensible tolerance can't be calculated, self.tolerance will\n # raise a ValueError. In this case, display '???'.\n try:\n vetted_tolerance = f\"{self.tolerance:.1e}\"\n if (\n isinstance(self.expected, Complex)\n and self.expected.imag\n and not math.isinf(self.tolerance)\n ):\n vetted_tolerance += \" \u2220 \u00b1180\u00b0\"\n except ValueError:\n vetted_tolerance = \"???\"\n\n return f\"{self.expected} \u00b1 {vetted_tolerance}\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python_api.py_ApproxScalar.__eq___ApproxScalar.__eq__.return.result": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python_api.py_ApproxScalar.__eq___ApproxScalar.__eq__.return.result", "embedding": null, "metadata": {"file_path": "src/_pytest/python_api.py", "file_name": "python_api.py", "file_type": "text/x-python", "category": "implementation", "start_line": 237, "end_line": 276, "span_ids": ["ApproxScalar.__eq__"], "tokens": 417}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class ApproxScalar(ApproxBase):\n\n def __eq__(self, actual) -> bool:\n \"\"\"Return whether the given value is equal to the expected value\n within the pre-specified tolerance.\"\"\"\n asarray = _as_numpy_array(actual)\n if asarray is not None:\n # Call ``__eq__()`` manually to prevent infinite-recursion with\n # numpy<1.13. See #3748.\n return all(self.__eq__(a) for a in asarray.flat)\n\n # Short-circuit exact equality.\n if actual == self.expected:\n return True\n\n # If either type is non-numeric, fall back to strict equality.\n # NB: we need Complex, rather than just Number, to ensure that __abs__,\n # __sub__, and __float__ are defined.\n if not (\n isinstance(self.expected, (Complex, Decimal))\n and isinstance(actual, (Complex, Decimal))\n ):\n return False\n\n # Allow the user to control whether NaNs are considered equal to each\n # other or not. The abs() calls are for compatibility with complex\n # numbers.\n if math.isnan(abs(self.expected)): # type: ignore[arg-type]\n return self.nan_ok and math.isnan(abs(actual)) # type: ignore[arg-type]\n\n # Infinity shouldn't be approximately equal to anything but itself, but\n # if there's a relative tolerance, it will be infinite and infinity\n # will seem approximately equal to everything. The equal-to-itself\n # case would have been short circuited above, so here we can just\n # return false if the expected value is infinite. The abs() call is\n # for compatibility with complex numbers.\n if math.isinf(abs(self.expected)): # type: ignore[arg-type]\n return False\n\n # Return true if the two numbers are within the tolerance.\n result: bool = abs(self.expected - actual) <= self.tolerance\n return result", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python_api.py_ApproxScalar._Ignore_type_because_of__approx": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python_api.py_ApproxScalar._Ignore_type_because_of__approx", "embedding": null, "metadata": {"file_path": "src/_pytest/python_api.py", "file_name": "python_api.py", "file_type": "text/x-python", "category": "implementation", "start_line": 278, "end_line": 542, "span_ids": ["ApproxScalar:7", "ApproxDecimal", "approx", "ApproxScalar.__eq__", "ApproxScalar.tolerance"], "tokens": 489}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class ApproxScalar(ApproxBase):\n\n # Ignore type because of https://github.com/python/mypy/issues/4266.\n __hash__ = None # type: ignore\n\n @property\n def tolerance(self):\n \"\"\"Return the tolerance for the comparison.\n\n This could be either an absolute tolerance or a relative tolerance,\n depending on what the user specified or which would be larger.\n \"\"\"\n\n def set_default(x, default):\n return x if x is not None else default\n\n # Figure out what the absolute tolerance should be. ``self.abs`` is\n # either None or a value specified by the user.\n absolute_tolerance = set_default(self.abs, self.DEFAULT_ABSOLUTE_TOLERANCE)\n\n if absolute_tolerance < 0:\n raise ValueError(\n f\"absolute tolerance can't be negative: {absolute_tolerance}\"\n )\n if math.isnan(absolute_tolerance):\n raise ValueError(\"absolute tolerance can't be NaN.\")\n\n # If the user specified an absolute tolerance but not a relative one,\n # just return the absolute tolerance.\n if self.rel is None:\n if self.abs is not None:\n return absolute_tolerance\n\n # Figure out what the relative tolerance should be. ``self.rel`` is\n # either None or a value specified by the user. This is done after\n # we've made sure the user didn't ask for an absolute tolerance only,\n # because we don't want to raise errors about the relative tolerance if\n # we aren't even going to use it.\n relative_tolerance = set_default(\n self.rel, self.DEFAULT_RELATIVE_TOLERANCE\n ) * abs(self.expected)\n\n if relative_tolerance < 0:\n raise ValueError(\n f\"relative tolerance can't be negative: {absolute_tolerance}\"\n )\n if math.isnan(relative_tolerance):\n raise ValueError(\"relative tolerance can't be NaN.\")\n\n # Return the larger of the relative and absolute tolerances.\n return max(relative_tolerance, absolute_tolerance)\n\n\nclass ApproxDecimal(ApproxScalar):\n \"\"\"Perform approximate comparisons where the expected value is a Decimal.\"\"\"\n\n DEFAULT_ABSOLUTE_TOLERANCE = Decimal(\"1e-12\")\n DEFAULT_RELATIVE_TOLERANCE = Decimal(\"1e-6\")\n\n\ndef approx(expected, rel=None, abs=None, nan_ok: bool = False) -> ApproxBase:\n # ... other code", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python_api.py_approx._Assert_that_two_number_approx._Assert_that_two_number": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python_api.py_approx._Assert_that_two_number_approx._Assert_that_two_number", "embedding": null, "metadata": {"file_path": "src/_pytest/python_api.py", "file_name": "python_api.py", "file_type": "text/x-python", "category": "implementation", "start_line": 337, "end_line": 506, "span_ids": ["approx"], "tokens": 2092}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def approx(expected, rel=None, abs=None, nan_ok: bool = False) -> ApproxBase:\n \"\"\"Assert that two numbers (or two sets of numbers) are equal to each other\n within some tolerance.\n\n Due to the `intricacies of floating-point arithmetic`__, numbers that we\n would intuitively expect to be equal are not always so::\n\n >>> 0.1 + 0.2 == 0.3\n False\n\n __ https://docs.python.org/3/tutorial/floatingpoint.html\n\n This problem is commonly encountered when writing tests, e.g. when making\n sure that floating-point values are what you expect them to be. One way to\n deal with this problem is to assert that two floating-point numbers are\n equal to within some appropriate tolerance::\n\n >>> abs((0.1 + 0.2) - 0.3) < 1e-6\n True\n\n However, comparisons like this are tedious to write and difficult to\n understand. Furthermore, absolute comparisons like the one above are\n usually discouraged because there's no tolerance that works well for all\n situations. ``1e-6`` is good for numbers around ``1``, but too small for\n very big numbers and too big for very small ones. It's better to express\n the tolerance as a fraction of the expected value, but relative comparisons\n like that are even more difficult to write correctly and concisely.\n\n The ``approx`` class performs floating-point comparisons using a syntax\n that's as intuitive as possible::\n\n >>> from pytest import approx\n >>> 0.1 + 0.2 == approx(0.3)\n True\n\n The same syntax also works for sequences of numbers::\n\n >>> (0.1 + 0.2, 0.2 + 0.4) == approx((0.3, 0.6))\n True\n\n Dictionary *values*::\n\n >>> {'a': 0.1 + 0.2, 'b': 0.2 + 0.4} == approx({'a': 0.3, 'b': 0.6})\n True\n\n ``numpy`` arrays::\n\n >>> import numpy as np # doctest: +SKIP\n >>> np.array([0.1, 0.2]) + np.array([0.2, 0.4]) == approx(np.array([0.3, 0.6])) # doctest: +SKIP\n True\n\n And for a ``numpy`` array against a scalar::\n\n >>> import numpy as np # doctest: +SKIP\n >>> np.array([0.1, 0.2]) + np.array([0.2, 0.1]) == approx(0.3) # doctest: +SKIP\n True\n\n By default, ``approx`` considers numbers within a relative tolerance of\n ``1e-6`` (i.e. one part in a million) of its expected value to be equal.\n This treatment would lead to surprising results if the expected value was\n ``0.0``, because nothing but ``0.0`` itself is relatively close to ``0.0``.\n To handle this case less surprisingly, ``approx`` also considers numbers\n within an absolute tolerance of ``1e-12`` of its expected value to be\n equal. Infinity and NaN are special cases. Infinity is only considered\n equal to itself, regardless of the relative tolerance. NaN is not\n considered equal to anything by default, but you can make it be equal to\n itself by setting the ``nan_ok`` argument to True. (This is meant to\n facilitate comparing arrays that use NaN to mean \"no data\".)\n\n Both the relative and absolute tolerances can be changed by passing\n arguments to the ``approx`` constructor::\n\n >>> 1.0001 == approx(1)\n False\n >>> 1.0001 == approx(1, rel=1e-3)\n True\n >>> 1.0001 == approx(1, abs=1e-3)\n True\n\n If you specify ``abs`` but not ``rel``, the comparison will not consider\n the relative tolerance at all. In other words, two numbers that are within\n the default relative tolerance of ``1e-6`` will still be considered unequal\n if they exceed the specified absolute tolerance. If you specify both\n ``abs`` and ``rel``, the numbers will be considered equal if either\n tolerance is met::\n\n >>> 1 + 1e-8 == approx(1)\n True\n >>> 1 + 1e-8 == approx(1, abs=1e-12)\n False\n >>> 1 + 1e-8 == approx(1, rel=1e-6, abs=1e-12)\n True\n\n You can also use ``approx`` to compare nonnumeric types, or dicts and\n sequences containing nonnumeric types, in which case it falls back to\n strict equality. This can be useful for comparing dicts and sequences that\n can contain optional values::\n\n >>> {\"required\": 1.0000005, \"optional\": None} == approx({\"required\": 1, \"optional\": None})\n True\n >>> [None, 1.0000005] == approx([None,1])\n True\n >>> [\"foo\", 1.0000005] == approx([None,1])\n False\n\n If you're thinking about using ``approx``, then you might want to know how\n it compares to other good ways of comparing floating-point numbers. All of\n these algorithms are based on relative and absolute tolerances and should\n agree for the most part, but they do have meaningful differences:\n\n - ``math.isclose(a, b, rel_tol=1e-9, abs_tol=0.0)``: True if the relative\n tolerance is met w.r.t. either ``a`` or ``b`` or if the absolute\n tolerance is met. Because the relative tolerance is calculated w.r.t.\n both ``a`` and ``b``, this test is symmetric (i.e. neither ``a`` nor\n ``b`` is a \"reference value\"). You have to specify an absolute tolerance\n if you want to compare to ``0.0`` because there is no tolerance by\n default. `More information...`__\n\n __ https://docs.python.org/3/library/math.html#math.isclose\n\n - ``numpy.isclose(a, b, rtol=1e-5, atol=1e-8)``: True if the difference\n between ``a`` and ``b`` is less that the sum of the relative tolerance\n w.r.t. ``b`` and the absolute tolerance. Because the relative tolerance\n is only calculated w.r.t. ``b``, this test is asymmetric and you can\n think of ``b`` as the reference value. Support for comparing sequences\n is provided by ``numpy.allclose``. `More information...`__\n\n __ https://numpy.org/doc/stable/reference/generated/numpy.isclose.html\n\n - ``unittest.TestCase.assertAlmostEqual(a, b)``: True if ``a`` and ``b``\n are within an absolute tolerance of ``1e-7``. No relative tolerance is\n considered and the absolute tolerance cannot be changed, so this function\n is not appropriate for very large or very small numbers. Also, it's only\n available in subclasses of ``unittest.TestCase`` and it's ugly because it\n doesn't follow PEP8. `More information...`__\n\n __ https://docs.python.org/3/library/unittest.html#unittest.TestCase.assertAlmostEqual\n\n - ``a == pytest.approx(b, rel=1e-6, abs=1e-12)``: True if the relative\n tolerance is met w.r.t. ``b`` or if the absolute tolerance is met.\n Because the relative tolerance is only calculated w.r.t. ``b``, this test\n is asymmetric and you can think of ``b`` as the reference value. In the\n special case that you explicitly specify an absolute tolerance but not a\n relative tolerance, only the absolute tolerance is considered.\n\n .. warning::\n\n .. versionchanged:: 3.2\n\n In order to avoid inconsistent behavior, ``TypeError`` is\n raised for ``>``, ``>=``, ``<`` and ``<=`` comparisons.\n The example below illustrates the problem::\n\n assert approx(0.1) > 0.1 + 1e-10 # calls approx(0.1).__gt__(0.1 + 1e-10)\n assert 0.1 + 1e-10 > approx(0.1) # calls approx(0.1).__lt__(0.1 + 1e-10)\n\n In the second example one expects ``approx(0.1).__le__(0.1 + 1e-10)``\n to be called. But instead, ``approx(0.1).__lt__(0.1 + 1e-10)`` is used to\n comparison. This is because the call hierarchy of rich comparisons\n follows a fixed behavior. `More information...`__\n\n __ https://docs.python.org/3/reference/datamodel.html#object.__ge__\n\n .. versionchanged:: 3.7.1\n ``approx`` raises ``TypeError`` when it encounters a dict value or\n sequence element of nonnumeric type.\n\n .. versionchanged:: 6.1.0\n ``approx`` falls back to strict equality for nonnumeric types instead\n of raising ``TypeError``.\n \"\"\"\n # ... other code", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python_api.py__is_numpy_array__as_numpy_array.return.None": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python_api.py__is_numpy_array__as_numpy_array.return.None", "embedding": null, "metadata": {"file_path": "src/_pytest/python_api.py", "file_name": "python_api.py", "file_type": "text/x-python", "category": "implementation", "start_line": 545, "end_line": 569, "span_ids": ["_as_numpy_array", "_is_numpy_array"], "tokens": 180}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _is_numpy_array(obj: object) -> bool:\n \"\"\"\n Return true if the given object is implicitly convertible to ndarray,\n and numpy is already imported.\n \"\"\"\n return _as_numpy_array(obj) is not None\n\n\ndef _as_numpy_array(obj: object) -> Optional[\"ndarray\"]:\n \"\"\"\n Return an ndarray if the given object is implicitly convertible to ndarray,\n and numpy is already imported, otherwise None.\n \"\"\"\n import sys\n\n np: Any = sys.modules.get(\"numpy\")\n if np is not None:\n # avoid infinite recursion on numpy scalars, which have __array__\n if np.isscalar(obj):\n return None\n elif isinstance(obj, np.ndarray):\n return obj\n elif hasattr(obj, \"__array__\") or hasattr(\"obj\", \"__array_interface__\"):\n return np.asarray(obj)\n return None", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python_api.py__builtin_pytest_raises_h_raises_6._": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python_api.py__builtin_pytest_raises_h_raises_6._", "embedding": null, "metadata": {"file_path": "src/_pytest/python_api.py", "file_name": "python_api.py", "file_type": "text/x-python", "category": "implementation", "start_line": 572, "end_line": 593, "span_ids": ["impl:2", "_as_numpy_array", "raises_6", "raises"], "tokens": 129}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "# builtin pytest.raises helper\n\n_E = TypeVar(\"_E\", bound=BaseException)\n\n\n@overload\ndef raises(\n expected_exception: Union[Type[_E], Tuple[Type[_E], ...]],\n *,\n match: Optional[Union[str, Pattern[str]]] = ...,\n) -> \"RaisesContext[_E]\":\n ...\n\n\n@overload\ndef raises(\n expected_exception: Union[Type[_E], Tuple[Type[_E], ...]],\n func: Callable[..., Any],\n *args: Any,\n **kwargs: Any,\n) -> _pytest._code.ExceptionInfo[_E]:\n ...", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python_api.py_raises_7_raises_7.r_Assert_that_a_code_bl": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python_api.py_raises_7_raises_7.r_Assert_that_a_code_bl", "embedding": null, "metadata": {"file_path": "src/_pytest/python_api.py", "file_name": "python_api.py", "file_type": "text/x-python", "category": "implementation", "start_line": 596, "end_line": 708, "span_ids": ["raises_7"], "tokens": 1012}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def raises(\n expected_exception: Union[Type[_E], Tuple[Type[_E], ...]], *args: Any, **kwargs: Any\n) -> Union[\"RaisesContext[_E]\", _pytest._code.ExceptionInfo[_E]]:\n r\"\"\"Assert that a code block/function call raises ``expected_exception``\n or raise a failure exception otherwise.\n\n :kwparam match:\n If specified, a string containing a regular expression,\n or a regular expression object, that is tested against the string\n representation of the exception using ``re.search``. To match a literal\n string that may contain `special characters`__, the pattern can\n first be escaped with ``re.escape``.\n\n (This is only used when ``pytest.raises`` is used as a context manager,\n and passed through to the function otherwise.\n When using ``pytest.raises`` as a function, you can use:\n ``pytest.raises(Exc, func, match=\"passed on\").match(\"my pattern\")``.)\n\n __ https://docs.python.org/3/library/re.html#regular-expression-syntax\n\n .. currentmodule:: _pytest._code\n\n Use ``pytest.raises`` as a context manager, which will capture the exception of the given\n type::\n\n >>> import pytest\n >>> with pytest.raises(ZeroDivisionError):\n ... 1/0\n\n If the code block does not raise the expected exception (``ZeroDivisionError`` in the example\n above), or no exception at all, the check will fail instead.\n\n You can also use the keyword argument ``match`` to assert that the\n exception matches a text or regex::\n\n >>> with pytest.raises(ValueError, match='must be 0 or None'):\n ... raise ValueError(\"value must be 0 or None\")\n\n >>> with pytest.raises(ValueError, match=r'must be \\d+$'):\n ... raise ValueError(\"value must be 42\")\n\n The context manager produces an :class:`ExceptionInfo` object which can be used to inspect the\n details of the captured exception::\n\n >>> with pytest.raises(ValueError) as exc_info:\n ... raise ValueError(\"value must be 42\")\n >>> assert exc_info.type is ValueError\n >>> assert exc_info.value.args[0] == \"value must be 42\"\n\n .. note::\n\n When using ``pytest.raises`` as a context manager, it's worthwhile to\n note that normal context manager rules apply and that the exception\n raised *must* be the final line in the scope of the context manager.\n Lines of code after that, within the scope of the context manager will\n not be executed. For example::\n\n >>> value = 15\n >>> with pytest.raises(ValueError) as exc_info:\n ... if value > 10:\n ... raise ValueError(\"value must be <= 10\")\n ... assert exc_info.type is ValueError # this will not execute\n\n Instead, the following approach must be taken (note the difference in\n scope)::\n\n >>> with pytest.raises(ValueError) as exc_info:\n ... if value > 10:\n ... raise ValueError(\"value must be <= 10\")\n ...\n >>> assert exc_info.type is ValueError\n\n **Using with** ``pytest.mark.parametrize``\n\n When using :ref:`pytest.mark.parametrize ref`\n it is possible to parametrize tests such that\n some runs raise an exception and others do not.\n\n See :ref:`parametrizing_conditional_raising` for an example.\n\n **Legacy form**\n\n It is possible to specify a callable by passing a to-be-called lambda::\n\n >>> raises(ZeroDivisionError, lambda: 1/0)\n \n\n or you can specify an arbitrary callable with arguments::\n\n >>> def f(x): return 1/x\n ...\n >>> raises(ZeroDivisionError, f, 0)\n \n >>> raises(ZeroDivisionError, f, x=0)\n \n\n The form above is fully supported but discouraged for new code because the\n context manager form is regarded as more readable and less error-prone.\n\n .. note::\n Similar to caught exception objects in Python, explicitly clearing\n local references to returned ``ExceptionInfo`` objects can\n help the Python interpreter speed up its garbage collection.\n\n Clearing those references breaks a reference cycle\n (``ExceptionInfo`` --> caught exception --> frame stack raising\n the exception --> current frame stack --> local variables -->\n ``ExceptionInfo``) which makes Python keep all objects referenced\n from that cycle (including all local variables in the current\n frame) alive until the next cyclic garbage collection run.\n More detailed information can be found in the official Python\n documentation for :ref:`the try statement `.\n \"\"\"\n # ... other code", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python_api.py_raises_7.__tracebackhide___raises_7.fail_message_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/python_api.py_raises_7.__tracebackhide___raises_7.fail_message_", "embedding": null, "metadata": {"file_path": "src/_pytest/python_api.py", "file_name": "python_api.py", "file_type": "text/x-python", "category": "implementation", "start_line": 709, "end_line": 745, "span_ids": ["raises_7"], "tokens": 380}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def raises(\n expected_exception: Union[Type[_E], Tuple[Type[_E], ...]], *args: Any, **kwargs: Any\n) -> Union[\"RaisesContext[_E]\", _pytest._code.ExceptionInfo[_E]]:\n __tracebackhide__ = True\n\n if isinstance(expected_exception, type):\n excepted_exceptions: Tuple[Type[_E], ...] = (expected_exception,)\n else:\n excepted_exceptions = expected_exception\n for exc in excepted_exceptions:\n if not isinstance(exc, type) or not issubclass(exc, BaseException):\n msg = \"expected exception must be a BaseException type, not {}\" # type: ignore[unreachable]\n not_a = exc.__name__ if isinstance(exc, type) else type(exc).__name__\n raise TypeError(msg.format(not_a))\n\n message = f\"DID NOT RAISE {expected_exception}\"\n\n if not args:\n match: Optional[Union[str, Pattern[str]]] = kwargs.pop(\"match\", None)\n if kwargs:\n msg = \"Unexpected keyword arguments passed to pytest.raises: \"\n msg += \", \".join(sorted(kwargs))\n msg += \"\\nUse context-manager form instead?\"\n raise TypeError(msg)\n return RaisesContext(expected_exception, message, match)\n else:\n func = args[0]\n if not callable(func):\n raise TypeError(\n \"{!r} object (type: {}) must be callable\".format(func, type(func))\n )\n try:\n func(*args[1:], **kwargs)\n except expected_exception as e:\n # We just caught the exception - there is a traceback.\n assert e.__traceback__ is not None\n return _pytest._code.ExceptionInfo.from_exc_info(\n (type(e), e, e.__traceback__)\n )\n fail(message)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/recwarn.py__Record_warnings_during_deprecated_call_2._": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/recwarn.py__Record_warnings_during_deprecated_call_2._", "embedding": null, "metadata": {"file_path": "src/_pytest/recwarn.py", "file_name": "recwarn.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 49, "span_ids": ["impl", "docstring", "recwarn", "deprecated_call", "imports", "deprecated_call_2"], "tokens": 269}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"Record warnings during test function execution.\"\"\"\nimport re\nimport warnings\nfrom types import TracebackType\nfrom typing import Any\nfrom typing import Callable\nfrom typing import Generator\nfrom typing import Iterator\nfrom typing import List\nfrom typing import Optional\nfrom typing import overload\nfrom typing import Pattern\nfrom typing import Tuple\nfrom typing import Type\nfrom typing import TypeVar\nfrom typing import Union\n\nfrom _pytest.compat import final\nfrom _pytest.deprecated import check_ispytest\nfrom _pytest.fixtures import fixture\nfrom _pytest.outcomes import fail\n\n\nT = TypeVar(\"T\")\n\n\n@fixture\ndef recwarn() -> Generator[\"WarningsRecorder\", None, None]:\n \"\"\"Return a :class:`WarningsRecorder` instance that records all warnings emitted by test functions.\n\n See http://docs.python.org/library/warnings.html for information\n on warning categories.\n \"\"\"\n wrec = WarningsRecorder(_ispytest=True)\n with wrec:\n warnings.simplefilter(\"default\")\n yield wrec\n\n\n@overload\ndef deprecated_call(\n *, match: Optional[Union[str, Pattern[str]]] = ...\n) -> \"WarningsRecorder\":\n ...\n\n\n@overload\ndef deprecated_call(func: Callable[..., T], *args: Any, **kwargs: Any) -> T:\n ...", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/recwarn.py_warns_warns_5._": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/recwarn.py_warns_warns_5._", "embedding": null, "metadata": {"file_path": "src/_pytest/recwarn.py", "file_name": "recwarn.py", "file_type": "text/x-python", "category": "implementation", "start_line": 84, "end_line": 100, "span_ids": ["warns", "warns_5"], "tokens": 107}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@overload\ndef warns(\n expected_warning: Optional[Union[Type[Warning], Tuple[Type[Warning], ...]]],\n *,\n match: Optional[Union[str, Pattern[str]]] = ...,\n) -> \"WarningsChecker\":\n ...\n\n\n@overload\ndef warns(\n expected_warning: Optional[Union[Type[Warning], Tuple[Type[Warning], ...]]],\n func: Callable[..., T],\n *args: Any,\n **kwargs: Any,\n) -> T:\n ...", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/reports.py_BaseReport_BaseReport.if_TYPE_CHECKING_.__getattr__._": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/reports.py_BaseReport_BaseReport.if_TYPE_CHECKING_.__getattr__._", "embedding": null, "metadata": {"file_path": "src/_pytest/reports.py", "file_name": "reports.py", "file_type": "text/x-python", "category": "implementation", "start_line": 60, "end_line": 76, "span_ids": ["BaseReport.if_TYPE_CHECKING_.__getattr__", "BaseReport"], "tokens": 143}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class BaseReport:\n when: Optional[str]\n location: Optional[Tuple[str, Optional[int], str]]\n longrepr: Union[\n None, ExceptionInfo[BaseException], Tuple[str, int, str], str, TerminalRepr\n ]\n sections: List[Tuple[str, str]]\n nodeid: str\n outcome: \"Literal['passed', 'failed', 'skipped']\"\n\n def __init__(self, **kw: Any) -> None:\n self.__dict__.update(kw)\n\n if TYPE_CHECKING:\n # Can have arbitrary fields given to __init__().\n def __getattr__(self, key: str) -> Any:\n ...", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/reports.py_BaseReport.toterminal_BaseReport.toterminal.if_hasattr_longrepr_tot.else_.out_line_s_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/reports.py_BaseReport.toterminal_BaseReport.toterminal.if_hasattr_longrepr_tot.else_.out_line_s_", "embedding": null, "metadata": {"file_path": "src/_pytest/reports.py", "file_name": "reports.py", "file_type": "text/x-python", "category": "implementation", "start_line": 78, "end_line": 94, "span_ids": ["BaseReport.toterminal"], "tokens": 125}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class BaseReport:\n\n def toterminal(self, out: TerminalWriter) -> None:\n if hasattr(self, \"node\"):\n out.line(getworkerinfoline(self.node))\n\n longrepr = self.longrepr\n if longrepr is None:\n return\n\n if hasattr(longrepr, \"toterminal\"):\n longrepr_terminal = cast(TerminalRepr, longrepr)\n longrepr_terminal.toterminal(out)\n else:\n try:\n s = str(longrepr)\n except UnicodeEncodeError:\n s = \"\"\n out.line(s)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/reports.py_BaseReport.get_sections_BaseReport.count_towards_summary.return.True": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/reports.py_BaseReport.get_sections_BaseReport.count_towards_summary.return.True", "embedding": null, "metadata": {"file_path": "src/_pytest/reports.py", "file_name": "reports.py", "file_type": "text/x-python", "category": "implementation", "start_line": 96, "end_line": 171, "span_ids": ["BaseReport.skipped", "BaseReport.capstderr", "BaseReport.caplog", "BaseReport.longreprtext", "BaseReport.capstdout", "BaseReport.fspath", "BaseReport.count_towards_summary", "BaseReport.passed", "BaseReport.failed", "BaseReport.get_sections"], "tokens": 485}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class BaseReport:\n\n def get_sections(self, prefix: str) -> Iterator[Tuple[str, str]]:\n for name, content in self.sections:\n if name.startswith(prefix):\n yield prefix, content\n\n @property\n def longreprtext(self) -> str:\n \"\"\"Read-only property that returns the full string representation of\n ``longrepr``.\n\n .. versionadded:: 3.0\n \"\"\"\n file = StringIO()\n tw = TerminalWriter(file)\n tw.hasmarkup = False\n self.toterminal(tw)\n exc = file.getvalue()\n return exc.strip()\n\n @property\n def caplog(self) -> str:\n \"\"\"Return captured log lines, if log capturing is enabled.\n\n .. versionadded:: 3.5\n \"\"\"\n return \"\\n\".join(\n content for (prefix, content) in self.get_sections(\"Captured log\")\n )\n\n @property\n def capstdout(self) -> str:\n \"\"\"Return captured text from stdout, if capturing is enabled.\n\n .. versionadded:: 3.0\n \"\"\"\n return \"\".join(\n content for (prefix, content) in self.get_sections(\"Captured stdout\")\n )\n\n @property\n def capstderr(self) -> str:\n \"\"\"Return captured text from stderr, if capturing is enabled.\n\n .. versionadded:: 3.0\n \"\"\"\n return \"\".join(\n content for (prefix, content) in self.get_sections(\"Captured stderr\")\n )\n\n @property\n def passed(self) -> bool:\n return self.outcome == \"passed\"\n\n @property\n def failed(self) -> bool:\n return self.outcome == \"failed\"\n\n @property\n def skipped(self) -> bool:\n return self.outcome == \"skipped\"\n\n @property\n def fspath(self) -> str:\n return self.nodeid.split(\"::\")[0]\n\n @property\n def count_towards_summary(self) -> bool:\n \"\"\"**Experimental** Whether this report should be counted towards the\n totals shown at the end of the test session: \"1 passed, 1 failure, etc\".\n\n .. note::\n\n This function is considered **experimental**, so beware that it is subject to changes\n even in patch releases.\n \"\"\"\n return True", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/reports.py__report_to_json.serialize_exception_longrepr__report_to_json.serialize_exception_longrepr.return.result": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/reports.py__report_to_json.serialize_exception_longrepr__report_to_json.serialize_exception_longrepr.return.result", "embedding": null, "metadata": {"file_path": "src/_pytest/reports.py", "file_name": "reports.py", "file_type": "text/x-python", "category": "implementation", "start_line": 469, "end_line": 490, "span_ids": ["_report_to_json"], "tokens": 220}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _report_to_json(report: BaseReport) -> Dict[str, Any]:\n # ... other code\n\n def serialize_exception_longrepr(rep: BaseReport) -> Dict[str, Any]:\n assert rep.longrepr is not None\n # TODO: Investigate whether the duck typing is really necessary here.\n longrepr = cast(ExceptionRepr, rep.longrepr)\n result: Dict[str, Any] = {\n \"reprcrash\": serialize_repr_crash(longrepr.reprcrash),\n \"reprtraceback\": serialize_repr_traceback(longrepr.reprtraceback),\n \"sections\": longrepr.sections,\n }\n if isinstance(longrepr, ExceptionChainRepr):\n result[\"chain\"] = []\n for repr_traceback, repr_crash, description in longrepr.chain:\n result[\"chain\"].append(\n (\n serialize_repr_traceback(repr_traceback),\n serialize_repr_crash(repr_crash),\n description,\n )\n )\n else:\n result[\"chain\"] = None\n return result\n # ... other code", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/runner.py__Basic_collect_and_runt_if_TYPE_CHECKING_.from__pytest_terminal_imp": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/runner.py__Basic_collect_and_runt_if_TYPE_CHECKING_.from__pytest_terminal_imp", "embedding": null, "metadata": {"file_path": "src/_pytest/runner.py", "file_name": "runner.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 44, "span_ids": ["imports:36", "impl", "docstring", "imports"], "tokens": 278}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"Basic collect and runtest protocol implementations.\"\"\"\nimport bdb\nimport os\nimport sys\nimport warnings\nfrom typing import Callable\nfrom typing import cast\nfrom typing import Dict\nfrom typing import Generic\nfrom typing import List\nfrom typing import Optional\nfrom typing import Tuple\nfrom typing import Type\nfrom typing import TYPE_CHECKING\nfrom typing import TypeVar\nfrom typing import Union\n\nimport attr\n\nfrom .reports import BaseReport\nfrom .reports import CollectErrorRepr\nfrom .reports import CollectReport\nfrom .reports import TestReport\nfrom _pytest import timing\nfrom _pytest._code.code import ExceptionChainRepr\nfrom _pytest._code.code import ExceptionInfo\nfrom _pytest._code.code import TerminalRepr\nfrom _pytest.compat import final\nfrom _pytest.config.argparsing import Parser\nfrom _pytest.deprecated import check_ispytest\nfrom _pytest.deprecated import UNITTEST_SKIP_DURING_COLLECTION\nfrom _pytest.nodes import Collector\nfrom _pytest.nodes import Item\nfrom _pytest.nodes import Node\nfrom _pytest.outcomes import Exit\nfrom _pytest.outcomes import OutcomeException\nfrom _pytest.outcomes import Skipped\nfrom _pytest.outcomes import TEST_OUTCOME\n\nif TYPE_CHECKING:\n from typing_extensions import Literal\n\n from _pytest.main import Session\n from _pytest.terminal import TerminalReporter", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/runner.py___pytest_addoption.None_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/runner.py___pytest_addoption.None_1", "embedding": null, "metadata": {"file_path": "src/_pytest/runner.py", "file_name": "runner.py", "file_type": "text/x-python", "category": "implementation", "start_line": 46, "end_line": 67, "span_ids": ["imports:36", "pytest_addoption"], "tokens": 137}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "#\n# pytest plugin hooks.\n\n\ndef pytest_addoption(parser: Parser) -> None:\n group = parser.getgroup(\"terminal reporting\", \"reporting\", after=\"general\")\n group.addoption(\n \"--durations\",\n action=\"store\",\n type=int,\n default=None,\n metavar=\"N\",\n help=\"show N slowest setup/test durations (N=0 for all).\",\n )\n group.addoption(\n \"--durations-min\",\n action=\"store\",\n type=float,\n default=0.005,\n metavar=\"N\",\n help=\"Minimal duration in seconds for inclusion in slowest list. Default 0.005\",\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/runner.py_pytest_terminal_summary_pytest_terminal_summary.for_i_rep_in_enumerate_d.tr_write_line_f_rep_dura": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/runner.py_pytest_terminal_summary_pytest_terminal_summary.for_i_rep_in_enumerate_d.tr_write_line_f_rep_dura", "embedding": null, "metadata": {"file_path": "src/_pytest/runner.py", "file_name": "runner.py", "file_type": "text/x-python", "category": "implementation", "start_line": 70, "end_line": 99, "span_ids": ["pytest_terminal_summary"], "tokens": 276}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_terminal_summary(terminalreporter: \"TerminalReporter\") -> None:\n durations = terminalreporter.config.option.durations\n durations_min = terminalreporter.config.option.durations_min\n verbose = terminalreporter.config.getvalue(\"verbose\")\n if durations is None:\n return\n tr = terminalreporter\n dlist = []\n for replist in tr.stats.values():\n for rep in replist:\n if hasattr(rep, \"duration\"):\n dlist.append(rep)\n if not dlist:\n return\n dlist.sort(key=lambda x: x.duration, reverse=True) # type: ignore[no-any-return]\n if not durations:\n tr.write_sep(\"=\", \"slowest durations\")\n else:\n tr.write_sep(\"=\", \"slowest %s durations\" % durations)\n dlist = dlist[:durations]\n\n for i, rep in enumerate(dlist):\n if verbose < 2 and rep.duration < durations_min:\n tr.write_line(\"\")\n tr.write_line(\n \"(%s durations < %gs hidden. Use -vv to show these durations.)\"\n % (len(dlist) - i, durations_min)\n )\n break\n tr.write_line(f\"{rep.duration:02.2f}s {rep.when:<8} {rep.nodeid}\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/runner.py_check_interactive_exception_call_runtest_hook.return.CallInfo_from_call_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/runner.py_check_interactive_exception_call_runtest_hook.return.CallInfo_from_call_", "embedding": null, "metadata": {"file_path": "src/_pytest/runner.py", "file_name": "runner.py", "file_type": "text/x-python", "category": "implementation", "start_line": 231, "end_line": 262, "span_ids": ["check_interactive_exception", "call_runtest_hook"], "tokens": 308}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def check_interactive_exception(call: \"CallInfo[object]\", report: BaseReport) -> bool:\n \"\"\"Check whether the call raised an exception that should be reported as\n interactive.\"\"\"\n if call.excinfo is None:\n # Didn't raise.\n return False\n if hasattr(report, \"wasxfail\"):\n # Exception was expected.\n return False\n if isinstance(call.excinfo.value, (Skipped, bdb.BdbQuit)):\n # Special control flow exception.\n return False\n return True\n\n\ndef call_runtest_hook(\n item: Item, when: \"Literal['setup', 'call', 'teardown']\", **kwds\n) -> \"CallInfo[None]\":\n if when == \"setup\":\n ihook: Callable[..., None] = item.ihook.pytest_runtest_setup\n elif when == \"call\":\n ihook = item.ihook.pytest_runtest_call\n elif when == \"teardown\":\n ihook = item.ihook.pytest_runtest_teardown\n else:\n assert False, f\"Unhandled runtest hook case: {when}\"\n reraise: Tuple[Type[BaseException], ...] = (Exit,)\n if not item.config.getoption(\"usepdb\", False):\n reraise += (KeyboardInterrupt,)\n return CallInfo.from_call(\n lambda: ihook(item=item, **kwds), when=when, reraise=reraise\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/runner.py_TResult_CallInfo.__init__.self.when.when": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/runner.py_TResult_CallInfo.__init__.self.when.when", "embedding": null, "metadata": {"file_path": "src/_pytest/runner.py", "file_name": "runner.py", "file_type": "text/x-python", "category": "implementation", "start_line": 265, "end_line": 302, "span_ids": ["impl:2", "CallInfo"], "tokens": 302}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "TResult = TypeVar(\"TResult\", covariant=True)\n\n\n@final\n@attr.s(repr=False, init=False, auto_attribs=True)\nclass CallInfo(Generic[TResult]):\n \"\"\"Result/Exception info of a function invocation.\"\"\"\n\n _result: Optional[TResult]\n #: The captured exception of the call, if it raised.\n excinfo: Optional[ExceptionInfo[BaseException]]\n #: The system time when the call started, in seconds since the epoch.\n start: float\n #: The system time when the call ended, in seconds since the epoch.\n stop: float\n #: The call duration, in seconds.\n duration: float\n #: The context of invocation: \"collect\", \"setup\", \"call\" or \"teardown\".\n when: \"Literal['collect', 'setup', 'call', 'teardown']\"\n\n def __init__(\n self,\n result: Optional[TResult],\n excinfo: Optional[ExceptionInfo[BaseException]],\n start: float,\n stop: float,\n duration: float,\n when: \"Literal['collect', 'setup', 'call', 'teardown']\",\n *,\n _ispytest: bool = False,\n ) -> None:\n check_ispytest(_ispytest)\n self._result = result\n self.excinfo = excinfo\n self.start = start\n self.stop = stop\n self.duration = duration\n self.when = when", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/runner.py_CallInfo.result_CallInfo.result.return.cast_TResult_self__resul": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/runner.py_CallInfo.result_CallInfo.result.return.cast_TResult_self__resul", "embedding": null, "metadata": {"file_path": "src/_pytest/runner.py", "file_name": "runner.py", "file_type": "text/x-python", "category": "implementation", "start_line": 304, "end_line": 315, "span_ids": ["CallInfo.result"], "tokens": 145}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\n@attr.s(repr=False, init=False, auto_attribs=True)\nclass CallInfo(Generic[TResult]):\n\n @property\n def result(self) -> TResult:\n \"\"\"The return value of the call, if it didn't raise.\n\n Can only be accessed if excinfo is None.\n \"\"\"\n if self.excinfo is not None:\n raise AttributeError(f\"{self!r} has no valid result\")\n # The cast is safe because an exception wasn't raised, hence\n # _result has the expected function return type (which may be\n # None, that's why a cast and not an assert).\n return cast(TResult, self._result)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/runner.py_CallInfo.from_call_CallInfo.__repr__.return.f_CallInfo_when_self_wh": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/runner.py_CallInfo.from_call_CallInfo.__repr__.return.f_CallInfo_when_self_wh", "embedding": null, "metadata": {"file_path": "src/_pytest/runner.py", "file_name": "runner.py", "file_type": "text/x-python", "category": "implementation", "start_line": 317, "end_line": 363, "span_ids": ["CallInfo.from_call", "CallInfo.__repr__"], "tokens": 393}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\n@attr.s(repr=False, init=False, auto_attribs=True)\nclass CallInfo(Generic[TResult]):\n\n @classmethod\n def from_call(\n cls,\n func: \"Callable[[], TResult]\",\n when: \"Literal['collect', 'setup', 'call', 'teardown']\",\n reraise: Optional[\n Union[Type[BaseException], Tuple[Type[BaseException], ...]]\n ] = None,\n ) -> \"CallInfo[TResult]\":\n \"\"\"Call func, wrapping the result in a CallInfo.\n\n :param func:\n The function to call. Called without arguments.\n :param when:\n The phase in which the function is called.\n :param reraise:\n Exception or exceptions that shall propagate if raised by the\n function, instead of being wrapped in the CallInfo.\n \"\"\"\n excinfo = None\n start = timing.time()\n precise_start = timing.perf_counter()\n try:\n result: Optional[TResult] = func()\n except BaseException:\n excinfo = ExceptionInfo.from_current()\n if reraise is not None and isinstance(excinfo.value, reraise):\n raise\n result = None\n # use the perf counter\n precise_stop = timing.perf_counter()\n duration = precise_stop - precise_start\n stop = timing.time()\n return cls(\n start=start,\n stop=stop,\n duration=duration,\n when=when,\n result=result,\n excinfo=excinfo,\n _ispytest=True,\n )\n\n def __repr__(self) -> str:\n if self.excinfo is None:\n return f\"\"\n return f\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/runner.py_pytest_runtest_makereport_pytest_make_collect_report.return.rep": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/runner.py_pytest_runtest_makereport_pytest_make_collect_report.return.rep", "embedding": null, "metadata": {"file_path": "src/_pytest/runner.py", "file_name": "runner.py", "file_type": "text/x-python", "category": "implementation", "start_line": 366, "end_line": 403, "span_ids": ["pytest_runtest_makereport", "pytest_make_collect_report"], "tokens": 395}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def pytest_runtest_makereport(item: Item, call: CallInfo[None]) -> TestReport:\n return TestReport.from_item_and_call(item, call)\n\n\ndef pytest_make_collect_report(collector: Collector) -> CollectReport:\n call = CallInfo.from_call(lambda: list(collector.collect()), \"collect\")\n longrepr: Union[None, Tuple[str, int, str], str, TerminalRepr] = None\n if not call.excinfo:\n outcome: Literal[\"passed\", \"skipped\", \"failed\"] = \"passed\"\n else:\n skip_exceptions = [Skipped]\n unittest = sys.modules.get(\"unittest\")\n if unittest is not None:\n # Type ignored because unittest is loaded dynamically.\n skip_exceptions.append(unittest.SkipTest) # type: ignore\n if isinstance(call.excinfo.value, tuple(skip_exceptions)):\n if unittest is not None and isinstance(\n call.excinfo.value, unittest.SkipTest # type: ignore[attr-defined]\n ):\n warnings.warn(UNITTEST_SKIP_DURING_COLLECTION, stacklevel=2)\n\n outcome = \"skipped\"\n r_ = collector._repr_failure_py(call.excinfo, \"line\")\n assert isinstance(r_, ExceptionChainRepr), repr(r_)\n r = r_.reprcrash\n assert r\n longrepr = (str(r.path), r.lineno, r.message)\n else:\n outcome = \"failed\"\n errorinfo = collector.repr_failure(call.excinfo)\n if not hasattr(errorinfo, \"toterminal\"):\n assert isinstance(errorinfo, str)\n errorinfo = CollectErrorRepr(errorinfo)\n longrepr = errorinfo\n result = call.result if not call.excinfo else None\n rep = CollectReport(collector.nodeid, outcome, longrepr, result)\n rep.call = call # type: ignore # see collect_one_node\n return rep", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/runner.py_SetupState_SetupState.__init__.self.stack._": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/runner.py_SetupState_SetupState.__init__.self.stack._", "embedding": null, "metadata": {"file_path": "src/_pytest/runner.py", "file_name": "runner.py", "file_type": "text/x-python", "category": "implementation", "start_line": 406, "end_line": 480, "span_ids": ["SetupState"], "tokens": 477}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class SetupState:\n \"\"\"Shared state for setting up/tearing down test items or collectors\n in a session.\n\n Suppose we have a collection tree as follows:\n\n \n \n \n \n \n\n The SetupState maintains a stack. The stack starts out empty:\n\n []\n\n During the setup phase of item1, setup(item1) is called. What it does\n is:\n\n push session to stack, run session.setup()\n push mod1 to stack, run mod1.setup()\n push item1 to stack, run item1.setup()\n\n The stack is:\n\n [session, mod1, item1]\n\n While the stack is in this shape, it is allowed to add finalizers to\n each of session, mod1, item1 using addfinalizer().\n\n During the teardown phase of item1, teardown_exact(item2) is called,\n where item2 is the next item to item1. What it does is:\n\n pop item1 from stack, run its teardowns\n pop mod1 from stack, run its teardowns\n\n mod1 was popped because it ended its purpose with item1. The stack is:\n\n [session]\n\n During the setup phase of item2, setup(item2) is called. What it does\n is:\n\n push mod2 to stack, run mod2.setup()\n push item2 to stack, run item2.setup()\n\n Stack:\n\n [session, mod2, item2]\n\n During the teardown phase of item2, teardown_exact(None) is called,\n because item2 is the last item. What it does is:\n\n pop item2 from stack, run its teardowns\n pop mod2 from stack, run its teardowns\n pop session from stack, run its teardowns\n\n Stack:\n\n []\n\n The end!\n \"\"\"\n\n def __init__(self) -> None:\n # The stack is in the dict insertion order.\n self.stack: Dict[\n Node,\n Tuple[\n # Node's finalizers.\n List[Callable[[], object]],\n # Node's exception, if its setup raised.\n Optional[Union[OutcomeException, Exception]],\n ],\n ] = {}", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/runner.py_SetupState.setup_SetupState.addfinalizer.self_stack_node_0_appen": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/runner.py_SetupState.setup_SetupState.addfinalizer.self_stack_node_0_appen", "embedding": null, "metadata": {"file_path": "src/_pytest/runner.py", "file_name": "runner.py", "file_type": "text/x-python", "category": "implementation", "start_line": 482, "end_line": 511, "span_ids": ["SetupState.setup", "SetupState.addfinalizer"], "tokens": 272}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class SetupState:\n\n def setup(self, item: Item) -> None:\n \"\"\"Setup objects along the collector chain to the item.\"\"\"\n needed_collectors = item.listchain()\n\n # If a collector fails its setup, fail its entire subtree of items.\n # The setup is not retried for each item - the same exception is used.\n for col, (finalizers, exc) in self.stack.items():\n assert col in needed_collectors, \"previous item was not torn down properly\"\n if exc:\n raise exc\n\n for col in needed_collectors[len(self.stack) :]:\n assert col not in self.stack\n # Push onto the stack.\n self.stack[col] = ([col.teardown], None)\n try:\n col.setup()\n except TEST_OUTCOME as exc:\n self.stack[col] = (self.stack[col][0], exc)\n raise exc\n\n def addfinalizer(self, finalizer: Callable[[], object], node: Node) -> None:\n \"\"\"Attach a finalizer to the given node.\n\n The node must be currently active in the stack.\n \"\"\"\n assert node and not isinstance(node, tuple)\n assert callable(finalizer)\n assert node in self.stack, (node, self.stack)\n self.stack[node][0].append(finalizer)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/runner.py_SetupState.teardown_exact_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/runner.py_SetupState.teardown_exact_", "embedding": null, "metadata": {"file_path": "src/_pytest/runner.py", "file_name": "runner.py", "file_type": "text/x-python", "category": "implementation", "start_line": 513, "end_line": 549, "span_ids": ["SetupState.teardown_exact", "collect_one_node"], "tokens": 305}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class SetupState:\n\n def teardown_exact(self, nextitem: Optional[Item]) -> None:\n \"\"\"Teardown the current stack up until reaching nodes that nextitem\n also descends from.\n\n When nextitem is None (meaning we're at the last item), the entire\n stack is torn down.\n \"\"\"\n needed_collectors = nextitem and nextitem.listchain() or []\n exc = None\n while self.stack:\n if list(self.stack.keys()) == needed_collectors[: len(self.stack)]:\n break\n node, (finalizers, _) = self.stack.popitem()\n while finalizers:\n fin = finalizers.pop()\n try:\n fin()\n except TEST_OUTCOME as e:\n # XXX Only first exception will be seen by user,\n # ideally all should be reported.\n if exc is None:\n exc = e\n if exc:\n raise exc\n if nextitem is None:\n assert not self.stack\n\n\ndef collect_one_node(collector: Collector) -> CollectReport:\n ihook = collector.ihook\n ihook.pytest_collectstart(collector=collector)\n rep: CollectReport = ihook.pytest_make_collect_report(collector=collector)\n call = rep.__dict__.pop(\"call\", None)\n if call and check_interactive_exception(call, rep):\n ihook.pytest_exception_interact(node=collector, call=call, report=rep)\n return rep", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/skipping.py__Support_for_skip_xfail_from__pytest_store_import": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/skipping.py__Support_for_skip_xfail_from__pytest_store_import", "embedding": null, "metadata": {"file_path": "src/_pytest/skipping.py", "file_name": "skipping.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 24, "span_ids": ["docstring", "imports"], "tokens": 143}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"Support for skip/xfail functions and markers.\"\"\"\nimport os\nimport platform\nimport sys\nimport traceback\nfrom collections.abc import Mapping\nfrom typing import Generator\nfrom typing import Optional\nfrom typing import Tuple\nfrom typing import Type\n\nimport attr\n\nfrom _pytest.config import Config\nfrom _pytest.config import hookimpl\nfrom _pytest.config.argparsing import Parser\nfrom _pytest.mark.structures import Mark\nfrom _pytest.nodes import Item\nfrom _pytest.outcomes import fail\nfrom _pytest.outcomes import skip\nfrom _pytest.outcomes import xfail\nfrom _pytest.reports import BaseReport\nfrom _pytest.runner import CallInfo\nfrom _pytest.store import StoreKey", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/skipping.py__Saves_the_xfail_mark_ev_pytest_runtest_setup.if_xfailed_and_not_item_c.xfail_NOTRUN_xfail": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/skipping.py__Saves_the_xfail_mark_ev_pytest_runtest_setup.if_xfailed_and_not_item_c.xfail_NOTRUN_xfail", "embedding": null, "metadata": {"file_path": "src/_pytest/skipping.py", "file_name": "skipping.py", "file_type": "text/x-python", "category": "implementation", "start_line": 233, "end_line": 245, "span_ids": ["impl", "evaluate_xfail_marks", "pytest_runtest_setup"], "tokens": 123}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "# Saves the xfail mark evaluation. Can be refreshed during call if None.\nxfailed_key = StoreKey[Optional[Xfail]]()\n\n\n@hookimpl(tryfirst=True)\ndef pytest_runtest_setup(item: Item) -> None:\n skipped = evaluate_skip_marks(item)\n if skipped:\n raise skip.Exception(skipped.reason, _use_item_location=True)\n\n item._store[xfailed_key] = xfailed = evaluate_xfail_marks(item)\n if xfailed and not item.config.option.runxfail and not xfailed.run:\n xfail(\"[NOTRUN] \" + xfailed.reason)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/skipping.py_pytest_runtest_call_pytest_runtest_call.None_2.item__store_xfailed_key_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/skipping.py_pytest_runtest_call_pytest_runtest_call.None_2.item__store_xfailed_key_", "embedding": null, "metadata": {"file_path": "src/_pytest/skipping.py", "file_name": "skipping.py", "file_type": "text/x-python", "category": "implementation", "start_line": 248, "end_line": 262, "span_ids": ["pytest_runtest_call"], "tokens": 152}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@hookimpl(hookwrapper=True)\ndef pytest_runtest_call(item: Item) -> Generator[None, None, None]:\n xfailed = item._store.get(xfailed_key, None)\n if xfailed is None:\n item._store[xfailed_key] = xfailed = evaluate_xfail_marks(item)\n\n if xfailed and not item.config.option.runxfail and not xfailed.run:\n xfail(\"[NOTRUN] \" + xfailed.reason)\n\n yield\n\n # The test run may have added an xfail mark dynamically.\n xfailed = item._store.get(xfailed_key, None)\n if xfailed is None:\n item._store[xfailed_key] = xfailed = evaluate_xfail_marks(item)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/stepwise.py_from_typing_import_List_pytest_addoption.None_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/stepwise.py_from_typing_import_List_pytest_addoption.None_1", "embedding": null, "metadata": {"file_path": "src/_pytest/stepwise.py", "file_name": "stepwise.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 35, "span_ids": ["impl", "pytest_addoption", "imports:10", "impl:2", "imports"], "tokens": 208}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from typing import List\nfrom typing import Optional\nfrom typing import TYPE_CHECKING\n\nimport pytest\nfrom _pytest import nodes\nfrom _pytest.config import Config\nfrom _pytest.config.argparsing import Parser\nfrom _pytest.main import Session\nfrom _pytest.reports import TestReport\n\nif TYPE_CHECKING:\n from _pytest.cacheprovider import Cache\n\nSTEPWISE_CACHE_DIR = \"cache/stepwise\"\n\n\ndef pytest_addoption(parser: Parser) -> None:\n group = parser.getgroup(\"general\")\n group.addoption(\n \"--sw\",\n \"--stepwise\",\n action=\"store_true\",\n default=False,\n dest=\"stepwise\",\n help=\"exit on test failure and continue from last failing test next time\",\n )\n group.addoption(\n \"--sw-skip\",\n \"--stepwise-skip\",\n action=\"store_true\",\n default=False,\n dest=\"stepwise_skip\",\n help=\"ignore the first failing test but stop on the next failing test\",\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/stepwise.py_pytest_configure_pytest_sessionfinish.if_not_session_config_get.session_config_cache_set_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/stepwise.py_pytest_configure_pytest_sessionfinish.if_not_session_config_get.session_config_cache_set_", "embedding": null, "metadata": {"file_path": "src/_pytest/stepwise.py", "file_name": "stepwise.py", "file_type": "text/x-python", "category": "implementation", "start_line": 38, "end_line": 49, "span_ids": ["pytest_sessionfinish", "pytest_configure"], "tokens": 115}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.hookimpl\ndef pytest_configure(config: Config) -> None:\n # We should always have a cache as cache provider plugin uses tryfirst=True\n if config.getoption(\"stepwise\"):\n config.pluginmanager.register(StepwisePlugin(config), \"stepwiseplugin\")\n\n\ndef pytest_sessionfinish(session: Session) -> None:\n if not session.config.getoption(\"stepwise\"):\n assert session.config.cache is not None\n # Clear the list of failing tests if the plugin is not active.\n session.config.cache.set(STEPWISE_CACHE_DIR, [])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/stepwise.py_StepwisePlugin_StepwisePlugin.pytest_collection_modifyitems.if_failed_index_is_None_.else_.config_hook_pytest_desele": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/stepwise.py_StepwisePlugin_StepwisePlugin.pytest_collection_modifyitems.if_failed_index_is_None_.else_.config_hook_pytest_desele", "embedding": null, "metadata": {"file_path": "src/_pytest/stepwise.py", "file_name": "stepwise.py", "file_type": "text/x-python", "category": "implementation", "start_line": 52, "end_line": 87, "span_ids": ["StepwisePlugin", "StepwisePlugin.pytest_collection_modifyitems", "StepwisePlugin.pytest_sessionstart"], "tokens": 308}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class StepwisePlugin:\n def __init__(self, config: Config) -> None:\n self.config = config\n self.session: Optional[Session] = None\n self.report_status = \"\"\n assert config.cache is not None\n self.cache: Cache = config.cache\n self.lastfailed: Optional[str] = self.cache.get(STEPWISE_CACHE_DIR, None)\n self.skip: bool = config.getoption(\"stepwise_skip\")\n\n def pytest_sessionstart(self, session: Session) -> None:\n self.session = session\n\n def pytest_collection_modifyitems(\n self, config: Config, items: List[nodes.Item]\n ) -> None:\n if not self.lastfailed:\n self.report_status = \"no previously failed tests, not skipping.\"\n return\n\n # check all item nodes until we find a match on last failed\n failed_index = None\n for index, item in enumerate(items):\n if item.nodeid == self.lastfailed:\n failed_index = index\n break\n\n # If the previously failed test was not found among the test items,\n # do not skip any tests.\n if failed_index is None:\n self.report_status = \"previously failed test not found, not skipping.\"\n else:\n self.report_status = f\"skipping {failed_index} already passed items.\"\n deselected = items[:failed_index]\n del items[:failed_index]\n config.hook.pytest_deselected(items=deselected)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/stepwise.py_StepwisePlugin.pytest_runtest_logreport_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/stepwise.py_StepwisePlugin.pytest_runtest_logreport_", "embedding": null, "metadata": {"file_path": "src/_pytest/stepwise.py", "file_name": "stepwise.py", "file_type": "text/x-python", "category": "implementation", "start_line": 89, "end_line": 120, "span_ids": ["StepwisePlugin.pytest_sessionfinish", "StepwisePlugin.pytest_report_collectionfinish", "StepwisePlugin.pytest_runtest_logreport"], "tokens": 263}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class StepwisePlugin:\n\n def pytest_runtest_logreport(self, report: TestReport) -> None:\n if report.failed:\n if self.skip:\n # Remove test from the failed ones (if it exists) and unset the skip option\n # to make sure the following tests will not be skipped.\n if report.nodeid == self.lastfailed:\n self.lastfailed = None\n\n self.skip = False\n else:\n # Mark test as the last failing and interrupt the test session.\n self.lastfailed = report.nodeid\n assert self.session is not None\n self.session.shouldstop = (\n \"Test failed, continuing from this test next run.\"\n )\n\n else:\n # If the test was actually run and did pass.\n if report.when == \"call\":\n # Remove test from the failed ones, if exists.\n if report.nodeid == self.lastfailed:\n self.lastfailed = None\n\n def pytest_report_collectionfinish(self) -> Optional[str]:\n if self.config.getoption(\"verbose\") >= 0 and self.report_status:\n return f\"stepwise: {self.report_status}\"\n return None\n\n def pytest_sessionfinish(self) -> None:\n self.cache.set(STEPWISE_CACHE_DIR, self.lastfailed)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py__Terminal_reporting_of___REPORTCHARS_DEFAULT._fE_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py__Terminal_reporting_of___REPORTCHARS_DEFAULT._fE_", "embedding": null, "metadata": {"file_path": "src/_pytest/terminal.py", "file_name": "terminal.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 72, "span_ids": ["impl", "impl:2", "imports:46", "docstring", "imports"], "tokens": 383}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"Terminal reporting of the full testing process.\n\nThis is a good source for looking at the various reporting hooks.\n\"\"\"\nimport argparse\nimport datetime\nimport inspect\nimport platform\nimport sys\nimport warnings\nfrom collections import Counter\nfrom functools import partial\nfrom pathlib import Path\nfrom typing import Any\nfrom typing import Callable\nfrom typing import cast\nfrom typing import Dict\nfrom typing import Generator\nfrom typing import List\nfrom typing import Mapping\nfrom typing import Optional\nfrom typing import Sequence\nfrom typing import Set\nfrom typing import TextIO\nfrom typing import Tuple\nfrom typing import TYPE_CHECKING\nfrom typing import Union\n\nimport attr\nimport pluggy\nimport py\n\nimport _pytest._version\nfrom _pytest import nodes\nfrom _pytest import timing\nfrom _pytest._code import ExceptionInfo\nfrom _pytest._code.code import ExceptionRepr\nfrom _pytest._io.wcwidth import wcswidth\nfrom _pytest.compat import final\nfrom _pytest.config import _PluggyPlugin\nfrom _pytest.config import Config\nfrom _pytest.config import ExitCode\nfrom _pytest.config import hookimpl\nfrom _pytest.config.argparsing import Parser\nfrom _pytest.nodes import Item\nfrom _pytest.nodes import Node\nfrom _pytest.pathlib import absolutepath\nfrom _pytest.pathlib import bestrelpath\nfrom _pytest.reports import BaseReport\nfrom _pytest.reports import CollectReport\nfrom _pytest.reports import TestReport\n\nif TYPE_CHECKING:\n from typing_extensions import Literal\n\n from _pytest.main import Session\n\n\nREPORT_COLLECTING_RESOLUTION = 0.5\n\nKNOWN_TYPES = (\n \"failed\",\n \"passed\",\n \"skipped\",\n \"deselected\",\n \"xfailed\",\n \"xpassed\",\n \"warnings\",\n \"error\",\n)\n\n_REPORTCHARS_DEFAULT = \"fE\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_pytest_report_teststatus_WarningReport.get_location.return.None": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_pytest_report_teststatus_WarningReport.get_location.return.None", "embedding": null, "metadata": {"file_path": "src/_pytest/terminal.py", "file_name": "terminal.py", "file_type": "text/x-python", "category": "implementation", "start_line": 264, "end_line": 305, "span_ids": ["pytest_report_teststatus", "WarningReport", "WarningReport.get_location"], "tokens": 340}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@hookimpl(trylast=True) # after _pytest.runner\ndef pytest_report_teststatus(report: BaseReport) -> Tuple[str, str, str]:\n letter = \"F\"\n if report.passed:\n letter = \".\"\n elif report.skipped:\n letter = \"s\"\n\n outcome: str = report.outcome\n if report.when in (\"collect\", \"setup\", \"teardown\") and outcome == \"failed\":\n outcome = \"error\"\n letter = \"E\"\n\n return outcome, letter, outcome.upper()\n\n\n@attr.s\nclass WarningReport:\n \"\"\"Simple structure to hold warnings information captured by ``pytest_warning_recorded``.\n\n :ivar str message:\n User friendly message about the warning.\n :ivar str|None nodeid:\n nodeid that generated the warning (see ``get_location``).\n :ivar tuple fslocation:\n File system location of the source of the warning (see ``get_location``).\n \"\"\"\n\n message = attr.ib(type=str)\n nodeid = attr.ib(type=Optional[str], default=None)\n fslocation = attr.ib(type=Optional[Tuple[str, int]], default=None)\n count_towards_summary = True\n\n def get_location(self, config: Config) -> Optional[str]:\n \"\"\"Return the more user-friendly information about the location of a warning, or None.\"\"\"\n if self.nodeid:\n return self.nodeid\n if self.fslocation:\n filename, linenum = self.fslocation\n relpath = bestrelpath(config.invocation_params.dir, absolutepath(filename))\n return f\"{relpath}:{linenum}\"\n return None", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter_TerminalReporter.__init__.self._keyboardinterrupt_memo.None": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter_TerminalReporter.__init__.self._keyboardinterrupt_memo.None", "embedding": null, "metadata": {"file_path": "src/_pytest/terminal.py", "file_name": "terminal.py", "file_type": "text/x-python", "category": "implementation", "start_line": 308, "end_line": 335, "span_ids": ["TerminalReporter"], "tokens": 299}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass TerminalReporter:\n def __init__(self, config: Config, file: Optional[TextIO] = None) -> None:\n import _pytest.config\n\n self.config = config\n self._numcollected = 0\n self._session: Optional[Session] = None\n self._showfspath: Optional[bool] = None\n\n self.stats: Dict[str, List[Any]] = {}\n self._main_color: Optional[str] = None\n self._known_types: Optional[List[str]] = None\n self.startdir = config.invocation_dir\n self.startpath = config.invocation_params.dir\n if file is None:\n file = sys.stdout\n self._tw = _pytest.config.create_terminal_writer(config, file)\n self._screen_width = self._tw.fullwidth\n self.currentfspath: Union[None, Path, str, int] = None\n self.reportchars = getreportopt(config)\n self.hasmarkup = self._tw.hasmarkup\n self.isatty = file.isatty()\n self._progress_nodeids_reported: Set[str] = set()\n self._show_progress_info = self._determine_show_progress_info()\n self._collect_report_last_write: Optional[float] = None\n self._already_displayed_warnings: Optional[int] = None\n self._keyboardinterrupt_memo: Optional[ExceptionRepr] = None", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.short_test_summary_TerminalReporter.short_test_summary.show_xpassed.for_rep_in_xpassed_.lines_append_f_verbose_w": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.short_test_summary_TerminalReporter.short_test_summary.show_xpassed.for_rep_in_xpassed_.lines_append_f_verbose_w", "embedding": null, "metadata": {"file_path": "src/_pytest/terminal.py", "file_name": "terminal.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1077, "end_line": 1107, "span_ids": ["TerminalReporter.short_test_summary"], "tokens": 275}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass TerminalReporter:\n\n def short_test_summary(self) -> None:\n if not self.reportchars:\n return\n\n def show_simple(stat, lines: List[str]) -> None:\n failed = self.stats.get(stat, [])\n if not failed:\n return\n termwidth = self._tw.fullwidth\n config = self.config\n for rep in failed:\n line = _get_line_with_reprcrash_message(config, rep, termwidth)\n lines.append(line)\n\n def show_xfailed(lines: List[str]) -> None:\n xfailed = self.stats.get(\"xfailed\", [])\n for rep in xfailed:\n verbose_word = rep._get_verbose_word(self.config)\n pos = _get_pos(self.config, rep)\n lines.append(f\"{verbose_word} {pos}\")\n reason = rep.wasxfail\n if reason:\n lines.append(\" \" + str(reason))\n\n def show_xpassed(lines: List[str]) -> None:\n xpassed = self.stats.get(\"xpassed\", [])\n for rep in xpassed:\n verbose_word = rep._get_verbose_word(self.config)\n pos = _get_pos(self.config, rep)\n reason = rep.wasxfail\n lines.append(f\"{verbose_word} {pos} {reason}\")\n # ... other code", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.build_summary_stats_line_TerminalReporter.build_summary_stats_line.if_self_config_getoption_.else_.return.self__build_normal_summar": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter.build_summary_stats_line_TerminalReporter.build_summary_stats_line.if_self_config_getoption_.else_.return.self__build_normal_summar", "embedding": null, "metadata": {"file_path": "src/_pytest/terminal.py", "file_name": "terminal.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1174, "end_line": 1197, "span_ids": ["TerminalReporter.build_summary_stats_line"], "tokens": 211}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass TerminalReporter:\n\n def build_summary_stats_line(self) -> Tuple[List[Tuple[str, Dict[str, bool]]], str]:\n \"\"\"\n Build the parts used in the last summary stats line.\n\n The summary stats line is the line shown at the end, \"=== 12 passed, 2 errors in Xs===\".\n\n This function builds a list of the \"parts\" that make up for the text in that line, in\n the example above it would be:\n\n [\n (\"12 passed\", {\"green\": True}),\n (\"2 errors\", {\"red\": True}\n ]\n\n That last dict for each line is a \"markup dictionary\", used by TerminalWriter to\n color output.\n\n The final color of the line is also determined by this function, and is the second\n element of the returned tuple.\n \"\"\"\n if self.config.getoption(\"collectonly\"):\n return self._build_collect_only_summary_stats_line()\n else:\n return self._build_normal_summary_stats_line()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter._get_reports_to_display_TerminalReporter._build_normal_summary_stats_line.return.parts_main_color": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter._get_reports_to_display_TerminalReporter._build_normal_summary_stats_line.return.parts_main_color", "embedding": null, "metadata": {"file_path": "src/_pytest/terminal.py", "file_name": "terminal.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1199, "end_line": 1221, "span_ids": ["TerminalReporter._get_reports_to_display", "TerminalReporter._build_normal_summary_stats_line"], "tokens": 229}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass TerminalReporter:\n\n def _get_reports_to_display(self, key: str) -> List[Any]:\n \"\"\"Get test/collection reports for the given status key, such as `passed` or `error`.\"\"\"\n reports = self.stats.get(key, [])\n return [x for x in reports if getattr(x, \"count_towards_summary\", True)]\n\n def _build_normal_summary_stats_line(\n self,\n ) -> Tuple[List[Tuple[str, Dict[str, bool]]], str]:\n main_color, known_types = self._get_main_color()\n parts = []\n\n for key in known_types:\n reports = self._get_reports_to_display(key)\n if reports:\n count = len(reports)\n color = _color_for_type.get(key, _color_for_type_default)\n markup = {color: True, \"bold\": color == main_color}\n parts.append((\"%d %s\" % pluralize(count, key), markup))\n\n if not parts:\n parts = [(\"no tests ran\", {_color_for_type_default: True})]\n\n return parts, main_color", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter._build_collect_only_summary_stats_line_TerminalReporter._build_collect_only_summary_stats_line.return.parts_main_color": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py_TerminalReporter._build_collect_only_summary_stats_line_TerminalReporter._build_collect_only_summary_stats_line.return.parts_main_color", "embedding": null, "metadata": {"file_path": "src/_pytest/terminal.py", "file_name": "terminal.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1223, "end_line": 1253, "span_ids": ["TerminalReporter._build_collect_only_summary_stats_line"], "tokens": 311}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\nclass TerminalReporter:\n\n def _build_collect_only_summary_stats_line(\n self,\n ) -> Tuple[List[Tuple[str, Dict[str, bool]]], str]:\n deselected = len(self._get_reports_to_display(\"deselected\"))\n errors = len(self._get_reports_to_display(\"error\"))\n\n if self._numcollected == 0:\n parts = [(\"no tests collected\", {\"yellow\": True})]\n main_color = \"yellow\"\n\n elif deselected == 0:\n main_color = \"green\"\n collected_output = \"%d %s collected\" % pluralize(self._numcollected, \"test\")\n parts = [(collected_output, {main_color: True})]\n else:\n all_tests_were_deselected = self._numcollected == deselected\n if all_tests_were_deselected:\n main_color = \"yellow\"\n collected_output = f\"no tests collected ({deselected} deselected)\"\n else:\n main_color = \"green\"\n selected = self._numcollected - deselected\n collected_output = f\"{selected}/{self._numcollected} tests collected ({deselected} deselected)\"\n\n parts = [(collected_output, {main_color: True})]\n\n if errors:\n main_color = _color_for_type[\"error\"]\n parts += [(\"%d %s\" % pluralize(errors, \"error\"), {main_color: True})]\n\n return parts, main_color", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py__get_pos__format_trimmed.return.format_format_msg_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py__get_pos__format_trimmed.return.format_format_msg_", "embedding": null, "metadata": {"file_path": "src/_pytest/terminal.py", "file_name": "terminal.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1256, "end_line": 1283, "span_ids": ["_format_trimmed", "_get_pos"], "tokens": 212}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _get_pos(config: Config, rep: BaseReport):\n nodeid = config.cwd_relative_nodeid(rep.nodeid)\n return nodeid\n\n\ndef _format_trimmed(format: str, msg: str, available_width: int) -> Optional[str]:\n \"\"\"Format msg into format, ellipsizing it if doesn't fit in available_width.\n\n Returns None if even the ellipsis can't fit.\n \"\"\"\n # Only use the first line.\n i = msg.find(\"\\n\")\n if i != -1:\n msg = msg[:i]\n\n ellipsis = \"...\"\n format_width = wcswidth(format.format(\"\"))\n if format_width + len(ellipsis) > available_width:\n return None\n\n if format_width + wcswidth(msg) > available_width:\n available_width -= len(ellipsis)\n msg = msg[:available_width]\n while format_width + wcswidth(msg) > available_width:\n msg = msg[:-1]\n msg += ellipsis\n\n return format.format(msg)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py__color_for_type_pluralize.return.count_noun_s_if_coun": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py__color_for_type_pluralize.return.count_noun_s_if_coun", "embedding": null, "metadata": {"file_path": "src/_pytest/terminal.py", "file_name": "terminal.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1341, "end_line": 1360, "span_ids": ["pluralize", "impl:8"], "tokens": 172}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "_color_for_type = {\n \"failed\": \"red\",\n \"error\": \"red\",\n \"warnings\": \"yellow\",\n \"passed\": \"green\",\n}\n_color_for_type_default = \"yellow\"\n\n\ndef pluralize(count: int, noun: str) -> Tuple[int, str]:\n # No need to pluralize words such as `failed` or `passed`.\n if noun not in [\"error\", \"warnings\", \"test\"]:\n return count, noun\n\n # The `warnings` key is plural. To avoid API breakage, we keep it that way but\n # set it to singular here so we can determine plurality in the same way as we do\n # for `error`.\n noun = noun.replace(\"warnings\", \"warning\")\n\n return count, noun + \"s\" if count != 1 else noun", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py__plugin_nameversions_format_session_duration.if_seconds_60_.else_.return.f_seconds_2f_s_dt_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py__plugin_nameversions_format_session_duration.if_seconds_60_.else_.return.f_seconds_2f_s_dt_", "embedding": null, "metadata": {"file_path": "src/_pytest/terminal.py", "file_name": "terminal.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1363, "end_line": 1383, "span_ids": ["_plugin_nameversions", "format_session_duration"], "tokens": 184}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _plugin_nameversions(plugininfo) -> List[str]:\n values: List[str] = []\n for plugin, dist in plugininfo:\n # Gets us name and version!\n name = \"{dist.project_name}-{dist.version}\".format(dist=dist)\n # Questionable convenience, but it keeps things short.\n if name.startswith(\"pytest-\"):\n name = name[7:]\n # We decided to print python package names they can have more than one plugin.\n if name not in values:\n values.append(name)\n return values\n\n\ndef format_session_duration(seconds: float) -> str:\n \"\"\"Format the given seconds in a human readable manner to show in the final summary.\"\"\"\n if seconds < 60:\n return f\"{seconds:.2f}s\"\n else:\n dt = datetime.timedelta(seconds=int(seconds))\n return f\"{seconds:.2f}s ({dt})\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py__get_raw_skip_reason_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/terminal.py__get_raw_skip_reason_", "embedding": null, "metadata": {"file_path": "src/_pytest/terminal.py", "file_name": "terminal.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1386, "end_line": 1405, "span_ids": ["_get_raw_skip_reason"], "tokens": 153}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _get_raw_skip_reason(report: TestReport) -> str:\n \"\"\"Get the reason string of a skip/xfail/xpass test report.\n\n The string is just the part given by the user.\n \"\"\"\n if hasattr(report, \"wasxfail\"):\n reason = cast(str, report.wasxfail)\n if reason.startswith(\"reason: \"):\n reason = reason[len(\"reason: \") :]\n return reason\n else:\n assert report.skipped\n assert isinstance(report.longrepr, tuple)\n _, _, reason = report.longrepr\n if reason.startswith(\"Skipped: \"):\n reason = reason[len(\"Skipped: \") :]\n elif reason == \"Skipped\":\n reason = \"\"\n return reason", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/threadexception.py_threading_catch_threading_exception.__exit__.del_self_args": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/threadexception.py_threading_catch_threading_exception.__exit__.del_self_args", "embedding": null, "metadata": {"file_path": "src/_pytest/threadexception.py", "file_name": "threadexception.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 57, "span_ids": ["catch_threading_exception.__exit__", "catch_threading_exception.__enter__", "catch_threading_exception._hook", "catch_threading_exception", "imports"], "tokens": 406}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import threading\nimport traceback\nimport warnings\nfrom types import TracebackType\nfrom typing import Any\nfrom typing import Callable\nfrom typing import Generator\nfrom typing import Optional\nfrom typing import Type\n\nimport pytest\n\n\n# Copied from cpython/Lib/test/support/threading_helper.py, with modifications.\nclass catch_threading_exception:\n \"\"\"Context manager catching threading.Thread exception using\n threading.excepthook.\n\n Storing exc_value using a custom hook can create a reference cycle. The\n reference cycle is broken explicitly when the context manager exits.\n\n Storing thread using a custom hook can resurrect it if it is set to an\n object which is being finalized. Exiting the context manager clears the\n stored object.\n\n Usage:\n with threading_helper.catch_threading_exception() as cm:\n # code spawning a thread which raises an exception\n ...\n # check the thread exception: use cm.args\n ...\n # cm.args attribute no longer exists at this point\n # (to break a reference cycle)\n \"\"\"\n\n def __init__(self) -> None:\n self.args: Optional[\"threading.ExceptHookArgs\"] = None\n self._old_hook: Optional[Callable[[\"threading.ExceptHookArgs\"], Any]] = None\n\n def _hook(self, args: \"threading.ExceptHookArgs\") -> None:\n self.args = args\n\n def __enter__(self) -> \"catch_threading_exception\":\n self._old_hook = threading.excepthook\n threading.excepthook = self._hook\n return self\n\n def __exit__(\n self,\n exc_type: Optional[Type[BaseException]],\n exc_val: Optional[BaseException],\n exc_tb: Optional[TracebackType],\n ) -> None:\n assert self._old_hook is not None\n threading.excepthook = self._old_hook\n self._old_hook = None\n del self.args", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/threadexception.py_thread_exception_runtest_hook_thread_exception_runtest_hook.with_catch_threading_exce.if_cm_args_.warnings_warn_pytest_Pyte": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/threadexception.py_thread_exception_runtest_hook_thread_exception_runtest_hook.with_catch_threading_exce.if_cm_args_.warnings_warn_pytest_Pyte", "embedding": null, "metadata": {"file_path": "src/_pytest/threadexception.py", "file_name": "threadexception.py", "file_type": "text/x-python", "category": "implementation", "start_line": 60, "end_line": 76, "span_ids": ["thread_exception_runtest_hook"], "tokens": 125}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def thread_exception_runtest_hook() -> Generator[None, None, None]:\n with catch_threading_exception() as cm:\n yield\n if cm.args:\n if cm.args.thread is not None:\n thread_name = cm.args.thread.name\n else:\n thread_name = \"\"\n msg = f\"Exception in thread {thread_name}\\n\\n\"\n msg += \"\".join(\n traceback.format_exception(\n cm.args.exc_type,\n cm.args.exc_value,\n cm.args.exc_traceback,\n )\n )\n warnings.warn(pytest.PytestUnhandledThreadExceptionWarning(msg))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/threadexception.py_pytest_runtest_setup_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/threadexception.py_pytest_runtest_setup_", "embedding": null, "metadata": {"file_path": "src/_pytest/threadexception.py", "file_name": "threadexception.py", "file_type": "text/x-python", "category": "implementation", "start_line": 79, "end_line": 92, "span_ids": ["pytest_runtest_call", "pytest_runtest_teardown", "pytest_runtest_setup"], "tokens": 112}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.hookimpl(hookwrapper=True, trylast=True)\ndef pytest_runtest_setup() -> Generator[None, None, None]:\n yield from thread_exception_runtest_hook()\n\n\n@pytest.hookimpl(hookwrapper=True, tryfirst=True)\ndef pytest_runtest_call() -> Generator[None, None, None]:\n yield from thread_exception_runtest_hook()\n\n\n@pytest.hookimpl(hookwrapper=True, tryfirst=True)\ndef pytest_runtest_teardown() -> Generator[None, None, None]:\n yield from thread_exception_runtest_hook()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/tmpdir.py__Support_for_providing__from__pytest_monkeypatch_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/tmpdir.py__Support_for_providing__from__pytest_monkeypatch_", "embedding": null, "metadata": {"file_path": "src/_pytest/tmpdir.py", "file_name": "tmpdir.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 20, "span_ids": ["docstring", "imports"], "tokens": 124}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"Support for providing temporary directories to test functions.\"\"\"\nimport os\nimport re\nimport tempfile\nfrom pathlib import Path\nfrom typing import Optional\n\nimport attr\nimport py\n\nfrom .pathlib import ensure_reset_dir\nfrom .pathlib import LOCK_TIMEOUT\nfrom .pathlib import make_numbered_dir\nfrom .pathlib import make_numbered_dir_with_cleanup\nfrom _pytest.compat import final\nfrom _pytest.config import Config\nfrom _pytest.deprecated import check_ispytest\nfrom _pytest.fixtures import fixture\nfrom _pytest.fixtures import FixtureRequest\nfrom _pytest.monkeypatch import MonkeyPatch", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/tmpdir.py_TempPathFactory_TempPathFactory._ensure_relative_to_basetemp.return.basename": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/tmpdir.py_TempPathFactory_TempPathFactory._ensure_relative_to_basetemp.return.basename", "embedding": null, "metadata": {"file_path": "src/_pytest/tmpdir.py", "file_name": "tmpdir.py", "file_type": "text/x-python", "category": "implementation", "start_line": 23, "end_line": 76, "span_ids": ["TempPathFactory._ensure_relative_to_basetemp", "TempPathFactory.from_config", "TempPathFactory"], "tokens": 417}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@final\n@attr.s(init=False)\nclass TempPathFactory:\n \"\"\"Factory for temporary directories under the common base temp directory.\n\n The base directory can be configured using the ``--basetemp`` option.\n \"\"\"\n\n _given_basetemp = attr.ib(type=Optional[Path])\n _trace = attr.ib()\n _basetemp = attr.ib(type=Optional[Path])\n\n def __init__(\n self,\n given_basetemp: Optional[Path],\n trace,\n basetemp: Optional[Path] = None,\n *,\n _ispytest: bool = False,\n ) -> None:\n check_ispytest(_ispytest)\n if given_basetemp is None:\n self._given_basetemp = None\n else:\n # Use os.path.abspath() to get absolute path instead of resolve() as it\n # does not work the same in all platforms (see #4427).\n # Path.absolute() exists, but it is not public (see https://bugs.python.org/issue25012).\n self._given_basetemp = Path(os.path.abspath(str(given_basetemp)))\n self._trace = trace\n self._basetemp = basetemp\n\n @classmethod\n def from_config(\n cls,\n config: Config,\n *,\n _ispytest: bool = False,\n ) -> \"TempPathFactory\":\n \"\"\"Create a factory according to pytest configuration.\n\n :meta private:\n \"\"\"\n check_ispytest(_ispytest)\n return cls(\n given_basetemp=config.option.basetemp,\n trace=config.trace.get(\"tmpdir\"),\n _ispytest=True,\n )\n\n def _ensure_relative_to_basetemp(self, basename: str) -> str:\n basename = os.path.normpath(basename)\n if (self.getbasetemp() / basename).resolve().parent != self.getbasetemp():\n raise ValueError(f\"{basename} is not a normalized and relative path\")\n return basename", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/tmpdir.py_tmpdir_factory__mk_tmp.return.factory_mktemp_name_numb": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/tmpdir.py_tmpdir_factory__mk_tmp.return.factory_mktemp_name_numb", "embedding": null, "metadata": {"file_path": "src/_pytest/tmpdir.py", "file_name": "tmpdir.py", "file_type": "text/x-python", "category": "implementation", "start_line": 177, "end_line": 196, "span_ids": ["tmpdir_factory", "tmp_path_factory", "_mk_tmp"], "tokens": 194}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@fixture(scope=\"session\")\ndef tmpdir_factory(request: FixtureRequest) -> TempdirFactory:\n \"\"\"Return a :class:`_pytest.tmpdir.TempdirFactory` instance for the test session.\"\"\"\n # Set dynamically by pytest_configure() above.\n return request.config._tmpdirhandler # type: ignore\n\n\n@fixture(scope=\"session\")\ndef tmp_path_factory(request: FixtureRequest) -> TempPathFactory:\n \"\"\"Return a :class:`_pytest.tmpdir.TempPathFactory` instance for the test session.\"\"\"\n # Set dynamically by pytest_configure() above.\n return request.config._tmp_path_factory # type: ignore\n\n\ndef _mk_tmp(request: FixtureRequest, factory: TempPathFactory) -> Path:\n name = request.node.name\n name = re.sub(r\"[\\W]\", \"_\", name)\n MAXVAL = 30\n name = name[:MAXVAL]\n return factory.mktemp(name, numbered=True)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/tmpdir.py_tmpdir_tmpdir.return.py_path_local_tmp_path_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/tmpdir.py_tmpdir_tmpdir.return.py_path_local_tmp_path_", "embedding": null, "metadata": {"file_path": "src/_pytest/tmpdir.py", "file_name": "tmpdir.py", "file_type": "text/x-python", "category": "implementation", "start_line": 199, "end_line": 214, "span_ids": ["tmpdir"], "tokens": 152}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@fixture\ndef tmpdir(tmp_path: Path) -> py.path.local:\n \"\"\"Return a temporary directory path object which is unique to each test\n function invocation, created as a sub directory of the base temporary\n directory.\n\n By default, a new base temporary directory is created each test session,\n and old bases are removed after 3 sessions, to aid in debugging. If\n ``--basetemp`` is used then it is cleared each session. See :ref:`base\n temporary directory`.\n\n The returned object is a `py.path.local`_ path object.\n\n .. _`py.path.local`: https://py.readthedocs.io/en/latest/path.html\n \"\"\"\n return py.path.local(tmp_path)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/tmpdir.py_tmp_path_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/tmpdir.py_tmp_path_", "embedding": null, "metadata": {"file_path": "src/_pytest/tmpdir.py", "file_name": "tmpdir.py", "file_type": "text/x-python", "category": "implementation", "start_line": 217, "end_line": 232, "span_ids": ["tmp_path"], "tokens": 141}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@fixture\ndef tmp_path(request: FixtureRequest, tmp_path_factory: TempPathFactory) -> Path:\n \"\"\"Return a temporary directory path object which is unique to each test\n function invocation, created as a sub directory of the base temporary\n directory.\n\n By default, a new base temporary directory is created each test session,\n and old bases are removed after 3 sessions, to aid in debugging. If\n ``--basetemp`` is used then it is cleared each session. See :ref:`base\n temporary directory`.\n\n The returned object is a :class:`pathlib.Path` object.\n \"\"\"\n\n return _mk_tmp(request, tmp_path_factory)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/unittest.py__Discover_and_run_std_l_if_TYPE_CHECKING_._SysExcInfoType.Union_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/unittest.py__Discover_and_run_std_l_if_TYPE_CHECKING_._SysExcInfoType.Union_", "embedding": null, "metadata": {"file_path": "src/_pytest/unittest.py", "file_name": "unittest.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 42, "span_ids": ["impl", "impl:2", "docstring", "imports:30", "imports"], "tokens": 257}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"Discover and run std-library \"unittest\" style tests.\"\"\"\nimport sys\nimport traceback\nimport types\nfrom typing import Any\nfrom typing import Callable\nfrom typing import Generator\nfrom typing import Iterable\nfrom typing import List\nfrom typing import Optional\nfrom typing import Tuple\nfrom typing import Type\nfrom typing import TYPE_CHECKING\nfrom typing import Union\n\nimport _pytest._code\nimport pytest\nfrom _pytest.compat import getimfunc\nfrom _pytest.compat import is_async_function\nfrom _pytest.config import hookimpl\nfrom _pytest.fixtures import FixtureRequest\nfrom _pytest.nodes import Collector\nfrom _pytest.nodes import Item\nfrom _pytest.outcomes import exit\nfrom _pytest.outcomes import fail\nfrom _pytest.outcomes import skip\nfrom _pytest.outcomes import xfail\nfrom _pytest.python import Class\nfrom _pytest.python import Function\nfrom _pytest.python import PyCollector\nfrom _pytest.runner import CallInfo\n\nif TYPE_CHECKING:\n import unittest\n import twisted.trial.unittest\n\n from _pytest.fixtures import _Scope\n\n _SysExcInfoType = Union[\n Tuple[Type[BaseException], BaseException, types.TracebackType],\n Tuple[None, None, None],\n ]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/unittest.py_TestCaseFunction.addError_TestCaseFunction.stopTest.pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/unittest.py_TestCaseFunction.addError_TestCaseFunction.stopTest.pass", "embedding": null, "metadata": {"file_path": "src/_pytest/unittest.py", "file_name": "unittest.py", "file_type": "text/x-python", "category": "implementation", "start_line": 241, "end_line": 291, "span_ids": ["TestCaseFunction.addUnexpectedSuccess", "TestCaseFunction.addError", "TestCaseFunction.addSuccess", "TestCaseFunction.addExpectedFailure", "TestCaseFunction.stopTest", "TestCaseFunction.addSkip", "TestCaseFunction.addFailure"], "tokens": 364}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCaseFunction(Function):\n\n def addError(\n self, testcase: \"unittest.TestCase\", rawexcinfo: \"_SysExcInfoType\"\n ) -> None:\n try:\n if isinstance(rawexcinfo[1], exit.Exception):\n exit(rawexcinfo[1].msg)\n except TypeError:\n pass\n self._addexcinfo(rawexcinfo)\n\n def addFailure(\n self, testcase: \"unittest.TestCase\", rawexcinfo: \"_SysExcInfoType\"\n ) -> None:\n self._addexcinfo(rawexcinfo)\n\n def addSkip(self, testcase: \"unittest.TestCase\", reason: str) -> None:\n try:\n raise pytest.skip.Exception(reason, _use_item_location=True)\n except skip.Exception:\n self._addexcinfo(sys.exc_info())\n\n def addExpectedFailure(\n self,\n testcase: \"unittest.TestCase\",\n rawexcinfo: \"_SysExcInfoType\",\n reason: str = \"\",\n ) -> None:\n try:\n xfail(str(reason))\n except xfail.Exception:\n self._addexcinfo(sys.exc_info())\n\n def addUnexpectedSuccess(\n self,\n testcase: \"unittest.TestCase\",\n reason: Optional[\"twisted.trial.unittest.Todo\"] = None,\n ) -> None:\n msg = \"Unexpected success\"\n if reason:\n msg += f\": {reason.reason}\"\n # Preserve unittest behaviour - fail the test. Explicitly not an XPASS.\n try:\n fail(msg, pytrace=False)\n except fail.Exception:\n self._addexcinfo(sys.exc_info())\n\n def addSuccess(self, testcase: \"unittest.TestCase\") -> None:\n pass\n\n def stopTest(self, testcase: \"unittest.TestCase\") -> None:\n pass", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/unittest.py__Twisted_trial_support__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/unittest.py__Twisted_trial_support__", "embedding": null, "metadata": {"file_path": "src/_pytest/unittest.py", "file_name": "unittest.py", "file_type": "text/x-python", "category": "implementation", "start_line": 361, "end_line": 407, "span_ids": ["pytest_runtest_protocol", "pytest_runtest_makereport", "check_testcase_implements_trial_reporter", "_is_skipped"], "tokens": 347}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "# Twisted trial support.\n\n\n@hookimpl(hookwrapper=True)\ndef pytest_runtest_protocol(item: Item) -> Generator[None, None, None]:\n if isinstance(item, TestCaseFunction) and \"twisted.trial.unittest\" in sys.modules:\n ut: Any = sys.modules[\"twisted.python.failure\"]\n Failure__init__ = ut.Failure.__init__\n check_testcase_implements_trial_reporter()\n\n def excstore(\n self, exc_value=None, exc_type=None, exc_tb=None, captureVars=None\n ):\n if exc_value is None:\n self._rawexcinfo = sys.exc_info()\n else:\n if exc_type is None:\n exc_type = type(exc_value)\n self._rawexcinfo = (exc_type, exc_value, exc_tb)\n try:\n Failure__init__(\n self, exc_value, exc_type, exc_tb, captureVars=captureVars\n )\n except TypeError:\n Failure__init__(self, exc_value, exc_type, exc_tb)\n\n ut.Failure.__init__ = excstore\n yield\n ut.Failure.__init__ = Failure__init__\n else:\n yield\n\n\ndef check_testcase_implements_trial_reporter(done: List[int] = []) -> None:\n if done:\n return\n from zope.interface import classImplements\n from twisted.trial.itrial import IReporter\n\n classImplements(TestCaseFunction, IReporter)\n done.append(1)\n\n\ndef _is_skipped(obj) -> bool:\n \"\"\"Return True if the given object has been marked with @unittest.skip.\"\"\"\n return bool(getattr(obj, \"__unittest_skip__\", False))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/unraisableexception.py_sys_catch_unraisable_exception.__exit__.del_self_unraisable": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/unraisableexception.py_sys_catch_unraisable_exception.__exit__.del_self_unraisable", "embedding": null, "metadata": {"file_path": "src/_pytest/unraisableexception.py", "file_name": "unraisableexception.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 59, "span_ids": ["catch_unraisable_exception.__enter__", "catch_unraisable_exception", "catch_unraisable_exception._hook", "imports", "catch_unraisable_exception.__exit__"], "tokens": 464}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import sys\nimport traceback\nimport warnings\nfrom types import TracebackType\nfrom typing import Any\nfrom typing import Callable\nfrom typing import Generator\nfrom typing import Optional\nfrom typing import Type\n\nimport pytest\n\n\n# Copied from cpython/Lib/test/support/__init__.py, with modifications.\nclass catch_unraisable_exception:\n \"\"\"Context manager catching unraisable exception using sys.unraisablehook.\n\n Storing the exception value (cm.unraisable.exc_value) creates a reference\n cycle. The reference cycle is broken explicitly when the context manager\n exits.\n\n Storing the object (cm.unraisable.object) can resurrect it if it is set to\n an object which is being finalized. Exiting the context manager clears the\n stored object.\n\n Usage:\n with catch_unraisable_exception() as cm:\n # code creating an \"unraisable exception\"\n ...\n # check the unraisable exception: use cm.unraisable\n ...\n # cm.unraisable attribute no longer exists at this point\n # (to break a reference cycle)\n \"\"\"\n\n def __init__(self) -> None:\n self.unraisable: Optional[\"sys.UnraisableHookArgs\"] = None\n self._old_hook: Optional[Callable[[\"sys.UnraisableHookArgs\"], Any]] = None\n\n def _hook(self, unraisable: \"sys.UnraisableHookArgs\") -> None:\n # Storing unraisable.object can resurrect an object which is being\n # finalized. Storing unraisable.exc_value creates a reference cycle.\n self.unraisable = unraisable\n\n def __enter__(self) -> \"catch_unraisable_exception\":\n self._old_hook = sys.unraisablehook\n sys.unraisablehook = self._hook\n return self\n\n def __exit__(\n self,\n exc_type: Optional[Type[BaseException]],\n exc_val: Optional[BaseException],\n exc_tb: Optional[TracebackType],\n ) -> None:\n assert self._old_hook is not None\n sys.unraisablehook = self._old_hook\n self._old_hook = None\n del self.unraisable", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/unraisableexception.py_unraisable_exception_runtest_hook_unraisable_exception_runtest_hook.with_catch_unraisable_exc.if_cm_unraisable_.warnings_warn_pytest_Pyte": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/unraisableexception.py_unraisable_exception_runtest_hook_unraisable_exception_runtest_hook.with_catch_unraisable_exc.if_cm_unraisable_.warnings_warn_pytest_Pyte", "embedding": null, "metadata": {"file_path": "src/_pytest/unraisableexception.py", "file_name": "unraisableexception.py", "file_type": "text/x-python", "category": "implementation", "start_line": 62, "end_line": 78, "span_ids": ["unraisable_exception_runtest_hook"], "tokens": 149}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def unraisable_exception_runtest_hook() -> Generator[None, None, None]:\n with catch_unraisable_exception() as cm:\n yield\n if cm.unraisable:\n if cm.unraisable.err_msg is not None:\n err_msg = cm.unraisable.err_msg\n else:\n err_msg = \"Exception ignored in\"\n msg = f\"{err_msg}: {cm.unraisable.object!r}\\n\\n\"\n msg += \"\".join(\n traceback.format_exception(\n cm.unraisable.exc_type,\n cm.unraisable.exc_value,\n cm.unraisable.exc_traceback,\n )\n )\n warnings.warn(pytest.PytestUnraisableExceptionWarning(msg))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/unraisableexception.py_pytest_runtest_setup_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/unraisableexception.py_pytest_runtest_setup_", "embedding": null, "metadata": {"file_path": "src/_pytest/unraisableexception.py", "file_name": "unraisableexception.py", "file_type": "text/x-python", "category": "implementation", "start_line": 81, "end_line": 94, "span_ids": ["pytest_runtest_call", "pytest_runtest_teardown", "pytest_runtest_setup"], "tokens": 118}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.hookimpl(hookwrapper=True, tryfirst=True)\ndef pytest_runtest_setup() -> Generator[None, None, None]:\n yield from unraisable_exception_runtest_hook()\n\n\n@pytest.hookimpl(hookwrapper=True, tryfirst=True)\ndef pytest_runtest_call() -> Generator[None, None, None]:\n yield from unraisable_exception_runtest_hook()\n\n\n@pytest.hookimpl(hookwrapper=True, tryfirst=True)\ndef pytest_runtest_teardown() -> Generator[None, None, None]:\n yield from unraisable_exception_runtest_hook()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/warnings.py_sys_pytest_configure.config_addinivalue_line_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/warnings.py_sys_pytest_configure.config_addinivalue_line_", "embedding": null, "metadata": {"file_path": "src/_pytest/warnings.py", "file_name": "warnings.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 25, "span_ids": ["impl", "pytest_configure", "imports:14", "imports"], "tokens": 155}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import sys\nimport warnings\nfrom contextlib import contextmanager\nfrom typing import Generator\nfrom typing import Optional\nfrom typing import TYPE_CHECKING\n\nimport pytest\nfrom _pytest.config import apply_warning_filters\nfrom _pytest.config import Config\nfrom _pytest.config import parse_warning_filter\nfrom _pytest.main import Session\nfrom _pytest.nodes import Item\nfrom _pytest.terminal import TerminalReporter\n\nif TYPE_CHECKING:\n from typing_extensions import Literal\n\n\ndef pytest_configure(config: Config) -> None:\n config.addinivalue_line(\n \"markers\",\n \"filterwarnings(warning): add a warning filter to the given test. \"\n \"see https://docs.pytest.org/en/stable/warnings.html#pytest-mark-filterwarnings \",\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/warnings.py_warning_record_to_str_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/_pytest/warnings.py_warning_record_to_str_", "embedding": null, "metadata": {"file_path": "src/_pytest/warnings.py", "file_name": "warnings.py", "file_type": "text/x-python", "category": "implementation", "start_line": 82, "end_line": 140, "span_ids": ["pytest_runtest_protocol", "warning_record_to_str", "pytest_sessionfinish", "pytest_load_initial_conftests", "pytest_terminal_summary", "pytest_collection"], "tokens": 398}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def warning_record_to_str(warning_message: warnings.WarningMessage) -> str:\n \"\"\"Convert a warnings.WarningMessage to a string.\"\"\"\n warn_msg = warning_message.message\n msg = warnings.formatwarning(\n str(warn_msg),\n warning_message.category,\n warning_message.filename,\n warning_message.lineno,\n warning_message.line,\n )\n return msg\n\n\n@pytest.hookimpl(hookwrapper=True, tryfirst=True)\ndef pytest_runtest_protocol(item: Item) -> Generator[None, None, None]:\n with catch_warnings_for_item(\n config=item.config, ihook=item.ihook, when=\"runtest\", item=item\n ):\n yield\n\n\n@pytest.hookimpl(hookwrapper=True, tryfirst=True)\ndef pytest_collection(session: Session) -> Generator[None, None, None]:\n config = session.config\n with catch_warnings_for_item(\n config=config, ihook=config.hook, when=\"collect\", item=None\n ):\n yield\n\n\n@pytest.hookimpl(hookwrapper=True)\ndef pytest_terminal_summary(\n terminalreporter: TerminalReporter,\n) -> Generator[None, None, None]:\n config = terminalreporter.config\n with catch_warnings_for_item(\n config=config, ihook=config.hook, when=\"config\", item=None\n ):\n yield\n\n\n@pytest.hookimpl(hookwrapper=True)\ndef pytest_sessionfinish(session: Session) -> Generator[None, None, None]:\n config = session.config\n with catch_warnings_for_item(\n config=config, ihook=config.hook, when=\"config\", item=None\n ):\n yield\n\n\n@pytest.hookimpl(hookwrapper=True)\ndef pytest_load_initial_conftests(\n early_config: \"Config\",\n) -> Generator[None, None, None]:\n with catch_warnings_for_item(\n config=early_config, ihook=early_config.hook, when=\"config\", item=None\n ):\n yield", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/pytest/__init__.py__PYTHON_ARGCOMPLETE_OK_set_trace.__pytestPDB_set_trace": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/pytest/__init__.py__PYTHON_ARGCOMPLETE_OK_set_trace.__pytestPDB_set_trace", "embedding": null, "metadata": {"file_path": "src/pytest/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 66, "span_ids": ["impl", "docstring", "imports"], "tokens": 586}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "# PYTHON_ARGCOMPLETE_OK\n\"\"\"pytest: unit and functional testing with Python.\"\"\"\nfrom . import collect\nfrom _pytest import __version__\nfrom _pytest.assertion import register_assert_rewrite\nfrom _pytest.cacheprovider import Cache\nfrom _pytest.capture import CaptureFixture\nfrom _pytest.config import cmdline\nfrom _pytest.config import console_main\nfrom _pytest.config import ExitCode\nfrom _pytest.config import hookimpl\nfrom _pytest.config import hookspec\nfrom _pytest.config import main\nfrom _pytest.config import UsageError\nfrom _pytest.debugging import pytestPDB as __pytestPDB\nfrom _pytest.fixtures import _fillfuncargs\nfrom _pytest.fixtures import fixture\nfrom _pytest.fixtures import FixtureLookupError\nfrom _pytest.fixtures import FixtureRequest\nfrom _pytest.fixtures import yield_fixture\nfrom _pytest.freeze_support import freeze_includes\nfrom _pytest.logging import LogCaptureFixture\nfrom _pytest.main import Session\nfrom _pytest.mark import Mark\nfrom _pytest.mark import MARK_GEN as mark\nfrom _pytest.mark import MarkDecorator\nfrom _pytest.mark import MarkGenerator\nfrom _pytest.mark import param\nfrom _pytest.monkeypatch import MonkeyPatch\nfrom _pytest.nodes import Collector\nfrom _pytest.nodes import File\nfrom _pytest.nodes import Item\nfrom _pytest.outcomes import exit\nfrom _pytest.outcomes import fail\nfrom _pytest.outcomes import importorskip\nfrom _pytest.outcomes import skip\nfrom _pytest.outcomes import xfail\nfrom _pytest.pytester import Pytester\nfrom _pytest.pytester import Testdir\nfrom _pytest.python import Class\nfrom _pytest.python import Function\nfrom _pytest.python import Instance\nfrom _pytest.python import Metafunc\nfrom _pytest.python import Module\nfrom _pytest.python import Package\nfrom _pytest.python_api import approx\nfrom _pytest.python_api import raises\nfrom _pytest.recwarn import deprecated_call\nfrom _pytest.recwarn import WarningsRecorder\nfrom _pytest.recwarn import warns\nfrom _pytest.runner import CallInfo\nfrom _pytest.tmpdir import TempdirFactory\nfrom _pytest.tmpdir import TempPathFactory\nfrom _pytest.warning_types import PytestAssertRewriteWarning\nfrom _pytest.warning_types import PytestCacheWarning\nfrom _pytest.warning_types import PytestCollectionWarning\nfrom _pytest.warning_types import PytestConfigWarning\nfrom _pytest.warning_types import PytestDeprecationWarning\nfrom _pytest.warning_types import PytestExperimentalApiWarning\nfrom _pytest.warning_types import PytestUnhandledCoroutineWarning\nfrom _pytest.warning_types import PytestUnhandledThreadExceptionWarning\nfrom _pytest.warning_types import PytestUnknownMarkWarning\nfrom _pytest.warning_types import PytestUnraisableExceptionWarning\nfrom _pytest.warning_types import PytestWarning\n\nset_trace = __pytestPDB.set_trace", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/src/pytest/__init__.py___all___": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/src/pytest/__init__.py___all___", "embedding": null, "metadata": {"file_path": "src/pytest/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 68, "end_line": 132, "span_ids": ["impl"], "tokens": 336}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "__all__ = [\n \"__version__\",\n \"_fillfuncargs\",\n \"approx\",\n \"Cache\",\n \"CallInfo\",\n \"CaptureFixture\",\n \"Class\",\n \"cmdline\",\n \"collect\",\n \"Collector\",\n \"console_main\",\n \"deprecated_call\",\n \"exit\",\n \"ExitCode\",\n \"fail\",\n \"File\",\n \"fixture\",\n \"FixtureLookupError\",\n \"FixtureRequest\",\n \"freeze_includes\",\n \"Function\",\n \"hookimpl\",\n \"hookspec\",\n \"importorskip\",\n \"Instance\",\n \"Item\",\n \"LogCaptureFixture\",\n \"main\",\n \"mark\",\n \"Mark\",\n \"MarkDecorator\",\n \"MarkGenerator\",\n \"Metafunc\",\n \"Module\",\n \"MonkeyPatch\",\n \"Package\",\n \"param\",\n \"PytestAssertRewriteWarning\",\n \"PytestCacheWarning\",\n \"PytestCollectionWarning\",\n \"PytestConfigWarning\",\n \"PytestDeprecationWarning\",\n \"PytestExperimentalApiWarning\",\n \"Pytester\",\n \"PytestUnhandledCoroutineWarning\",\n \"PytestUnhandledThreadExceptionWarning\",\n \"PytestUnknownMarkWarning\",\n \"PytestUnraisableExceptionWarning\",\n \"PytestWarning\",\n \"raises\",\n \"register_assert_rewrite\",\n \"Session\",\n \"set_trace\",\n \"skip\",\n \"TempPathFactory\",\n \"Testdir\",\n \"TempdirFactory\",\n \"UsageError\",\n \"WarningsRecorder\",\n \"warns\",\n \"xfail\",\n \"yield_fixture\",\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_early_hook_configure_error_issue38_TestGeneralUsage.test_file_not_found.result_stderr_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_early_hook_configure_error_issue38_TestGeneralUsage.test_file_not_found.result_stderr_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/acceptance_test.py", "file_name": "acceptance_test.py", "file_type": "text/x-python", "category": "test", "start_line": 56, "end_line": 73, "span_ids": ["TestGeneralUsage.test_file_not_found", "TestGeneralUsage.test_early_hook_configure_error_issue38"], "tokens": 166}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestGeneralUsage:\n\n def test_early_hook_configure_error_issue38(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n def pytest_configure():\n 0 / 0\n \"\"\"\n )\n result = pytester.runpytest(pytester.path)\n assert result.ret != 0\n # here we get it on stderr\n result.stderr.fnmatch_lines(\n [\"*INTERNALERROR*File*conftest.py*line 2*\", \"*0 / 0*\"]\n )\n\n def test_file_not_found(self, pytester: Pytester) -> None:\n result = pytester.runpytest(\"asd\")\n assert result.ret != 0\n result.stderr.fnmatch_lines([\"ERROR: file or directory not found: asd\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_file_not_found_unconfigure_issue143_TestGeneralUsage.test_file_not_found_unconfigure_issue143.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_file_not_found_unconfigure_issue143_TestGeneralUsage.test_file_not_found_unconfigure_issue143.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/acceptance_test.py", "file_name": "acceptance_test.py", "file_type": "text/x-python", "category": "test", "start_line": 75, "end_line": 87, "span_ids": ["TestGeneralUsage.test_file_not_found_unconfigure_issue143"], "tokens": 120}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestGeneralUsage:\n\n def test_file_not_found_unconfigure_issue143(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n def pytest_configure():\n print(\"---configure\")\n def pytest_unconfigure():\n print(\"---unconfigure\")\n \"\"\"\n )\n result = pytester.runpytest(\"-s\", \"asd\")\n assert result.ret == ExitCode.USAGE_ERROR\n result.stderr.fnmatch_lines([\"ERROR: file or directory not found: asd\"])\n result.stdout.fnmatch_lines([\"*---configure\", \"*---unconfigure\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_direct_addressing_selects_TestGeneralUsage.test_direct_addressing_selects.res_stdout_fnmatch_lines_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_direct_addressing_selects_TestGeneralUsage.test_direct_addressing_selects.res_stdout_fnmatch_lines_", "embedding": null, "metadata": {"file_path": "testing/acceptance_test.py", "file_name": "acceptance_test.py", "file_type": "text/x-python", "category": "test", "start_line": 334, "end_line": 345, "span_ids": ["TestGeneralUsage.test_direct_addressing_selects"], "tokens": 115}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestGeneralUsage:\n\n def test_direct_addressing_selects(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n def pytest_generate_tests(metafunc):\n metafunc.parametrize('i', [1, 2], ids=[\"1\", \"2\"])\n def test_func(i):\n pass\n \"\"\"\n )\n res = pytester.runpytest(p.name + \"::\" + \"test_func[1]\")\n assert res.ret == 0\n res.stdout.fnmatch_lines([\"*1 passed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_direct_addressing_notfound_TestGeneralUsage.test_initialization_error_issue49.assert_sessionstarttime_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestGeneralUsage.test_direct_addressing_notfound_TestGeneralUsage.test_initialization_error_issue49.assert_sessionstarttime_", "embedding": null, "metadata": {"file_path": "testing/acceptance_test.py", "file_name": "acceptance_test.py", "file_type": "text/x-python", "category": "test", "start_line": 347, "end_line": 375, "span_ids": ["TestGeneralUsage.test_direct_addressing_notfound", "TestGeneralUsage.test_docstring_on_hookspec", "TestGeneralUsage.test_initialization_error_issue49"], "tokens": 233}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestGeneralUsage:\n\n def test_direct_addressing_notfound(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n def test_func():\n pass\n \"\"\"\n )\n res = pytester.runpytest(p.name + \"::\" + \"test_notfound\")\n assert res.ret\n res.stderr.fnmatch_lines([\"*ERROR*not found*\"])\n\n def test_docstring_on_hookspec(self) -> None:\n from _pytest import hookspec\n\n for name, value in vars(hookspec).items():\n if name.startswith(\"pytest_\"):\n assert value.__doc__, \"no docstring for %s\" % name\n\n def test_initialization_error_issue49(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n def pytest_configure():\n x\n \"\"\"\n )\n result = pytester.runpytest()\n assert result.ret == 3 # internal error\n result.stderr.fnmatch_lines([\"INTERNAL*pytest_configure*\", \"INTERNAL*x*\"])\n assert \"sessionstarttime\" not in result.stderr.str()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestInvocationVariants_TestInvocationVariants.test_invoke_with_path.out_err_capsys_readout": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestInvocationVariants_TestInvocationVariants.test_invoke_with_path.out_err_capsys_readout", "embedding": null, "metadata": {"file_path": "testing/acceptance_test.py", "file_name": "acceptance_test.py", "file_type": "text/x-python", "category": "test", "start_line": 506, "end_line": 601, "span_ids": ["TestInvocationVariants.test_import_star_py_dot_test", "TestInvocationVariants.test_invoke_with_invalid_type", "TestInvocationVariants.test_invoke_with_path", "TestInvocationVariants.test_equivalence_pytest_pydottest", "TestInvocationVariants", "TestInvocationVariants.test_python_minus_m_invocation_fail", "TestInvocationVariants.test_python_pytest_package", "TestInvocationVariants.test_python_minus_m_invocation_ok", "TestInvocationVariants.test_earlyinit", "TestInvocationVariants.test_import_star_pytest", "TestInvocationVariants.test_pydoc", "TestInvocationVariants.test_double_pytestcmdline"], "tokens": 766}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestInvocationVariants:\n def test_earlyinit(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n import pytest\n assert hasattr(pytest, 'mark')\n \"\"\"\n )\n result = pytester.runpython(p)\n assert result.ret == 0\n\n def test_pydoc(self, pytester: Pytester) -> None:\n for name in (\"py.test\", \"pytest\"):\n result = pytester.runpython_c(f\"import {name};help({name})\")\n assert result.ret == 0\n s = result.stdout.str()\n assert \"MarkGenerator\" in s\n\n def test_import_star_py_dot_test(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n from py.test import *\n #collect\n #cmdline\n #Item\n # assert collect.Item is Item\n # assert collect.Collector is Collector\n main\n skip\n xfail\n \"\"\"\n )\n result = pytester.runpython(p)\n assert result.ret == 0\n\n def test_import_star_pytest(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n from pytest import *\n #Item\n #File\n main\n skip\n xfail\n \"\"\"\n )\n result = pytester.runpython(p)\n assert result.ret == 0\n\n def test_double_pytestcmdline(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n run=\"\"\"\n import pytest\n pytest.main()\n pytest.main()\n \"\"\"\n )\n pytester.makepyfile(\n \"\"\"\n def test_hello():\n pass\n \"\"\"\n )\n result = pytester.runpython(p)\n result.stdout.fnmatch_lines([\"*1 passed*\", \"*1 passed*\"])\n\n def test_python_minus_m_invocation_ok(self, pytester: Pytester) -> None:\n p1 = pytester.makepyfile(\"def test_hello(): pass\")\n res = pytester.run(sys.executable, \"-m\", \"pytest\", str(p1))\n assert res.ret == 0\n\n def test_python_minus_m_invocation_fail(self, pytester: Pytester) -> None:\n p1 = pytester.makepyfile(\"def test_fail(): 0/0\")\n res = pytester.run(sys.executable, \"-m\", \"pytest\", str(p1))\n assert res.ret == 1\n\n def test_python_pytest_package(self, pytester: Pytester) -> None:\n p1 = pytester.makepyfile(\"def test_pass(): pass\")\n res = pytester.run(sys.executable, \"-m\", \"pytest\", str(p1))\n assert res.ret == 0\n res.stdout.fnmatch_lines([\"*1 passed*\"])\n\n def test_equivalence_pytest_pydottest(self) -> None:\n # Type ignored because `py.test` is not and will not be typed.\n assert pytest.main == py.test.cmdline.main # type: ignore[attr-defined]\n\n def test_invoke_with_invalid_type(self) -> None:\n with pytest.raises(\n TypeError, match=\"expected to be a list of strings, got: '-h'\"\n ):\n pytest.main(\"-h\") # type: ignore[arg-type]\n\n def test_invoke_with_path(self, pytester: Pytester, capsys) -> None:\n retcode = pytest.main([str(pytester.path)])\n assert retcode == ExitCode.NO_TESTS_COLLECTED\n out, err = capsys.readouterr()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestInvocationVariants.test_invoke_plugin_api_TestInvocationVariants.test_pyargs_importerror.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestInvocationVariants.test_invoke_plugin_api_TestInvocationVariants.test_pyargs_importerror.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/acceptance_test.py", "file_name": "acceptance_test.py", "file_type": "text/x-python", "category": "test", "start_line": 603, "end_line": 620, "span_ids": ["TestInvocationVariants.test_invoke_plugin_api.MyPlugin", "TestInvocationVariants.test_invoke_plugin_api.MyPlugin.pytest_addoption", "TestInvocationVariants.test_invoke_plugin_api", "TestInvocationVariants.test_pyargs_importerror"], "tokens": 187}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestInvocationVariants:\n\n def test_invoke_plugin_api(self, capsys) -> None:\n class MyPlugin:\n def pytest_addoption(self, parser):\n parser.addoption(\"--myopt\")\n\n pytest.main([\"-h\"], plugins=[MyPlugin()])\n out, err = capsys.readouterr()\n assert \"--myopt\" in out\n\n def test_pyargs_importerror(self, pytester: Pytester, monkeypatch) -> None:\n monkeypatch.delenv(\"PYTHONDONTWRITEBYTECODE\", False)\n path = pytester.mkpydir(\"tpkg\")\n path.joinpath(\"test_hello.py\").write_text(\"raise ImportError\")\n\n result = pytester.runpytest(\"--pyargs\", \"tpkg.test_hello\", syspathinsert=True)\n assert result.ret != 0\n\n result.stdout.fnmatch_lines([\"collected*0*items*/*1*error\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestInvocationVariants.test_pyargs_only_imported_once_TestInvocationVariants.test_pyargs_filename_looks_like_module.assert_result_ret_Exit": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestInvocationVariants.test_pyargs_only_imported_once_TestInvocationVariants.test_pyargs_filename_looks_like_module.assert_result_ret_Exit", "embedding": null, "metadata": {"file_path": "testing/acceptance_test.py", "file_name": "acceptance_test.py", "file_type": "text/x-python", "category": "test", "start_line": 622, "end_line": 643, "span_ids": ["TestInvocationVariants.test_pyargs_only_imported_once", "TestInvocationVariants.test_pyargs_filename_looks_like_module"], "tokens": 245}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestInvocationVariants:\n\n def test_pyargs_only_imported_once(self, pytester: Pytester) -> None:\n pkg = pytester.mkpydir(\"foo\")\n pkg.joinpath(\"test_foo.py\").write_text(\n \"print('hello from test_foo')\\ndef test(): pass\"\n )\n pkg.joinpath(\"conftest.py\").write_text(\n \"def pytest_configure(config): print('configuring')\"\n )\n\n result = pytester.runpytest(\n \"--pyargs\", \"foo.test_foo\", \"-s\", syspathinsert=True\n )\n # should only import once\n assert result.outlines.count(\"hello from test_foo\") == 1\n # should only configure once\n assert result.outlines.count(\"configuring\") == 1\n\n def test_pyargs_filename_looks_like_module(self, pytester: Pytester) -> None:\n pytester.path.joinpath(\"conftest.py\").touch()\n pytester.path.joinpath(\"t.py\").write_text(\"def test(): pass\")\n result = pytester.runpytest(\"--pyargs\", \"t.py\")\n assert result.ret == ExitCode.OK", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestDurations_TestDurations.test_calls.None_2": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestDurations_TestDurations.test_calls.None_2", "embedding": null, "metadata": {"file_path": "testing/acceptance_test.py", "file_name": "acceptance_test.py", "file_type": "text/x-python", "category": "test", "start_line": 872, "end_line": 896, "span_ids": ["TestDurations.test_calls", "TestDurations"], "tokens": 182}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDurations:\n source = \"\"\"\n from _pytest import timing\n def test_something():\n pass\n def test_2():\n timing.sleep(0.010)\n def test_1():\n timing.sleep(0.002)\n def test_3():\n timing.sleep(0.020)\n \"\"\"\n\n def test_calls(self, pytester: Pytester, mock_timing) -> None:\n pytester.makepyfile(self.source)\n result = pytester.runpytest_inprocess(\"--durations=10\")\n assert result.ret == 0\n\n result.stdout.fnmatch_lines_random(\n [\"*durations*\", \"*call*test_3*\", \"*call*test_2*\"]\n )\n\n result.stdout.fnmatch_lines(\n [\"(8 durations < 0.005s hidden. Use -vv to show these durations.)\"]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestDurations.test_calls_show_2_TestDurations.test_calls_showall.for_x_in_tested_.for_y_in_call_s.for_line_in_result_stdout.else_.raise_AssertionError_f_no": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestDurations.test_calls_show_2_TestDurations.test_calls_showall.for_x_in_tested_.for_y_in_call_s.for_line_in_result_stdout.else_.raise_AssertionError_f_no", "embedding": null, "metadata": {"file_path": "testing/acceptance_test.py", "file_name": "acceptance_test.py", "file_type": "text/x-python", "category": "test", "start_line": 898, "end_line": 919, "span_ids": ["TestDurations.test_calls_show_2", "TestDurations.test_calls_showall"], "tokens": 212}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDurations:\n\n def test_calls_show_2(self, pytester: Pytester, mock_timing) -> None:\n\n pytester.makepyfile(self.source)\n result = pytester.runpytest_inprocess(\"--durations=2\")\n assert result.ret == 0\n\n lines = result.stdout.get_lines_after(\"*slowest*durations*\")\n assert \"4 passed\" in lines[2]\n\n def test_calls_showall(self, pytester: Pytester, mock_timing) -> None:\n pytester.makepyfile(self.source)\n result = pytester.runpytest_inprocess(\"--durations=0\")\n assert result.ret == 0\n\n tested = \"3\"\n for x in tested:\n for y in (\"call\",): # 'setup', 'call', 'teardown':\n for line in result.stdout.lines:\n if (\"test_%s\" % x) in line and y in line:\n break\n else:\n raise AssertionError(f\"not found {x} {y}\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestDurations.test_calls_showall_verbose_TestDurations.test_with_deselected.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestDurations.test_calls_showall_verbose_TestDurations.test_with_deselected.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/acceptance_test.py", "file_name": "acceptance_test.py", "file_type": "text/x-python", "category": "test", "start_line": 921, "end_line": 939, "span_ids": ["TestDurations.test_calls_showall_verbose", "TestDurations.test_with_deselected"], "tokens": 210}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDurations:\n\n def test_calls_showall_verbose(self, pytester: Pytester, mock_timing) -> None:\n pytester.makepyfile(self.source)\n result = pytester.runpytest_inprocess(\"--durations=0\", \"-vv\")\n assert result.ret == 0\n\n for x in \"123\":\n for y in (\"call\",): # 'setup', 'call', 'teardown':\n for line in result.stdout.lines:\n if (\"test_%s\" % x) in line and y in line:\n break\n else:\n raise AssertionError(f\"not found {x} {y}\")\n\n def test_with_deselected(self, pytester: Pytester, mock_timing) -> None:\n pytester.makepyfile(self.source)\n result = pytester.runpytest_inprocess(\"--durations=2\", \"-k test_3\")\n assert result.ret == 0\n\n result.stdout.fnmatch_lines([\"*durations*\", \"*call*test_3*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestDurations.test_with_failing_collection_TestDurations.test_with_not.assert_result_ret_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/acceptance_test.py_TestDurations.test_with_failing_collection_TestDurations.test_with_not.assert_result_ret_0", "embedding": null, "metadata": {"file_path": "testing/acceptance_test.py", "file_name": "acceptance_test.py", "file_type": "text/x-python", "category": "test", "start_line": 941, "end_line": 955, "span_ids": ["TestDurations.test_with_not", "TestDurations.test_with_failing_collection"], "tokens": 172}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDurations:\n\n def test_with_failing_collection(self, pytester: Pytester, mock_timing) -> None:\n pytester.makepyfile(self.source)\n pytester.makepyfile(test_collecterror=\"\"\"xyz\"\"\")\n result = pytester.runpytest_inprocess(\"--durations=2\", \"-k test_1\")\n assert result.ret == 2\n\n result.stdout.fnmatch_lines([\"*Interrupted: 1 error during collection*\"])\n # Collection errors abort test execution, therefore no duration is\n # output\n result.stdout.no_fnmatch_line(\"*duration*\")\n\n def test_with_not(self, pytester: Pytester, mock_timing) -> None:\n pytester.makepyfile(self.source)\n result = pytester.runpytest_inprocess(\"-k not 1\")\n assert result.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_code.py_re_test_unicode_handling.str_excinfo_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_code.py_re_test_unicode_handling.str_excinfo_", "embedding": null, "metadata": {"file_path": "testing/code/test_code.py", "file_name": "test_code.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 89, "span_ids": ["test_code_from_function_with_class", "test_code_from_function_with_class.A", "test_unicode_handling", "test_getstatement_empty_fullsource", "test_code_gives_back_name_for_not_existing_file", "test_frame_getsourcelineno_myself", "imports", "test_code_from_func", "test_code_fullsource", "x", "test_code_from_function_with_class.A:2", "test_code_source", "test_ne"], "tokens": 542}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import re\nimport sys\nfrom types import FrameType\nfrom unittest import mock\n\nimport pytest\nfrom _pytest._code import Code\nfrom _pytest._code import ExceptionInfo\nfrom _pytest._code import Frame\nfrom _pytest._code import Source\nfrom _pytest._code.code import ExceptionChainRepr\nfrom _pytest._code.code import ReprFuncArgs\n\n\ndef test_ne() -> None:\n code1 = Code(compile('foo = \"bar\"', \"\", \"exec\"))\n assert code1 == code1\n code2 = Code(compile('foo = \"baz\"', \"\", \"exec\"))\n assert code2 != code1\n\n\ndef test_code_gives_back_name_for_not_existing_file() -> None:\n name = \"abc-123\"\n co_code = compile(\"pass\\n\", name, \"exec\")\n assert co_code.co_filename == name\n code = Code(co_code)\n assert str(code.path) == name\n assert code.fullsource is None\n\n\ndef test_code_from_function_with_class() -> None:\n class A:\n pass\n\n with pytest.raises(TypeError):\n Code.from_function(A)\n\n\ndef x() -> None:\n raise NotImplementedError()\n\n\ndef test_code_fullsource() -> None:\n code = Code.from_function(x)\n full = code.fullsource\n assert \"test_code_fullsource()\" in str(full)\n\n\ndef test_code_source() -> None:\n code = Code.from_function(x)\n src = code.source()\n expected = \"\"\"def x() -> None:\n raise NotImplementedError()\"\"\"\n assert str(src) == expected\n\n\ndef test_frame_getsourcelineno_myself() -> None:\n def func() -> FrameType:\n return sys._getframe(0)\n\n f = Frame(func())\n source, lineno = f.code.fullsource, f.lineno\n assert source is not None\n assert source[lineno].startswith(\" return sys._getframe(0)\")\n\n\ndef test_getstatement_empty_fullsource() -> None:\n def func() -> FrameType:\n return sys._getframe(0)\n\n f = Frame(func())\n with mock.patch.object(f.code.__class__, \"fullsource\", None):\n assert f.statement == Source(\"\")\n\n\ndef test_code_from_func() -> None:\n co = Code.from_function(test_frame_getsourcelineno_myself)\n assert co.firstlineno\n assert co.path\n\n\ndef test_unicode_handling() -> None:\n value = \"\u0105\u0107\".encode()\n\n def f() -> None:\n raise Exception(value)\n\n excinfo = pytest.raises(Exception, f)\n str(excinfo)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_code.py_TestExceptionInfo_TestTracebackEntry.test_tb_entry_str.assert_re_match_pattern_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_code.py_TestExceptionInfo_TestTracebackEntry.test_tb_entry_str.assert_re_match_pattern_", "embedding": null, "metadata": {"file_path": "testing/code/test_code.py", "file_name": "test_code.py", "file_type": "text/x-python", "category": "test", "start_line": 144, "end_line": 182, "span_ids": ["TestTracebackEntry.test_tb_entry_str", "TestExceptionInfo.test_bad_getsource", "TestTracebackEntry", "TestExceptionInfo.test_from_current_with_missing", "TestExceptionInfo", "TestTracebackEntry.test_getsource"], "tokens": 250}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestExceptionInfo:\n def test_bad_getsource(self) -> None:\n try:\n if False:\n pass\n else:\n assert False\n except AssertionError:\n exci = ExceptionInfo.from_current()\n assert exci.getrepr()\n\n def test_from_current_with_missing(self) -> None:\n with pytest.raises(AssertionError, match=\"no current exception\"):\n ExceptionInfo.from_current()\n\n\nclass TestTracebackEntry:\n def test_getsource(self) -> None:\n try:\n if False:\n pass\n else:\n assert False\n except AssertionError:\n exci = ExceptionInfo.from_current()\n entry = exci.traceback[0]\n source = entry.getsource()\n assert source is not None\n assert len(source) == 6\n assert \"assert False\" in source[5]\n\n def test_tb_entry_str(self):\n try:\n assert False\n except AssertionError:\n exci = ExceptionInfo.from_current()\n pattern = r\" File '.*test_code.py':\\d+ in test_tb_entry_str\\n assert False\"\n entry = str(exci.traceback[0])\n assert re.match(pattern, entry)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_importlib_test_excinfo_from_exc_info_simple.assert_info_type_Value": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_importlib_test_excinfo_from_exc_info_simple.assert_info_type_Value", "embedding": null, "metadata": {"file_path": "testing/code/test_excinfo.py", "file_name": "test_excinfo.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 53, "span_ids": ["test_excinfo_simple", "limited_recursion_depth", "impl", "test_excinfo_from_exc_info_simple", "imports:24", "imports"], "tokens": 309}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import importlib\nimport io\nimport operator\nimport queue\nimport sys\nimport textwrap\nfrom pathlib import Path\nfrom typing import Any\nfrom typing import Dict\nfrom typing import Tuple\nfrom typing import TYPE_CHECKING\nfrom typing import Union\n\nimport _pytest\nimport pytest\nfrom _pytest._code.code import ExceptionChainRepr\nfrom _pytest._code.code import ExceptionInfo\nfrom _pytest._code.code import FormattedExcinfo\nfrom _pytest._io import TerminalWriter\nfrom _pytest.monkeypatch import MonkeyPatch\nfrom _pytest.pathlib import bestrelpath\nfrom _pytest.pathlib import import_path\nfrom _pytest.pytester import LineMatcher\nfrom _pytest.pytester import Pytester\n\n\nif TYPE_CHECKING:\n from _pytest._code.code import _TracebackStyle\n\n\n@pytest.fixture\ndef limited_recursion_depth():\n before = sys.getrecursionlimit()\n sys.setrecursionlimit(150)\n yield\n sys.setrecursionlimit(before)\n\n\ndef test_excinfo_simple() -> None:\n try:\n raise ValueError\n except ValueError:\n info = _pytest._code.ExceptionInfo.from_current()\n assert info.type == ValueError\n\n\ndef test_excinfo_from_exc_info_simple() -> None:\n try:\n raise ValueError\n except ValueError as e:\n assert e.__traceback__ is not None\n info = _pytest._code.ExceptionInfo.from_exc_info((type(e), e, e.__traceback__))\n assert info.type == ValueError", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestTraceback_f_g_h.test_traceback_cut_TestTraceback_f_g_h.test_traceback_cut_excludepath.assert_newtraceback_1_f": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestTraceback_f_g_h.test_traceback_cut_TestTraceback_f_g_h.test_traceback_cut_excludepath.assert_newtraceback_1_f", "embedding": null, "metadata": {"file_path": "testing/code/test_excinfo.py", "file_name": "test_excinfo.py", "file_type": "text/x-python", "category": "test", "start_line": 152, "end_line": 171, "span_ids": ["TestTraceback_f_g_h.test_traceback_cut_excludepath", "TestTraceback_f_g_h.test_traceback_cut"], "tokens": 243}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTraceback_f_g_h:\n\n def test_traceback_cut(self) -> None:\n co = _pytest._code.Code.from_function(f)\n path, firstlineno = co.path, co.firstlineno\n assert isinstance(path, Path)\n traceback = self.excinfo.traceback\n newtraceback = traceback.cut(path=path, firstlineno=firstlineno)\n assert len(newtraceback) == 1\n newtraceback = traceback.cut(path=path, lineno=firstlineno + 2)\n assert len(newtraceback) == 1\n\n def test_traceback_cut_excludepath(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\"def f(): raise ValueError\")\n with pytest.raises(ValueError) as excinfo:\n import_path(p).f() # type: ignore[attr-defined]\n basedir = Path(pytest.__file__).parent\n newtraceback = excinfo.traceback.cut(excludepath=basedir)\n for x in newtraceback:\n assert isinstance(x.path, Path)\n assert basedir not in x.path.parents\n assert newtraceback[-1].frame.code.path == p", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestTraceback_f_g_h.test_traceback_filter_TestTraceback_f_g_h.test_traceback_filter_selective.if_matching_.else_.assert_len_ntraceback_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestTraceback_f_g_h.test_traceback_filter_TestTraceback_f_g_h.test_traceback_filter_selective.if_matching_.else_.assert_len_ntraceback_", "embedding": null, "metadata": {"file_path": "testing/code/test_excinfo.py", "file_name": "test_excinfo.py", "file_type": "text/x-python", "category": "test", "start_line": 173, "end_line": 214, "span_ids": ["TestTraceback_f_g_h.test_traceback_filter_selective", "TestTraceback_f_g_h.test_traceback_filter"], "tokens": 279}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTraceback_f_g_h:\n\n def test_traceback_filter(self):\n traceback = self.excinfo.traceback\n ntraceback = traceback.filter()\n assert len(ntraceback) == len(traceback) - 1\n\n @pytest.mark.parametrize(\n \"tracebackhide, matching\",\n [\n (lambda info: True, True),\n (lambda info: False, False),\n (operator.methodcaller(\"errisinstance\", ValueError), True),\n (operator.methodcaller(\"errisinstance\", IndexError), False),\n ],\n )\n def test_traceback_filter_selective(self, tracebackhide, matching):\n def f():\n #\n raise ValueError\n #\n\n def g():\n #\n __tracebackhide__ = tracebackhide\n f()\n #\n\n def h():\n #\n g()\n #\n\n excinfo = pytest.raises(ValueError, h)\n traceback = excinfo.traceback\n ntraceback = traceback.filter()\n print(f\"old: {traceback!r}\")\n print(f\"new: {ntraceback!r}\")\n\n if matching:\n assert len(ntraceback) == len(traceback) - 2\n else:\n # -1 because of the __tracebackhide__ in pytest.raises\n assert len(ntraceback) == len(traceback) - 1", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_toterminal_long_filenames_TestFormattedExcinfo.test_toterminal_long_filenames.assert_line_3_Value": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_toterminal_long_filenames_TestFormattedExcinfo.test_toterminal_long_filenames.assert_line_3_Value", "embedding": null, "metadata": {"file_path": "testing/code/test_excinfo.py", "file_name": "test_excinfo.py", "file_type": "text/x-python", "category": "test", "start_line": 1018, "end_line": 1043, "span_ids": ["TestFormattedExcinfo.test_toterminal_long_filenames"], "tokens": 215}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFormattedExcinfo:\n\n def test_toterminal_long_filenames(\n self, importasmod, tw_mock, monkeypatch: MonkeyPatch\n ) -> None:\n mod = importasmod(\n \"\"\"\n def f():\n raise ValueError()\n \"\"\"\n )\n excinfo = pytest.raises(ValueError, mod.f)\n path = Path(mod.__file__)\n monkeypatch.chdir(path.parent)\n repr = excinfo.getrepr(abspath=False)\n repr.toterminal(tw_mock)\n x = bestrelpath(Path.cwd(), path)\n if len(x) < len(str(path)):\n msg = tw_mock.get_write_msg(-2)\n assert msg == \"mod.py\"\n assert tw_mock.lines[-1] == \":3: ValueError\"\n\n repr = excinfo.getrepr(abspath=True)\n repr.toterminal(tw_mock)\n msg = tw_mock.get_write_msg(-2)\n assert msg == str(path)\n line = tw_mock.lines[-1]\n assert line == \":3: ValueError\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_exc_chain_repr_cycle_TestFormattedExcinfo.test_exec_type_error_filter.excinfo_traceback_filter_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_excinfo.py_TestFormattedExcinfo.test_exc_chain_repr_cycle_TestFormattedExcinfo.test_exec_type_error_filter.excinfo_traceback_filter_", "embedding": null, "metadata": {"file_path": "testing/code/test_excinfo.py", "file_name": "test_excinfo.py", "file_type": "text/x-python", "category": "test", "start_line": 1311, "end_line": 1364, "span_ids": ["TestFormattedExcinfo.test_exc_chain_repr_cycle", "TestFormattedExcinfo.test_exec_type_error_filter"], "tokens": 379}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFormattedExcinfo:\n\n def test_exc_chain_repr_cycle(self, importasmod, tw_mock):\n mod = importasmod(\n \"\"\"\n class Err(Exception):\n pass\n def fail():\n return 0 / 0\n def reraise():\n try:\n fail()\n except ZeroDivisionError as e:\n raise Err() from e\n def unreraise():\n try:\n reraise()\n except Err as e:\n raise e.__cause__\n \"\"\"\n )\n excinfo = pytest.raises(ZeroDivisionError, mod.unreraise)\n r = excinfo.getrepr(style=\"short\")\n r.toterminal(tw_mock)\n out = \"\\n\".join(line for line in tw_mock.lines if isinstance(line, str))\n expected_out = textwrap.dedent(\n \"\"\"\\\n :13: in unreraise\n reraise()\n :10: in reraise\n raise Err() from e\n E test_exc_chain_repr_cycle0.mod.Err\n\n During handling of the above exception, another exception occurred:\n :15: in unreraise\n raise e.__cause__\n :8: in reraise\n fail()\n :5: in fail\n return 0 / 0\n E ZeroDivisionError: division by zero\"\"\"\n )\n assert out == expected_out\n\n def test_exec_type_error_filter(self, importasmod):\n \"\"\"See #7742\"\"\"\n mod = importasmod(\n \"\"\"\\\n def f():\n exec(\"a = 1\", {}, [])\n \"\"\"\n )\n with pytest.raises(TypeError) as excinfo:\n mod.f()\n # previously crashed with `AttributeError: list has no attribute get`\n excinfo.traceback.filter()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_source.py_test_getfslineno_test_getfslineno.assert_getfslineno_B_1_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/code/test_source.py_test_getfslineno_test_getfslineno.assert_getfslineno_B_1_", "embedding": null, "metadata": {"file_path": "testing/code/test_source.py", "file_name": "test_source.py", "file_type": "text/x-python", "category": "test", "start_line": 352, "end_line": 378, "span_ids": ["test_getfslineno", "test_getfslineno.A:2", "test_getfslineno.B", "test_getfslineno.A", "test_getfslineno.B:2"], "tokens": 184}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_getfslineno() -> None:\n def f(x) -> None:\n raise NotImplementedError()\n\n fspath, lineno = getfslineno(f)\n\n assert isinstance(fspath, Path)\n assert fspath.name == \"test_source.py\"\n assert lineno == f.__code__.co_firstlineno - 1 # see findsource\n\n class A:\n pass\n\n fspath, lineno = getfslineno(A)\n\n _, A_lineno = inspect.findsource(A)\n assert isinstance(fspath, Path)\n assert fspath.name == \"test_source.py\"\n assert lineno == A_lineno\n\n assert getfslineno(3) == (\"\", -1)\n\n class B:\n pass\n\n B.__name__ = B.__qualname__ = \"B2\"\n assert getfslineno(B)[1] == -1", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/conftest.py_dummy_yaml_custom_test_pytester.return.pytester": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/conftest.py_dummy_yaml_custom_test_pytester.return.pytester", "embedding": null, "metadata": {"file_path": "testing/conftest.py", "file_name": "conftest.py", "file_type": "text/x-python", "category": "implementation", "start_line": 106, "end_line": 136, "span_ids": ["pytester", "dummy_yaml_custom_test"], "tokens": 217}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.fixture\ndef dummy_yaml_custom_test(pytester: Pytester):\n \"\"\"Writes a conftest file that collects and executes a dummy yaml test.\n\n Taken from the docs, but stripped down to the bare minimum, useful for\n tests which needs custom items collected.\n \"\"\"\n pytester.makeconftest(\n \"\"\"\n import pytest\n\n def pytest_collect_file(parent, path):\n if path.ext == \".yaml\" and path.basename.startswith(\"test\"):\n return YamlFile.from_parent(fspath=path, parent=parent)\n\n class YamlFile(pytest.File):\n def collect(self):\n yield YamlItem.from_parent(name=self.fspath.basename, parent=self)\n\n class YamlItem(pytest.Item):\n def runtest(self):\n pass\n \"\"\"\n )\n pytester.makefile(\".yaml\", test1=\"\")\n\n\n@pytest.fixture\ndef pytester(pytester: Pytester, monkeypatch: MonkeyPatch) -> Pytester:\n monkeypatch.setenv(\"PYTEST_DISABLE_PLUGIN_AUTOLOAD\", \"1\")\n return pytester", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/conftest.py_color_mapping_color_mapping.return.ColorMapping": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/conftest.py_color_mapping_color_mapping.return.ColorMapping", "embedding": null, "metadata": {"file_path": "testing/conftest.py", "file_name": "conftest.py", "file_type": "text/x-python", "category": "implementation", "start_line": 139, "end_line": 178, "span_ids": ["color_mapping.ColorMapping", "color_mapping.ColorMapping:2", "color_mapping"], "tokens": 377}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.fixture(scope=\"session\")\ndef color_mapping():\n \"\"\"Returns a utility class which can replace keys in strings in the form \"{NAME}\"\n by their equivalent ASCII codes in the terminal.\n\n Used by tests which check the actual colors output by pytest.\n \"\"\"\n\n class ColorMapping:\n COLORS = {\n \"red\": \"\\x1b[31m\",\n \"green\": \"\\x1b[32m\",\n \"yellow\": \"\\x1b[33m\",\n \"bold\": \"\\x1b[1m\",\n \"reset\": \"\\x1b[0m\",\n \"kw\": \"\\x1b[94m\",\n \"hl-reset\": \"\\x1b[39;49;00m\",\n \"function\": \"\\x1b[92m\",\n \"number\": \"\\x1b[94m\",\n \"str\": \"\\x1b[33m\",\n \"print\": \"\\x1b[96m\",\n }\n RE_COLORS = {k: re.escape(v) for k, v in COLORS.items()}\n\n @classmethod\n def format(cls, lines: List[str]) -> List[str]:\n \"\"\"Straightforward replacement of color names to their ASCII codes.\"\"\"\n return [line.format(**cls.COLORS) for line in lines]\n\n @classmethod\n def format_for_fnmatch(cls, lines: List[str]) -> List[str]:\n \"\"\"Replace color names for use with LineMatcher.fnmatch_lines\"\"\"\n return [line.format(**cls.COLORS).replace(\"[\", \"[[]\") for line in lines]\n\n @classmethod\n def format_for_rematch(cls, lines: List[str]) -> List[str]:\n \"\"\"Replace color names for use with LineMatcher.re_match_lines\"\"\"\n return [line.format(**cls.RE_COLORS) for line in lines]\n\n return ColorMapping", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/deprecated_test.py_re_test_fillfixtures_is_deprecated.with_pytest_warns_._pytest_fixtures_fillfixt": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/deprecated_test.py_re_test_fillfixtures_is_deprecated.with_pytest_warns_._pytest_fixtures_fillfixt", "embedding": null, "metadata": {"file_path": "testing/deprecated_test.py", "file_name": "deprecated_test.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 48, "span_ids": ["test_fillfixtures_is_deprecated", "test_pytest_collect_module_deprecated", "imports", "test_external_plugins_integrated", "test_fillfuncargs_is_deprecated"], "tokens": 334}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import re\nimport warnings\nfrom unittest import mock\n\nimport pytest\nfrom _pytest import deprecated\nfrom _pytest.pytester import Pytester\n\n\n@pytest.mark.parametrize(\"attribute\", pytest.collect.__all__) # type: ignore\n# false positive due to dynamic attribute\ndef test_pytest_collect_module_deprecated(attribute) -> None:\n with pytest.warns(DeprecationWarning, match=attribute):\n getattr(pytest.collect, attribute)\n\n\n@pytest.mark.parametrize(\"plugin\", sorted(deprecated.DEPRECATED_EXTERNAL_PLUGINS))\n@pytest.mark.filterwarnings(\"default\")\ndef test_external_plugins_integrated(pytester: Pytester, plugin) -> None:\n pytester.syspathinsert()\n pytester.makepyfile(**{plugin: \"\"})\n\n with pytest.warns(pytest.PytestConfigWarning):\n pytester.parseconfig(\"-p\", plugin)\n\n\ndef test_fillfuncargs_is_deprecated() -> None:\n with pytest.warns(\n pytest.PytestDeprecationWarning,\n match=re.escape(\n \"pytest._fillfuncargs() is deprecated, use \"\n \"function._request._fillfixtures() instead if you cannot avoid reaching into internals.\"\n ),\n ):\n pytest._fillfuncargs(mock.Mock())\n\n\ndef test_fillfixtures_is_deprecated() -> None:\n import _pytest.fixtures\n\n with pytest.warns(\n pytest.PytestDeprecationWarning,\n match=re.escape(\n \"_pytest.fixtures.fillfixtures() is deprecated, use \"\n \"function._request._fillfixtures() instead if you cannot avoid reaching into internals.\"\n ),\n ):\n _pytest.fixtures.fillfixtures(mock.Mock())", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/deprecated_test.py_test_minus_k_dash_is_deprecated_test_minus_k_dash_is_deprecated.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/deprecated_test.py_test_minus_k_dash_is_deprecated_test_minus_k_dash_is_deprecated.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/deprecated_test.py", "file_name": "deprecated_test.py", "file_type": "text/x-python", "category": "test", "start_line": 51, "end_line": 60, "span_ids": ["test_minus_k_dash_is_deprecated"], "tokens": 106}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_minus_k_dash_is_deprecated(pytester: Pytester) -> None:\n threepass = pytester.makepyfile(\n test_threepass=\"\"\"\n def test_one(): assert 1\n def test_two(): assert 1\n def test_three(): assert 1\n \"\"\"\n )\n result = pytester.runpytest(\"-k=-test_two\", threepass)\n result.stdout.fnmatch_lines([\"*The `-k '-expr'` syntax*deprecated*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/deprecated_test.py_test_minus_k_colon_is_deprecated_test_minus_k_colon_is_deprecated.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/deprecated_test.py_test_minus_k_colon_is_deprecated_test_minus_k_colon_is_deprecated.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/deprecated_test.py", "file_name": "deprecated_test.py", "file_type": "text/x-python", "category": "test", "start_line": 63, "end_line": 72, "span_ids": ["test_minus_k_colon_is_deprecated"], "tokens": 108}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_minus_k_colon_is_deprecated(pytester: Pytester) -> None:\n threepass = pytester.makepyfile(\n test_threepass=\"\"\"\n def test_one(): assert 1\n def test_two(): assert 1\n def test_three(): assert 1\n \"\"\"\n )\n result = pytester.runpytest(\"-k\", \"test_two:\", threepass)\n result.stdout.fnmatch_lines([\"*The `-k 'expr:'` syntax*deprecated*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/deprecated_test.py_test_fscollector_gethookproxy_isinitpath_test_fscollector_gethookproxy_isinitpath.assert_len_rec_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/deprecated_test.py_test_fscollector_gethookproxy_isinitpath_test_fscollector_gethookproxy_isinitpath.assert_len_rec_0", "embedding": null, "metadata": {"file_path": "testing/deprecated_test.py", "file_name": "deprecated_test.py", "file_type": "text/x-python", "category": "test", "start_line": 75, "end_line": 97, "span_ids": ["test_fscollector_gethookproxy_isinitpath"], "tokens": 185}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_fscollector_gethookproxy_isinitpath(pytester: Pytester) -> None:\n module = pytester.getmodulecol(\n \"\"\"\n def test_foo(): pass\n \"\"\",\n withinit=True,\n )\n assert isinstance(module, pytest.Module)\n package = module.parent\n assert isinstance(package, pytest.Package)\n\n with pytest.warns(pytest.PytestDeprecationWarning, match=\"gethookproxy\"):\n package.gethookproxy(pytester.path)\n\n with pytest.warns(pytest.PytestDeprecationWarning, match=\"isinitpath\"):\n package.isinitpath(pytester.path)\n\n # The methods on Session are *not* deprecated.\n session = module.session\n with warnings.catch_warnings(record=True) as rec:\n session.gethookproxy(pytester.path)\n session.isinitpath(pytester.path)\n assert len(rec) == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/deprecated_test.py_test_strict_option_is_deprecated_test_strict_option_is_deprecated.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/deprecated_test.py_test_strict_option_is_deprecated_test_strict_option_is_deprecated.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/deprecated_test.py", "file_name": "deprecated_test.py", "file_type": "text/x-python", "category": "test", "start_line": 100, "end_line": 116, "span_ids": ["test_strict_option_is_deprecated"], "tokens": 125}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_strict_option_is_deprecated(pytester: Pytester) -> None:\n \"\"\"--strict is a deprecated alias to --strict-markers (#7530).\"\"\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.mark.unknown\n def test_foo(): pass\n \"\"\"\n )\n result = pytester.runpytest(\"--strict\")\n result.stdout.fnmatch_lines(\n [\n \"'unknown' not found in `markers` configuration option\",\n \"*PytestDeprecationWarning: The --strict option is deprecated, use --strict-markers instead.\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/deprecated_test.py_test_yield_fixture_is_deprecated_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/deprecated_test.py_test_yield_fixture_is_deprecated_", "embedding": null, "metadata": {"file_path": "testing/deprecated_test.py", "file_name": "deprecated_test.py", "file_type": "text/x-python", "category": "test", "start_line": 119, "end_line": 156, "span_ids": ["test_raising_unittest_skiptest_during_collection_is_deprecated", "test_private_is_deprecated", "test_private_is_deprecated.PrivateInit.__init__", "test_private_is_deprecated.PrivateInit", "test_yield_fixture_is_deprecated"], "tokens": 229}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_yield_fixture_is_deprecated() -> None:\n with pytest.warns(DeprecationWarning, match=r\"yield_fixture is deprecated\"):\n\n @pytest.yield_fixture\n def fix():\n assert False\n\n\ndef test_private_is_deprecated() -> None:\n class PrivateInit:\n def __init__(self, foo: int, *, _ispytest: bool = False) -> None:\n deprecated.check_ispytest(_ispytest)\n\n with pytest.warns(\n pytest.PytestDeprecationWarning, match=\"private pytest class or function\"\n ):\n PrivateInit(10)\n\n # Doesn't warn.\n PrivateInit(10, _ispytest=True)\n\n\ndef test_raising_unittest_skiptest_during_collection_is_deprecated(\n pytester: Pytester,\n) -> None:\n pytester.makepyfile(\n \"\"\"\n import unittest\n raise unittest.SkipTest()\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n [\n \"*PytestDeprecationWarning: Raising unittest.SkipTest*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/unittest/test_unittest_plain_async.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/example_scripts/unittest/test_unittest_plain_async.py__", "embedding": null, "metadata": {"file_path": "testing/example_scripts/unittest/test_unittest_plain_async.py", "file_name": "test_unittest_plain_async.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 7, "span_ids": ["Test", "imports", "Test.test_foo"], "tokens": 19}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\n\nclass Test(unittest.TestCase):\n async def test_foo(self):\n assert False", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_fixture.py_test_change_level_undos_handler_level_test_change_level_undos_handler_level.result_assert_outcomes_pa": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/logging/test_fixture.py_test_change_level_undos_handler_level_test_change_level_undos_handler_level.result_assert_outcomes_pa", "embedding": null, "metadata": {"file_path": "testing/logging/test_fixture.py", "file_name": "test_fixture.py", "file_type": "text/x-python", "category": "test", "start_line": 57, "end_line": 82, "span_ids": ["test_change_level_undos_handler_level"], "tokens": 181}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_change_level_undos_handler_level(pytester: Pytester) -> None:\n \"\"\"Ensure that 'set_level' is undone after the end of the test (handler).\n\n Issue #7569. Tests the handler level specifically.\n \"\"\"\n pytester.makepyfile(\n \"\"\"\n import logging\n\n def test1(caplog):\n assert caplog.handler.level == 0\n caplog.set_level(9999)\n caplog.set_level(41)\n assert caplog.handler.level == 41\n\n def test2(caplog):\n assert caplog.handler.level == 0\n\n def test3(caplog):\n assert caplog.handler.level == 0\n caplog.set_level(43)\n assert caplog.handler.level == 43\n \"\"\"\n )\n result = pytester.runpytest()\n result.assert_outcomes(passed=3)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/plugins_integration/bdd_wallet.py_from_pytest_bdd_import_gi_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/plugins_integration/bdd_wallet.py_from_pytest_bdd_import_gi_", "embedding": null, "metadata": {"file_path": "testing/plugins_integration/bdd_wallet.py", "file_name": "bdd_wallet.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 40, "span_ids": ["wallet.Wallet:2", "buy_bananas", "test_publish", "fill_wallet", "check", "buy_apples", "imports", "wallet", "wallet.Wallet"], "tokens": 159}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from pytest_bdd import given\nfrom pytest_bdd import scenario\nfrom pytest_bdd import then\nfrom pytest_bdd import when\n\nimport pytest\n\n\n@scenario(\"bdd_wallet.feature\", \"Buy fruits\")\ndef test_publish():\n pass\n\n\n@pytest.fixture\ndef wallet():\n class Wallet:\n amount = 0\n\n return Wallet()\n\n\n@given(\"A wallet with 50\")\ndef fill_wallet(wallet):\n wallet.amount = 50\n\n\n@when(\"I buy some apples for 1\")\ndef buy_apples(wallet):\n wallet.amount -= 1\n\n\n@when(\"I buy some bananas for 2\")\ndef buy_bananas(wallet):\n wallet.amount -= 2\n\n\n@then(\"I have 47 left\")\ndef check(wallet):\n assert wallet.amount == 47", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/plugins_integration/django_settings.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/plugins_integration/django_settings.py__", "embedding": null, "metadata": {"file_path": "testing/plugins_integration/django_settings.py", "file_name": "django_settings.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 2, "span_ids": ["impl"], "tokens": 7}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "SECRET_KEY = \"mysecret\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/plugins_integration/pytest_anyio_integration.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/plugins_integration/pytest_anyio_integration.py__", "embedding": null, "metadata": {"file_path": "testing/plugins_integration/pytest_anyio_integration.py", "file_name": "pytest_anyio_integration.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 9, "span_ids": ["imports", "test_sleep"], "tokens": 25}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import anyio\n\nimport pytest\n\n\n@pytest.mark.anyio\nasync def test_sleep():\n await anyio.sleep(0)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/plugins_integration/pytest_asyncio_integration.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/plugins_integration/pytest_asyncio_integration.py__", "embedding": null, "metadata": {"file_path": "testing/plugins_integration/pytest_asyncio_integration.py", "file_name": "pytest_asyncio_integration.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 9, "span_ids": ["imports", "test_sleep"], "tokens": 23}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import asyncio\n\nimport pytest\n\n\n@pytest.mark.asyncio\nasync def test_sleep():\n await asyncio.sleep(0)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/plugins_integration/pytest_mock_integration.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/plugins_integration/pytest_mock_integration.py__", "embedding": null, "metadata": {"file_path": "testing/plugins_integration/pytest_mock_integration.py", "file_name": "pytest_mock_integration.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 3, "span_ids": ["test_mocker"], "tokens": 12}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_mocker(mocker):\n mocker.MagicMock()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/plugins_integration/pytest_trio_integration.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/plugins_integration/pytest_trio_integration.py__", "embedding": null, "metadata": {"file_path": "testing/plugins_integration/pytest_trio_integration.py", "file_name": "pytest_trio_integration.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 9, "span_ids": ["imports", "test_sleep"], "tokens": 23}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import trio\n\nimport pytest\n\n\n@pytest.mark.trio\nasync def test_sleep():\n await trio.sleep(0)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/plugins_integration/pytest_twisted_integration.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/plugins_integration/pytest_twisted_integration.py__", "embedding": null, "metadata": {"file_path": "testing/plugins_integration/pytest_twisted_integration.py", "file_name": "pytest_twisted_integration.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 19, "span_ids": ["test_inlineCallbacks_async", "test_inlineCallbacks", "imports", "sleep"], "tokens": 71}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import pytest_twisted\nfrom twisted.internet.task import deferLater\n\n\ndef sleep():\n import twisted.internet.reactor\n\n return deferLater(clock=twisted.internet.reactor, delay=0)\n\n\n@pytest_twisted.inlineCallbacks\ndef test_inlineCallbacks():\n yield sleep()\n\n\n@pytest_twisted.ensureDeferred\nasync def test_inlineCallbacks_async():\n await sleep()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/plugins_integration/simple_integration.py__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/plugins_integration/simple_integration.py__", "embedding": null, "metadata": {"file_path": "testing/plugins_integration/simple_integration.py", "file_name": "simple_integration.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 11, "span_ids": ["test_bar", "imports", "test_foo"], "tokens": 30}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import pytest\n\n\ndef test_foo():\n assert True\n\n\n@pytest.mark.parametrize(\"i\", range(3))\ndef test_bar(i):\n assert True", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_list_wrong_len_TestApprox.test_tuple_vs_other.assert_1_approx_1_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_list_wrong_len_TestApprox.test_tuple_vs_other.assert_1_approx_1_", "embedding": null, "metadata": {"file_path": "testing/python/approx.py", "file_name": "approx.py", "file_type": "text/x-python", "category": "implementation", "start_line": 316, "end_line": 335, "span_ids": ["TestApprox.test_list_wrong_len", "TestApprox.test_tuple", "TestApprox.test_tuple_vs_other", "TestApprox.test_tuple_wrong_len"], "tokens": 224}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestApprox:\n\n def test_list_wrong_len(self):\n assert [1, 2] != approx([1])\n assert [1, 2] != approx([1, 2, 3])\n\n def test_tuple(self):\n actual = (1 + 1e-7, 2 + 1e-8)\n expected = (1, 2)\n\n # Return false if any element is outside the tolerance.\n assert actual == approx(expected, rel=5e-7, abs=0)\n assert actual != approx(expected, rel=5e-8, abs=0)\n assert approx(expected, rel=5e-7, abs=0) == actual\n assert approx(expected, rel=5e-8, abs=0) != actual\n\n def test_tuple_wrong_len(self):\n assert (1, 2) != approx((1,))\n assert (1, 2) != approx((1, 2, 3))\n\n def test_tuple_vs_other(self):\n assert 1 != approx((1,))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_dict_TestApprox.test_dict.None_3": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_dict_TestApprox.test_dict.None_3", "embedding": null, "metadata": {"file_path": "testing/python/approx.py", "file_name": "approx.py", "file_type": "text/x-python", "category": "implementation", "start_line": 337, "end_line": 347, "span_ids": ["TestApprox.test_dict"], "tokens": 166}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestApprox:\n\n def test_dict(self):\n actual = {\"a\": 1 + 1e-7, \"b\": 2 + 1e-8}\n # Dictionaries became ordered in python3.6, so switch up the order here\n # to make sure it doesn't matter.\n expected = {\"b\": 2, \"a\": 1}\n\n # Return false if any element is outside the tolerance.\n assert actual == approx(expected, rel=5e-7, abs=0)\n assert actual != approx(expected, rel=5e-8, abs=0)\n assert approx(expected, rel=5e-7, abs=0) == actual\n assert approx(expected, rel=5e-8, abs=0) != actual", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_dict_wrong_len_TestApprox.test_dict_vs_other.assert_1_approx_a_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_dict_wrong_len_TestApprox.test_dict_vs_other.assert_1_approx_a_", "embedding": null, "metadata": {"file_path": "testing/python/approx.py", "file_name": "approx.py", "file_type": "text/x-python", "category": "implementation", "start_line": 349, "end_line": 359, "span_ids": ["TestApprox.test_dict_nonnumeric", "TestApprox.test_dict_wrong_len", "TestApprox.test_dict_vs_other"], "tokens": 189}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestApprox:\n\n def test_dict_wrong_len(self):\n assert {\"a\": 1, \"b\": 2} != approx({\"a\": 1})\n assert {\"a\": 1, \"b\": 2} != approx({\"a\": 1, \"c\": 2})\n assert {\"a\": 1, \"b\": 2} != approx({\"a\": 1, \"b\": 2, \"c\": 3})\n\n def test_dict_nonnumeric(self):\n assert {\"a\": 1.0, \"b\": None} == pytest.approx({\"a\": 1.0, \"b\": None})\n assert {\"a\": 1.0, \"b\": 1} != pytest.approx({\"a\": 1.0, \"b\": None})\n\n def test_dict_vs_other(self):\n assert 1 != approx({\"a\": 0})", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_numpy_expecting_inf_TestApprox.test_numpy_array_wrong_shape.assert_a21_approx_a12_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_numpy_expecting_inf_TestApprox.test_numpy_array_wrong_shape.assert_a21_approx_a12_", "embedding": null, "metadata": {"file_path": "testing/python/approx.py", "file_name": "approx.py", "file_type": "text/x-python", "category": "implementation", "start_line": 427, "end_line": 448, "span_ids": ["TestApprox.test_numpy_array_wrong_shape", "TestApprox.test_numpy_expecting_inf"], "tokens": 178}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestApprox:\n\n def test_numpy_expecting_inf(self):\n np = pytest.importorskip(\"numpy\")\n examples = [\n (eq, inf, inf),\n (eq, -inf, -inf),\n (ne, inf, -inf),\n (ne, 0.0, inf),\n (ne, nan, inf),\n ]\n for op, a, x in examples:\n assert op(np.array(a), approx(x))\n assert op(a, approx(np.array(x)))\n assert op(np.array(a), approx(np.array(x)))\n\n def test_numpy_array_wrong_shape(self):\n np = pytest.importorskip(\"numpy\")\n\n a12 = np.array([[1, 2]])\n a21 = np.array([[1], [2]])\n\n assert a12 != approx(a21)\n assert a21 != approx(a12)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_numpy_array_protocol_TestApprox.test_numpy_array_protocol.None_4": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_numpy_array_protocol_TestApprox.test_numpy_array_protocol.None_4", "embedding": null, "metadata": {"file_path": "testing/python/approx.py", "file_name": "approx.py", "file_type": "text/x-python", "category": "implementation", "start_line": 450, "end_line": 478, "span_ids": ["TestApprox.test_numpy_array_protocol.DeviceArray", "TestApprox.test_numpy_array_protocol.DeviceScalar.__init__", "TestApprox.test_numpy_array_protocol.DeviceArray.__init__", "TestApprox.test_numpy_array_protocol.DeviceScalar", "TestApprox.test_numpy_array_protocol"], "tokens": 205}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestApprox:\n\n def test_numpy_array_protocol(self):\n \"\"\"\n array-like objects such as tensorflow's DeviceArray are handled like ndarray.\n See issue #8132\n \"\"\"\n np = pytest.importorskip(\"numpy\")\n\n class DeviceArray:\n def __init__(self, value, size):\n self.value = value\n self.size = size\n\n def __array__(self):\n return self.value * np.ones(self.size)\n\n class DeviceScalar:\n def __init__(self, value):\n self.value = value\n\n def __array__(self):\n return np.array(self.value)\n\n expected = 1\n actual = 1 + 1e-6\n assert approx(expected) == DeviceArray(actual, size=1)\n assert approx(expected) == DeviceArray(actual, size=2)\n assert approx(expected) == DeviceScalar(actual)\n assert approx(DeviceScalar(expected)) == actual\n assert approx(DeviceScalar(expected)) == DeviceScalar(actual)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_doctests_TestApprox.test_unicode_plus_minus.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_doctests_TestApprox.test_unicode_plus_minus.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/approx.py", "file_name": "approx.py", "file_type": "text/x-python", "category": "implementation", "start_line": 480, "end_line": 506, "span_ids": ["TestApprox.test_unicode_plus_minus", "TestApprox.test_doctests"], "tokens": 219}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestApprox:\n\n def test_doctests(self, mocked_doctest_runner) -> None:\n import doctest\n\n parser = doctest.DocTestParser()\n assert approx.__doc__ is not None\n test = parser.get_doctest(\n approx.__doc__, {\"approx\": approx}, approx.__name__, None, None\n )\n mocked_doctest_runner.run(test)\n\n def test_unicode_plus_minus(self, pytester: Pytester) -> None:\n \"\"\"\n Comparing approx instances inside lists should not produce an error in the detailed diff.\n Integration test for issue #2111.\n \"\"\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n def test_foo():\n assert [3] == [pytest.approx(4)]\n \"\"\"\n )\n expected = \"4.0e-06\"\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n [f\"*At index 0 diff: 3 != 4 \u00b1 {expected}\", \"=* 1 failed in *=\"]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_expected_value_type_error_TestApprox.test_nonnumeric_okay_if_equal.assert_x_approx_x_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_expected_value_type_error_TestApprox.test_nonnumeric_okay_if_equal.assert_x_approx_x_", "embedding": null, "metadata": {"file_path": "testing/python/approx.py", "file_name": "approx.py", "file_type": "text/x-python", "category": "implementation", "start_line": 508, "end_line": 532, "span_ids": ["TestApprox.test_expected_value_type_error", "TestApprox.test_nonnumeric_okay_if_equal"], "tokens": 178}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestApprox:\n\n @pytest.mark.parametrize(\n \"x, name\",\n [\n pytest.param([[1]], \"data structures\", id=\"nested-list\"),\n pytest.param({\"key\": {\"key\": 1}}, \"dictionaries\", id=\"nested-dict\"),\n ],\n )\n def test_expected_value_type_error(self, x, name):\n with pytest.raises(\n TypeError,\n match=fr\"pytest.approx\\(\\) does not support nested {name}:\",\n ):\n approx(x)\n\n @pytest.mark.parametrize(\n \"x\",\n [\n pytest.param(None),\n pytest.param(\"string\"),\n pytest.param([\"string\"], id=\"nested-str\"),\n pytest.param({\"key\": \"string\"}, id=\"dict-with-string\"),\n ],\n )\n def test_nonnumeric_okay_if_equal(self, x):\n assert x == approx(x)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_nonnumeric_false_if_unequal_TestApprox.test_nonnumeric_false_if_unequal.None_3": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_nonnumeric_false_if_unequal_TestApprox.test_nonnumeric_false_if_unequal.None_3", "embedding": null, "metadata": {"file_path": "testing/python/approx.py", "file_name": "approx.py", "file_type": "text/x-python", "category": "implementation", "start_line": 534, "end_line": 554, "span_ids": ["TestApprox.test_nonnumeric_false_if_unequal"], "tokens": 206}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestApprox:\n\n @pytest.mark.parametrize(\n \"x\",\n [\n pytest.param(\"string\"),\n pytest.param([\"string\"], id=\"nested-str\"),\n pytest.param({\"key\": \"string\"}, id=\"dict-with-string\"),\n ],\n )\n def test_nonnumeric_false_if_unequal(self, x):\n \"\"\"For nonnumeric types, x != pytest.approx(y) reduces to x != y\"\"\"\n assert \"ab\" != approx(\"abc\")\n assert [\"ab\"] != approx([\"abc\"])\n # in particular, both of these should return False\n assert {\"a\": 1.0} != approx({\"a\": None})\n assert {\"a\": None} != approx({\"a\": 1.0})\n\n assert 1.0 != approx(None)\n assert None != approx(1.0) # noqa: E711\n\n assert 1.0 != approx([None])\n assert None != approx([1.0]) # noqa: E711", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_nonnumeric_dict_repr_TestApprox.test_nonnumeric_dict_repr.assert_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_nonnumeric_dict_repr_TestApprox.test_nonnumeric_dict_repr.assert_", "embedding": null, "metadata": {"file_path": "testing/python/approx.py", "file_name": "approx.py", "file_type": "text/x-python", "category": "implementation", "start_line": 556, "end_line": 563, "span_ids": ["TestApprox.test_nonnumeric_dict_repr"], "tokens": 120}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestApprox:\n\n @pytest.mark.skipif(sys.version_info < (3, 7), reason=\"requires ordered dicts\")\n def test_nonnumeric_dict_repr(self):\n \"\"\"Dicts with non-numerics and infinites have no tolerances\"\"\"\n x1 = {\"foo\": 1.0000005, \"bar\": None, \"foobar\": inf}\n assert (\n repr(approx(x1))\n == \"approx({'foo': 1.0000005 \u00b1 1.0e-06, 'bar': None, 'foobar': inf})\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_nonnumeric_list_repr_TestApprox.test_comparison_operator_type_error.with_pytest_raises_TypeEr.op_1_approx_1_rel_1e_6_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/approx.py_TestApprox.test_nonnumeric_list_repr_TestApprox.test_comparison_operator_type_error.with_pytest_raises_TypeEr.op_1_approx_1_rel_1e_6_", "embedding": null, "metadata": {"file_path": "testing/python/approx.py", "file_name": "approx.py", "file_type": "text/x-python", "category": "implementation", "start_line": 565, "end_line": 582, "span_ids": ["TestApprox.test_nonnumeric_list_repr", "TestApprox.test_comparison_operator_type_error"], "tokens": 191}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestApprox:\n\n def test_nonnumeric_list_repr(self):\n \"\"\"Lists with non-numerics and infinites have no tolerances\"\"\"\n x1 = [1.0000005, None, inf]\n assert repr(approx(x1)) == \"approx([1.0000005 \u00b1 1.0e-06, None, inf])\"\n\n @pytest.mark.parametrize(\n \"op\",\n [\n pytest.param(operator.le, id=\"<=\"),\n pytest.param(operator.lt, id=\"<\"),\n pytest.param(operator.ge, id=\">=\"),\n pytest.param(operator.gt, id=\">\"),\n ],\n )\n def test_comparison_operator_type_error(self, op):\n \"\"\"pytest.approx should raise TypeError for operators other than == and != (#2003).\"\"\"\n with pytest.raises(TypeError):\n op(1, approx(1, rel=1e-6, abs=1e-12))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_os_TestModule.test_import_duplicate.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_os_TestModule.test_import_duplicate.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 42, "span_ids": ["TestModule", "TestModule.test_failing_import", "TestModule.test_import_duplicate", "imports"], "tokens": 293}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import os\nimport sys\nimport textwrap\nfrom typing import Any\nfrom typing import Dict\n\nimport _pytest._code\nimport pytest\nfrom _pytest.config import ExitCode\nfrom _pytest.monkeypatch import MonkeyPatch\nfrom _pytest.nodes import Collector\nfrom _pytest.pytester import Pytester\nfrom _pytest.python import Class\nfrom _pytest.python import Instance\n\n\nclass TestModule:\n def test_failing_import(self, pytester: Pytester) -> None:\n modcol = pytester.getmodulecol(\"import alksdjalskdjalkjals\")\n pytest.raises(Collector.CollectError, modcol.collect)\n\n def test_import_duplicate(self, pytester: Pytester) -> None:\n a = pytester.mkdir(\"a\")\n b = pytester.mkdir(\"b\")\n p1 = a.joinpath(\"test_whatever.py\")\n p1.touch()\n p2 = b.joinpath(\"test_whatever.py\")\n p2.touch()\n # ensure we don't have it imported already\n sys.modules.pop(p1.stem, None)\n\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n [\n \"*import*mismatch*\",\n \"*imported*test_whatever*\",\n \"*%s*\" % p1,\n \"*not the same*\",\n \"*%s*\" % p2,\n \"*HINT*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestModule.test_import_prepend_append_TestModule.test_import_prepend_append.with_monkeypatch_context_.None_2": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestModule.test_import_prepend_append_TestModule.test_import_prepend_append.with_monkeypatch_context_.None_2", "embedding": null, "metadata": {"file_path": "testing/python/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 44, "end_line": 69, "span_ids": ["TestModule.test_import_prepend_append"], "tokens": 215}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestModule:\n\n def test_import_prepend_append(\n self, pytester: Pytester, monkeypatch: MonkeyPatch\n ) -> None:\n root1 = pytester.mkdir(\"root1\")\n root2 = pytester.mkdir(\"root2\")\n root1.joinpath(\"x456.py\").touch()\n root2.joinpath(\"x456.py\").touch()\n p = root2.joinpath(\"test_x456.py\")\n monkeypatch.syspath_prepend(str(root1))\n p.write_text(\n textwrap.dedent(\n \"\"\"\\\n import x456\n def test():\n assert x456.__file__.startswith({!r})\n \"\"\".format(\n str(root2)\n )\n )\n )\n with monkeypatch.context() as mp:\n mp.chdir(root2)\n reprec = pytester.inline_run(\"--import-mode=append\")\n reprec.assertoutcome(passed=0, failed=1)\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.make_function_TestFunction.test_single_tuple_unwraps_values.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.make_function_TestFunction.test_single_tuple_unwraps_values.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 292, "end_line": 344, "span_ids": ["TestFunction.test_function_equality", "TestFunction.make_function", "TestFunction.test_single_tuple_unwraps_values", "TestFunction.test_issue197_parametrize_emptyset", "TestFunction.test_repr_produces_actual_test_id"], "tokens": 402}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFunction:\n\n @staticmethod\n def make_function(pytester: Pytester, **kwargs: Any) -> Any:\n from _pytest.fixtures import FixtureManager\n\n config = pytester.parseconfigure()\n session = pytester.Session.from_config(config)\n session._fixturemanager = FixtureManager(session)\n\n return pytest.Function.from_parent(parent=session, **kwargs)\n\n def test_function_equality(self, pytester: Pytester) -> None:\n def func1():\n pass\n\n def func2():\n pass\n\n f1 = self.make_function(pytester, name=\"name\", callobj=func1)\n assert f1 == f1\n f2 = self.make_function(\n pytester, name=\"name\", callobj=func2, originalname=\"foobar\"\n )\n assert f1 != f2\n\n def test_repr_produces_actual_test_id(self, pytester: Pytester) -> None:\n f = self.make_function(\n pytester, name=r\"test[\\xe5]\", callobj=self.test_repr_produces_actual_test_id\n )\n assert repr(f) == r\"\"\n\n def test_issue197_parametrize_emptyset(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.mark.parametrize('arg', [])\n def test_function(arg):\n pass\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(skipped=1)\n\n def test_single_tuple_unwraps_values(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.mark.parametrize(('arg',), [(1,)])\n def test_function(arg):\n assert arg == 1\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.test_issue213_parametrize_value_no_equal_TestFunction.test_issue213_parametrize_value_no_equal.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.test_issue213_parametrize_value_no_equal_TestFunction.test_issue213_parametrize_value_no_equal.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 346, "end_line": 359, "span_ids": ["TestFunction.test_issue213_parametrize_value_no_equal"], "tokens": 114}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFunction:\n\n def test_issue213_parametrize_value_no_equal(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n class A(object):\n def __eq__(self, other):\n raise ValueError(\"not possible\")\n @pytest.mark.parametrize('arg', [A()])\n def test_function(arg):\n assert arg.__class__.__name__ == \"A\"\n \"\"\"\n )\n reprec = pytester.inline_run(\"--fulltrace\")\n reprec.assertoutcome(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.test_parametrize_with_empty_string_arguments_TestFunction.test_parametrize_with_empty_string_arguments.assert_names_test_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.test_parametrize_with_empty_string_arguments_TestFunction.test_parametrize_with_empty_string_arguments.assert_names_test_", "embedding": null, "metadata": {"file_path": "testing/python/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 516, "end_line": 527, "span_ids": ["TestFunction.test_parametrize_with_empty_string_arguments"], "tokens": 110}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFunction:\n\n def test_parametrize_with_empty_string_arguments(self, pytester: Pytester) -> None:\n items = pytester.getitems(\n \"\"\"\\\n import pytest\n\n @pytest.mark.parametrize('v', ('', ' '))\n @pytest.mark.parametrize('w', ('', ' '))\n def test(v, w): ...\n \"\"\"\n )\n names = {item.name for item in items}\n assert names == {\"test[-]\", \"test[ -]\", \"test[- ]\", \"test[ - ]\"}", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.test_function_equality_with_callspec_TestFunction.test_pyfunc_call.config_hook_pytest_pyfunc": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.test_function_equality_with_callspec_TestFunction.test_pyfunc_call.config_hook_pytest_pyfunc", "embedding": null, "metadata": {"file_path": "testing/python/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 529, "end_line": 556, "span_ids": ["TestFunction.test_pyfunc_call.MyPlugin2.pytest_pyfunc_call", "TestFunction.test_pyfunc_call.MyPlugin1.pytest_pyfunc_call", "TestFunction.test_pyfunc_call.MyPlugin2", "TestFunction.test_pyfunc_call", "TestFunction.test_function_equality_with_callspec", "TestFunction.test_pyfunc_call.MyPlugin1"], "tokens": 206}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFunction:\n\n def test_function_equality_with_callspec(self, pytester: Pytester) -> None:\n items = pytester.getitems(\n \"\"\"\n import pytest\n @pytest.mark.parametrize('arg', [1,2])\n def test_function(arg):\n pass\n \"\"\"\n )\n assert items[0] != items[1]\n assert not (items[0] == items[1])\n\n def test_pyfunc_call(self, pytester: Pytester) -> None:\n item = pytester.getitem(\"def test_func(): raise ValueError\")\n config = item.config\n\n class MyPlugin1:\n def pytest_pyfunc_call(self):\n raise ValueError\n\n class MyPlugin2:\n def pytest_pyfunc_call(self):\n return True\n\n config.pluginmanager.register(MyPlugin1())\n config.pluginmanager.register(MyPlugin2())\n config.hook.pytest_runtest_setup(item=item)\n config.hook.pytest_pyfunc_call(pyfuncitem=item)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.test_parametrize_skipif_TestFunction.test_parametrize_skipif.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.test_parametrize_skipif_TestFunction.test_parametrize_skipif.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 593, "end_line": 606, "span_ids": ["TestFunction.test_parametrize_skipif"], "tokens": 111}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFunction:\n\n def test_parametrize_skipif(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n m = pytest.mark.skipif('True')\n\n @pytest.mark.parametrize('x', [0, 1, pytest.param(2, marks=m)])\n def test_skip_if(x):\n assert x < 2\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"* 2 passed, 1 skipped in *\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.test_parametrize_skip_TestFunction.test_parametrize_skipif_no_skip.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.test_parametrize_skip_TestFunction.test_parametrize_skipif_no_skip.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 608, "end_line": 636, "span_ids": ["TestFunction.test_parametrize_skipif_no_skip", "TestFunction.test_parametrize_skip"], "tokens": 214}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFunction:\n\n def test_parametrize_skip(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n m = pytest.mark.skip('')\n\n @pytest.mark.parametrize('x', [0, 1, pytest.param(2, marks=m)])\n def test_skip(x):\n assert x < 2\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"* 2 passed, 1 skipped in *\"])\n\n def test_parametrize_skipif_no_skip(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n m = pytest.mark.skipif('False')\n\n @pytest.mark.parametrize('x', [0, 1, m(2)])\n def test_skipif_no_skip(x):\n assert x < 2\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"* 1 failed, 2 passed in *\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.test_parametrize_xfail_TestFunction.test_parametrize_xfail.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.test_parametrize_xfail_TestFunction.test_parametrize_xfail.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 638, "end_line": 651, "span_ids": ["TestFunction.test_parametrize_xfail"], "tokens": 112}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFunction:\n\n def test_parametrize_xfail(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n m = pytest.mark.xfail('True')\n\n @pytest.mark.parametrize('x', [0, 1, pytest.param(2, marks=m)])\n def test_xfail(x):\n assert x < 2\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"* 2 passed, 1 xfailed in *\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.test_parametrize_passed_TestFunction.test_parametrize_xfail_passed.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestFunction.test_parametrize_passed_TestFunction.test_parametrize_xfail_passed.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 653, "end_line": 681, "span_ids": ["TestFunction.test_parametrize_xfail_passed", "TestFunction.test_parametrize_passed"], "tokens": 202}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFunction:\n\n def test_parametrize_passed(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n m = pytest.mark.xfail('True')\n\n @pytest.mark.parametrize('x', [0, 1, pytest.param(2, marks=m)])\n def test_xfail(x):\n pass\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"* 2 passed, 1 xpassed in *\"])\n\n def test_parametrize_xfail_passed(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n m = pytest.mark.xfail('False')\n\n @pytest.mark.parametrize('x', [0, 1, m(2)])\n def test_passed(x):\n pass\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"* 3 passed in *\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestConftestCustomization.test_pytest_pycollect_makeitem_TestConftestCustomization.test_pytest_pycollect_makeitem.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestConftestCustomization.test_pytest_pycollect_makeitem_TestConftestCustomization.test_pytest_pycollect_makeitem.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 851, "end_line": 864, "span_ids": ["TestConftestCustomization.test_pytest_pycollect_makeitem"], "tokens": 128}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestConftestCustomization:\n\n def test_pytest_pycollect_makeitem(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n import pytest\n class MyFunction(pytest.Function):\n pass\n def pytest_pycollect_makeitem(collector, name, obj):\n if name == \"some\":\n return MyFunction.from_parent(name=name, parent=collector)\n \"\"\"\n )\n pytester.makepyfile(\"def some(): pass\")\n result = pytester.runpytest(\"--collect-only\")\n result.stdout.fnmatch_lines([\"*MyFunction*some*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestConftestCustomization.test_early_ignored_attributes_TestConftestCustomization.test_early_ignored_attributes.assert_len_items_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/collect.py_TestConftestCustomization.test_early_ignored_attributes_TestConftestCustomization.test_early_ignored_attributes.assert_len_items_1", "embedding": null, "metadata": {"file_path": "testing/python/collect.py", "file_name": "collect.py", "file_type": "text/x-python", "category": "implementation", "start_line": 899, "end_line": 925, "span_ids": ["TestConftestCustomization.test_early_ignored_attributes"], "tokens": 173}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestConftestCustomization:\n\n def test_early_ignored_attributes(self, pytester: Pytester) -> None:\n \"\"\"Builtin attributes should be ignored early on, even if\n configuration would otherwise allow them.\n\n This tests a performance optimization, not correctness, really,\n although it tests PytestCollectionWarning is not raised, while\n it would have been raised otherwise.\n \"\"\"\n pytester.makeini(\n \"\"\"\n [pytest]\n python_classes=*\n python_functions=*\n \"\"\"\n )\n pytester.makepyfile(\n \"\"\"\n class TestEmpty:\n pass\n test_empty = TestEmpty()\n def test_real():\n pass\n \"\"\"\n )\n items, rec = pytester.inline_genitems()\n assert rec.ret == 0\n assert len(items) == 1", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_os_test_getfuncargnames_functions.assert_getfuncargnames_j_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_os_test_getfuncargnames_functions.assert_getfuncargnames_j_", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 38, "span_ids": ["test_getfuncargnames_functions", "imports"], "tokens": 209}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import os\nimport sys\nimport textwrap\nfrom pathlib import Path\n\nimport pytest\nfrom _pytest import fixtures\nfrom _pytest.compat import getfuncargnames\nfrom _pytest.config import ExitCode\nfrom _pytest.fixtures import FixtureRequest\nfrom _pytest.monkeypatch import MonkeyPatch\nfrom _pytest.pytester import get_public_names\nfrom _pytest.pytester import Pytester\nfrom _pytest.python import Function\n\n\ndef test_getfuncargnames_functions():\n \"\"\"Test getfuncargnames for normal functions\"\"\"\n\n def f():\n raise NotImplementedError()\n\n assert not getfuncargnames(f)\n\n def g(arg):\n raise NotImplementedError()\n\n assert getfuncargnames(g) == (\"arg\",)\n\n def h(arg1, arg2=\"hello\"):\n raise NotImplementedError()\n\n assert getfuncargnames(h) == (\"arg1\",)\n\n def j(arg1, arg2, arg3=\"hello\"):\n raise NotImplementedError()\n\n assert getfuncargnames(j) == (\"arg1\", \"arg2\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFillFixtures_TestFillFixtures.test_detect_recursive_dependency_error.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFillFixtures_TestFillFixtures.test_detect_recursive_dependency_error.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 104, "end_line": 127, "span_ids": ["TestFillFixtures.test_detect_recursive_dependency_error", "TestFillFixtures", "TestFillFixtures.test_fillfuncargs_exposed", "TestFillFixtures.test_funcarg_lookupfails"], "tokens": 192}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.pytester_example_path(\"fixtures/fill_fixtures\")\nclass TestFillFixtures:\n def test_fillfuncargs_exposed(self):\n # used by oejskit, kept for compatibility\n assert pytest._fillfuncargs == fixtures._fillfuncargs\n\n def test_funcarg_lookupfails(self, pytester: Pytester) -> None:\n pytester.copy_example()\n result = pytester.runpytest() # \"--collect-only\")\n assert result.ret != 0\n result.stdout.fnmatch_lines(\n \"\"\"\n *def test_func(some)*\n *fixture*some*not found*\n *xyzsomething*\n \"\"\"\n )\n\n def test_detect_recursive_dependency_error(self, pytester: Pytester) -> None:\n pytester.copy_example()\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n [\"*recursive dependency involving fixture 'fix1' detected*\"]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFillFixtures.test_funcarg_basic_TestFillFixtures.test_funcarg_basic.assert_item_funcargs_oth": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFillFixtures.test_funcarg_basic_TestFillFixtures.test_funcarg_basic.assert_item_funcargs_oth", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 129, "end_line": 138, "span_ids": ["TestFillFixtures.test_funcarg_basic"], "tokens": 131}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.pytester_example_path(\"fixtures/fill_fixtures\")\nclass TestFillFixtures:\n\n def test_funcarg_basic(self, pytester: Pytester) -> None:\n pytester.copy_example()\n item = pytester.getitem(Path(\"test_funcarg_basic.py\"))\n assert isinstance(item, Function)\n # Execute's item's setup, which fills fixtures.\n item.session._setupstate.setup(item)\n del item.funcargs[\"request\"]\n assert len(get_public_names(item.funcargs)) == 2\n assert item.funcargs[\"some\"] == \"test_func\"\n assert item.funcargs[\"other\"] == 42", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFillFixtures.test_funcarg_lookup_modulelevel_TestFillFixtures.test_extend_fixture_conftest_conftest.None_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFillFixtures.test_funcarg_lookup_modulelevel_TestFillFixtures.test_extend_fixture_conftest_conftest.None_1", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 140, "end_line": 176, "span_ids": ["TestFillFixtures.test_funcarg_lookup_classlevel", "TestFillFixtures.test_funcarg_lookup_modulelevel", "TestFillFixtures.test_extend_fixture_conftest_conftest", "TestFillFixtures.test_conftest_funcargs_only_available_in_subdir", "TestFillFixtures.test_extend_fixture_module_class", "TestFillFixtures.test_extend_fixture_conftest_module"], "tokens": 398}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.pytester_example_path(\"fixtures/fill_fixtures\")\nclass TestFillFixtures:\n\n def test_funcarg_lookup_modulelevel(self, pytester: Pytester) -> None:\n pytester.copy_example()\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=2)\n\n def test_funcarg_lookup_classlevel(self, pytester: Pytester) -> None:\n p = pytester.copy_example()\n result = pytester.runpytest(p)\n result.stdout.fnmatch_lines([\"*1 passed*\"])\n\n def test_conftest_funcargs_only_available_in_subdir(\n self, pytester: Pytester\n ) -> None:\n pytester.copy_example()\n result = pytester.runpytest(\"-v\")\n result.assert_outcomes(passed=2)\n\n def test_extend_fixture_module_class(self, pytester: Pytester) -> None:\n testfile = pytester.copy_example()\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*1 passed*\"])\n result = pytester.runpytest(testfile)\n result.stdout.fnmatch_lines([\"*1 passed*\"])\n\n def test_extend_fixture_conftest_module(self, pytester: Pytester) -> None:\n p = pytester.copy_example()\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*1 passed*\"])\n result = pytester.runpytest(str(next(Path(str(p)).rglob(\"test_*.py\"))))\n result.stdout.fnmatch_lines([\"*1 passed*\"])\n\n def test_extend_fixture_conftest_conftest(self, pytester: Pytester) -> None:\n p = pytester.copy_example()\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*1 passed*\"])\n result = pytester.runpytest(str(next(Path(str(p)).rglob(\"test_*.py\"))))\n result.stdout.fnmatch_lines([\"*1 passed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFillFixtures.test_override_fixture_reusing_super_fixture_parametrization_TestFillFixtures.test_override_fixture_reusing_super_fixture_parametrization.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFillFixtures.test_override_fixture_reusing_super_fixture_parametrization_TestFillFixtures.test_override_fixture_reusing_super_fixture_parametrization.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 429, "end_line": 457, "span_ids": ["TestFillFixtures.test_override_fixture_reusing_super_fixture_parametrization"], "tokens": 185}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.pytester_example_path(\"fixtures/fill_fixtures\")\nclass TestFillFixtures:\n\n def test_override_fixture_reusing_super_fixture_parametrization(\n self, pytester: Pytester\n ) -> None:\n \"\"\"Override a fixture at a lower level, reusing the higher-level fixture that\n is parametrized (#1953).\n \"\"\"\n pytester.makeconftest(\n \"\"\"\n import pytest\n\n @pytest.fixture(params=[1, 2])\n def foo(request):\n return request.param\n \"\"\"\n )\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.fixture\n def foo(foo):\n return foo * 2\n\n def test_spam(foo):\n assert foo in (2, 4)\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*2 passed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFillFixtures.test_override_parametrize_fixture_and_indirect_TestFillFixtures.test_override_parametrize_fixture_and_indirect.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFillFixtures.test_override_parametrize_fixture_and_indirect_TestFillFixtures.test_override_parametrize_fixture_and_indirect.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 459, "end_line": 493, "span_ids": ["TestFillFixtures.test_override_parametrize_fixture_and_indirect"], "tokens": 230}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.pytester_example_path(\"fixtures/fill_fixtures\")\nclass TestFillFixtures:\n\n def test_override_parametrize_fixture_and_indirect(\n self, pytester: Pytester\n ) -> None:\n \"\"\"Override a fixture at a lower level, reusing the higher-level fixture that\n is parametrized, while also using indirect parametrization.\n \"\"\"\n pytester.makeconftest(\n \"\"\"\n import pytest\n\n @pytest.fixture(params=[1, 2])\n def foo(request):\n return request.param\n \"\"\"\n )\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.fixture\n def foo(foo):\n return foo * 2\n\n @pytest.fixture\n def bar(request):\n return request.param * 100\n\n @pytest.mark.parametrize(\"bar\", [42], indirect=True)\n def test_spam(bar, foo):\n assert bar == 4200\n assert foo in (2, 4)\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*2 passed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFillFixtures.test_override_top_level_fixture_reusing_super_fixture_parametrization_TestFillFixtures.test_override_top_level_fixture_reusing_super_fixture_parametrization.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFillFixtures.test_override_top_level_fixture_reusing_super_fixture_parametrization_TestFillFixtures.test_override_top_level_fixture_reusing_super_fixture_parametrization.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 495, "end_line": 527, "span_ids": ["TestFillFixtures.test_override_top_level_fixture_reusing_super_fixture_parametrization"], "tokens": 203}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.pytester_example_path(\"fixtures/fill_fixtures\")\nclass TestFillFixtures:\n\n def test_override_top_level_fixture_reusing_super_fixture_parametrization(\n self, pytester: Pytester\n ) -> None:\n \"\"\"Same as the above test, but with another level of overwriting.\"\"\"\n pytester.makeconftest(\n \"\"\"\n import pytest\n\n @pytest.fixture(params=['unused', 'unused'])\n def foo(request):\n return request.param\n \"\"\"\n )\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.fixture(params=[1, 2])\n def foo(request):\n return request.param\n\n class Test:\n\n @pytest.fixture\n def foo(self, foo):\n return foo * 2\n\n def test_spam(self, foo):\n assert foo in (2, 4)\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*2 passed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFillFixtures.test_override_parametrized_fixture_with_new_parametrized_fixture_TestFillFixtures.test_override_parametrized_fixture_with_new_parametrized_fixture.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFillFixtures.test_override_parametrized_fixture_with_new_parametrized_fixture_TestFillFixtures.test_override_parametrized_fixture_with_new_parametrized_fixture.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 529, "end_line": 559, "span_ids": ["TestFillFixtures.test_override_parametrized_fixture_with_new_parametrized_fixture"], "tokens": 216}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.pytester_example_path(\"fixtures/fill_fixtures\")\nclass TestFillFixtures:\n\n def test_override_parametrized_fixture_with_new_parametrized_fixture(\n self, pytester: Pytester\n ) -> None:\n \"\"\"Overriding a parametrized fixture, while also parametrizing the new fixture and\n simultaneously requesting the overwritten fixture as parameter, yields the same value\n as ``request.param``.\n \"\"\"\n pytester.makeconftest(\n \"\"\"\n import pytest\n\n @pytest.fixture(params=['ignored', 'ignored'])\n def foo(request):\n return request.param\n \"\"\"\n )\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.fixture(params=[10, 20])\n def foo(foo, request):\n assert request.param == foo\n return foo * 2\n\n def test_spam(foo):\n assert foo in (20, 40)\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*2 passed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestBasic.test_request_fixturenames_dynamic_fixture_TestRequestBasic.test_setupdecorator_and_xunit.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestBasic.test_request_fixturenames_dynamic_fixture_TestRequestBasic.test_setupdecorator_and_xunit.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 994, "end_line": 1030, "span_ids": ["TestRequestBasic.test_setupdecorator_and_xunit", "TestRequestBasic.test_request_fixturenames_dynamic_fixture"], "tokens": 268}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRequestBasic:\n\n def test_request_fixturenames_dynamic_fixture(self, pytester: Pytester) -> None:\n \"\"\"Regression test for #3057\"\"\"\n pytester.copy_example(\"fixtures/test_getfixturevalue_dynamic.py\")\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*1 passed*\"])\n\n def test_setupdecorator_and_xunit(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n values = []\n @pytest.fixture(scope='module', autouse=True)\n def setup_module():\n values.append(\"module\")\n @pytest.fixture(autouse=True)\n def setup_function():\n values.append(\"function\")\n\n def test_func():\n pass\n\n class TestClass(object):\n @pytest.fixture(scope=\"class\", autouse=True)\n def setup_class(self):\n values.append(\"class\")\n @pytest.fixture(autouse=True)\n def setup_method(self):\n values.append(\"method\")\n def test_method(self):\n pass\n def test_all():\n assert values == [\"module\", \"function\", \"class\",\n \"function\", \"method\", \"function\"]\n \"\"\"\n )\n reprec = pytester.inline_run(\"-v\")\n reprec.assertoutcome(passed=3)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestMarking_TestRequestMarking.test_applymarker.with_pytest_raises_ValueE._type_ignore_arg_type_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestMarking_TestRequestMarking.test_applymarker.with_pytest_raises_ValueE._type_ignore_arg_type_", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1095, "end_line": 1119, "span_ids": ["TestRequestMarking.test_applymarker", "TestRequestMarking"], "tokens": 193}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRequestMarking:\n def test_applymarker(self, pytester: Pytester) -> None:\n item1, item2 = pytester.getitems(\n \"\"\"\n import pytest\n\n @pytest.fixture\n def something(request):\n pass\n class TestClass(object):\n def test_func1(self, something):\n pass\n def test_func2(self, something):\n pass\n \"\"\"\n )\n req1 = fixtures.FixtureRequest(item1, _ispytest=True)\n assert \"xfail\" not in item1.keywords\n req1.applymarker(pytest.mark.xfail)\n assert \"xfail\" in item1.keywords\n assert \"skipif\" not in item1.keywords\n req1.applymarker(pytest.mark.skipif)\n assert \"skipif\" in item1.keywords\n with pytest.raises(ValueError):\n req1.applymarker(42) # type: ignore[arg-type]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestMarking.test_accesskeywords_TestRequestMarking.test_accessmarker_dynamic.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestRequestMarking.test_accesskeywords_TestRequestMarking.test_accessmarker_dynamic.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1121, "end_line": 1162, "span_ids": ["TestRequestMarking.test_accesskeywords", "TestRequestMarking.test_accessmarker_dynamic"], "tokens": 271}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRequestMarking:\n\n def test_accesskeywords(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.fixture()\n def keywords(request):\n return request.keywords\n @pytest.mark.XYZ\n def test_function(keywords):\n assert keywords[\"XYZ\"]\n assert \"abc\" not in keywords\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=1)\n\n def test_accessmarker_dynamic(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n import pytest\n @pytest.fixture()\n def keywords(request):\n return request.keywords\n\n @pytest.fixture(scope=\"class\", autouse=True)\n def marking(request):\n request.applymarker(pytest.mark.XYZ(\"hello\"))\n \"\"\"\n )\n pytester.makepyfile(\n \"\"\"\n import pytest\n def test_fun1(keywords):\n assert keywords[\"XYZ\"] is not None\n assert \"abc\" not in keywords\n def test_fun2(keywords):\n assert keywords[\"XYZ\"] is not None\n assert \"abc\" not in keywords\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=2)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureManagerParseFactories.test_parsefactories_relative_node_ids_TestFixtureManagerParseFactories.test_parsefactories_relative_node_ids.with_monkeypatch_context_.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureManagerParseFactories.test_parsefactories_relative_node_ids_TestFixtureManagerParseFactories.test_parsefactories_relative_node_ids.with_monkeypatch_context_.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1595, "end_line": 1646, "span_ids": ["TestFixtureManagerParseFactories.test_parsefactories_relative_node_ids"], "tokens": 340}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFixtureManagerParseFactories:\n\n def test_parsefactories_relative_node_ids(\n self, pytester: Pytester, monkeypatch: MonkeyPatch\n ) -> None:\n # example mostly taken from:\n # https://mail.python.org/pipermail/pytest-dev/2014-September/002617.html\n runner = pytester.mkdir(\"runner\")\n package = pytester.mkdir(\"package\")\n package.joinpath(\"conftest.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n import pytest\n @pytest.fixture\n def one():\n return 1\n \"\"\"\n )\n )\n package.joinpath(\"test_x.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n def test_x(one):\n assert one == 1\n \"\"\"\n )\n )\n sub = package.joinpath(\"sub\")\n sub.mkdir()\n sub.joinpath(\"__init__.py\").touch()\n sub.joinpath(\"conftest.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n import pytest\n @pytest.fixture\n def one():\n return 2\n \"\"\"\n )\n )\n sub.joinpath(\"test_y.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n def test_x(one):\n assert one == 2\n \"\"\"\n )\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=2)\n with monkeypatch.context() as mp:\n mp.chdir(runner)\n reprec = pytester.inline_run(\"..\")\n reprec.assertoutcome(passed=2)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestAutouseDiscovery_TestAutouseDiscovery.pytester.return.pytester": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestAutouseDiscovery_TestAutouseDiscovery.pytester.return.pytester", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1746, "end_line": 1772, "span_ids": ["TestAutouseDiscovery.pytester", "TestAutouseDiscovery"], "tokens": 140}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAutouseDiscovery:\n @pytest.fixture\n def pytester(self, pytester: Pytester) -> Pytester:\n pytester.makeconftest(\n \"\"\"\n import pytest\n @pytest.fixture(autouse=True)\n def perfunction(request, tmpdir):\n pass\n\n @pytest.fixture()\n def arg1(tmpdir):\n pass\n @pytest.fixture(autouse=True)\n def perfunction2(arg1):\n pass\n\n @pytest.fixture\n def fm(request):\n return request._fixturemanager\n\n @pytest.fixture\n def item(request):\n return request._pyfuncitem\n \"\"\"\n )\n return pytester", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker_TestFixtureMarker.test_parametrize.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker_TestFixtureMarker.test_parametrize.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 2176, "end_line": 2192, "span_ids": ["TestFixtureMarker.test_parametrize", "TestFixtureMarker"], "tokens": 109}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFixtureMarker:\n def test_parametrize(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.fixture(params=[\"a\", \"b\", \"c\"])\n def arg(request):\n return request.param\n values = []\n def test_param(arg):\n values.append(arg)\n def test_result():\n assert values == list(\"abc\")\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=4)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_multiple_parametrization_issue_736_TestFixtureMarker.test_multiple_parametrization_issue_736.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_multiple_parametrization_issue_736_TestFixtureMarker.test_multiple_parametrization_issue_736.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 2194, "end_line": 2210, "span_ids": ["TestFixtureMarker.test_multiple_parametrization_issue_736"], "tokens": 131}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFixtureMarker:\n\n def test_multiple_parametrization_issue_736(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.fixture(params=[1,2,3])\n def foo(request):\n return request.param\n\n @pytest.mark.parametrize('foobar', [4,5,6])\n def test_issue(foo, foobar):\n assert foo in [1,2,3]\n assert foobar in [4,5,6]\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=9)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_dynamic_scope_bad_return_TestFixtureMarker.test_dynamic_scope_bad_return.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_dynamic_scope_bad_return_TestFixtureMarker.test_dynamic_scope_bad_return.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 2473, "end_line": 2491, "span_ids": ["TestFixtureMarker.test_dynamic_scope_bad_return"], "tokens": 113}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFixtureMarker:\n\n def test_dynamic_scope_bad_return(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n def dynamic_scope(**_):\n return \"wrong-scope\"\n\n @pytest.fixture(scope=dynamic_scope)\n def fixture():\n pass\n\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n \"Fixture 'fixture' from test_dynamic_scope_bad_return.py \"\n \"got an unexpected scope value 'wrong-scope'\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_register_only_with_mark_TestFixtureMarker.test_register_only_with_mark.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_register_only_with_mark_TestFixtureMarker.test_register_only_with_mark.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 2493, "end_line": 2513, "span_ids": ["TestFixtureMarker.test_register_only_with_mark"], "tokens": 126}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFixtureMarker:\n\n def test_register_only_with_mark(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n import pytest\n @pytest.fixture()\n def arg():\n return 1\n \"\"\"\n )\n pytester.makepyfile(\n test_mod1=\"\"\"\n import pytest\n @pytest.fixture()\n def arg(arg):\n return arg + 1\n def test_1(arg):\n assert arg == 2\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_fixture_marked_function_not_collected_as_test_TestFixtureMarker.test_params_and_ids.res_stdout_fnmatch_lines_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_fixture_marked_function_not_collected_as_test_TestFixtureMarker.test_params_and_ids.res_stdout_fnmatch_lines_", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 3109, "end_line": 3141, "span_ids": ["TestFixtureMarker.test_params_and_ids", "TestFixtureMarker.test_fixture_marked_function_not_collected_as_test"], "tokens": 206}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFixtureMarker:\n\n def test_fixture_marked_function_not_collected_as_test(\n self, pytester: Pytester\n ) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.fixture\n def test_app():\n return 1\n\n def test_something(test_app):\n assert test_app == 1\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=1)\n\n def test_params_and_ids(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.fixture(params=[object(), object()],\n ids=['alpha', 'beta'])\n def fix(request):\n return request.param\n\n def test_foo(fix):\n assert 1\n \"\"\"\n )\n res = pytester.runpytest(\"-v\")\n res.stdout.fnmatch_lines([\"*test_foo*alpha*\", \"*test_foo*beta*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_params_and_ids_yieldfixture_TestFixtureMarker.test_params_and_ids_yieldfixture.res_stdout_fnmatch_lines_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_params_and_ids_yieldfixture_TestFixtureMarker.test_params_and_ids_yieldfixture.res_stdout_fnmatch_lines_", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 3143, "end_line": 3157, "span_ids": ["TestFixtureMarker.test_params_and_ids_yieldfixture"], "tokens": 113}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFixtureMarker:\n\n def test_params_and_ids_yieldfixture(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.fixture(params=[object(), object()], ids=['alpha', 'beta'])\n def fix(request):\n yield request.param\n\n def test_foo(fix):\n assert 1\n \"\"\"\n )\n res = pytester.runpytest(\"-v\")\n res.stdout.fnmatch_lines([\"*test_foo*alpha*\", \"*test_foo*beta*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_deterministic_fixture_collection_TestFixtureMarker.test_deterministic_fixture_collection.assert_output1_output2": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestFixtureMarker.test_deterministic_fixture_collection_TestFixtureMarker.test_deterministic_fixture_collection.assert_output1_output2", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 3159, "end_line": 3202, "span_ids": ["TestFixtureMarker.test_deterministic_fixture_collection"], "tokens": 322}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFixtureMarker:\n\n def test_deterministic_fixture_collection(\n self, pytester: Pytester, monkeypatch\n ) -> None:\n \"\"\"#920\"\"\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.fixture(scope=\"module\",\n params=[\"A\",\n \"B\",\n \"C\"])\n def A(request):\n return request.param\n\n @pytest.fixture(scope=\"module\",\n params=[\"DDDDDDDDD\", \"EEEEEEEEEEEE\", \"FFFFFFFFFFF\", \"banansda\"])\n def B(request, A):\n return request.param\n\n def test_foo(B):\n # Something funky is going on here.\n # Despite specified seeds, on what is collected,\n # sometimes we get unexpected passes. hashing B seems\n # to help?\n assert hash(B) or True\n \"\"\"\n )\n monkeypatch.setenv(\"PYTHONHASHSEED\", \"1\")\n out1 = pytester.runpytest_subprocess(\"-v\")\n monkeypatch.setenv(\"PYTHONHASHSEED\", \"2\")\n out2 = pytester.runpytest_subprocess(\"-v\")\n output1 = [\n line\n for line in out1.outlines\n if line.startswith(\"test_deterministic_fixture_collection.py::test_foo\")\n ]\n output2 = [\n line\n for line in out2.outlines\n if line.startswith(\"test_deterministic_fixture_collection.py::test_foo\")\n ]\n assert len(output1) == 12\n assert output1 == output2", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestContextManagerFixtureFuncs_TestContextManagerFixtureFuncs.test_simple.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_TestContextManagerFixtureFuncs_TestContextManagerFixtureFuncs.test_simple.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 3614, "end_line": 3641, "span_ids": ["TestContextManagerFixtureFuncs", "TestContextManagerFixtureFuncs.test_simple"], "tokens": 160}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestContextManagerFixtureFuncs:\n def test_simple(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.fixture\n def arg1():\n print(\"setup\")\n yield 1\n print(\"teardown\")\n def test_1(arg1):\n print(\"test1\", arg1)\n def test_2(arg1):\n print(\"test2\", arg1)\n assert 0\n \"\"\"\n )\n result = pytester.runpytest(\"-s\")\n result.stdout.fnmatch_lines(\n \"\"\"\n *setup*\n *test1 1*\n *teardown*\n *setup*\n *test2 1*\n *teardown*\n \"\"\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_test_fixture_param_shadowing_test_fixture_named_request.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_test_fixture_param_shadowing_test_fixture_named_request.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 4282, "end_line": 4333, "span_ids": ["test_fixture_named_request", "test_fixture_param_shadowing"], "tokens": 391}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_fixture_param_shadowing(pytester: Pytester) -> None:\n \"\"\"Parametrized arguments would be shadowed if a fixture with the same name also exists (#5036)\"\"\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.fixture(params=['a', 'b'])\n def argroot(request):\n return request.param\n\n @pytest.fixture\n def arg(argroot):\n return argroot\n\n # This should only be parametrized directly\n @pytest.mark.parametrize(\"arg\", [1])\n def test_direct(arg):\n assert arg == 1\n\n # This should be parametrized based on the fixtures\n def test_normal_fixture(arg):\n assert isinstance(arg, str)\n\n # Indirect should still work:\n\n @pytest.fixture\n def arg2(request):\n return 2*request.param\n\n @pytest.mark.parametrize(\"arg2\", [1], indirect=True)\n def test_indirect(arg2):\n assert arg2 == 2\n \"\"\"\n )\n # Only one test should have run\n result = pytester.runpytest(\"-v\")\n result.assert_outcomes(passed=4)\n result.stdout.fnmatch_lines([\"*::test_direct[[]1[]]*\"])\n result.stdout.fnmatch_lines([\"*::test_normal_fixture[[]a[]]*\"])\n result.stdout.fnmatch_lines([\"*::test_normal_fixture[[]b[]]*\"])\n result.stdout.fnmatch_lines([\"*::test_indirect[[]1[]]*\"])\n\n\ndef test_fixture_named_request(pytester: Pytester) -> None:\n pytester.copy_example(\"fixtures/test_fixture_named_request.py\")\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n [\n \"*'request' is a reserved word for fixtures, use another name:\",\n \" *test_fixture_named_request.py:5\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_test_fixture_parametrization_nparray_test_fixture_parametrization_nparray.result_assert_outcomes_pa": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_test_fixture_parametrization_nparray_test_fixture_parametrization_nparray.result_assert_outcomes_pa", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 4387, "end_line": 4404, "span_ids": ["test_fixture_parametrization_nparray"], "tokens": 105}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_fixture_parametrization_nparray(pytester: Pytester) -> None:\n pytest.importorskip(\"numpy\")\n\n pytester.makepyfile(\n \"\"\"\n from numpy import linspace\n from pytest import fixture\n\n @fixture(params=linspace(1, 10, 10))\n def value(request):\n return request.param\n\n def test_bug(value):\n assert value == value\n \"\"\"\n )\n result = pytester.runpytest()\n result.assert_outcomes(passed=10)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_test_fixture_arg_ordering_test_fixture_arg_ordering.assert_result_ret_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/fixtures.py_test_fixture_arg_ordering_test_fixture_arg_ordering.assert_result_ret_0", "embedding": null, "metadata": {"file_path": "testing/python/fixtures.py", "file_name": "fixtures.py", "file_type": "text/x-python", "category": "implementation", "start_line": 4407, "end_line": 4439, "span_ids": ["test_fixture_arg_ordering"], "tokens": 289}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_fixture_arg_ordering(pytester: Pytester) -> None:\n \"\"\"\n This test describes how fixtures in the same scope but without explicit dependencies\n between them are created. While users should make dependencies explicit, often\n they rely on this order, so this test exists to catch regressions in this regard.\n See #6540 and #6492.\n \"\"\"\n p1 = pytester.makepyfile(\n \"\"\"\n import pytest\n\n suffixes = []\n\n @pytest.fixture\n def fix_1(): suffixes.append(\"fix_1\")\n @pytest.fixture\n def fix_2(): suffixes.append(\"fix_2\")\n @pytest.fixture\n def fix_3(): suffixes.append(\"fix_3\")\n @pytest.fixture\n def fix_4(): suffixes.append(\"fix_4\")\n @pytest.fixture\n def fix_5(): suffixes.append(\"fix_5\")\n\n @pytest.fixture\n def fix_combined(fix_1, fix_2, fix_3, fix_4, fix_5): pass\n\n def test_suffix(fix_combined):\n assert suffixes == [\"fix_1\", \"fix_2\", \"fix_3\", \"fix_4\", \"fix_5\"]\n \"\"\"\n )\n result = pytester.runpytest(\"-vv\", str(p1))\n assert result.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_itertools_None_25": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_itertools_None_25", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 28, "span_ids": ["imports"], "tokens": 144}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import itertools\nimport re\nimport sys\nimport textwrap\nfrom typing import Any\nfrom typing import cast\nfrom typing import Dict\nfrom typing import Iterator\nfrom typing import List\nfrom typing import Optional\nfrom typing import Sequence\nfrom typing import Tuple\nfrom typing import Union\n\nimport attr\nimport hypothesis\nfrom hypothesis import strategies\n\nimport pytest\nfrom _pytest import fixtures\nfrom _pytest import python\nfrom _pytest.compat import _format_args\nfrom _pytest.compat import getfuncargnames\nfrom _pytest.compat import NOTSET\nfrom _pytest.outcomes import fail\nfrom _pytest.pytester import Pytester\nfrom _pytest.python import _idval\nfrom _pytest.python import idmaker", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc_TestMetafunc.test_function_basic.assert_metafunc_cls_is_No": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc_TestMetafunc.test_function_basic.assert_metafunc_cls_is_No", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 31, "end_line": 68, "span_ids": ["TestMetafunc.Metafunc", "TestMetafunc.Metafunc.DefinitionMock:2", "TestMetafunc.Metafunc.FuncFixtureInfoMock:2", "TestMetafunc", "TestMetafunc.Metafunc.FuncFixtureInfoMock", "TestMetafunc.Metafunc.DefinitionMock", "TestMetafunc.test_function_basic", "TestMetafunc.test_no_funcargs"], "tokens": 286}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafunc:\n def Metafunc(self, func, config=None) -> python.Metafunc:\n # The unit tests of this class check if things work correctly\n # on the funcarg level, so we don't need a full blown\n # initialization.\n class FuncFixtureInfoMock:\n name2fixturedefs = None\n\n def __init__(self, names):\n self.names_closure = names\n\n @attr.s\n class DefinitionMock(python.FunctionDefinition):\n obj = attr.ib()\n _nodeid = attr.ib()\n\n names = getfuncargnames(func)\n fixtureinfo: Any = FuncFixtureInfoMock(names)\n definition: Any = DefinitionMock._create(func, \"mock::nodeid\")\n return python.Metafunc(definition, fixtureinfo, config, _ispytest=True)\n\n def test_no_funcargs(self) -> None:\n def function():\n pass\n\n metafunc = self.Metafunc(function)\n assert not metafunc.fixturenames\n repr(metafunc._calls)\n\n def test_function_basic(self) -> None:\n def func(arg1, arg2=\"qwe\"):\n pass\n\n metafunc = self.Metafunc(func)\n assert len(metafunc.fixturenames) == 1\n assert \"arg1\" in metafunc.fixturenames\n assert metafunc.function is func\n assert metafunc.cls is None", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_class_or_function_idval_TestMetafunc.test_notset_idval.assert__idval_NOTSET_a_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_class_or_function_idval_TestMetafunc.test_notset_idval.assert__idval_NOTSET_a_", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 349, "end_line": 369, "span_ids": ["TestMetafunc.test_class_or_function_idval.TestClass:2", "TestMetafunc.test_notset_idval", "TestMetafunc.test_class_or_function_idval.TestClass", "TestMetafunc.test_class_or_function_idval"], "tokens": 187}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafunc:\n\n def test_class_or_function_idval(self) -> None:\n \"\"\"Unit test for the expected behavior to obtain ids for parametrized\n values that are classes or functions: their __name__.\"\"\"\n\n class TestClass:\n pass\n\n def test_function():\n pass\n\n values = [(TestClass, \"TestClass\"), (test_function, \"test_function\")]\n for val, expected in values:\n assert _idval(val, \"a\", 6, None, nodeid=None, config=None) == expected\n\n def test_notset_idval(self) -> None:\n \"\"\"Test that a NOTSET value (used by an empty parameterset) generates\n a proper ID.\n\n Regression test for #7686.\n \"\"\"\n assert _idval(NOTSET, \"a\", 0, None, nodeid=None, config=None) == \"a0\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_idmaker_autoname_TestMetafunc.test_idmaker_with_bytes_regex.assert_result_foo_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_idmaker_autoname_TestMetafunc.test_idmaker_with_bytes_regex.assert_result_foo_", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 371, "end_line": 388, "span_ids": ["TestMetafunc.test_idmaker_autoname", "TestMetafunc.test_idmaker_with_bytes_regex"], "tokens": 217}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafunc:\n\n def test_idmaker_autoname(self) -> None:\n \"\"\"#250\"\"\"\n result = idmaker(\n (\"a\", \"b\"), [pytest.param(\"string\", 1.0), pytest.param(\"st-ring\", 2.0)]\n )\n assert result == [\"string-1.0\", \"st-ring-2.0\"]\n\n result = idmaker(\n (\"a\", \"b\"), [pytest.param(object(), 1.0), pytest.param(object(), object())]\n )\n assert result == [\"a0-1.0\", \"a1-b1\"]\n # unicode mixing, issue250\n result = idmaker((\"a\", \"b\"), [pytest.param({}, b\"\\xc3\\xb4\")])\n assert result == [\"a0-\\\\xc3\\\\xb4\"]\n\n def test_idmaker_with_bytes_regex(self) -> None:\n result = idmaker((\"a\"), [pytest.param(re.compile(b\"foo\"), 1.0)])\n assert result == [\"foo\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_idmaker_native_strings_TestMetafunc.test_idmaker_native_strings.assert_result_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/metafunc.py_TestMetafunc.test_idmaker_native_strings_TestMetafunc.test_idmaker_native_strings.assert_result_", "embedding": null, "metadata": {"file_path": "testing/python/metafunc.py", "file_name": "metafunc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 390, "end_line": 421, "span_ids": ["TestMetafunc.test_idmaker_native_strings"], "tokens": 257}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMetafunc:\n\n def test_idmaker_native_strings(self) -> None:\n result = idmaker(\n (\"a\", \"b\"),\n [\n pytest.param(1.0, -1.1),\n pytest.param(2, -202),\n pytest.param(\"three\", \"three hundred\"),\n pytest.param(True, False),\n pytest.param(None, None),\n pytest.param(re.compile(\"foo\"), re.compile(\"bar\")),\n pytest.param(str, int),\n pytest.param(list(\"six\"), [66, 66]),\n pytest.param({7}, set(\"seven\")),\n pytest.param(tuple(\"eight\"), (8, -8, 8)),\n pytest.param(b\"\\xc3\\xb4\", b\"name\"),\n pytest.param(b\"\\xc3\\xb4\", \"other\"),\n ],\n )\n assert result == [\n \"1.0--1.1\",\n \"2--202\",\n \"three-three hundred\",\n \"True-False\",\n \"None-None\",\n \"foo-bar\",\n \"str-int\",\n \"a7-b7\",\n \"a8-b8\",\n \"a9-b9\",\n \"\\\\xc3\\\\xb4-name\",\n \"\\\\xc3\\\\xb4-other\",\n ]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/raises.py_TestRaises.test_match_failure_string_quoting_TestRaises.test_match_failure_exact_string_message.assert_msg_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/raises.py_TestRaises.test_match_failure_string_quoting_TestRaises.test_match_failure_exact_string_message.assert_msg_", "embedding": null, "metadata": {"file_path": "testing/python/raises.py", "file_name": "raises.py", "file_type": "text/x-python", "category": "implementation", "start_line": 215, "end_line": 232, "span_ids": ["TestRaises.test_match_failure_exact_string_message", "TestRaises.test_match_failure_string_quoting"], "tokens": 205}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRaises:\n\n def test_match_failure_string_quoting(self):\n with pytest.raises(AssertionError) as excinfo:\n with pytest.raises(AssertionError, match=\"'foo\"):\n raise AssertionError(\"'bar\")\n (msg,) = excinfo.value.args\n assert msg == 'Regex pattern \"\\'foo\" does not match \"\\'bar\".'\n\n def test_match_failure_exact_string_message(self):\n message = \"Oh here is a message with (42) numbers in parameters\"\n with pytest.raises(AssertionError) as excinfo:\n with pytest.raises(AssertionError, match=message):\n raise AssertionError(message)\n (msg,) = excinfo.value.args\n assert msg == (\n \"Regex pattern 'Oh here is a message with (42) numbers in \"\n \"parameters' does not match 'Oh here is a message with (42) \"\n \"numbers in parameters'. Did you mean to `re.escape()` the regex?\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/raises.py_TestRaises.test_raises_match_wrong_type_TestRaises.test_raises_exception_looks_iterable.with_pytest_raises_.pytest_raises_ClassLooksI": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/raises.py_TestRaises.test_raises_match_wrong_type_TestRaises.test_raises_exception_looks_iterable.with_pytest_raises_.pytest_raises_ClassLooksI", "embedding": null, "metadata": {"file_path": "testing/python/raises.py", "file_name": "raises.py", "file_type": "text/x-python", "category": "implementation", "start_line": 234, "end_line": 259, "span_ids": ["TestRaises.test_raises_exception_looks_iterable.Meta", "TestRaises.test_raises_exception_looks_iterable.ClassLooksIterableException:2", "TestRaises.test_raises_exception_looks_iterable.ClassLooksIterableException", "TestRaises.test_raises_exception_looks_iterable.Meta.__getitem__", "TestRaises.test_raises_exception_looks_iterable", "TestRaises.test_raises_match_wrong_type"], "tokens": 182}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRaises:\n\n def test_raises_match_wrong_type(self):\n \"\"\"Raising an exception with the wrong type and match= given.\n\n pytest should throw the unexpected exception - the pattern match is not\n really relevant if we got a different exception.\n \"\"\"\n with pytest.raises(ValueError):\n with pytest.raises(IndexError, match=\"nomatch\"):\n int(\"asdf\")\n\n def test_raises_exception_looks_iterable(self):\n class Meta(type):\n def __getitem__(self, item):\n return 1 / 0\n\n def __len__(self):\n return 1\n\n class ClassLooksIterableException(Exception, metaclass=Meta):\n pass\n\n with pytest.raises(\n Failed,\n match=r\"DID NOT RAISE \",\n ):\n pytest.raises(ClassLooksIterableException, lambda: None)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/raises.py_TestRaises.test_raises_with_raising_dunder_class_TestRaises.test_raises_context_manager_with_kwargs.assert_Unexpected_keywor": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/raises.py_TestRaises.test_raises_with_raising_dunder_class_TestRaises.test_raises_context_manager_with_kwargs.assert_Unexpected_keywor", "embedding": null, "metadata": {"file_path": "testing/python/raises.py", "file_name": "raises.py", "file_type": "text/x-python", "category": "implementation", "start_line": 261, "end_line": 279, "span_ids": ["TestRaises.test_raises_with_raising_dunder_class.CrappyClass", "TestRaises.test_raises_with_raising_dunder_class.CrappyClass:2", "TestRaises.test_raises_context_manager_with_kwargs", "TestRaises.test_raises_with_raising_dunder_class"], "tokens": 189}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRaises:\n\n def test_raises_with_raising_dunder_class(self) -> None:\n \"\"\"Test current behavior with regard to exceptions via __class__ (#4284).\"\"\"\n\n class CrappyClass(Exception):\n # Type ignored because it's bypassed intentionally.\n @property # type: ignore\n def __class__(self):\n assert False, \"via __class__\"\n\n with pytest.raises(AssertionError) as excinfo:\n with pytest.raises(CrappyClass()): # type: ignore[call-overload]\n pass\n assert \"via __class__\" in excinfo.value.args[0]\n\n def test_raises_context_manager_with_kwargs(self):\n with pytest.raises(TypeError) as excinfo:\n with pytest.raises(Exception, foo=\"bar\"): # type: ignore[call-overload]\n pass\n assert \"Unexpected keyword arguments\" in str(excinfo.value)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/raises.py_TestRaises.test_expected_exception_is_not_a_baseexception_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/raises.py_TestRaises.test_expected_exception_is_not_a_baseexception_", "embedding": null, "metadata": {"file_path": "testing/python/raises.py", "file_name": "raises.py", "file_type": "text/x-python", "category": "implementation", "start_line": 281, "end_line": 299, "span_ids": ["TestRaises.test_expected_exception_is_not_a_baseexception.NotAnException", "TestRaises.test_expected_exception_is_not_a_baseexception", "TestRaises.test_expected_exception_is_not_a_baseexception.NotAnException:2"], "tokens": 197}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRaises:\n\n def test_expected_exception_is_not_a_baseexception(self) -> None:\n with pytest.raises(TypeError) as excinfo:\n with pytest.raises(\"hello\"): # type: ignore[call-overload]\n pass # pragma: no cover\n assert \"must be a BaseException type, not str\" in str(excinfo.value)\n\n class NotAnException:\n pass\n\n with pytest.raises(TypeError) as excinfo:\n with pytest.raises(NotAnException): # type: ignore[type-var]\n pass # pragma: no cover\n assert \"must be a BaseException type, not NotAnException\" in str(excinfo.value)\n\n with pytest.raises(TypeError) as excinfo:\n with pytest.raises((\"hello\", NotAnException)): # type: ignore[arg-type]\n pass # pragma: no cover\n assert \"must be a BaseException type, not str\" in str(excinfo.value)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/show_fixtures_per_test.py_from__pytest_pytester_imp_test_fixtures_in_module.result_stdout_no_fnmatch_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/python/show_fixtures_per_test.py_from__pytest_pytester_imp_test_fixtures_in_module.result_stdout_no_fnmatch_", "embedding": null, "metadata": {"file_path": "testing/python/show_fixtures_per_test.py", "file_name": "show_fixtures_per_test.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 36, "span_ids": ["test_fixtures_in_module", "test_no_items_should_not_show_output", "imports"], "tokens": 223}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from _pytest.pytester import Pytester\n\n\ndef test_no_items_should_not_show_output(pytester: Pytester) -> None:\n result = pytester.runpytest(\"--fixtures-per-test\")\n result.stdout.no_fnmatch_line(\"*fixtures used by*\")\n assert result.ret == 0\n\n\ndef test_fixtures_in_module(pytester: Pytester) -> None:\n p = pytester.makepyfile(\n '''\n import pytest\n @pytest.fixture\n def _arg0():\n \"\"\"hidden arg0 fixture\"\"\"\n @pytest.fixture\n def arg1():\n \"\"\"arg1 docstring\"\"\"\n def test_arg1(arg1):\n pass\n '''\n )\n\n result = pytester.runpytest(\"--fixtures-per-test\", p)\n assert result.ret == 0\n\n result.stdout.fnmatch_lines(\n [\n \"*fixtures used by test_arg1*\",\n \"*(test_fixtures_in_module.py:9)*\",\n \"arg1\",\n \" arg1 docstring\",\n ]\n )\n result.stdout.no_fnmatch_line(\"*_arg0*\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_collections_mock_config.return.Config_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_collections_mock_config.return.Config_", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 26, "span_ids": ["mock_config", "mock_config.Config", "imports", "mock_config.Config.getoption"], "tokens": 124}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import collections\nimport sys\nimport textwrap\nfrom typing import Any\nfrom typing import List\nfrom typing import MutableSequence\nfrom typing import Optional\n\nimport attr\n\nimport _pytest.assertion as plugin\nimport pytest\nfrom _pytest import outcomes\nfrom _pytest.assertion import truncate\nfrom _pytest.assertion import util\nfrom _pytest.pytester import Pytester\n\n\ndef mock_config(verbose=0):\n class Config:\n def getoption(self, name):\n if name == \"verbose\":\n return verbose\n raise KeyError(\"Not mocked out: %s\" % name)\n\n return Config()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare_namedtuple_TestAssert_reprcompare_namedtuple.test_namedtuple.assert_lines_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare_namedtuple_TestAssert_reprcompare_namedtuple.test_namedtuple.assert_lines_", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 991, "end_line": 1011, "span_ids": ["TestAssert_reprcompare_namedtuple.test_namedtuple", "TestAssert_reprcompare_namedtuple"], "tokens": 169}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssert_reprcompare_namedtuple:\n def test_namedtuple(self) -> None:\n NT = collections.namedtuple(\"NT\", [\"a\", \"b\"])\n\n left = NT(1, \"b\")\n right = NT(1, \"c\")\n\n lines = callequal(left, right)\n assert lines == [\n \"NT(a=1, b='b') == NT(a=1, b='c')\",\n \"\",\n \"Omitting 1 identical items, use -vv to show\",\n \"Differing attributes:\",\n \"['b']\",\n \"\",\n \"Drill down into differing attribute b:\",\n \" b: 'b' != 'c'\",\n \" - c\",\n \" + b\",\n \"Use -v to get the full diff\",\n ]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare_namedtuple.test_comparing_two_different_namedtuple_TestAssert_reprcompare_namedtuple.test_comparing_two_different_namedtuple.assert_lines_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertion.py_TestAssert_reprcompare_namedtuple.test_comparing_two_different_namedtuple_TestAssert_reprcompare_namedtuple.test_comparing_two_different_namedtuple.assert_lines_", "embedding": null, "metadata": {"file_path": "testing/test_assertion.py", "file_name": "test_assertion.py", "file_type": "text/x-python", "category": "test", "start_line": 1013, "end_line": 1026, "span_ids": ["TestAssert_reprcompare_namedtuple.test_comparing_two_different_namedtuple"], "tokens": 157}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssert_reprcompare_namedtuple:\n\n def test_comparing_two_different_namedtuple(self) -> None:\n NT1 = collections.namedtuple(\"NT1\", [\"a\", \"b\"])\n NT2 = collections.namedtuple(\"NT2\", [\"a\", \"b\"])\n\n left = NT1(1, \"b\")\n right = NT2(2, \"b\")\n\n lines = callequal(left, right)\n # Because the types are different, uses the generic sequence matcher.\n assert lines == [\n \"NT1(a=1, b='b') == NT2(a=2, b='b')\",\n \"At index 0 diff: 1 != 2\",\n \"Use -v to get the full diff\",\n ]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_honors_pep_235_TestAssertionRewrite.test_honors_pep_235.pytester_runpytest_asse": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_honors_pep_235_TestAssertionRewrite.test_honors_pep_235.pytester_runpytest_asse", "embedding": null, "metadata": {"file_path": "testing/test_assertrewrite.py", "file_name": "test_assertrewrite.py", "file_type": "text/x-python", "category": "test", "start_line": 142, "end_line": 157, "span_ids": ["TestAssertionRewrite.test_honors_pep_235"], "tokens": 210}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssertionRewrite:\n\n def test_honors_pep_235(self, pytester: Pytester, monkeypatch) -> None:\n # note: couldn't make it fail on macos with a single `sys.path` entry\n # note: these modules are named `test_*` to trigger rewriting\n pytester.makepyfile(test_y=\"x = 1\")\n xdir = pytester.mkdir(\"x\")\n pytester.mkpydir(str(xdir.joinpath(\"test_Y\")))\n xdir.joinpath(\"test_Y\").joinpath(\"__init__.py\").write_text(\"x = 2\")\n pytester.makepyfile(\n \"import test_y\\n\"\n \"import test_Y\\n\"\n \"def test():\\n\"\n \" assert test_y.x == 1\\n\"\n \" assert test_Y.x == 2\\n\"\n )\n monkeypatch.syspath_prepend(str(xdir))\n pytester.runpytest().assert_outcomes(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_binary_op_TestAssertionRewrite.test_boolop_percent.assert_getmsg_f2_ass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_binary_op_TestAssertionRewrite.test_boolop_percent.assert_getmsg_f2_ass", "embedding": null, "metadata": {"file_path": "testing/test_assertrewrite.py", "file_name": "test_assertrewrite.py", "file_type": "text/x-python", "category": "test", "start_line": 454, "end_line": 476, "span_ids": ["TestAssertionRewrite.test_binary_op", "TestAssertionRewrite.test_boolop_percent"], "tokens": 178}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssertionRewrite:\n\n def test_binary_op(self) -> None:\n def f1() -> None:\n x = 1\n y = -1\n assert x + y\n\n assert getmsg(f1) == \"assert (1 + -1)\"\n\n def f2() -> None:\n assert not 5 % 4\n\n assert getmsg(f2) == \"assert not (5 % 4)\"\n\n def test_boolop_percent(self) -> None:\n def f1() -> None:\n assert 3 % 2 and False\n\n assert getmsg(f1) == \"assert ((3 % 2) and False)\"\n\n def f2() -> None:\n assert False or 4 % 2\n\n assert getmsg(f2) == \"assert (False or (4 % 2))\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_at_operator_issue1290_TestAssertionRewrite.test_starred_with_side_effect.pytester_runpytest_asse": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewrite.test_at_operator_issue1290_TestAssertionRewrite.test_starred_with_side_effect.pytester_runpytest_asse", "embedding": null, "metadata": {"file_path": "testing/test_assertrewrite.py", "file_name": "test_assertrewrite.py", "file_type": "text/x-python", "category": "test", "start_line": 478, "end_line": 502, "span_ids": ["TestAssertionRewrite.test_starred_with_side_effect", "TestAssertionRewrite.test_at_operator_issue1290"], "tokens": 204}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssertionRewrite:\n\n def test_at_operator_issue1290(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n class Matrix(object):\n def __init__(self, num):\n self.num = num\n def __matmul__(self, other):\n return self.num * other.num\n\n def test_multmat_operator():\n assert Matrix(2) @ Matrix(3) == 6\"\"\"\n )\n pytester.runpytest().assert_outcomes(passed=1)\n\n def test_starred_with_side_effect(self, pytester: Pytester) -> None:\n \"\"\"See #4412\"\"\"\n pytester.makepyfile(\n \"\"\"\\\n def test():\n f = lambda x: x\n x = iter([1, 2, 3])\n assert 2 * next(x) == f(*[next(x)])\n \"\"\"\n )\n pytester.runpytest().assert_outcomes(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestRewriteOnImport.test_zipfile_TestRewriteOnImport.test_zipfile.assert_pytester_runpytest": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestRewriteOnImport.test_zipfile_TestRewriteOnImport.test_zipfile.assert_pytester_runpytest", "embedding": null, "metadata": {"file_path": "testing/test_assertrewrite.py", "file_name": "test_assertrewrite.py", "file_type": "text/x-python", "category": "test", "start_line": 753, "end_line": 770, "span_ids": ["TestRewriteOnImport.test_zipfile"], "tokens": 153}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRewriteOnImport:\n\n def test_zipfile(self, pytester: Pytester) -> None:\n z = pytester.path.joinpath(\"myzip.zip\")\n z_fn = str(z)\n f = zipfile.ZipFile(z_fn, \"w\")\n try:\n f.writestr(\"test_gum/__init__.py\", \"\")\n f.writestr(\"test_gum/test_lizard.py\", \"\")\n finally:\n f.close()\n z.chmod(256)\n pytester.makepyfile(\n \"\"\"\n import sys\n sys.path.append(%r)\n import test_gum.test_lizard\"\"\"\n % (z_fn,)\n )\n assert pytester.runpytest().ret == ExitCode.NO_TESTS_COLLECTED", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestRewriteOnImport.test_readonly_TestRewriteOnImport.test_dont_write_bytecode.assert_pytester_runpytest": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestRewriteOnImport.test_readonly_TestRewriteOnImport.test_dont_write_bytecode.assert_pytester_runpytest", "embedding": null, "metadata": {"file_path": "testing/test_assertrewrite.py", "file_name": "test_assertrewrite.py", "file_type": "text/x-python", "category": "test", "start_line": 772, "end_line": 799, "span_ids": ["TestRewriteOnImport.test_dont_write_bytecode", "TestRewriteOnImport.test_readonly"], "tokens": 232}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRewriteOnImport:\n\n def test_readonly(self, pytester: Pytester) -> None:\n sub = pytester.mkdir(\"testing\")\n sub.joinpath(\"test_readonly.py\").write_bytes(\n b\"\"\"\ndef test_rewritten():\n assert \"@py_builtins\" in globals()\n \"\"\",\n )\n old_mode = sub.stat().st_mode\n sub.chmod(320)\n try:\n assert pytester.runpytest().ret == 0\n finally:\n sub.chmod(old_mode)\n\n def test_dont_write_bytecode(self, pytester: Pytester, monkeypatch) -> None:\n monkeypatch.delenv(\"PYTHONPYCACHEPREFIX\", raising=False)\n\n pytester.makepyfile(\n \"\"\"\n import os\n def test_no_bytecode():\n assert \"__pycache__\" in __cached__\n assert not os.path.exists(__cached__)\n assert not os.path.exists(os.path.dirname(__cached__))\"\"\"\n )\n monkeypatch.setenv(\"PYTHONDONTWRITEBYTECODE\", \"1\")\n assert pytester.runpytest_subprocess().ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestRewriteOnImport.test_orphaned_pyc_file_TestRewriteOnImport.test_orphaned_pyc_file.assert_pytester_runpytest": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestRewriteOnImport.test_orphaned_pyc_file_TestRewriteOnImport.test_orphaned_pyc_file.assert_pytester_runpytest", "embedding": null, "metadata": {"file_path": "testing/test_assertrewrite.py", "file_name": "test_assertrewrite.py", "file_type": "text/x-python", "category": "test", "start_line": 801, "end_line": 828, "span_ids": ["TestRewriteOnImport.test_orphaned_pyc_file"], "tokens": 244}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRewriteOnImport:\n\n def test_orphaned_pyc_file(self, pytester: Pytester, monkeypatch) -> None:\n monkeypatch.delenv(\"PYTHONPYCACHEPREFIX\", raising=False)\n monkeypatch.setattr(sys, \"pycache_prefix\", None, raising=False)\n\n pytester.makepyfile(\n \"\"\"\n import orphan\n def test_it():\n assert orphan.value == 17\n \"\"\"\n )\n pytester.makepyfile(\n orphan=\"\"\"\n value = 17\n \"\"\"\n )\n py_compile.compile(\"orphan.py\")\n os.remove(\"orphan.py\")\n\n # Python 3 puts the .pyc files in a __pycache__ directory, and will\n # not import from there without source. It will import a .pyc from\n # the source location though.\n if not os.path.exists(\"orphan.pyc\"):\n pycs = glob.glob(\"__pycache__/orphan.*.pyc\")\n assert len(pycs) == 1\n os.rename(pycs[0], \"orphan.pyc\")\n\n assert pytester.runpytest().ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestRewriteOnImport.test_package_TestRewriteOnImport.test_rewrite_module_imported_from_conftest.assert_pytester_runpytest": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestRewriteOnImport.test_package_TestRewriteOnImport.test_rewrite_module_imported_from_conftest.assert_pytester_runpytest", "embedding": null, "metadata": {"file_path": "testing/test_assertrewrite.py", "file_name": "test_assertrewrite.py", "file_type": "text/x-python", "category": "test", "start_line": 872, "end_line": 918, "span_ids": ["TestRewriteOnImport.test_translate_newlines", "TestRewriteOnImport.test_package_without__init__py", "TestRewriteOnImport.test_package", "TestRewriteOnImport.test_rewrite_warning", "TestRewriteOnImport.test_rewrite_module_imported_from_conftest"], "tokens": 419}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRewriteOnImport:\n\n def test_package(self, pytester: Pytester) -> None:\n pkg = pytester.path.joinpath(\"pkg\")\n pkg.mkdir()\n pkg.joinpath(\"__init__.py\")\n pkg.joinpath(\"test_blah.py\").write_text(\n \"\"\"\ndef test_rewritten():\n assert \"@py_builtins\" in globals()\"\"\"\n )\n assert pytester.runpytest().ret == 0\n\n def test_translate_newlines(self, pytester: Pytester) -> None:\n content = \"def test_rewritten():\\r\\n assert '@py_builtins' in globals()\"\n b = content.encode(\"utf-8\")\n pytester.path.joinpath(\"test_newlines.py\").write_bytes(b)\n assert pytester.runpytest().ret == 0\n\n def test_package_without__init__py(self, pytester: Pytester) -> None:\n pkg = pytester.mkdir(\"a_package_without_init_py\")\n pkg.joinpath(\"module.py\").touch()\n pytester.makepyfile(\"import a_package_without_init_py.module\")\n assert pytester.runpytest().ret == ExitCode.NO_TESTS_COLLECTED\n\n def test_rewrite_warning(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n import pytest\n pytest.register_assert_rewrite(\"_pytest\")\n \"\"\"\n )\n # needs to be a subprocess because pytester explicitly disables this warning\n result = pytester.runpytest_subprocess()\n result.stdout.fnmatch_lines([\"*Module already imported*: _pytest\"])\n\n def test_rewrite_module_imported_from_conftest(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n import test_rewrite_module_imported\n \"\"\"\n )\n pytester.makepyfile(\n test_rewrite_module_imported=\"\"\"\n def test_rewritten():\n assert \"@py_builtins\" in globals()\n \"\"\"\n )\n assert pytester.runpytest_subprocess().ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewriteHookDetails.test_read_pyc_more_invalid_TestAssertionRewriteHookDetails.test_read_pyc_more_invalid.None_5": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionRewriteHookDetails.test_read_pyc_more_invalid_TestAssertionRewriteHookDetails.test_read_pyc_more_invalid.None_5", "embedding": null, "metadata": {"file_path": "testing/test_assertrewrite.py", "file_name": "test_assertrewrite.py", "file_type": "text/x-python", "category": "test", "start_line": 1071, "end_line": 1117, "span_ids": ["TestAssertionRewriteHookDetails.test_read_pyc_more_invalid"], "tokens": 478}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssertionRewriteHookDetails:\n\n @pytest.mark.skipif(\n sys.version_info < (3, 7), reason=\"Only the Python 3.7 format for simplicity\"\n )\n def test_read_pyc_more_invalid(self, tmp_path: Path) -> None:\n from _pytest.assertion.rewrite import _read_pyc\n\n source = tmp_path / \"source.py\"\n pyc = tmp_path / \"source.pyc\"\n\n source_bytes = b\"def test(): pass\\n\"\n source.write_bytes(source_bytes)\n\n magic = importlib.util.MAGIC_NUMBER\n\n flags = b\"\\x00\\x00\\x00\\x00\"\n\n mtime = b\"\\x58\\x3c\\xb0\\x5f\"\n mtime_int = int.from_bytes(mtime, \"little\")\n os.utime(source, (mtime_int, mtime_int))\n\n size = len(source_bytes).to_bytes(4, \"little\")\n\n code = marshal.dumps(compile(source_bytes, str(source), \"exec\"))\n\n # Good header.\n pyc.write_bytes(magic + flags + mtime + size + code)\n assert _read_pyc(source, pyc, print) is not None\n\n # Too short.\n pyc.write_bytes(magic + flags + mtime)\n assert _read_pyc(source, pyc, print) is None\n\n # Bad magic.\n pyc.write_bytes(b\"\\x12\\x34\\x56\\x78\" + flags + mtime + size + code)\n assert _read_pyc(source, pyc, print) is None\n\n # Unsupported flags.\n pyc.write_bytes(magic + b\"\\x00\\xff\\x00\\x00\" + mtime + size + code)\n assert _read_pyc(source, pyc, print) is None\n\n # Bad mtime.\n pyc.write_bytes(magic + flags + b\"\\x58\\x3d\\xb0\\x5f\" + size + code)\n assert _read_pyc(source, pyc, print) is None\n\n # Bad size.\n pyc.write_bytes(magic + flags + mtime + b\"\\x99\\x00\\x00\\x00\" + code)\n assert _read_pyc(source, pyc, print) is None", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionPass_TestAssertionPass.hook_on.pytester_makeconftest_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_assertrewrite.py_TestAssertionPass_TestAssertionPass.hook_on.pytester_makeconftest_", "embedding": null, "metadata": {"file_path": "testing/test_assertrewrite.py", "file_name": "test_assertrewrite.py", "file_type": "text/x-python", "category": "test", "start_line": 1415, "end_line": 1431, "span_ids": ["TestAssertionPass.hook_on", "TestAssertionPass", "TestAssertionPass.test_option_default", "TestAssertionPass.flag_on"], "tokens": 144}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAssertionPass:\n def test_option_default(self, pytester: Pytester) -> None:\n config = pytester.parseconfig()\n assert config.getini(\"enable_assertion_pass_hook\") is False\n\n @pytest.fixture\n def flag_on(self, pytester: Pytester):\n pytester.makeini(\"[pytest]\\nenable_assertion_pass_hook = True\\n\")\n\n @pytest.fixture\n def hook_on(self, pytester: Pytester):\n pytester.makeconftest(\n \"\"\"\\\n def pytest_assertion_pass(item, lineno, orig, expl):\n raise Exception(\"Assertion Passed: {} {} at line {}\".format(orig, expl, lineno))\n \"\"\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestNewAPI.test_cache_writefail_permissions_TestNewAPI.test_cache_writefail_permissions.try_.finally_.cache_dir_chmod_mode_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestNewAPI.test_cache_writefail_permissions_TestNewAPI.test_cache_writefail_permissions.try_.finally_.cache_dir_chmod_mode_", "embedding": null, "metadata": {"file_path": "testing/test_cacheprovider.py", "file_name": "test_cacheprovider.py", "file_type": "text/x-python", "category": "test", "start_line": 47, "end_line": 63, "span_ids": ["TestNewAPI.test_cache_writefail_permissions"], "tokens": 154}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestNewAPI:\n\n @pytest.mark.skipif(sys.platform.startswith(\"win\"), reason=\"no chmod on windows\")\n @pytest.mark.filterwarnings(\n \"ignore:could not create cache path:pytest.PytestWarning\"\n )\n def test_cache_writefail_permissions(self, pytester: Pytester) -> None:\n pytester.makeini(\"[pytest]\")\n cache_dir = pytester.path.joinpath(\".pytest_cache\")\n cache_dir.mkdir()\n mode = cache_dir.stat().st_mode\n cache_dir.chmod(0)\n try:\n config = pytester.parseconfigure()\n cache = config.cache\n assert cache is not None\n cache.set(\"test/broken\", [])\n finally:\n cache_dir.chmod(mode)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestNewAPI.test_cache_failure_warns_TestNewAPI.test_cache_failure_warns.try_.finally_.cache_dir_chmod_mode_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestNewAPI.test_cache_failure_warns_TestNewAPI.test_cache_failure_warns.try_.finally_.cache_dir_chmod_mode_", "embedding": null, "metadata": {"file_path": "testing/test_cacheprovider.py", "file_name": "test_cacheprovider.py", "file_type": "text/x-python", "category": "test", "start_line": 65, "end_line": 92, "span_ids": ["TestNewAPI.test_cache_failure_warns"], "tokens": 282}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestNewAPI:\n\n @pytest.mark.skipif(sys.platform.startswith(\"win\"), reason=\"no chmod on windows\")\n @pytest.mark.filterwarnings(\"default\")\n def test_cache_failure_warns(\n self, pytester: Pytester, monkeypatch: MonkeyPatch\n ) -> None:\n monkeypatch.setenv(\"PYTEST_DISABLE_PLUGIN_AUTOLOAD\", \"1\")\n cache_dir = pytester.path.joinpath(\".pytest_cache\")\n cache_dir.mkdir()\n mode = cache_dir.stat().st_mode\n cache_dir.chmod(0)\n try:\n pytester.makepyfile(\"def test_error(): raise Exception\")\n result = pytester.runpytest()\n assert result.ret == 1\n # warnings from nodeids, lastfailed, and stepwise\n result.stdout.fnmatch_lines(\n [\n # Validate location/stacklevel of warning from cacheprovider.\n \"*= warnings summary =*\",\n \"*/cacheprovider.py:*\",\n \" */cacheprovider.py:*: PytestCacheWarning: could not create cache path \"\n \"{}/v/cache/nodeids\".format(cache_dir),\n ' config.cache.set(\"cache/nodeids\", sorted(self.cached_nodeids))',\n \"*1 failed, 3 warnings in*\",\n ]\n )\n finally:\n cache_dir.chmod(mode)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestNewAPI.test_custom_rel_cache_dir_TestNewAPI.test_custom_rel_cache_dir.assert_pytester_path_join": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestNewAPI.test_custom_rel_cache_dir_TestNewAPI.test_custom_rel_cache_dir.assert_pytester_path_join", "embedding": null, "metadata": {"file_path": "testing/test_cacheprovider.py", "file_name": "test_cacheprovider.py", "file_type": "text/x-python", "category": "test", "start_line": 129, "end_line": 141, "span_ids": ["TestNewAPI.test_custom_rel_cache_dir"], "tokens": 117}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestNewAPI:\n\n def test_custom_rel_cache_dir(self, pytester: Pytester) -> None:\n rel_cache_dir = os.path.join(\"custom_cache_dir\", \"subdir\")\n pytester.makeini(\n \"\"\"\n [pytest]\n cache_dir = {cache_dir}\n \"\"\".format(\n cache_dir=rel_cache_dir\n )\n )\n pytester.makepyfile(test_errored=\"def test_error():\\n assert False\")\n pytester.runpytest()\n assert pytester.path.joinpath(rel_cache_dir).is_dir()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestNewAPI.test_custom_abs_cache_dir_TestNewAPI.test_custom_abs_cache_dir.assert_abs_cache_dir_is_d": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestNewAPI.test_custom_abs_cache_dir_TestNewAPI.test_custom_abs_cache_dir.assert_abs_cache_dir_is_d", "embedding": null, "metadata": {"file_path": "testing/test_cacheprovider.py", "file_name": "test_cacheprovider.py", "file_type": "text/x-python", "category": "test", "start_line": 143, "end_line": 158, "span_ids": ["TestNewAPI.test_custom_abs_cache_dir"], "tokens": 129}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestNewAPI:\n\n def test_custom_abs_cache_dir(\n self, pytester: Pytester, tmp_path_factory: TempPathFactory\n ) -> None:\n tmp = tmp_path_factory.mktemp(\"tmp\")\n abs_cache_dir = tmp / \"custom_cache_dir\"\n pytester.makeini(\n \"\"\"\n [pytest]\n cache_dir = {cache_dir}\n \"\"\".format(\n cache_dir=abs_cache_dir\n )\n )\n pytester.makepyfile(test_errored=\"def test_error():\\n assert False\")\n pytester.runpytest()\n assert abs_cache_dir.is_dir()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestNewAPI.test_custom_cache_dir_with_env_var_TestNewAPI.test_custom_cache_dir_with_env_var.assert_pytester_path_join": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestNewAPI.test_custom_cache_dir_with_env_var_TestNewAPI.test_custom_cache_dir_with_env_var.assert_pytester_path_join", "embedding": null, "metadata": {"file_path": "testing/test_cacheprovider.py", "file_name": "test_cacheprovider.py", "file_type": "text/x-python", "category": "test", "start_line": 160, "end_line": 174, "span_ids": ["TestNewAPI.test_custom_cache_dir_with_env_var"], "tokens": 126}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestNewAPI:\n\n def test_custom_cache_dir_with_env_var(\n self, pytester: Pytester, monkeypatch: MonkeyPatch\n ) -> None:\n monkeypatch.setenv(\"env_var\", \"custom_cache_dir\")\n pytester.makeini(\n \"\"\"\n [pytest]\n cache_dir = {cache_dir}\n \"\"\".format(\n cache_dir=\"$env_var\"\n )\n )\n pytester.makepyfile(test_errored=\"def test_error():\\n assert False\")\n pytester.runpytest()\n assert pytester.path.joinpath(\"custom_cache_dir\").is_dir()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_packages_TestLastFailed.test_packages.None_3": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_TestLastFailed.test_packages_TestLastFailed.test_packages.None_3", "embedding": null, "metadata": {"file_path": "testing/test_cacheprovider.py", "file_name": "test_cacheprovider.py", "file_type": "text/x-python", "category": "test", "start_line": 1024, "end_line": 1052, "span_ids": ["TestLastFailed.test_packages"], "tokens": 244}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestLastFailed:\n\n def test_packages(self, pytester: Pytester) -> None:\n \"\"\"Regression test for #7758.\n\n The particular issue here was that Package nodes were included in the\n filtering, being themselves Modules for the __init__.py, even if they\n had failed Modules in them.\n\n The tests includes a test in an __init__.py file just to make sure the\n fix doesn't somehow regress that, it is not critical for the issue.\n \"\"\"\n pytester.makepyfile(\n **{\n \"__init__.py\": \"\",\n \"a/__init__.py\": \"def test_a_init(): assert False\",\n \"a/test_one.py\": \"def test_1(): assert False\",\n \"b/__init__.py\": \"\",\n \"b/test_two.py\": \"def test_2(): assert False\",\n },\n )\n pytester.makeini(\n \"\"\"\n [pytest]\n python_files = *.py\n \"\"\"\n )\n result = pytester.runpytest()\n result.assert_outcomes(failed=3)\n result = pytester.runpytest(\"--lf\")\n result.assert_outcomes(failed=3)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_test_does_not_create_boilerplate_in_existing_dirs_test_does_not_create_boilerplate_in_existing_dirs.None_2": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_test_does_not_create_boilerplate_in_existing_dirs_test_does_not_create_boilerplate_in_existing_dirs.None_2", "embedding": null, "metadata": {"file_path": "testing/test_cacheprovider.py", "file_name": "test_cacheprovider.py", "file_type": "text/x-python", "category": "test", "start_line": 1211, "end_line": 1226, "span_ids": ["test_does_not_create_boilerplate_in_existing_dirs"], "tokens": 116}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_does_not_create_boilerplate_in_existing_dirs(pytester: Pytester) -> None:\n from _pytest.cacheprovider import Cache\n\n pytester.makeini(\n \"\"\"\n [pytest]\n cache_dir = .\n \"\"\"\n )\n config = pytester.parseconfig()\n cache = Cache.for_config(config, _ispytest=True)\n cache.set(\"foo\", \"bar\")\n\n assert os.path.isdir(\"v\") # cache contents\n assert not os.path.exists(\".gitignore\")\n assert not os.path.exists(\"README.md\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_test_cachedir_tag_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_cacheprovider.py_test_cachedir_tag_", "embedding": null, "metadata": {"file_path": "testing/test_cacheprovider.py", "file_name": "test_cacheprovider.py", "file_type": "text/x-python", "category": "test", "start_line": 1229, "end_line": 1239, "span_ids": ["test_cachedir_tag"], "tokens": 126}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_cachedir_tag(pytester: Pytester) -> None:\n \"\"\"Ensure we automatically create CACHEDIR.TAG file in the pytest_cache directory (#4278).\"\"\"\n from _pytest.cacheprovider import Cache\n from _pytest.cacheprovider import CACHEDIR_TAG_CONTENT\n\n config = pytester.parseconfig()\n cache = Cache.for_config(config, _ispytest=True)\n cache.set(\"foo\", \"bar\")\n cachedir_tag_path = cache._cachedir.joinpath(\"CACHEDIR.TAG\")\n assert cachedir_tag_path.read_bytes() == CACHEDIR_TAG_CONTENT", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestCaptureFixture.test_capsyscapfdbinary_TestCaptureFixture.test_cafd_preserves_newlines.assert_out_endswith_nl_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestCaptureFixture.test_capsyscapfdbinary_TestCaptureFixture.test_cafd_preserves_newlines.assert_out_endswith_nl_", "embedding": null, "metadata": {"file_path": "testing/test_capture.py", "file_name": "test_capture.py", "file_type": "text/x-python", "category": "test", "start_line": 488, "end_line": 533, "span_ids": ["TestCaptureFixture.test_stdfd_functional", "TestCaptureFixture.test_cafd_preserves_newlines", "TestCaptureFixture.test_capsyscapfdbinary", "TestCaptureFixture.test_capture_is_represented_on_failure_issue128"], "tokens": 359}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCaptureFixture:\n\n def test_capsyscapfdbinary(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\\\n def test_one(capsys, capfdbinary):\n pass\n \"\"\"\n )\n result = pytester.runpytest(p)\n result.stdout.fnmatch_lines(\n [\"*ERROR*setup*test_one*\", \"E*capfdbinary*capsys*same*time*\", \"*1 error*\"]\n )\n\n @pytest.mark.parametrize(\"method\", [\"sys\", \"fd\"])\n def test_capture_is_represented_on_failure_issue128(\n self, pytester: Pytester, method\n ) -> None:\n p = pytester.makepyfile(\n \"\"\"\\\n def test_hello(cap{}):\n print(\"xxx42xxx\")\n assert 0\n \"\"\".format(\n method\n )\n )\n result = pytester.runpytest(p)\n result.stdout.fnmatch_lines([\"xxx42xxx\"])\n\n def test_stdfd_functional(self, pytester: Pytester) -> None:\n reprec = pytester.inline_runsource(\n \"\"\"\\\n def test_hello(capfd):\n import os\n os.write(1, b\"42\")\n out, err = capfd.readouterr()\n assert out.startswith(\"42\")\n capfd.close()\n \"\"\"\n )\n reprec.assertoutcome(passed=1)\n\n @pytest.mark.parametrize(\"nl\", (\"\\n\", \"\\r\\n\", \"\\r\"))\n def test_cafd_preserves_newlines(self, capfd, nl) -> None:\n print(\"test\", end=nl)\n out, err = capfd.readouterr()\n assert out.endswith(nl)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestCaptureFixture.test_disabled_capture_fixture_twice_TestCaptureFixture.test_disabled_capture_fixture_twice.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestCaptureFixture.test_disabled_capture_fixture_twice_TestCaptureFixture.test_disabled_capture_fixture_twice.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_capture.py", "file_name": "test_capture.py", "file_type": "text/x-python", "category": "test", "start_line": 650, "end_line": 676, "span_ids": ["TestCaptureFixture.test_disabled_capture_fixture_twice"], "tokens": 202}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCaptureFixture:\n\n def test_disabled_capture_fixture_twice(self, pytester: Pytester) -> None:\n \"\"\"Test that an inner disabled() exit doesn't undo an outer disabled().\n\n Issue #7148.\n \"\"\"\n pytester.makepyfile(\n \"\"\"\n def test_disabled(capfd):\n print('captured before')\n with capfd.disabled():\n print('while capture is disabled 1')\n with capfd.disabled():\n print('while capture is disabled 2')\n print('while capture is disabled 1 after')\n print('captured after')\n assert capfd.readouterr() == ('captured before\\\\ncaptured after\\\\n', '')\n \"\"\"\n )\n result = pytester.runpytest_subprocess()\n result.stdout.fnmatch_lines(\n [\n \"*while capture is disabled 1\",\n \"*while capture is disabled 2\",\n \"*while capture is disabled 1 after\",\n ],\n consecutive=True,\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestTeeCaptureIO_test_dontreadfrominput._just_for_completeness": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_TestTeeCaptureIO_test_dontreadfrominput._just_for_completeness", "embedding": null, "metadata": {"file_path": "testing/test_capture.py", "file_name": "test_capture.py", "file_type": "text/x-python", "category": "test", "start_line": 870, "end_line": 900, "span_ids": ["TestTeeCaptureIO.test_text", "test_dontreadfrominput", "TestTeeCaptureIO", "TestTeeCaptureIO.test_unicode_and_str_mixture"], "tokens": 237}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTeeCaptureIO(TestCaptureIO):\n def test_text(self) -> None:\n sio = io.StringIO()\n f = capture.TeeCaptureIO(sio)\n f.write(\"hello\")\n s1 = f.getvalue()\n assert s1 == \"hello\"\n s2 = sio.getvalue()\n assert s2 == s1\n f.close()\n sio.close()\n\n def test_unicode_and_str_mixture(self) -> None:\n sio = io.StringIO()\n f = capture.TeeCaptureIO(sio)\n f.write(\"\\u00f6\")\n pytest.raises(TypeError, f.write, b\"hello\")\n\n\ndef test_dontreadfrominput() -> None:\n from _pytest.capture import DontReadFromInput\n\n f = DontReadFromInput()\n assert f.buffer is f\n assert not f.isatty()\n pytest.raises(OSError, f.read)\n pytest.raises(OSError, f.readlines)\n iter_f = iter(f)\n pytest.raises(OSError, next, iter_f)\n pytest.raises(UnsupportedOperation, f.fileno)\n f.close() # just for completeness", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_test_captureresult_test_captureresult.assert_cr__replace_err_r": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_test_captureresult_test_captureresult.assert_cr__replace_err_r", "embedding": null, "metadata": {"file_path": "testing/test_capture.py", "file_name": "test_capture.py", "file_type": "text/x-python", "category": "test", "start_line": 903, "end_line": 930, "span_ids": ["test_captureresult"], "tokens": 289}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_captureresult() -> None:\n cr = CaptureResult(\"out\", \"err\")\n assert len(cr) == 2\n assert cr.out == \"out\"\n assert cr.err == \"err\"\n out, err = cr\n assert out == \"out\"\n assert err == \"err\"\n assert cr[0] == \"out\"\n assert cr[1] == \"err\"\n assert cr == cr\n assert cr == CaptureResult(\"out\", \"err\")\n assert cr != CaptureResult(\"wrong\", \"err\")\n assert cr == (\"out\", \"err\")\n assert cr != (\"out\", \"wrong\")\n assert hash(cr) == hash(CaptureResult(\"out\", \"err\"))\n assert hash(cr) == hash((\"out\", \"err\"))\n assert hash(cr) != hash((\"out\", \"wrong\"))\n assert cr < (\"z\",)\n assert cr < (\"z\", \"b\")\n assert cr < (\"z\", \"b\", \"c\")\n assert cr.count(\"err\") == 1\n assert cr.count(\"wrong\") == 0\n assert cr.index(\"err\") == 1\n with pytest.raises(ValueError):\n assert cr.index(\"wrong\") == 0\n assert next(iter(cr)) == \"out\"\n assert cr._replace(err=\"replaced\") == (\"out\", \"replaced\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_tmpfile_lsof_check.assert_len2_len1_3_o": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_tmpfile_lsof_check.assert_len2_len1_3_o", "embedding": null, "metadata": {"file_path": "testing/test_capture.py", "file_name": "test_capture.py", "file_type": "text/x-python", "category": "test", "start_line": 933, "end_line": 953, "span_ids": ["lsof_check", "tmpfile"], "tokens": 223}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.fixture\ndef tmpfile(pytester: Pytester) -> Generator[BinaryIO, None, None]:\n f = pytester.makepyfile(\"\").open(\"wb+\")\n yield f\n if not f.closed:\n f.close()\n\n\n@contextlib.contextmanager\ndef lsof_check():\n pid = os.getpid()\n try:\n out = subprocess.check_output((\"lsof\", \"-p\", str(pid))).decode()\n except (OSError, subprocess.CalledProcessError, UnicodeDecodeError) as exc:\n # about UnicodeDecodeError, see note on pytester\n pytest.skip(f\"could not run 'lsof' ({exc!r})\")\n yield\n out2 = subprocess.check_output((\"lsof\", \"-p\", str(pid))).decode()\n len1 = len([x for x in out.split(\"\\n\") if \"REG\" in x])\n len2 = len([x for x in out2.split(\"\\n\") if \"REG\" in x])\n assert len2 < len1 + 3, out2", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_test_capture_not_started_but_reset_test_fdcapture_tmpfile_remains_the_same.assert_capfile2_capfil": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_test_capture_not_started_but_reset_test_fdcapture_tmpfile_remains_the_same.assert_capfile2_capfil", "embedding": null, "metadata": {"file_path": "testing/test_capture.py", "file_name": "test_capture.py", "file_type": "text/x-python", "category": "test", "start_line": 1317, "end_line": 1350, "span_ids": ["test_capsys_results_accessible_by_attribute", "test_fdcapture_tmpfile_remains_the_same", "test_capture_not_started_but_reset", "test_using_capsys_fixture_works_with_sys_stdout_encoding"], "tokens": 237}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_capture_not_started_but_reset() -> None:\n capsys = StdCapture()\n capsys.stop_capturing()\n\n\ndef test_using_capsys_fixture_works_with_sys_stdout_encoding(\n capsys: CaptureFixture[str],\n) -> None:\n test_text = \"test text\"\n\n print(test_text.encode(sys.stdout.encoding, \"replace\"))\n (out, err) = capsys.readouterr()\n assert out\n assert err == \"\"\n\n\ndef test_capsys_results_accessible_by_attribute(capsys: CaptureFixture[str]) -> None:\n sys.stdout.write(\"spam\")\n sys.stderr.write(\"eggs\")\n capture_result = capsys.readouterr()\n assert capture_result.out == \"spam\"\n assert capture_result.err == \"eggs\"\n\n\ndef test_fdcapture_tmpfile_remains_the_same() -> None:\n cap = StdCaptureFD(out=False, err=True)\n try:\n cap.start_capturing()\n capfile = cap.err.tmpfile\n cap.readouterr()\n finally:\n cap.stop_capturing()\n capfile2 = cap.err.tmpfile\n assert capfile2 == capfile", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_test_close_and_capture_again_test_close_and_capture_again.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_test_close_and_capture_again_test_close_and_capture_again.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_capture.py", "file_name": "test_capture.py", "file_type": "text/x-python", "category": "test", "start_line": 1353, "end_line": 1372, "span_ids": ["test_close_and_capture_again"], "tokens": 109}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_close_and_capture_again(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import os\n def test_close():\n os.close(1)\n def test_capture_again():\n os.write(1, b\"hello\\\\n\")\n assert 0\n \"\"\"\n )\n result = pytester.runpytest_subprocess()\n result.stdout.fnmatch_lines(\n \"\"\"\n *test_capture_again*\n *assert 0*\n *stdout*\n *hello*\n \"\"\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_test_error_attribute_issue555_test_dontreadfrominput_has_encoding.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_test_error_attribute_issue555_test_dontreadfrominput_has_encoding.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/test_capture.py", "file_name": "test_capture.py", "file_type": "text/x-python", "category": "test", "start_line": 1420, "end_line": 1463, "span_ids": ["test_py36_windowsconsoleio_workaround_non_standard_streams", "test_py36_windowsconsoleio_workaround_non_standard_streams.DummyStream.write", "test_error_attribute_issue555", "test_dontreadfrominput_has_encoding", "test_py36_windowsconsoleio_workaround_non_standard_streams.DummyStream"], "tokens": 285}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_error_attribute_issue555(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import sys\n def test_capattr():\n assert sys.stdout.errors == \"replace\"\n assert sys.stderr.errors == \"replace\"\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=1)\n\n\n@pytest.mark.skipif(\n not sys.platform.startswith(\"win\"),\n reason=\"only on windows\",\n)\ndef test_py36_windowsconsoleio_workaround_non_standard_streams() -> None:\n \"\"\"\n Ensure _py36_windowsconsoleio_workaround function works with objects that\n do not implement the full ``io``-based stream protocol, for example execnet channels (#2666).\n \"\"\"\n from _pytest.capture import _py36_windowsconsoleio_workaround\n\n class DummyStream:\n def write(self, s):\n pass\n\n stream = cast(TextIO, DummyStream())\n _py36_windowsconsoleio_workaround(stream)\n\n\ndef test_dontreadfrominput_has_encoding(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import sys\n def test_capattr():\n # should not raise AttributeError\n assert sys.stdout.encoding\n assert sys.stderr.encoding\n \"\"\"\n )\n reprec = pytester.inline_run()\n reprec.assertoutcome(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_test_crash_on_closing_tmpfile_py27_test_crash_on_closing_tmpfile_py27.result_stdout_no_fnmatch_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_capture.py_test_crash_on_closing_tmpfile_py27_test_crash_on_closing_tmpfile_py27.result_stdout_no_fnmatch_", "embedding": null, "metadata": {"file_path": "testing/test_capture.py", "file_name": "test_capture.py", "file_type": "text/x-python", "category": "test", "start_line": 1466, "end_line": 1500, "span_ids": ["test_crash_on_closing_tmpfile_py27"], "tokens": 210}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_crash_on_closing_tmpfile_py27(\n pytester: Pytester, monkeypatch: MonkeyPatch\n) -> None:\n p = pytester.makepyfile(\n \"\"\"\n import threading\n import sys\n\n printing = threading.Event()\n\n def spam():\n f = sys.stderr\n print('SPAMBEFORE', end='', file=f)\n printing.set()\n\n while True:\n try:\n f.flush()\n except (OSError, ValueError):\n break\n\n def test_spam_in_thread():\n t = threading.Thread(target=spam)\n t.daemon = True\n t.start()\n\n printing.wait()\n \"\"\"\n )\n # Do not consider plugins like hypothesis, which might output to stderr.\n monkeypatch.setenv(\"PYTEST_DISABLE_PLUGIN_AUTOLOAD\", \"1\")\n result = pytester.runpytest_subprocess(str(p))\n assert result.ret == 0\n assert result.stderr.str() == \"\"\n result.stdout.no_fnmatch_line(\"*OSError*\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_os_ensure_file.return.file_path": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_os_ensure_file.return.file_path", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 27, "span_ids": ["ensure_file", "imports"], "tokens": 157}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import os\nimport pprint\nimport shutil\nimport sys\nimport textwrap\nfrom pathlib import Path\nfrom typing import List\n\nimport py.path\n\nimport pytest\nfrom _pytest.config import ExitCode\nfrom _pytest.fixtures import FixtureRequest\nfrom _pytest.main import _in_venv\nfrom _pytest.main import Session\nfrom _pytest.monkeypatch import MonkeyPatch\nfrom _pytest.nodes import Item\nfrom _pytest.pathlib import symlink_or_skip\nfrom _pytest.pytester import HookRecorder\nfrom _pytest.pytester import Pytester\n\n\ndef ensure_file(file_path: Path) -> Path:\n \"\"\"Ensure that file exists\"\"\"\n file_path.parent.mkdir(parents=True, exist_ok=True)\n file_path.touch(exist_ok=True)\n return file_path", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestCollector_TestCollector.test_check_equality.assert_pytester_collect_b": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestCollector_TestCollector.test_check_equality.assert_pytester_collect_b", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 30, "end_line": 67, "span_ids": ["TestCollector", "TestCollector.test_check_equality", "TestCollector.test_collect_versus_item"], "tokens": 332}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCollector:\n def test_collect_versus_item(self) -> None:\n from pytest import Collector\n from pytest import Item\n\n assert not issubclass(Collector, Item)\n assert not issubclass(Item, Collector)\n\n def test_check_equality(self, pytester: Pytester) -> None:\n modcol = pytester.getmodulecol(\n \"\"\"\n def test_pass(): pass\n def test_fail(): assert 0\n \"\"\"\n )\n fn1 = pytester.collect_by_name(modcol, \"test_pass\")\n assert isinstance(fn1, pytest.Function)\n fn2 = pytester.collect_by_name(modcol, \"test_pass\")\n assert isinstance(fn2, pytest.Function)\n\n assert fn1 == fn2\n assert fn1 != modcol\n assert hash(fn1) == hash(fn2)\n\n fn3 = pytester.collect_by_name(modcol, \"test_fail\")\n assert isinstance(fn3, pytest.Function)\n assert not (fn1 == fn3)\n assert fn1 != fn3\n\n for fn in fn1, fn2, fn3:\n assert isinstance(fn, pytest.Function)\n assert fn != 3 # type: ignore[comparison-overlap]\n assert fn != modcol\n assert fn != [1, 2, 3] # type: ignore[comparison-overlap]\n assert [1, 2, 3] != fn # type: ignore[comparison-overlap]\n assert modcol != fn\n\n assert pytester.collect_by_name(modcol, \"doesnotexist\") is None", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestCollector.test_getparent_TestCollector.test_getparent.assert_class_parent_is_cl": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestCollector.test_getparent_TestCollector.test_getparent.assert_class_parent_is_cl", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 69, "end_line": 91, "span_ids": ["TestCollector.test_getparent"], "tokens": 170}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCollector:\n\n def test_getparent(self, pytester: Pytester) -> None:\n modcol = pytester.getmodulecol(\n \"\"\"\n class TestClass:\n def test_foo(self):\n pass\n \"\"\"\n )\n cls = pytester.collect_by_name(modcol, \"TestClass\")\n assert isinstance(cls, pytest.Class)\n instance = pytester.collect_by_name(cls, \"()\")\n assert isinstance(instance, pytest.Instance)\n fn = pytester.collect_by_name(instance, \"test_foo\")\n assert isinstance(fn, pytest.Function)\n\n module_parent = fn.getparent(pytest.Module)\n assert module_parent is modcol\n\n function_parent = fn.getparent(pytest.Function)\n assert function_parent is fn\n\n class_parent = fn.getparent(pytest.Class)\n assert class_parent is cls", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestCollectPluginHookRelay_TestCollectPluginHookRelay.test_pytest_collect_file.assert_wascalled_0_ext_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestCollectPluginHookRelay_TestCollectPluginHookRelay.test_pytest_collect_file.assert_wascalled_0_ext_", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 269, "end_line": 282, "span_ids": ["TestCollectPluginHookRelay.test_pytest_collect_file", "TestCollectPluginHookRelay", "TestCollectPluginHookRelay.test_pytest_collect_file.Plugin", "TestCollectPluginHookRelay.test_pytest_collect_file.Plugin.pytest_collect_file"], "tokens": 117}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCollectPluginHookRelay:\n def test_pytest_collect_file(self, pytester: Pytester) -> None:\n wascalled = []\n\n class Plugin:\n def pytest_collect_file(self, path):\n if not path.basename.startswith(\".\"):\n # Ignore hidden files, e.g. .testmondata.\n wascalled.append(path)\n\n pytester.makefile(\".abc\", \"xyz\")\n pytest.main(pytester.path, plugins=[Plugin()])\n assert len(wascalled) == 1\n assert wascalled[0].ext == \".abc\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestSession_TestSession.get_reported_items.return._": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestSession_TestSession.get_reported_items.return._", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 457, "end_line": 481, "span_ids": ["TestSession.test_collect_topdir", "TestSession", "TestSession.get_reported_items"], "tokens": 256}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestSession:\n def test_collect_topdir(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\"def test_func(): pass\")\n id = \"::\".join([p.name, \"test_func\"])\n # XXX migrate to collectonly? (see below)\n config = pytester.parseconfig(id)\n topdir = pytester.path\n rcol = Session.from_config(config)\n assert topdir == rcol.fspath\n # rootid = rcol.nodeid\n # root2 = rcol.perform_collect([rcol.nodeid], genitems=False)[0]\n # assert root2 == rcol, rootid\n colitems = rcol.perform_collect([rcol.nodeid], genitems=False)\n assert len(colitems) == 1\n assert colitems[0].fspath == p\n\n def get_reported_items(self, hookrec: HookRecorder) -> List[Item]:\n \"\"\"Return pytest.Item instances reported by the pytest_collectreport hook\"\"\"\n calls = hookrec.getcalls(\"pytest_collectreport\")\n return [\n x\n for call in calls\n for x in call.report.result\n if isinstance(x, pytest.Item)\n ]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestSession.test_collect_protocol_method_TestSession.test_collect_protocol_method.for_id_in_p_name_p_name.assert_x_name_for_x_in_s": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestSession.test_collect_protocol_method_TestSession.test_collect_protocol_method.for_id_in_p_name_p_name.assert_x_name_for_x_in_s", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 506, "end_line": 522, "span_ids": ["TestSession.test_collect_protocol_method"], "tokens": 173}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestSession:\n\n def test_collect_protocol_method(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n class TestClass(object):\n def test_method(self):\n pass\n \"\"\"\n )\n normid = p.name + \"::TestClass::test_method\"\n for id in [p.name, p.name + \"::TestClass\", normid]:\n items, hookrec = pytester.inline_genitems(id)\n assert len(items) == 1\n assert items[0].name == \"test_method\"\n newid = items[0].nodeid\n assert newid == normid\n # ensure we are reporting the collection of the single test item (#2464)\n assert [x.name for x in self.get_reported_items(hookrec)] == [\"test_method\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_Test_getinitialnodes_Test_getinitialnodes.test_pkgfile.for_parent_in_col_listcha.assert_parent_config_is_c": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_Test_getinitialnodes_Test_getinitialnodes.test_pkgfile.for_parent_in_col_listcha.assert_parent_config_is_c", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 629, "end_line": 663, "span_ids": ["Test_getinitialnodes.test_global_file", "Test_getinitialnodes.test_pkgfile", "Test_getinitialnodes"], "tokens": 319}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Test_getinitialnodes:\n def test_global_file(self, pytester: Pytester) -> None:\n tmpdir = pytester.path\n x = ensure_file(tmpdir / \"x.py\")\n with tmpdir.cwd():\n config = pytester.parseconfigure(x)\n col = pytester.getnode(config, x)\n assert isinstance(col, pytest.Module)\n assert col.name == \"x.py\"\n assert col.parent is not None\n assert col.parent.parent is None\n for parent in col.listchain():\n assert parent.config is config\n\n def test_pkgfile(self, pytester: Pytester) -> None:\n \"\"\"Verify nesting when a module is within a package.\n The parent chain should match: Module -> Package -> Session.\n Session's parent should always be None.\n \"\"\"\n tmpdir = pytester.path\n subdir = tmpdir.joinpath(\"subdir\")\n x = ensure_file(subdir / \"x.py\")\n ensure_file(subdir / \"__init__.py\")\n with subdir.cwd():\n config = pytester.parseconfigure(x)\n col = pytester.getnode(config, x)\n assert col is not None\n assert col.name == \"x.py\"\n assert isinstance(col, pytest.Module)\n assert isinstance(col.parent, pytest.Package)\n assert isinstance(col.parent.parent, pytest.Session)\n # session is batman (has no parents)\n assert col.parent.parent.parent is None\n for parent in col.listchain():\n assert parent.config is config", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_continue_on_collection_errors_test_continue_on_collection_errors.res_stdout_fnmatch_lines_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_continue_on_collection_errors_test_continue_on_collection_errors.res_stdout_fnmatch_lines_", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 943, "end_line": 955, "span_ids": ["test_continue_on_collection_errors"], "tokens": 110}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_continue_on_collection_errors(pytester: Pytester) -> None:\n \"\"\"\n Verify tests are executed even when collection errors occur when the\n --continue-on-collection-errors flag is set\n \"\"\"\n pytester.makepyfile(**COLLECTION_ERROR_PY_FILES)\n\n res = pytester.runpytest(\"--continue-on-collection-errors\")\n assert res.ret == 1\n\n res.stdout.fnmatch_lines(\n [\"collected 2 items / 2 errors\", \"*1 failed, 1 passed, 2 errors*\"]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_continue_on_collection_errors_maxfail_test_continue_on_collection_errors_maxfail.res_stdout_fnmatch_lines_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_continue_on_collection_errors_maxfail_test_continue_on_collection_errors_maxfail.res_stdout_fnmatch_lines_", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 958, "end_line": 971, "span_ids": ["test_continue_on_collection_errors_maxfail"], "tokens": 166}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_continue_on_collection_errors_maxfail(pytester: Pytester) -> None:\n \"\"\"\n Verify tests are executed even when collection errors occur and that maxfail\n is honoured (including the collection error count).\n 4 tests: 2 collection errors + 1 failure + 1 success\n test_4 is never executed because the test run is with --maxfail=3 which\n means it is interrupted after the 2 collection errors + 1 failure.\n \"\"\"\n pytester.makepyfile(**COLLECTION_ERROR_PY_FILES)\n\n res = pytester.runpytest(\"--continue-on-collection-errors\", \"--maxfail=3\")\n assert res.ret == 1\n\n res.stdout.fnmatch_lines([\"collected 2 items / 2 errors\", \"*1 failed, 2 errors*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_collect_invalid_signature_message_test_collect_invalid_signature_message.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_collect_invalid_signature_message_test_collect_invalid_signature_message.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 1066, "end_line": 1083, "span_ids": ["test_collect_invalid_signature_message"], "tokens": 108}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_collect_invalid_signature_message(pytester: Pytester) -> None:\n \"\"\"Check that we issue a proper message when we can't determine the signature of a test\n function (#4026).\n \"\"\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n class TestCase:\n @pytest.fixture\n def fix():\n pass\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n [\"Could not determine arguments of *.fix *: invalid method signature\"]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_collect_handles_raising_on_dunder_class_test_collect_handles_raising_on_dunder_class.assert_result_ret_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_collect_handles_raising_on_dunder_class_test_collect_handles_raising_on_dunder_class.assert_result_ret_0", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 1086, "end_line": 1110, "span_ids": ["test_collect_handles_raising_on_dunder_class"], "tokens": 145}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_collect_handles_raising_on_dunder_class(pytester: Pytester) -> None:\n \"\"\"Handle proxy classes like Django's LazySettings that might raise on\n ``isinstance`` (#4266).\n \"\"\"\n pytester.makepyfile(\n \"\"\"\n class ImproperlyConfigured(Exception):\n pass\n\n class RaisesOnGetAttr(object):\n def raises(self):\n raise ImproperlyConfigured\n\n __class__ = property(raises)\n\n raises = RaisesOnGetAttr()\n\n\n def test_1():\n pass\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*1 passed in*\"])\n assert result.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_collect_symlink_dir_test_collectignore_via_conftest.assert_result_ret_Exit": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_collect_symlink_dir_test_collectignore_via_conftest.assert_result_ret_Exit", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 1216, "end_line": 1236, "span_ids": ["test_collectignore_via_conftest", "test_collect_symlink_dir"], "tokens": 231}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_collect_symlink_dir(pytester: Pytester) -> None:\n \"\"\"A symlinked directory is collected.\"\"\"\n dir = pytester.mkdir(\"dir\")\n dir.joinpath(\"test_it.py\").write_text(\"def test_it(): pass\", \"utf-8\")\n pytester.path.joinpath(\"symlink_dir\").symlink_to(dir)\n result = pytester.runpytest()\n result.assert_outcomes(passed=2)\n\n\ndef test_collectignore_via_conftest(pytester: Pytester) -> None:\n \"\"\"collect_ignore in parent conftest skips importing child (issue #4592).\"\"\"\n tests = pytester.mkpydir(\"tests\")\n tests.joinpath(\"conftest.py\").write_text(\"collect_ignore = ['ignore_me']\")\n\n ignore_me = tests.joinpath(\"ignore_me\")\n ignore_me.mkdir()\n ignore_me.joinpath(\"__init__.py\").touch()\n ignore_me.joinpath(\"conftest.py\").write_text(\"assert 0, 'should_not_be_called'\")\n\n result = pytester.runpytest()\n assert result.ret == ExitCode.NO_TESTS_COLLECTED", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestImportModeImportlib.test_modules_not_importable_as_side_effect_TestImportModeImportlib.test_modules_not_importable_as_side_effect.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_TestImportModeImportlib.test_modules_not_importable_as_side_effect_TestImportModeImportlib.test_modules_not_importable_as_side_effect.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 1424, "end_line": 1437, "span_ids": ["TestImportModeImportlib.test_modules_not_importable_as_side_effect"], "tokens": 152}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestImportModeImportlib:\n\n def test_modules_not_importable_as_side_effect(self, pytester: Pytester) -> None:\n \"\"\"In import-mode `importlib`, modules in folders containing conftest.py are not\n importable, as don't change sys.path or sys.modules as side effect of importing\n the conftest.py file.\n \"\"\"\n self.setup_conftest_and_foo(pytester)\n result = pytester.runpytest(\"-v\", \"--import-mode=importlib\")\n result.stdout.fnmatch_lines(\n [\n \"*ModuleNotFoundError: No module named 'foo'\",\n \"tests?test_foo.py:2: ModuleNotFoundError\",\n \"* 1 failed in *\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_does_not_crash_on_error_from_decorated_function_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_collection.py_test_does_not_crash_on_error_from_decorated_function_", "embedding": null, "metadata": {"file_path": "testing/test_collection.py", "file_name": "test_collection.py", "file_type": "text/x-python", "category": "test", "start_line": 1440, "end_line": 1466, "span_ids": ["test_does_not_crash_on_recursive_symlink", "test_does_not_crash_on_error_from_decorated_function"], "tokens": 202}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_does_not_crash_on_error_from_decorated_function(pytester: Pytester) -> None:\n \"\"\"Regression test for an issue around bad exception formatting due to\n assertion rewriting mangling lineno's (#4984).\"\"\"\n pytester.makepyfile(\n \"\"\"\n @pytest.fixture\n def a(): return 4\n \"\"\"\n )\n result = pytester.runpytest()\n # Not INTERNAL_ERROR\n assert result.ret == ExitCode.INTERRUPTED\n\n\ndef test_does_not_crash_on_recursive_symlink(pytester: Pytester) -> None:\n \"\"\"Regression test for an issue around recursive symlinks (#7951).\"\"\"\n symlink_or_skip(\"recursive\", pytester.path.joinpath(\"recursive\"))\n pytester.makepyfile(\n \"\"\"\n def test_foo(): assert True\n \"\"\"\n )\n result = pytester.runpytest()\n\n assert result.ret == ExitCode.OK\n assert result.parseoutcomes() == {\"passed\": 1}", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_compat.py_test_is_generator_async_syntax_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_compat.py_test_is_generator_async_syntax_", "embedding": null, "metadata": {"file_path": "testing/test_compat.py", "file_name": "test_compat.py", "file_type": "text/x-python", "category": "test", "start_line": 113, "end_line": 264, "span_ids": ["test_assert_never_union", "test_is_generator_async_gen_syntax", "test_assert_never_enum", "test_cached_property", "test_helper_failures", "test_assert_never_literal", "test_safe_isclass.CrappyClass:2", "test_is_generator_async_syntax", "ErrorsHelper.raise_exception", "test_safe_getattr", "test_cached_property.Class.prop", "ErrorsHelper.raise_fail_outcome", "ErrorsHelper", "test_cached_property.Class", "test_safe_isclass.CrappyClass", "test_safe_isclass", "ErrorsHelper.raise_baseexception"], "tokens": 845}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_is_generator_async_syntax(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n from _pytest.compat import is_generator\n def test_is_generator_py35():\n async def foo():\n await foo()\n\n async def bar():\n pass\n\n assert not is_generator(foo)\n assert not is_generator(bar)\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*1 passed*\"])\n\n\ndef test_is_generator_async_gen_syntax(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n from _pytest.compat import is_generator\n def test_is_generator_py36():\n async def foo():\n yield\n await foo()\n\n async def bar():\n yield\n\n assert not is_generator(foo)\n assert not is_generator(bar)\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*1 passed*\"])\n\n\nclass ErrorsHelper:\n @property\n def raise_baseexception(self):\n raise BaseException(\"base exception should be raised\")\n\n @property\n def raise_exception(self):\n raise Exception(\"exception should be catched\")\n\n @property\n def raise_fail_outcome(self):\n pytest.fail(\"fail should be catched\")\n\n\ndef test_helper_failures() -> None:\n helper = ErrorsHelper()\n with pytest.raises(Exception):\n helper.raise_exception\n with pytest.raises(OutcomeException):\n helper.raise_fail_outcome\n\n\ndef test_safe_getattr() -> None:\n helper = ErrorsHelper()\n assert safe_getattr(helper, \"raise_exception\", \"default\") == \"default\"\n assert safe_getattr(helper, \"raise_fail_outcome\", \"default\") == \"default\"\n with pytest.raises(BaseException):\n assert safe_getattr(helper, \"raise_baseexception\", \"default\")\n\n\ndef test_safe_isclass() -> None:\n assert safe_isclass(type) is True\n\n class CrappyClass(Exception):\n # Type ignored because it's bypassed intentionally.\n @property # type: ignore\n def __class__(self):\n assert False, \"Should be ignored\"\n\n assert safe_isclass(CrappyClass()) is False\n\n\ndef test_cached_property() -> None:\n ncalls = 0\n\n class Class:\n @cached_property\n def prop(self) -> int:\n nonlocal ncalls\n ncalls += 1\n return ncalls\n\n c1 = Class()\n assert ncalls == 0\n assert c1.prop == 1\n assert c1.prop == 1\n c2 = Class()\n assert ncalls == 1\n assert c2.prop == 2\n assert c1.prop == 1\n\n\ndef test_assert_never_union() -> None:\n x: Union[int, str] = 10\n\n if isinstance(x, int):\n pass\n else:\n with pytest.raises(AssertionError):\n assert_never(x) # type: ignore[arg-type]\n\n if isinstance(x, int):\n pass\n elif isinstance(x, str):\n pass\n else:\n assert_never(x)\n\n\ndef test_assert_never_enum() -> None:\n E = enum.Enum(\"E\", \"a b\")\n x: E = E.a\n\n if x is E.a:\n pass\n else:\n with pytest.raises(AssertionError):\n assert_never(x) # type: ignore[arg-type]\n\n if x is E.a:\n pass\n elif x is E.b:\n pass\n else:\n assert_never(x)\n\n\ndef test_assert_never_literal() -> None:\n x: Literal[\"a\", \"b\"] = \"a\"\n\n if x == \"a\":\n pass\n else:\n with pytest.raises(AssertionError):\n assert_never(x) # type: ignore[arg-type]\n\n if x == \"a\":\n pass\n elif x == \"b\":\n pass\n else:\n assert_never(x)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_os_from__pytest_pytester_imp": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_os_from__pytest_pytester_imp", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 31, "span_ids": ["imports"], "tokens": 207}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import os\nimport re\nimport sys\nimport textwrap\nfrom pathlib import Path\nfrom typing import Dict\nfrom typing import List\nfrom typing import Sequence\nfrom typing import Tuple\nfrom typing import Type\nfrom typing import Union\n\nimport attr\n\nimport _pytest._code\nimport pytest\nfrom _pytest.compat import importlib_metadata\nfrom _pytest.config import _get_plugin_specs_as_list\nfrom _pytest.config import _iter_rewritable_modules\nfrom _pytest.config import _strtobool\nfrom _pytest.config import Config\nfrom _pytest.config import ConftestImportFailure\nfrom _pytest.config import ExitCode\nfrom _pytest.config import parse_warning_filter\nfrom _pytest.config.exceptions import UsageError\nfrom _pytest.config.findpaths import determine_setup\nfrom _pytest.config.findpaths import get_common_ancestor\nfrom _pytest.config.findpaths import locate_config\nfrom _pytest.monkeypatch import MonkeyPatch\nfrom _pytest.pathlib import absolutepath\nfrom _pytest.pytester import Pytester", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestParseIni.test_setupcfg_uses_toolpytest_with_pytest_TestParseIni.test_setupcfg_uses_toolpytest_with_pytest.assert_result_ret_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestParseIni.test_setupcfg_uses_toolpytest_with_pytest_TestParseIni.test_setupcfg_uses_toolpytest_with_pytest.assert_result_ret_0", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 65, "end_line": 79, "span_ids": ["TestParseIni.test_setupcfg_uses_toolpytest_with_pytest"], "tokens": 127}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestParseIni:\n\n def test_setupcfg_uses_toolpytest_with_pytest(self, pytester: Pytester) -> None:\n p1 = pytester.makepyfile(\"def test(): pass\")\n pytester.makefile(\n \".cfg\",\n setup=\"\"\"\n [tool:pytest]\n testpaths=%s\n [pytest]\n testpaths=ignored\n \"\"\"\n % p1.name,\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"*, configfile: setup.cfg, *\", \"* 1 passed in *\"])\n assert result.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestParseIni.test_append_parse_args_TestParseIni.test_append_parse_args.assert_config_option_verb": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestParseIni.test_append_parse_args_TestParseIni.test_append_parse_args.assert_config_option_verb", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 81, "end_line": 97, "span_ids": ["TestParseIni.test_append_parse_args"], "tokens": 143}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestParseIni:\n\n def test_append_parse_args(\n self, pytester: Pytester, tmp_path: Path, monkeypatch: MonkeyPatch\n ) -> None:\n monkeypatch.setenv(\"PYTEST_ADDOPTS\", '--color no -rs --tb=\"short\"')\n tmp_path.joinpath(\"pytest.ini\").write_text(\n textwrap.dedent(\n \"\"\"\\\n [pytest]\n addopts = --verbose\n \"\"\"\n )\n )\n config = pytester.parseconfig(tmp_path)\n assert config.option.color == \"no\"\n assert config.option.reportchars == \"s\"\n assert config.option.tbstyle == \"short\"\n assert config.option.verbose", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestParseIni.test_tox_ini_wrong_version_TestParseIni.test_ini_names.assert_config_getini_min": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestParseIni.test_tox_ini_wrong_version_TestParseIni.test_ini_names.assert_config_getini_min", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 99, "end_line": 129, "span_ids": ["TestParseIni.test_tox_ini_wrong_version", "TestParseIni.test_ini_names"], "tokens": 233}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestParseIni:\n\n def test_tox_ini_wrong_version(self, pytester: Pytester) -> None:\n pytester.makefile(\n \".ini\",\n tox=\"\"\"\n [pytest]\n minversion=999.0\n \"\"\",\n )\n result = pytester.runpytest()\n assert result.ret != 0\n result.stderr.fnmatch_lines(\n [\"*tox.ini: 'minversion' requires pytest-999.0, actual pytest-*\"]\n )\n\n @pytest.mark.parametrize(\n \"section, name\",\n [(\"tool:pytest\", \"setup.cfg\"), (\"pytest\", \"tox.ini\"), (\"pytest\", \"pytest.ini\")],\n )\n def test_ini_names(self, pytester: Pytester, name, section) -> None:\n pytester.path.joinpath(name).write_text(\n textwrap.dedent(\n \"\"\"\n [{section}]\n minversion = 1.0\n \"\"\".format(\n section=section\n )\n )\n )\n config = pytester.parseconfig()\n assert config.getini(\"minversion\") == \"1.0\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestParseIni.test_pyproject_toml_TestParseIni.test_toxini_before_lower_pytestini.assert_config_getini_min": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestParseIni.test_pyproject_toml_TestParseIni.test_toxini_before_lower_pytestini.assert_config_getini_min", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 131, "end_line": 160, "span_ids": ["TestParseIni.test_pyproject_toml", "TestParseIni.test_toxini_before_lower_pytestini"], "tokens": 215}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestParseIni:\n\n def test_pyproject_toml(self, pytester: Pytester) -> None:\n pytester.makepyprojecttoml(\n \"\"\"\n [tool.pytest.ini_options]\n minversion = \"1.0\"\n \"\"\"\n )\n config = pytester.parseconfig()\n assert config.getini(\"minversion\") == \"1.0\"\n\n def test_toxini_before_lower_pytestini(self, pytester: Pytester) -> None:\n sub = pytester.mkdir(\"sub\")\n sub.joinpath(\"tox.ini\").write_text(\n textwrap.dedent(\n \"\"\"\n [pytest]\n minversion = 2.0\n \"\"\"\n )\n )\n pytester.path.joinpath(\"pytest.ini\").write_text(\n textwrap.dedent(\n \"\"\"\n [pytest]\n minversion = 1.5\n \"\"\"\n )\n )\n config = pytester.parseconfigure(sub)\n assert config.getini(\"minversion\") == \"2.0\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestParseIni.test_invalid_config_options_TestParseIni.test_invalid_config_options.if_exception_text_.else_.assert_result_ret_pyte": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestParseIni.test_invalid_config_options_TestParseIni.test_invalid_config_options.if_exception_text_.else_.assert_result_ret_pyte", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 181, "end_line": 277, "span_ids": ["TestParseIni.test_invalid_config_options"], "tokens": 552}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestParseIni:\n\n @pytest.mark.parametrize(\n \"ini_file_text, invalid_keys, warning_output, exception_text\",\n [\n pytest.param(\n \"\"\"\n [pytest]\n unknown_ini = value1\n another_unknown_ini = value2\n \"\"\",\n [\"unknown_ini\", \"another_unknown_ini\"],\n [\n \"=*= warnings summary =*=\",\n \"*PytestConfigWarning:*Unknown config option: another_unknown_ini\",\n \"*PytestConfigWarning:*Unknown config option: unknown_ini\",\n ],\n \"Unknown config option: another_unknown_ini\",\n id=\"2-unknowns\",\n ),\n pytest.param(\n \"\"\"\n [pytest]\n unknown_ini = value1\n minversion = 5.0.0\n \"\"\",\n [\"unknown_ini\"],\n [\n \"=*= warnings summary =*=\",\n \"*PytestConfigWarning:*Unknown config option: unknown_ini\",\n ],\n \"Unknown config option: unknown_ini\",\n id=\"1-unknown\",\n ),\n pytest.param(\n \"\"\"\n [some_other_header]\n unknown_ini = value1\n [pytest]\n minversion = 5.0.0\n \"\"\",\n [],\n [],\n \"\",\n id=\"unknown-in-other-header\",\n ),\n pytest.param(\n \"\"\"\n [pytest]\n minversion = 5.0.0\n \"\"\",\n [],\n [],\n \"\",\n id=\"no-unknowns\",\n ),\n pytest.param(\n \"\"\"\n [pytest]\n conftest_ini_key = 1\n \"\"\",\n [],\n [],\n \"\",\n id=\"1-known\",\n ),\n ],\n )\n @pytest.mark.filterwarnings(\"default\")\n def test_invalid_config_options(\n self,\n pytester: Pytester,\n ini_file_text,\n invalid_keys,\n warning_output,\n exception_text,\n ) -> None:\n pytester.makeconftest(\n \"\"\"\n def pytest_addoption(parser):\n parser.addini(\"conftest_ini_key\", \"\")\n \"\"\"\n )\n pytester.makepyfile(\"def test(): pass\")\n pytester.makeini(ini_file_text)\n\n config = pytester.parseconfig()\n assert sorted(config._get_unknown_ini_keys()) == sorted(invalid_keys)\n\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(warning_output)\n\n result = pytester.runpytest(\"--strict-config\")\n if exception_text:\n result.stderr.fnmatch_lines(\"ERROR: \" + exception_text)\n assert result.ret == pytest.ExitCode.USAGE_ERROR\n else:\n result.stderr.no_fnmatch_line(exception_text)\n assert result.ret == pytest.ExitCode.OK", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestParseIni.test_silence_unknown_key_warning_TestParseIni.test_silence_unknown_key_warning.result_stdout_no_fnmatch_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestParseIni.test_silence_unknown_key_warning_TestParseIni.test_silence_unknown_key_warning.result_stdout_no_fnmatch_", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 279, "end_line": 291, "span_ids": ["TestParseIni.test_silence_unknown_key_warning"], "tokens": 113}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestParseIni:\n\n @pytest.mark.filterwarnings(\"default\")\n def test_silence_unknown_key_warning(self, pytester: Pytester) -> None:\n \"\"\"Unknown config key warnings can be silenced using filterwarnings (#7620)\"\"\"\n pytester.makeini(\n \"\"\"\n [pytest]\n filterwarnings =\n ignore:Unknown config option:pytest.PytestConfigWarning\n foobar=1\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.no_fnmatch_line(\"*PytestConfigWarning*\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestParseIni.test_disable_warnings_plugin_disables_config_warnings_TestParseIni.test_disable_warnings_plugin_disables_config_warnings.result_stdout_no_fnmatch_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestParseIni.test_disable_warnings_plugin_disables_config_warnings_TestParseIni.test_disable_warnings_plugin_disables_config_warnings.result_stdout_no_fnmatch_", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 293, "end_line": 309, "span_ids": ["TestParseIni.test_disable_warnings_plugin_disables_config_warnings"], "tokens": 131}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestParseIni:\n\n @pytest.mark.filterwarnings(\"default\")\n def test_disable_warnings_plugin_disables_config_warnings(\n self, pytester: Pytester\n ) -> None:\n \"\"\"Disabling 'warnings' plugin also disables config time warnings\"\"\"\n pytester.makeconftest(\n \"\"\"\n import pytest\n def pytest_configure(config):\n config.issue_config_time_warning(\n pytest.PytestConfigWarning(\"custom config warning\"),\n stacklevel=2,\n )\n \"\"\"\n )\n result = pytester.runpytest(\"-pno:warnings\")\n result.stdout.no_fnmatch_line(\"*PytestConfigWarning*\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestParseIni.test_missing_required_plugins_TestParseIni.test_missing_required_plugins.if_exception_text_.else_.pytester_parseconfig_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestParseIni.test_missing_required_plugins_TestParseIni.test_missing_required_plugins.if_exception_text_.else_.pytester_parseconfig_", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 311, "end_line": 432, "span_ids": ["TestParseIni.test_missing_required_plugins", "TestParseIni.test_missing_required_plugins.DummyEntryPoint", "TestParseIni.test_missing_required_plugins.DummyEntryPoint:2", "TestParseIni.test_missing_required_plugins.DummyDist", "TestParseIni.test_missing_required_plugins.DummyDist:2"], "tokens": 670}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestParseIni:\n\n @pytest.mark.parametrize(\n \"ini_file_text, exception_text\",\n [\n pytest.param(\n \"\"\"\n [pytest]\n required_plugins = a z\n \"\"\",\n \"Missing required plugins: a, z\",\n id=\"2-missing\",\n ),\n pytest.param(\n \"\"\"\n [pytest]\n required_plugins = a z myplugin\n \"\"\",\n \"Missing required plugins: a, z\",\n id=\"2-missing-1-ok\",\n ),\n pytest.param(\n \"\"\"\n [pytest]\n required_plugins = myplugin\n \"\"\",\n None,\n id=\"1-ok\",\n ),\n pytest.param(\n \"\"\"\n [pytest]\n required_plugins = myplugin==1.5\n \"\"\",\n None,\n id=\"1-ok-pin-exact\",\n ),\n pytest.param(\n \"\"\"\n [pytest]\n required_plugins = myplugin>1.0,<2.0\n \"\"\",\n None,\n id=\"1-ok-pin-loose\",\n ),\n pytest.param(\n \"\"\"\n [pytest]\n required_plugins = pyplugin==1.6\n \"\"\",\n \"Missing required plugins: pyplugin==1.6\",\n id=\"missing-version\",\n ),\n pytest.param(\n \"\"\"\n [pytest]\n required_plugins = pyplugin==1.6 other==1.0\n \"\"\",\n \"Missing required plugins: other==1.0, pyplugin==1.6\",\n id=\"missing-versions\",\n ),\n pytest.param(\n \"\"\"\n [some_other_header]\n required_plugins = wont be triggered\n [pytest]\n \"\"\",\n None,\n id=\"invalid-header\",\n ),\n ],\n )\n def test_missing_required_plugins(\n self,\n pytester: Pytester,\n monkeypatch: MonkeyPatch,\n ini_file_text: str,\n exception_text: str,\n ) -> None:\n \"\"\"Check 'required_plugins' option with various settings.\n\n This test installs a mock \"myplugin-1.5\" which is used in the parametrized test cases.\n \"\"\"\n\n @attr.s\n class DummyEntryPoint:\n name = attr.ib()\n module = attr.ib()\n group = \"pytest11\"\n\n def load(self):\n __import__(self.module)\n return sys.modules[self.module]\n\n entry_points = [\n DummyEntryPoint(\"myplugin1\", \"myplugin1_module\"),\n ]\n\n @attr.s\n class DummyDist:\n entry_points = attr.ib()\n files = ()\n version = \"1.5\"\n\n @property\n def metadata(self):\n return {\"name\": \"myplugin\"}\n\n def my_dists():\n return [DummyDist(entry_points)]\n\n pytester.makepyfile(myplugin1_module=\"# my plugin module\")\n pytester.syspathinsert()\n\n monkeypatch.setattr(importlib_metadata, \"distributions\", my_dists)\n monkeypatch.delenv(\"PYTEST_DISABLE_PLUGIN_AUTOLOAD\", raising=False)\n\n pytester.makeini(ini_file_text)\n\n if exception_text:\n with pytest.raises(pytest.UsageError, match=exception_text):\n pytester.parseconfig()\n else:\n pytester.parseconfig()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestParseIni.test_early_config_cmdline_TestParseIni.test_early_config_cmdline.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestParseIni.test_early_config_cmdline_TestParseIni.test_early_config_cmdline.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 434, "end_line": 453, "span_ids": ["TestParseIni.test_early_config_cmdline"], "tokens": 181}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestParseIni:\n\n def test_early_config_cmdline(\n self, pytester: Pytester, monkeypatch: MonkeyPatch\n ) -> None:\n \"\"\"early_config contains options registered by third-party plugins.\n\n This is a regression involving pytest-cov (and possibly others) introduced in #7700.\n \"\"\"\n pytester.makepyfile(\n myplugin=\"\"\"\n def pytest_addoption(parser):\n parser.addoption('--foo', default=None, dest='foo')\n\n def pytest_load_initial_conftests(early_config, parser, args):\n assert early_config.known_args_namespace.foo == \"1\"\n \"\"\"\n )\n monkeypatch.setenv(\"PYTEST_PLUGINS\", \"myplugin\")\n pytester.syspathinsert()\n result = pytester.runpytest(\"--foo=1\")\n result.stdout.fnmatch_lines(\"* no tests ran in *\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestConfigAPI.test_addini_TestConfigAPI.test_addini.pytest_raises_ValueError_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestConfigAPI.test_addini_TestConfigAPI.test_addini.pytest_raises_ValueError_", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 589, "end_line": 611, "span_ids": ["TestConfigAPI.test_addini"], "tokens": 171}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestConfigAPI:\n\n @pytest.mark.parametrize(\"maybe_type\", [\"not passed\", \"None\", '\"string\"'])\n def test_addini(self, pytester: Pytester, maybe_type: str) -> None:\n if maybe_type == \"not passed\":\n type_string = \"\"\n else:\n type_string = f\", {maybe_type}\"\n\n pytester.makeconftest(\n f\"\"\"\n def pytest_addoption(parser):\n parser.addini(\"myname\", \"my new ini value\"{type_string})\n \"\"\"\n )\n pytester.makeini(\n \"\"\"\n [pytest]\n myname=hello\n \"\"\"\n )\n config = pytester.parseconfig()\n val = config.getini(\"myname\")\n assert val == \"hello\"\n pytest.raises(ValueError, config.getini, \"other\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestConfigAPI.make_conftest_for_pathlist_TestConfigAPI.test_addini_linelist_ini_files.self_check_config_linelis": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestConfigAPI.make_conftest_for_pathlist_TestConfigAPI.test_addini_linelist_ini_files.self_check_config_linelis", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 613, "end_line": 704, "span_ids": ["TestConfigAPI.test_addini_args_pyproject_toml", "TestConfigAPI.make_conftest_for_args", "TestConfigAPI.make_conftest_for_pathlist", "TestConfigAPI.check_config_pathlist", "TestConfigAPI.check_config_args", "TestConfigAPI.make_conftest_for_linelist", "TestConfigAPI.test_addini_pathlist_pyproject_toml", "TestConfigAPI.test_addini_linelist_ini_files", "TestConfigAPI.test_addini_pathlist_ini_files", "TestConfigAPI.test_addini_args_ini_files"], "tokens": 737}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestConfigAPI:\n\n def make_conftest_for_pathlist(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n def pytest_addoption(parser):\n parser.addini(\"paths\", \"my new ini value\", type=\"pathlist\")\n parser.addini(\"abc\", \"abc value\")\n \"\"\"\n )\n\n def test_addini_pathlist_ini_files(self, pytester: Pytester) -> None:\n self.make_conftest_for_pathlist(pytester)\n p = pytester.makeini(\n \"\"\"\n [pytest]\n paths=hello world/sub.py\n \"\"\"\n )\n self.check_config_pathlist(pytester, p)\n\n def test_addini_pathlist_pyproject_toml(self, pytester: Pytester) -> None:\n self.make_conftest_for_pathlist(pytester)\n p = pytester.makepyprojecttoml(\n \"\"\"\n [tool.pytest.ini_options]\n paths=[\"hello\", \"world/sub.py\"]\n \"\"\"\n )\n self.check_config_pathlist(pytester, p)\n\n def check_config_pathlist(self, pytester: Pytester, config_path: Path) -> None:\n config = pytester.parseconfig()\n values = config.getini(\"paths\")\n assert len(values) == 2\n assert values[0] == config_path.parent.joinpath(\"hello\")\n assert values[1] == config_path.parent.joinpath(\"world/sub.py\")\n pytest.raises(ValueError, config.getini, \"other\")\n\n def make_conftest_for_args(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n def pytest_addoption(parser):\n parser.addini(\"args\", \"new args\", type=\"args\")\n parser.addini(\"a2\", \"\", \"args\", default=\"1 2 3\".split())\n \"\"\"\n )\n\n def test_addini_args_ini_files(self, pytester: Pytester) -> None:\n self.make_conftest_for_args(pytester)\n pytester.makeini(\n \"\"\"\n [pytest]\n args=123 \"123 hello\" \"this\"\n \"\"\"\n )\n self.check_config_args(pytester)\n\n def test_addini_args_pyproject_toml(self, pytester: Pytester) -> None:\n self.make_conftest_for_args(pytester)\n pytester.makepyprojecttoml(\n \"\"\"\n [tool.pytest.ini_options]\n args = [\"123\", \"123 hello\", \"this\"]\n \"\"\"\n )\n self.check_config_args(pytester)\n\n def check_config_args(self, pytester: Pytester) -> None:\n config = pytester.parseconfig()\n values = config.getini(\"args\")\n assert values == [\"123\", \"123 hello\", \"this\"]\n values = config.getini(\"a2\")\n assert values == list(\"123\")\n\n def make_conftest_for_linelist(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n def pytest_addoption(parser):\n parser.addini(\"xy\", \"\", type=\"linelist\")\n parser.addini(\"a2\", \"\", \"linelist\")\n \"\"\"\n )\n\n def test_addini_linelist_ini_files(self, pytester: Pytester) -> None:\n self.make_conftest_for_linelist(pytester)\n pytester.makeini(\n \"\"\"\n [pytest]\n xy= 123 345\n second line\n \"\"\"\n )\n self.check_config_linelist(pytester)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_setuptools_importerror_issue1479_test_setuptools_importerror_issue1479.with_pytest_raises_Import.pytester_parseconfig_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_setuptools_importerror_issue1479_test_setuptools_importerror_issue1479.with_pytest_raises_Import.pytester_parseconfig_", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 951, "end_line": 974, "span_ids": ["test_setuptools_importerror_issue1479.DummyEntryPoint:2", "test_setuptools_importerror_issue1479.Distribution:2", "test_setuptools_importerror_issue1479.Distribution", "test_setuptools_importerror_issue1479.DummyEntryPoint", "test_setuptools_importerror_issue1479"], "tokens": 154}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_setuptools_importerror_issue1479(\n pytester: Pytester, monkeypatch: MonkeyPatch\n) -> None:\n monkeypatch.delenv(\"PYTEST_DISABLE_PLUGIN_AUTOLOAD\", raising=False)\n\n class DummyEntryPoint:\n name = \"mytestplugin\"\n group = \"pytest11\"\n\n def load(self):\n raise ImportError(\"Don't hide me!\")\n\n class Distribution:\n version = \"1.0\"\n files = (\"foo.txt\",)\n metadata = {\"name\": \"foo\"}\n entry_points = (DummyEntryPoint(),)\n\n def distributions():\n return (Distribution(),)\n\n monkeypatch.setattr(importlib_metadata, \"distributions\", distributions)\n with pytest.raises(ImportError):\n pytester.parseconfig()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_importlib_metadata_broken_distribution_test_importlib_metadata_broken_distribution.pytester_parseconfig_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_importlib_metadata_broken_distribution_test_importlib_metadata_broken_distribution.pytester_parseconfig_", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 977, "end_line": 1000, "span_ids": ["test_importlib_metadata_broken_distribution", "test_importlib_metadata_broken_distribution.Distribution", "test_importlib_metadata_broken_distribution.Distribution:2", "test_importlib_metadata_broken_distribution.DummyEntryPoint", "test_importlib_metadata_broken_distribution.DummyEntryPoint:2"], "tokens": 156}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_importlib_metadata_broken_distribution(\n pytester: Pytester, monkeypatch: MonkeyPatch\n) -> None:\n \"\"\"Integration test for broken distributions with 'files' metadata being None (#5389)\"\"\"\n monkeypatch.delenv(\"PYTEST_DISABLE_PLUGIN_AUTOLOAD\", raising=False)\n\n class DummyEntryPoint:\n name = \"mytestplugin\"\n group = \"pytest11\"\n\n def load(self):\n return object()\n\n class Distribution:\n version = \"1.0\"\n files = None\n metadata = {\"name\": \"foo\"}\n entry_points = (DummyEntryPoint(),)\n\n def distributions():\n return (Distribution(),)\n\n monkeypatch.setattr(importlib_metadata, \"distributions\", distributions)\n pytester.parseconfig()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestRootdir_TestRootdir.test_simple_noini.None_5": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestRootdir_TestRootdir.test_simple_noini.None_5", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 1293, "end_line": 1304, "span_ids": ["TestRootdir.test_simple_noini", "TestRootdir"], "tokens": 149}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRootdir:\n def test_simple_noini(self, tmp_path: Path, monkeypatch: MonkeyPatch) -> None:\n assert get_common_ancestor([tmp_path]) == tmp_path\n a = tmp_path / \"a\"\n a.mkdir()\n assert get_common_ancestor([a, tmp_path]) == tmp_path\n assert get_common_ancestor([tmp_path, a]) == tmp_path\n monkeypatch.chdir(tmp_path)\n assert get_common_ancestor([]) == tmp_path\n no_path = tmp_path / \"does-not-exist\"\n assert get_common_ancestor([no_path]) == tmp_path\n assert get_common_ancestor([no_path / \"a\"]) == tmp_path", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestRootdir.test_pytestini_overrides_empty_other_TestRootdir.test_pytestini_overrides_empty_other.assert_parsed_inipath_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestRootdir.test_pytestini_overrides_empty_other_TestRootdir.test_pytestini_overrides_empty_other.assert_parsed_inipath_", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 1334, "end_line": 1343, "span_ids": ["TestRootdir.test_pytestini_overrides_empty_other"], "tokens": 122}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRootdir:\n\n @pytest.mark.parametrize(\"name\", [\"setup.cfg\", \"tox.ini\"])\n def test_pytestini_overrides_empty_other(self, tmp_path: Path, name: str) -> None:\n inipath = tmp_path / \"pytest.ini\"\n inipath.touch()\n a = tmp_path / \"a\"\n a.mkdir()\n (a / name).touch()\n rootpath, parsed_inipath, _ = determine_setup(None, [str(a)])\n assert rootpath == tmp_path\n assert parsed_inipath == inipath", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestRootdir.test_setuppy_fallback_TestRootdir.test_nothing.assert_inicfg_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestRootdir.test_setuppy_fallback_TestRootdir.test_nothing.assert_inicfg_", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 1345, "end_line": 1360, "span_ids": ["TestRootdir.test_nothing", "TestRootdir.test_setuppy_fallback"], "tokens": 171}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRootdir:\n\n def test_setuppy_fallback(self, tmp_path: Path) -> None:\n a = tmp_path / \"a\"\n a.mkdir()\n (a / \"setup.cfg\").touch()\n (tmp_path / \"setup.py\").touch()\n rootpath, inipath, inicfg = determine_setup(None, [str(a)])\n assert rootpath == tmp_path\n assert inipath is None\n assert inicfg == {}\n\n def test_nothing(self, tmp_path: Path, monkeypatch: MonkeyPatch) -> None:\n monkeypatch.chdir(tmp_path)\n rootpath, inipath, inicfg = determine_setup(None, [str(tmp_path)])\n assert rootpath == tmp_path\n assert inipath is None\n assert inicfg == {}", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestRootdir.test_with_arg_outside_cwd_without_inifile_TestRootdir.test_with_existing_file_in_subdir.assert_inipath_is_None": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestRootdir.test_with_arg_outside_cwd_without_inifile_TestRootdir.test_with_existing_file_in_subdir.assert_inipath_is_None", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 1384, "end_line": 1425, "span_ids": ["TestRootdir.test_with_existing_file_in_subdir", "TestRootdir.test_with_non_dir_arg", "TestRootdir.test_with_arg_outside_cwd_with_inifile", "TestRootdir.test_with_arg_outside_cwd_without_inifile"], "tokens": 401}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRootdir:\n\n def test_with_arg_outside_cwd_without_inifile(\n self, tmp_path: Path, monkeypatch: MonkeyPatch\n ) -> None:\n monkeypatch.chdir(tmp_path)\n a = tmp_path / \"a\"\n a.mkdir()\n b = tmp_path / \"b\"\n b.mkdir()\n rootpath, inifile, _ = determine_setup(None, [str(a), str(b)])\n assert rootpath == tmp_path\n assert inifile is None\n\n def test_with_arg_outside_cwd_with_inifile(self, tmp_path: Path) -> None:\n a = tmp_path / \"a\"\n a.mkdir()\n b = tmp_path / \"b\"\n b.mkdir()\n inipath = a / \"pytest.ini\"\n inipath.touch()\n rootpath, parsed_inipath, _ = determine_setup(None, [str(a), str(b)])\n assert rootpath == a\n assert inipath == parsed_inipath\n\n @pytest.mark.parametrize(\"dirs\", ([], [\"does-not-exist\"], [\"a/does-not-exist\"]))\n def test_with_non_dir_arg(\n self, dirs: Sequence[str], tmp_path: Path, monkeypatch: MonkeyPatch\n ) -> None:\n monkeypatch.chdir(tmp_path)\n rootpath, inipath, _ = determine_setup(None, dirs)\n assert rootpath == tmp_path\n assert inipath is None\n\n def test_with_existing_file_in_subdir(\n self, tmp_path: Path, monkeypatch: MonkeyPatch\n ) -> None:\n a = tmp_path / \"a\"\n a.mkdir()\n (a / \"exists\").touch()\n monkeypatch.chdir(tmp_path)\n rootpath, inipath, _ = determine_setup(None, [\"a/exist\"])\n assert rootpath == tmp_path\n assert inipath is None", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestRootdir.test_with_config_also_in_parent_directory_TestRootdir.test_with_config_also_in_parent_directory.assert_inipath_tmp_pat": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestRootdir.test_with_config_also_in_parent_directory_TestRootdir.test_with_config_also_in_parent_directory.assert_inipath_tmp_pat", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 1427, "end_line": 1440, "span_ids": ["TestRootdir.test_with_config_also_in_parent_directory"], "tokens": 180}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRootdir:\n\n def test_with_config_also_in_parent_directory(\n self, tmp_path: Path, monkeypatch: MonkeyPatch\n ) -> None:\n \"\"\"Regression test for #7807.\"\"\"\n (tmp_path / \"setup.cfg\").write_text(\"[tool:pytest]\\n\", \"utf-8\")\n (tmp_path / \"myproject\").mkdir()\n (tmp_path / \"myproject\" / \"setup.cfg\").write_text(\"[tool:pytest]\\n\", \"utf-8\")\n (tmp_path / \"myproject\" / \"tests\").mkdir()\n monkeypatch.chdir(tmp_path / \"myproject\")\n\n rootpath, inipath, _ = determine_setup(None, [\"tests/\"])\n\n assert rootpath == tmp_path / \"myproject\"\n assert inipath == tmp_path / \"myproject\" / \"setup.cfg\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestOverrideIniArgs.test_multiple_override_ini_options_TestOverrideIniArgs.test_multiple_override_ini_options.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestOverrideIniArgs.test_multiple_override_ini_options_TestOverrideIniArgs.test_multiple_override_ini_options.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 1631, "end_line": 1653, "span_ids": ["TestOverrideIniArgs.test_multiple_override_ini_options"], "tokens": 232}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestOverrideIniArgs:\n\n def test_multiple_override_ini_options(self, pytester: Pytester) -> None:\n \"\"\"Ensure a file path following a '-o' option does not generate an error (#3103)\"\"\"\n pytester.makepyfile(\n **{\n \"conftest.py\": \"\"\"\n def pytest_addoption(parser):\n parser.addini('foo', default=None, help='some option')\n parser.addini('bar', default=None, help='some option')\n \"\"\",\n \"test_foo.py\": \"\"\"\n def test(pytestconfig):\n assert pytestconfig.getini('foo') == '1'\n assert pytestconfig.getini('bar') == '0'\n \"\"\",\n \"test_bar.py\": \"\"\"\n def test():\n assert False\n \"\"\",\n }\n )\n result = pytester.runpytest(\"-o\", \"foo=1\", \"-o\", \"bar=0\", \"test_foo.py\")\n assert \"ERROR:\" not in result.stderr.str()\n result.stdout.fnmatch_lines([\"collected 1 item\", \"*= 1 passed in *=\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_help_via_addopts_test_help_via_addopts.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_help_via_addopts_test_help_via_addopts.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 1656, "end_line": 1672, "span_ids": ["test_help_via_addopts"], "tokens": 107}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_help_via_addopts(pytester: Pytester) -> None:\n pytester.makeini(\n \"\"\"\n [pytest]\n addopts = --unknown-option-should-allow-for-help --help\n \"\"\"\n )\n result = pytester.runpytest()\n assert result.ret == 0\n result.stdout.fnmatch_lines(\n [\n \"usage: *\",\n \"positional arguments:\",\n # Displays full/default help.\n \"to see available markers type: pytest --markers\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestSetupCfg_TestSetupCfg.test_pytest_custom_cfg_unsupported.with_pytest_raises_pytest.pytester_runpytest_c_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_TestSetupCfg_TestSetupCfg.test_pytest_custom_cfg_unsupported.with_pytest_raises_pytest.pytester_runpytest_c_", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 1818, "end_line": 1839, "span_ids": ["TestSetupCfg", "TestSetupCfg.test_pytest_custom_cfg_unsupported", "TestSetupCfg.test_pytest_setup_cfg_unsupported"], "tokens": 141}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestSetupCfg:\n def test_pytest_setup_cfg_unsupported(self, pytester: Pytester) -> None:\n pytester.makefile(\n \".cfg\",\n setup=\"\"\"\n [pytest]\n addopts = --verbose\n \"\"\",\n )\n with pytest.raises(pytest.fail.Exception):\n pytester.runpytest()\n\n def test_pytest_custom_cfg_unsupported(self, pytester: Pytester) -> None:\n pytester.makefile(\n \".cfg\",\n custom=\"\"\"\n [pytest]\n addopts = --verbose\n \"\"\",\n )\n with pytest.raises(pytest.fail.Exception):\n pytester.runpytest(\"-c\", \"custom.cfg\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_conftest_import_error_repr_test_strtobool.with_pytest_raises_ValueE._strtobool_unknown_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_conftest_import_error_repr_test_strtobool.with_pytest_raises_ValueE._strtobool_unknown_", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 1938, "end_line": 1958, "span_ids": ["test_conftest_import_error_repr", "test_strtobool"], "tokens": 190}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_conftest_import_error_repr(tmp_path: Path) -> None:\n \"\"\"`ConftestImportFailure` should use a short error message and readable\n path to the failed conftest.py file.\"\"\"\n path = tmp_path.joinpath(\"foo/conftest.py\")\n with pytest.raises(\n ConftestImportFailure,\n match=re.escape(f\"RuntimeError: some error (from {path})\"),\n ):\n try:\n raise RuntimeError(\"some error\")\n except Exception as exc:\n assert exc.__traceback__ is not None\n exc_info = (type(exc), exc, exc.__traceback__)\n raise ConftestImportFailure(path, exc_info) from exc\n\n\ndef test_strtobool() -> None:\n assert _strtobool(\"YES\")\n assert not _strtobool(\"NO\")\n with pytest.raises(ValueError):\n _strtobool(\"unknown\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_parse_warning_filter_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_config.py_test_parse_warning_filter_", "embedding": null, "metadata": {"file_path": "testing/test_config.py", "file_name": "test_config.py", "file_type": "text/x-python", "category": "test", "start_line": 1961, "end_line": 2001, "span_ids": ["test_parse_warning_filter_failure", "test_parse_warning_filter"], "tokens": 328}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\n \"arg, escape, expected\",\n [\n (\"ignore\", False, (\"ignore\", \"\", Warning, \"\", 0)),\n (\n \"ignore::DeprecationWarning\",\n False,\n (\"ignore\", \"\", DeprecationWarning, \"\", 0),\n ),\n (\n \"ignore:some msg:DeprecationWarning\",\n False,\n (\"ignore\", \"some msg\", DeprecationWarning, \"\", 0),\n ),\n (\n \"ignore::DeprecationWarning:mod\",\n False,\n (\"ignore\", \"\", DeprecationWarning, \"mod\", 0),\n ),\n (\n \"ignore::DeprecationWarning:mod:42\",\n False,\n (\"ignore\", \"\", DeprecationWarning, \"mod\", 42),\n ),\n (\"error:some\\\\msg:::\", True, (\"error\", \"some\\\\\\\\msg\", Warning, \"\", 0)),\n (\"error:::mod\\\\foo:\", True, (\"error\", \"\", Warning, \"mod\\\\\\\\foo\\\\Z\", 0)),\n ],\n)\ndef test_parse_warning_filter(\n arg: str, escape: bool, expected: Tuple[str, str, Type[Warning], str, int]\n) -> None:\n assert parse_warning_filter(arg, escape=escape) == expected\n\n\n@pytest.mark.parametrize(\"arg\", [\":\" * 5, \"::::-1\", \"::::not-a-number\"])\ndef test_parse_warning_filter_failure(arg: str) -> None:\n import warnings\n\n with pytest.raises(warnings._OptionError):\n parse_warning_filter(arg, escape=True)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_argparse_ConftestWithSetinitial.return.conftest": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_argparse_ConftestWithSetinitial.return.conftest", "embedding": null, "metadata": {"file_path": "testing/test_conftest.py", "file_name": "test_conftest.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 23, "span_ids": ["ConftestWithSetinitial", "imports"], "tokens": 143}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import argparse\nimport os\nimport textwrap\nfrom pathlib import Path\nfrom typing import cast\nfrom typing import Dict\nfrom typing import Generator\nfrom typing import List\nfrom typing import Optional\n\nimport pytest\nfrom _pytest.config import ExitCode\nfrom _pytest.config import PytestPluginManager\nfrom _pytest.monkeypatch import MonkeyPatch\nfrom _pytest.pathlib import symlink_or_skip\nfrom _pytest.pytester import Pytester\nfrom _pytest.tmpdir import TempPathFactory\n\n\ndef ConftestWithSetinitial(path) -> PytestPluginManager:\n conftest = PytestPluginManager()\n conftest_setinitial(conftest, [path])\n return conftest", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_conftest_setinitial_conftest_setinitial.conftest__set_initial_con": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_conftest_setinitial_conftest_setinitial.conftest__set_initial_con", "embedding": null, "metadata": {"file_path": "testing/test_conftest.py", "file_name": "test_conftest.py", "file_type": "text/x-python", "category": "test", "start_line": 26, "end_line": 38, "span_ids": ["conftest_setinitial.Namespace", "conftest_setinitial.Namespace.__init__", "conftest_setinitial"], "tokens": 130}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def conftest_setinitial(\n conftest: PytestPluginManager, args, confcutdir: Optional[\"os.PathLike[str]\"] = None\n) -> None:\n class Namespace:\n def __init__(self) -> None:\n self.file_or_dir = args\n self.confcutdir = os.fspath(confcutdir) if confcutdir is not None else None\n self.noconftest = False\n self.pyargs = False\n self.importmode = \"prepend\"\n\n namespace = cast(argparse.Namespace, Namespace())\n conftest._set_initial_conftests(namespace)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_TestConftestValueAccessGlobal.test_value_access_with_confmod_TestConftestValueAccessGlobal.test_value_access_with_confmod.assert_path_stem_conf": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_TestConftestValueAccessGlobal.test_value_access_with_confmod_TestConftestValueAccessGlobal.test_value_access_with_confmod.assert_path_stem_conf", "embedding": null, "metadata": {"file_path": "testing/test_conftest.py", "file_name": "test_conftest.py", "file_type": "text/x-python", "category": "test", "start_line": 88, "end_line": 96, "span_ids": ["TestConftestValueAccessGlobal.test_value_access_with_confmod"], "tokens": 140}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.usefixtures(\"_sys_snapshot\")\nclass TestConftestValueAccessGlobal:\n\n def test_value_access_with_confmod(self, basedir: Path) -> None:\n startdir = basedir / \"adir\" / \"b\"\n startdir.joinpath(\"xx\").mkdir()\n conftest = ConftestWithSetinitial(startdir)\n mod, value = conftest._rget_with_confmod(\"a\", startdir, importmode=\"prepend\")\n assert value == 1.5\n path = Path(mod.__file__)\n assert path.parent == basedir / \"adir\" / \"b\"\n assert path.stem == \"conftest\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_conftest_import_order_test_conftest_import_order.assert_mods_expected": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_test_conftest_import_order_test_conftest_import_order.assert_mods_expected", "embedding": null, "metadata": {"file_path": "testing/test_conftest.py", "file_name": "test_conftest.py", "file_type": "text/x-python", "category": "test", "start_line": 344, "end_line": 358, "span_ids": ["test_conftest_import_order"], "tokens": 154}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_conftest_import_order(pytester: Pytester, monkeypatch: MonkeyPatch) -> None:\n ct1 = pytester.makeconftest(\"\")\n sub = pytester.mkdir(\"sub\")\n ct2 = sub / \"conftest.py\"\n ct2.write_text(\"\")\n\n def impct(p, importmode):\n return p\n\n conftest = PytestPluginManager()\n conftest._confcutdir = pytester.path\n monkeypatch.setattr(conftest, \"_importconftest\", impct)\n mods = cast(List[Path], conftest._getconftestmodules(sub, importmode=\"prepend\"))\n expected = [ct1, ct2]\n assert mods == expected", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_TestConftestVisibility._N_B_swc_stands_for__TestConftestVisibility.test_parsefactories_relative_node_ids.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_conftest.py_TestConftestVisibility._N_B_swc_stands_for__TestConftestVisibility.test_parsefactories_relative_node_ids.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/test_conftest.py", "file_name": "test_conftest.py", "file_type": "text/x-python", "category": "test", "start_line": 492, "end_line": 529, "span_ids": ["TestConftestVisibility.test_parsefactories_relative_node_ids", "TestConftestVisibility._setup_tree"], "tokens": 422}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestConftestVisibility:\n\n # N.B.: \"swc\" stands for \"subdir with conftest.py\"\n # \"snc\" stands for \"subdir no [i.e. without] conftest.py\"\n @pytest.mark.parametrize(\n \"chdir,testarg,expect_ntests_passed\",\n [\n # Effective target: package/..\n (\"runner\", \"..\", 3),\n (\"package\", \"..\", 3),\n (\"swc\", \"../..\", 3),\n (\"snc\", \"../..\", 3),\n # Effective target: package\n (\"runner\", \"../package\", 3),\n (\"package\", \".\", 3),\n (\"swc\", \"..\", 3),\n (\"snc\", \"..\", 3),\n # Effective target: package/swc\n (\"runner\", \"../package/swc\", 1),\n (\"package\", \"./swc\", 1),\n (\"swc\", \".\", 1),\n (\"snc\", \"../swc\", 1),\n # Effective target: package/snc\n (\"runner\", \"../package/snc\", 1),\n (\"package\", \"./snc\", 1),\n (\"swc\", \"../snc\", 1),\n (\"snc\", \".\", 1),\n ],\n )\n def test_parsefactories_relative_node_ids(\n self, pytester: Pytester, chdir: str, testarg: str, expect_ntests_passed: int\n ) -> None:\n \"\"\"#616\"\"\"\n dirs = self._setup_tree(pytester)\n print(\"pytest run in cwd: %s\" % (dirs[chdir].relative_to(pytester.path)))\n print(\"pytestarg : %s\" % testarg)\n print(\"expected pass : %s\" % expect_ntests_passed)\n os.chdir(dirs[chdir])\n reprec = pytester.inline_run(testarg, \"-q\", \"--traceconfig\")\n reprec.assertoutcome(passed=expect_ntests_passed)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_set_trace_interception_TestPDB.test_pdb_set_trace_interception.self_flush_child_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_set_trace_interception_TestPDB.test_pdb_set_trace_interception.self_flush_child_", "embedding": null, "metadata": {"file_path": "testing/test_debugging.py", "file_name": "test_debugging.py", "file_type": "text/x-python", "category": "test", "start_line": 411, "end_line": 427, "span_ids": ["TestPDB.test_pdb_set_trace_interception"], "tokens": 140}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPDB:\n\n def test_pdb_set_trace_interception(self, pytester: Pytester) -> None:\n p1 = pytester.makepyfile(\n \"\"\"\n import pdb\n def test_1():\n pdb.set_trace()\n \"\"\"\n )\n child = pytester.spawn_pytest(str(p1))\n child.expect(\"test_1\")\n child.expect(\"Pdb\")\n child.sendline(\"q\")\n rest = child.read().decode(\"utf8\")\n assert \"no tests ran\" in rest\n assert \"reading from stdin while output\" not in rest\n assert \"BdbQuit\" not in rest\n self.flush(child)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_and_capsys_TestPDB.test_pdb_and_capsys.self_flush_child_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_and_capsys_TestPDB.test_pdb_and_capsys.self_flush_child_", "embedding": null, "metadata": {"file_path": "testing/test_debugging.py", "file_name": "test_debugging.py", "file_type": "text/x-python", "category": "test", "start_line": 429, "end_line": 444, "span_ids": ["TestPDB.test_pdb_and_capsys"], "tokens": 117}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPDB:\n\n def test_pdb_and_capsys(self, pytester: Pytester) -> None:\n p1 = pytester.makepyfile(\n \"\"\"\n import pytest\n def test_1(capsys):\n print(\"hello1\")\n pytest.set_trace()\n \"\"\"\n )\n child = pytester.spawn_pytest(str(p1))\n child.expect(\"test_1\")\n child.send(\"capsys.readouterr()\\n\")\n child.expect(\"hello1\")\n child.sendeof()\n child.read()\n self.flush(child)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_custom_cls_TestPDB.test_pdb_custom_cls.assert_custom_pdb_calls_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_custom_cls_TestPDB.test_pdb_custom_cls.assert_custom_pdb_calls_", "embedding": null, "metadata": {"file_path": "testing/test_debugging.py", "file_name": "test_debugging.py", "file_type": "text/x-python", "category": "test", "start_line": 845, "end_line": 853, "span_ids": ["TestPDB.test_pdb_custom_cls"], "tokens": 111}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPDB:\n\n def test_pdb_custom_cls(\n self, pytester: Pytester, custom_pdb_calls: List[str]\n ) -> None:\n p1 = pytester.makepyfile(\"\"\"xxx \"\"\")\n result = pytester.runpytest_inprocess(\n \"--pdb\", \"--pdbcls=_pytest:_CustomPdb\", p1\n )\n result.stdout.fnmatch_lines([\"*NameError*xxx*\", \"*1 error*\"])\n assert custom_pdb_calls == [\"init\", \"reset\", \"interaction\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_custom_cls_invalid_TestPDB.test_pdb_custom_cls_without_pdb.assert_custom_pdb_calls_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestPDB.test_pdb_custom_cls_invalid_TestPDB.test_pdb_custom_cls_without_pdb.assert_custom_pdb_calls_", "embedding": null, "metadata": {"file_path": "testing/test_debugging.py", "file_name": "test_debugging.py", "file_type": "text/x-python", "category": "test", "start_line": 855, "end_line": 877, "span_ids": ["TestPDB.test_pdb_custom_cls_without_pdb", "TestPDB.test_pdb_custom_cls_invalid", "TestPDB.test_pdb_validate_usepdb_cls"], "tokens": 234}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPDB:\n\n def test_pdb_custom_cls_invalid(self, pytester: Pytester) -> None:\n result = pytester.runpytest_inprocess(\"--pdbcls=invalid\")\n result.stderr.fnmatch_lines(\n [\n \"*: error: argument --pdbcls: 'invalid' is not in the format 'modname:classname'\"\n ]\n )\n\n def test_pdb_validate_usepdb_cls(self):\n assert _validate_usepdb_cls(\"os.path:dirname.__name__\") == (\n \"os.path\",\n \"dirname.__name__\",\n )\n\n assert _validate_usepdb_cls(\"pdb:DoesNotExist\") == (\"pdb\", \"DoesNotExist\")\n\n def test_pdb_custom_cls_without_pdb(\n self, pytester: Pytester, custom_pdb_calls: List[str]\n ) -> None:\n p1 = pytester.makepyfile(\"\"\"xxx \"\"\")\n result = pytester.runpytest_inprocess(\"--pdbcls=_pytest:_CustomPdb\", p1)\n result.stdout.fnmatch_lines([\"*NameError*xxx*\", \"*1 error*\"])\n assert custom_pdb_calls == []", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestDebuggingBreakpoints_TestDebuggingBreakpoints.test_sys_breakpointhook_configure_and_unconfigure.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_debugging.py_TestDebuggingBreakpoints_TestDebuggingBreakpoints.test_sys_breakpointhook_configure_and_unconfigure.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_debugging.py", "file_name": "test_debugging.py", "file_type": "text/x-python", "category": "test", "start_line": 913, "end_line": 953, "span_ids": ["TestDebuggingBreakpoints", "TestDebuggingBreakpoints.test_sys_breakpointhook_configure_and_unconfigure", "TestDebuggingBreakpoints.test_supports_breakpoint_module_global"], "tokens": 322}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDebuggingBreakpoints:\n def test_supports_breakpoint_module_global(self) -> None:\n \"\"\"Test that supports breakpoint global marks on Python 3.7+.\"\"\"\n if sys.version_info >= (3, 7):\n assert SUPPORTS_BREAKPOINT_BUILTIN is True\n\n @pytest.mark.skipif(\n not SUPPORTS_BREAKPOINT_BUILTIN, reason=\"Requires breakpoint() builtin\"\n )\n @pytest.mark.parametrize(\"arg\", [\"--pdb\", \"\"])\n def test_sys_breakpointhook_configure_and_unconfigure(\n self, pytester: Pytester, arg: str\n ) -> None:\n \"\"\"\n Test that sys.breakpointhook is set to the custom Pdb class once configured, test that\n hook is reset to system value once pytest has been unconfigured\n \"\"\"\n pytester.makeconftest(\n \"\"\"\n import sys\n from pytest import hookimpl\n from _pytest.debugging import pytestPDB\n\n def pytest_configure(config):\n config._cleanup.append(check_restored)\n\n def check_restored():\n assert sys.breakpointhook == sys.__breakpointhook__\n\n def test_check():\n assert sys.breakpointhook == pytestPDB.set_trace\n \"\"\"\n )\n pytester.makepyfile(\n \"\"\"\n def test_nothing(): pass\n \"\"\"\n )\n args = (arg,) if arg else ()\n result = pytester.runpytest_subprocess(*args)\n result.stdout.fnmatch_lines([\"*1 passed in *\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_collect_module_empty_TestDoctests.test_collect_module_single_modulelevel_doctest.for_p_in_path_pytester_.None_3": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_collect_module_empty_TestDoctests.test_collect_module_single_modulelevel_doctest.for_p_in_path_pytester_.None_3", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 40, "end_line": 52, "span_ids": ["TestDoctests.test_collect_module_empty", "TestDoctests.test_collect_module_single_modulelevel_doctest"], "tokens": 167}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDoctests:\n\n def test_collect_module_empty(self, pytester: Pytester):\n path = pytester.makepyfile(whatever=\"#\")\n for p in (path, pytester.path):\n items, reprec = pytester.inline_genitems(p, \"--doctest-modules\")\n assert len(items) == 0\n\n def test_collect_module_single_modulelevel_doctest(self, pytester: Pytester):\n path = pytester.makepyfile(whatever='\"\"\">>> pass\"\"\"')\n for p in (path, pytester.path):\n items, reprec = pytester.inline_genitems(p, \"--doctest-modules\")\n assert len(items) == 1\n assert isinstance(items[0], DoctestItem)\n assert isinstance(items[0].parent, DoctestModule)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_collect_module_two_doctest_one_modulelevel_TestDoctests.test_collect_module_two_doctest_one_modulelevel.for_p_in_path_pytester_.assert_items_0_parent_is": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_collect_module_two_doctest_one_modulelevel_TestDoctests.test_collect_module_two_doctest_one_modulelevel.for_p_in_path_pytester_.assert_items_0_parent_is", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 54, "end_line": 68, "span_ids": ["TestDoctests.test_collect_module_two_doctest_one_modulelevel"], "tokens": 148}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDoctests:\n\n def test_collect_module_two_doctest_one_modulelevel(self, pytester: Pytester):\n path = pytester.makepyfile(\n whatever=\"\"\"\n '>>> x = None'\n def my_func():\n \">>> magic = 42 \"\n \"\"\"\n )\n for p in (path, pytester.path):\n items, reprec = pytester.inline_genitems(p, \"--doctest-modules\")\n assert len(items) == 2\n assert isinstance(items[0], DoctestItem)\n assert isinstance(items[1], DoctestItem)\n assert isinstance(items[0].parent, DoctestModule)\n assert items[0].parent is items[1].parent", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_collect_module_two_doctest_no_modulelevel_TestDoctests.test_collect_module_two_doctest_no_modulelevel.for_p_in_path_pytester_.assert_items_0_parent_is": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_collect_module_two_doctest_no_modulelevel_TestDoctests.test_collect_module_two_doctest_no_modulelevel.for_p_in_path_pytester_.assert_items_0_parent_is", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 70, "end_line": 101, "span_ids": ["TestDoctests.test_collect_module_two_doctest_no_modulelevel"], "tokens": 237}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDoctests:\n\n @pytest.mark.parametrize(\"filename\", [\"__init__\", \"whatever\"])\n def test_collect_module_two_doctest_no_modulelevel(\n self,\n pytester: Pytester,\n filename: str,\n ) -> None:\n path = pytester.makepyfile(\n **{\n filename: \"\"\"\n '# Empty'\n def my_func():\n \">>> magic = 42 \"\n def unuseful():\n '''\n # This is a function\n # >>> # it doesn't have any doctest\n '''\n def another():\n '''\n # This is another function\n >>> import os # this one does have a doctest\n '''\n \"\"\",\n },\n )\n for p in (path, pytester.path):\n items, reprec = pytester.inline_genitems(p, \"--doctest-modules\")\n assert len(items) == 2\n assert isinstance(items[0], DoctestItem)\n assert isinstance(items[1], DoctestItem)\n assert isinstance(items[0].parent, DoctestModule)\n assert items[0].parent is items[1].parent", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_doctest_unex_importerror_with_module_TestDoctests.test_doctestmodule.reprec_assertoutcome_fail": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_doctest_unex_importerror_with_module_TestDoctests.test_doctestmodule.reprec_assertoutcome_fail", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 416, "end_line": 452, "span_ids": ["TestDoctests.test_doctest_unex_importerror_with_module", "TestDoctests.test_doctestmodule"], "tokens": 236}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDoctests:\n\n def test_doctest_unex_importerror_with_module(self, pytester: Pytester):\n pytester.path.joinpath(\"hello.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n import asdalsdkjaslkdjasd\n \"\"\"\n )\n )\n pytester.maketxtfile(\n \"\"\"\n >>> import hello\n >>>\n \"\"\"\n )\n result = pytester.runpytest(\"--doctest-modules\")\n # doctest is never executed because of error during hello.py collection\n result.stdout.fnmatch_lines(\n [\n \"*ERROR collecting hello.py*\",\n \"*ModuleNotFoundError: No module named *asdals*\",\n \"*Interrupted: 1 error during collection*\",\n ]\n )\n\n def test_doctestmodule(self, pytester: Pytester):\n p = pytester.makepyfile(\n \"\"\"\n '''\n >>> x = 1\n >>> x == 1\n False\n\n '''\n \"\"\"\n )\n reprec = pytester.inline_run(p, \"--doctest-modules\")\n reprec.assertoutcome(failed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_doctestmodule_external_and_issue116_TestDoctests.test_doctestmodule_external_and_issue116.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_doctestmodule_external_and_issue116_TestDoctests.test_doctestmodule_external_and_issue116.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 454, "end_line": 479, "span_ids": ["TestDoctests.test_doctestmodule_external_and_issue116"], "tokens": 166}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDoctests:\n\n def test_doctestmodule_external_and_issue116(self, pytester: Pytester):\n p = pytester.mkpydir(\"hello\")\n p.joinpath(\"__init__.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n def somefunc():\n '''\n >>> i = 0\n >>> i + 1\n 2\n '''\n \"\"\"\n )\n )\n result = pytester.runpytest(p, \"--doctest-modules\")\n result.stdout.fnmatch_lines(\n [\n \"003 *>>> i = 0\",\n \"004 *>>> i + 1\",\n \"*Expected:\",\n \"* 2\",\n \"*Got:\",\n \"* 1\",\n \"*:4: DocTestFailure\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_txtfile_with_usefixtures_in_ini_TestDoctests.test_txtfile_with_usefixtures_in_ini.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_txtfile_with_usefixtures_in_ini_TestDoctests.test_txtfile_with_usefixtures_in_ini.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 513, "end_line": 537, "span_ids": ["TestDoctests.test_txtfile_with_usefixtures_in_ini"], "tokens": 145}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDoctests:\n\n def test_txtfile_with_usefixtures_in_ini(self, pytester: Pytester):\n pytester.makeini(\n \"\"\"\n [pytest]\n usefixtures = myfixture\n \"\"\"\n )\n pytester.makeconftest(\n \"\"\"\n import pytest\n @pytest.fixture\n def myfixture(monkeypatch):\n monkeypatch.setenv(\"HELLO\", \"WORLD\")\n \"\"\"\n )\n\n p = pytester.maketxtfile(\n \"\"\"\n >>> import os\n >>> os.environ[\"HELLO\"]\n 'WORLD'\n \"\"\"\n )\n reprec = pytester.inline_run(p)\n reprec.assertoutcome(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_doctestmodule_with_fixtures_TestDoctests.test_doctestmodule_three_tests.reprec_assertoutcome_pass": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctests.test_doctestmodule_with_fixtures_TestDoctests.test_doctestmodule_three_tests.reprec_assertoutcome_pass", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 539, "end_line": 577, "span_ids": ["TestDoctests.test_doctestmodule_three_tests", "TestDoctests.test_doctestmodule_with_fixtures"], "tokens": 223}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDoctests:\n\n def test_doctestmodule_with_fixtures(self, pytester: Pytester):\n p = pytester.makepyfile(\n \"\"\"\n '''\n >>> p = getfixture('tmp_path')\n >>> p.is_dir()\n True\n '''\n \"\"\"\n )\n reprec = pytester.inline_run(p, \"--doctest-modules\")\n reprec.assertoutcome(passed=1)\n\n def test_doctestmodule_three_tests(self, pytester: Pytester):\n p = pytester.makepyfile(\n \"\"\"\n '''\n >>> p = getfixture('tmp_path')\n >>> p.is_dir()\n True\n '''\n def my_func():\n '''\n >>> magic = 42\n >>> magic - 42\n 0\n '''\n def unuseful():\n pass\n def another():\n '''\n >>> import os\n >>> os is os\n True\n '''\n \"\"\"\n )\n reprec = pytester.inline_run(p, \"--doctest-modules\")\n reprec.assertoutcome(passed=3)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctestReportingOption_TestDoctestReportingOption._run_doctest_report.return.pytester_runpytest_doc": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_TestDoctestReportingOption_TestDoctestReportingOption._run_doctest_report.return.pytester_runpytest_doc", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 1374, "end_line": 1392, "span_ids": ["TestDoctestReportingOption._run_doctest_report", "TestDoctestReportingOption"], "tokens": 143}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDoctestReportingOption:\n def _run_doctest_report(self, pytester, format):\n pytester.makepyfile(\n \"\"\"\n def foo():\n '''\n >>> foo()\n a b\n 0 1 4\n 1 2 4\n 2 3 6\n '''\n print(' a b\\\\n'\n '0 1 4\\\\n'\n '1 2 5\\\\n'\n '2 3 6')\n \"\"\"\n )\n return pytester.runpytest(\"--doctest-modules\", \"--doctest-report\", format)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_test_is_setup_py_not_named_setup_py_test_is_setup_py_is_a_setup_py.assert__is_setup_py_setup": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_test_is_setup_py_not_named_setup_py_test_is_setup_py_is_a_setup_py.assert__is_setup_py_setup", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 1500, "end_line": 1510, "span_ids": ["test_is_setup_py_is_a_setup_py", "test_is_setup_py_not_named_setup_py"], "tokens": 143}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_is_setup_py_not_named_setup_py(tmp_path: Path) -> None:\n not_setup_py = tmp_path.joinpath(\"not_setup.py\")\n not_setup_py.write_text('from setuptools import setup; setup(name=\"foo\")')\n assert not _is_setup_py(not_setup_py)\n\n\n@pytest.mark.parametrize(\"mod\", (\"setuptools\", \"distutils.core\"))\ndef test_is_setup_py_is_a_setup_py(tmp_path: Path, mod: str) -> None:\n setup_py = tmp_path.joinpath(\"setup.py\")\n setup_py.write_text(f'from {mod} import setup; setup(name=\"foo\")', \"utf-8\")\n assert _is_setup_py(setup_py)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_test_is_setup_py_different_encoding_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_doctest.py_test_is_setup_py_different_encoding_", "embedding": null, "metadata": {"file_path": "testing/test_doctest.py", "file_name": "test_doctest.py", "file_type": "text/x-python", "category": "test", "start_line": 1513, "end_line": 1522, "span_ids": ["test_is_setup_py_different_encoding"], "tokens": 108}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\"mod\", (\"setuptools\", \"distutils.core\"))\ndef test_is_setup_py_different_encoding(tmp_path: Path, mod: str) -> None:\n setup_py = tmp_path.joinpath(\"setup.py\")\n contents = (\n \"# -*- coding: cp1252 -*-\\n\"\n 'from {} import setup; setup(name=\"foo\", description=\"\u20ac\")\\n'.format(mod)\n )\n setup_py.write_bytes(contents.encode(\"cp1252\"))\n assert _is_setup_py(setup_py)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_faulthandler.py_io_test_disabled.assert_result_ret_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_faulthandler.py_io_test_disabled.assert_result_ret_0", "embedding": null, "metadata": {"file_path": "testing/test_faulthandler.py", "file_name": "test_faulthandler.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 68, "span_ids": ["test_disabled", "test_crash_during_shutdown_captured", "imports", "test_crash_during_shutdown_not_captured", "setup_crashing_test", "test_enabled"], "tokens": 479}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import io\nimport sys\n\nimport pytest\nfrom _pytest.pytester import Pytester\n\n\ndef test_enabled(pytester: Pytester) -> None:\n \"\"\"Test single crashing test displays a traceback.\"\"\"\n pytester.makepyfile(\n \"\"\"\n import faulthandler\n def test_crash():\n faulthandler._sigabrt()\n \"\"\"\n )\n result = pytester.runpytest_subprocess()\n result.stderr.fnmatch_lines([\"*Fatal Python error*\"])\n assert result.ret != 0\n\n\ndef setup_crashing_test(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import faulthandler\n import atexit\n def test_ok():\n atexit.register(faulthandler._sigabrt)\n \"\"\"\n )\n\n\ndef test_crash_during_shutdown_captured(pytester: Pytester) -> None:\n \"\"\"\n Re-enable faulthandler if pytest encountered it enabled during configure.\n We should be able to then see crashes during interpreter shutdown.\n \"\"\"\n setup_crashing_test(pytester)\n args = (sys.executable, \"-Xfaulthandler\", \"-mpytest\")\n result = pytester.run(*args)\n result.stderr.fnmatch_lines([\"*Fatal Python error*\"])\n assert result.ret != 0\n\n\ndef test_crash_during_shutdown_not_captured(pytester: Pytester) -> None:\n \"\"\"\n Check that pytest leaves faulthandler disabled if it was not enabled during configure.\n This prevents us from seeing crashes during interpreter shutdown (see #8260).\n \"\"\"\n setup_crashing_test(pytester)\n args = (sys.executable, \"-mpytest\")\n result = pytester.run(*args)\n result.stderr.no_fnmatch_line(\"*Fatal Python error*\")\n assert result.ret != 0\n\n\ndef test_disabled(pytester: Pytester) -> None:\n \"\"\"Test option to disable fault handler in the command line.\"\"\"\n pytester.makepyfile(\n \"\"\"\n import faulthandler\n def test_disabled():\n assert not faulthandler.is_enabled()\n \"\"\"\n )\n result = pytester.runpytest_subprocess(\"-p\", \"no:faulthandler\")\n result.stdout.fnmatch_lines([\"*1 passed*\"])\n assert result.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_faulthandler.py_test_already_initialized_crash_test_already_initialized_crash.assert_result_ret_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_faulthandler.py_test_already_initialized_crash_test_already_initialized_crash.assert_result_ret_0", "embedding": null, "metadata": {"file_path": "testing/test_faulthandler.py", "file_name": "test_faulthandler.py", "file_type": "text/x-python", "category": "test", "start_line": 131, "end_line": 148, "span_ids": ["test_already_initialized_crash"], "tokens": 129}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_already_initialized_crash(pytester: Pytester) -> None:\n \"\"\"Even if faulthandler is already initialized, we still dump tracebacks on crashes (#8258).\"\"\"\n pytester.makepyfile(\n \"\"\"\n def test():\n import faulthandler\n faulthandler._sigabrt()\n \"\"\"\n )\n result = pytester.run(\n sys.executable,\n \"-X\",\n \"faulthandler\",\n \"-mpytest\",\n pytester.path,\n )\n result.stderr.fnmatch_lines([\"*Fatal Python error*\"])\n assert result.ret != 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_faulthandler.py_test_get_stderr_fileno_invalid_fd_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_faulthandler.py_test_get_stderr_fileno_invalid_fd_", "embedding": null, "metadata": {"file_path": "testing/test_faulthandler.py", "file_name": "test_faulthandler.py", "file_type": "text/x-python", "category": "test", "start_line": 151, "end_line": 173, "span_ids": ["test_get_stderr_fileno_invalid_fd.StdErrWrapper", "test_get_stderr_fileno_invalid_fd", "test_get_stderr_fileno_invalid_fd.StdErrWrapper:2"], "tokens": 192}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_get_stderr_fileno_invalid_fd() -> None:\n \"\"\"Test for faulthandler being able to handle invalid file descriptors for stderr (#8249).\"\"\"\n from _pytest.faulthandler import get_stderr_fileno\n\n class StdErrWrapper(io.StringIO):\n \"\"\"\n Mimic ``twisted.logger.LoggingFile`` to simulate returning an invalid file descriptor.\n\n https://github.com/twisted/twisted/blob/twisted-20.3.0/src/twisted/logger/_io.py#L132-L139\n \"\"\"\n\n def fileno(self):\n return -1\n\n wrapper = StdErrWrapper()\n\n with pytest.MonkeyPatch.context() as mp:\n mp.setattr(\"sys.stderr\", wrapper)\n\n # Even when the stderr wrapper signals an invalid file descriptor,\n # ``_get_stderr_fileno()`` should return the real one.\n assert get_stderr_fileno() == 2", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_findpaths.py_from_pathlib_import_Path_TestLoadConfigDictFromFile.test_invalid_toml_file.assert_load_config_dict_f": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_findpaths.py_from_pathlib_import_Path_TestLoadConfigDictFromFile.test_invalid_toml_file.assert_load_config_dict_f", "embedding": null, "metadata": {"file_path": "testing/test_findpaths.py", "file_name": "test_findpaths.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 66, "span_ids": ["TestLoadConfigDictFromFile.test_unsupported_pytest_section_in_cfg_file", "TestLoadConfigDictFromFile.test_custom_ini", "TestLoadConfigDictFromFile.test_custom_ini_without_section", "TestLoadConfigDictFromFile.test_valid_cfg_file", "TestLoadConfigDictFromFile.test_empty_pytest_ini", "TestLoadConfigDictFromFile", "TestLoadConfigDictFromFile.test_invalid_toml_file", "TestLoadConfigDictFromFile.test_pytest_ini", "imports", "TestLoadConfigDictFromFile.test_custom_cfg_file"], "tokens": 662}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from pathlib import Path\nfrom textwrap import dedent\n\nimport pytest\nfrom _pytest.config.findpaths import get_common_ancestor\nfrom _pytest.config.findpaths import get_dirs_from_args\nfrom _pytest.config.findpaths import load_config_dict_from_file\n\n\nclass TestLoadConfigDictFromFile:\n def test_empty_pytest_ini(self, tmp_path: Path) -> None:\n \"\"\"pytest.ini files are always considered for configuration, even if empty\"\"\"\n fn = tmp_path / \"pytest.ini\"\n fn.write_text(\"\", encoding=\"utf-8\")\n assert load_config_dict_from_file(fn) == {}\n\n def test_pytest_ini(self, tmp_path: Path) -> None:\n \"\"\"[pytest] section in pytest.ini files is read correctly\"\"\"\n fn = tmp_path / \"pytest.ini\"\n fn.write_text(\"[pytest]\\nx=1\", encoding=\"utf-8\")\n assert load_config_dict_from_file(fn) == {\"x\": \"1\"}\n\n def test_custom_ini(self, tmp_path: Path) -> None:\n \"\"\"[pytest] section in any .ini file is read correctly\"\"\"\n fn = tmp_path / \"custom.ini\"\n fn.write_text(\"[pytest]\\nx=1\", encoding=\"utf-8\")\n assert load_config_dict_from_file(fn) == {\"x\": \"1\"}\n\n def test_custom_ini_without_section(self, tmp_path: Path) -> None:\n \"\"\"Custom .ini files without [pytest] section are not considered for configuration\"\"\"\n fn = tmp_path / \"custom.ini\"\n fn.write_text(\"[custom]\", encoding=\"utf-8\")\n assert load_config_dict_from_file(fn) is None\n\n def test_custom_cfg_file(self, tmp_path: Path) -> None:\n \"\"\"Custom .cfg files without [tool:pytest] section are not considered for configuration\"\"\"\n fn = tmp_path / \"custom.cfg\"\n fn.write_text(\"[custom]\", encoding=\"utf-8\")\n assert load_config_dict_from_file(fn) is None\n\n def test_valid_cfg_file(self, tmp_path: Path) -> None:\n \"\"\"Custom .cfg files with [tool:pytest] section are read correctly\"\"\"\n fn = tmp_path / \"custom.cfg\"\n fn.write_text(\"[tool:pytest]\\nx=1\", encoding=\"utf-8\")\n assert load_config_dict_from_file(fn) == {\"x\": \"1\"}\n\n def test_unsupported_pytest_section_in_cfg_file(self, tmp_path: Path) -> None:\n \"\"\".cfg files with [pytest] section are no longer supported and should fail to alert users\"\"\"\n fn = tmp_path / \"custom.cfg\"\n fn.write_text(\"[pytest]\", encoding=\"utf-8\")\n with pytest.raises(pytest.fail.Exception):\n load_config_dict_from_file(fn)\n\n def test_invalid_toml_file(self, tmp_path: Path) -> None:\n \"\"\".toml files without [tool.pytest.ini_options] are not considered for configuration.\"\"\"\n fn = tmp_path / \"myconfig.toml\"\n fn.write_text(\n dedent(\n \"\"\"\n [build_system]\n x = 1\n \"\"\"\n ),\n encoding=\"utf-8\",\n )\n assert load_config_dict_from_file(fn) is None", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_findpaths.py_TestCommonAncestor_TestCommonAncestor.test_single_file.assert_get_common_ancesto": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_findpaths.py_TestCommonAncestor_TestCommonAncestor.test_single_file.assert_get_common_ancesto", "embedding": null, "metadata": {"file_path": "testing/test_findpaths.py", "file_name": "test_findpaths.py", "file_type": "text/x-python", "category": "test", "start_line": 92, "end_line": 111, "span_ids": ["TestCommonAncestor", "TestCommonAncestor.test_single_file", "TestCommonAncestor.test_single_dir", "TestCommonAncestor.test_has_ancestor"], "tokens": 247}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCommonAncestor:\n def test_has_ancestor(self, tmp_path: Path) -> None:\n fn1 = tmp_path / \"foo\" / \"bar\" / \"test_1.py\"\n fn1.parent.mkdir(parents=True)\n fn1.touch()\n fn2 = tmp_path / \"foo\" / \"zaz\" / \"test_2.py\"\n fn2.parent.mkdir(parents=True)\n fn2.touch()\n assert get_common_ancestor([fn1, fn2]) == tmp_path / \"foo\"\n assert get_common_ancestor([fn1.parent, fn2]) == tmp_path / \"foo\"\n assert get_common_ancestor([fn1.parent, fn2.parent]) == tmp_path / \"foo\"\n assert get_common_ancestor([fn1, fn2.parent]) == tmp_path / \"foo\"\n\n def test_single_dir(self, tmp_path: Path) -> None:\n assert get_common_ancestor([tmp_path]) == tmp_path\n\n def test_single_file(self, tmp_path: Path) -> None:\n fn = tmp_path / \"foo.py\"\n fn.touch()\n assert get_common_ancestor([fn]) == tmp_path", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_findpaths.py_test_get_dirs_from_args_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_findpaths.py_test_get_dirs_from_args_", "embedding": null, "metadata": {"file_path": "testing/test_findpaths.py", "file_name": "test_findpaths.py", "file_type": "text/x-python", "category": "test", "start_line": 114, "end_line": 126, "span_ids": ["test_get_dirs_from_args"], "tokens": 133}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_get_dirs_from_args(tmp_path):\n \"\"\"get_dirs_from_args() skips over non-existing directories and files\"\"\"\n fn = tmp_path / \"foo.py\"\n fn.touch()\n d = tmp_path / \"tests\"\n d.mkdir()\n option = \"--foobar=/foo.txt\"\n # xdist uses options in this format for its rsync feature (#7638)\n xdist_rsync_option = \"popen=c:/dest\"\n assert get_dirs_from_args(\n [str(fn), str(tmp_path / \"does_not_exist\"), str(d), option, xdist_rsync_option]\n ) == [fn.parent, d]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_helpconfig.py_test_none_help_param_raises_exception_test_empty_help_param.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_helpconfig.py_test_none_help_param_raises_exception_test_empty_help_param.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_helpconfig.py", "file_name": "test_helpconfig.py", "file_type": "text/x-python", "category": "test", "start_line": 42, "end_line": 72, "span_ids": ["test_none_help_param_raises_exception", "test_empty_help_param"], "tokens": 236}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_none_help_param_raises_exception(pytester: Pytester) -> None:\n \"\"\"Test that a None help param raises a TypeError.\"\"\"\n pytester.makeconftest(\n \"\"\"\n def pytest_addoption(parser):\n parser.addini(\"test_ini\", None, default=True, type=\"bool\")\n \"\"\"\n )\n result = pytester.runpytest(\"--help\")\n result.stderr.fnmatch_lines(\n [\"*TypeError: help argument cannot be None for test_ini*\"]\n )\n\n\ndef test_empty_help_param(pytester: Pytester) -> None:\n \"\"\"Test that an empty help param is displayed correctly.\"\"\"\n pytester.makeconftest(\n \"\"\"\n def pytest_addoption(parser):\n parser.addini(\"test_ini\", \"\", default=True, type=\"bool\")\n \"\"\"\n )\n result = pytester.runpytest(\"--help\")\n assert result.ret == 0\n lines = [\n \" required_plugins (args):\",\n \" plugins that must be present for pytest to run*\",\n \" test_ini (bool):*\",\n \"environment variables:\",\n ]\n result.stdout.fnmatch_lines(lines, consecutive=True)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_RunAndParse_RunAndParse.__call__.return.result_DomNode_xmldoc_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_RunAndParse_RunAndParse.__call__.return.result_DomNode_xmldoc_", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 35, "end_line": 51, "span_ids": ["RunAndParse.__call__", "RunAndParse"], "tokens": 192}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class RunAndParse:\n def __init__(self, pytester: Pytester, schema: xmlschema.XMLSchema) -> None:\n self.pytester = pytester\n self.schema = schema\n\n def __call__(\n self, *args: Union[str, \"os.PathLike[str]\"], family: Optional[str] = \"xunit1\"\n ) -> Tuple[RunResult, \"DomNode\"]:\n if family:\n args = (\"-o\", \"junit_family=\" + family) + args\n xml_path = self.pytester.path.joinpath(\"junit.xml\")\n result = self.pytester.runpytest(\"--junitxml=%s\" % xml_path, *args)\n if family == \"xunit2\":\n with xml_path.open() as f:\n self.schema.validate(f)\n xmldoc = minidom.parse(str(xml_path))\n return result, DomNode(xmldoc)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_unicode_TestPython.test_unicode.assert_hx_in_fnode_toxm": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_unicode_TestPython.test_unicode.assert_hx_in_fnode_toxm", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 765, "end_line": 780, "span_ids": ["TestPython.test_unicode"], "tokens": 136}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPython:\n\n def test_unicode(self, pytester: Pytester, run_and_parse: RunAndParse) -> None:\n value = \"hx\\xc4\\x85\\xc4\\x87\\n\"\n pytester.makepyfile(\n \"\"\"\\\n # coding: latin1\n def test_hello():\n print(%r)\n assert 0\n \"\"\"\n % value\n )\n result, dom = run_and_parse()\n assert result.ret == 1\n tnode = dom.find_first_by_tag(\"testcase\")\n fnode = tnode.find_first_by_tag(\"failure\")\n assert \"hx\" in fnode.toxml()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_assertion_binchars_TestPython.test_assertion_binchars.print_dom_toxml_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_TestPython.test_assertion_binchars_TestPython.test_assertion_binchars.print_dom_toxml_", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 782, "end_line": 797, "span_ids": ["TestPython.test_assertion_binchars"], "tokens": 117}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPython:\n\n def test_assertion_binchars(\n self, pytester: Pytester, run_and_parse: RunAndParse\n ) -> None:\n \"\"\"This test did fail when the escaping wasn't strict.\"\"\"\n pytester.makepyfile(\n \"\"\"\n\n M1 = '\\x01\\x02\\x03\\x04'\n M2 = '\\x01\\x02\\x03\\x05'\n\n def test_str_compare():\n assert M1 == M2\n \"\"\"\n )\n result, dom = run_and_parse()\n print(dom.toxml())", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_logxml_changingdir_test_logxml_check_isdir.result_stderr_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_logxml_changingdir_test_logxml_check_isdir.result_stderr_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 1094, "end_line": 1124, "span_ids": ["test_logxml_check_isdir", "test_logxml_makedir", "test_logxml_changingdir"], "tokens": 246}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_logxml_changingdir(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n def test_func():\n import os\n os.chdir(\"a\")\n \"\"\"\n )\n pytester.mkdir(\"a\")\n result = pytester.runpytest(\"--junitxml=a/x.xml\")\n assert result.ret == 0\n assert pytester.path.joinpath(\"a/x.xml\").exists()\n\n\ndef test_logxml_makedir(pytester: Pytester) -> None:\n \"\"\"--junitxml should automatically create directories for the xml file\"\"\"\n pytester.makepyfile(\n \"\"\"\n def test_pass():\n pass\n \"\"\"\n )\n result = pytester.runpytest(\"--junitxml=path/to/results.xml\")\n assert result.ret == 0\n assert pytester.path.joinpath(\"path/to/results.xml\").exists()\n\n\ndef test_logxml_check_isdir(pytester: Pytester) -> None:\n \"\"\"Give an error if --junit-xml is a directory (#2089)\"\"\"\n result = pytester.runpytest(\"--junit-xml=.\")\n result.stderr.fnmatch_lines([\"*--junitxml must be a filename*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_escaped_parametrized_names_xml_test_escaped_parametrized_names_xml.node_assert_attr_name_te": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_junitxml.py_test_escaped_parametrized_names_xml_test_escaped_parametrized_names_xml.node_assert_attr_name_te", "embedding": null, "metadata": {"file_path": "testing/test_junitxml.py", "file_name": "test_junitxml.py", "file_type": "text/x-python", "category": "test", "start_line": 1127, "end_line": 1141, "span_ids": ["test_escaped_parametrized_names_xml"], "tokens": 112}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_escaped_parametrized_names_xml(\n pytester: Pytester, run_and_parse: RunAndParse\n) -> None:\n pytester.makepyfile(\n \"\"\"\\\n import pytest\n @pytest.mark.parametrize('char', [\"\\\\x00\"])\n def test_func(char):\n assert char\n \"\"\"\n )\n result, dom = run_and_parse()\n assert result.ret == 0\n node = dom.find_first_by_tag(\"testcase\")\n node.assert_attr(name=\"test_func[\\\\x00]\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_main.py_test_validate_basetemp_ok_test_validate_basetemp_integration.result_stderr_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_main.py_test_validate_basetemp_ok_test_validate_basetemp_integration.result_stderr_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_main.py", "file_name": "test_main.py", "file_type": "text/x-python", "category": "test", "start_line": 87, "end_line": 105, "span_ids": ["test_validate_basetemp_integration", "test_validate_basetemp_ok", "test_validate_basetemp_fails"], "tokens": 197}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\"basetemp\", [\"foo\", \"foo/bar\"])\ndef test_validate_basetemp_ok(tmp_path, basetemp, monkeypatch):\n monkeypatch.chdir(str(tmp_path))\n validate_basetemp(tmp_path / basetemp)\n\n\n@pytest.mark.parametrize(\"basetemp\", [\"\", \".\", \"..\"])\ndef test_validate_basetemp_fails(tmp_path, basetemp, monkeypatch):\n monkeypatch.chdir(str(tmp_path))\n msg = \"basetemp must not be empty, the current working directory or any parent directory of it\"\n with pytest.raises(argparse.ArgumentTypeError, match=msg):\n if basetemp:\n basetemp = tmp_path / basetemp\n validate_basetemp(basetemp)\n\n\ndef test_validate_basetemp_integration(pytester: Pytester) -> None:\n result = pytester.runpytest(\"--basetemp=.\")\n result.stderr.fnmatch_lines(\"*basetemp must not be*\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_main.py_TestResolveCollectionArgument_TestResolveCollectionArgument.test_file.None_3": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_main.py_TestResolveCollectionArgument_TestResolveCollectionArgument.test_file.None_3", "embedding": null, "metadata": {"file_path": "testing/test_main.py", "file_name": "test_main.py", "file_type": "text/x-python", "category": "test", "start_line": 108, "end_line": 135, "span_ids": ["TestResolveCollectionArgument.invocation_path", "TestResolveCollectionArgument.test_file", "TestResolveCollectionArgument"], "tokens": 258}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestResolveCollectionArgument:\n @pytest.fixture\n def invocation_path(self, pytester: Pytester) -> Path:\n pytester.syspathinsert(pytester.path / \"src\")\n pytester.chdir()\n\n pkg = pytester.path.joinpath(\"src/pkg\")\n pkg.mkdir(parents=True)\n pkg.joinpath(\"__init__.py\").touch()\n pkg.joinpath(\"test.py\").touch()\n return pytester.path\n\n def test_file(self, invocation_path: Path) -> None:\n \"\"\"File and parts.\"\"\"\n assert resolve_collection_argument(invocation_path, \"src/pkg/test.py\") == (\n invocation_path / \"src/pkg/test.py\",\n [],\n )\n assert resolve_collection_argument(invocation_path, \"src/pkg/test.py::\") == (\n invocation_path / \"src/pkg/test.py\",\n [\"\"],\n )\n assert resolve_collection_argument(\n invocation_path, \"src/pkg/test.py::foo::bar\"\n ) == (invocation_path / \"src/pkg/test.py\", [\"foo\", \"bar\"])\n assert resolve_collection_argument(\n invocation_path, \"src/pkg/test.py::foo::bar::\"\n ) == (invocation_path / \"src/pkg/test.py\", [\"foo\", \"bar\", \"\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_main.py_TestResolveCollectionArgument.test_dir_TestResolveCollectionArgument.test_dir.None_1.resolve_collection_argume": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_main.py_TestResolveCollectionArgument.test_dir_TestResolveCollectionArgument.test_dir.None_1.resolve_collection_argume", "embedding": null, "metadata": {"file_path": "testing/test_main.py", "file_name": "test_main.py", "file_type": "text/x-python", "category": "test", "start_line": 137, "end_line": 152, "span_ids": ["TestResolveCollectionArgument.test_dir"], "tokens": 126}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestResolveCollectionArgument:\n\n def test_dir(self, invocation_path: Path) -> None:\n \"\"\"Directory and parts.\"\"\"\n assert resolve_collection_argument(invocation_path, \"src/pkg\") == (\n invocation_path / \"src/pkg\",\n [],\n )\n\n with pytest.raises(\n UsageError, match=r\"directory argument cannot contain :: selection parts\"\n ):\n resolve_collection_argument(invocation_path, \"src/pkg::\")\n\n with pytest.raises(\n UsageError, match=r\"directory argument cannot contain :: selection parts\"\n ):\n resolve_collection_argument(invocation_path, \"src/pkg::foo::bar\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_main.py_TestResolveCollectionArgument.test_pypath_TestResolveCollectionArgument.test_pypath.with_pytest_raises_.resolve_collection_argume": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_main.py_TestResolveCollectionArgument.test_pypath_TestResolveCollectionArgument.test_pypath.with_pytest_raises_.resolve_collection_argume", "embedding": null, "metadata": {"file_path": "testing/test_main.py", "file_name": "test_main.py", "file_type": "text/x-python", "category": "test", "start_line": 154, "end_line": 172, "span_ids": ["TestResolveCollectionArgument.test_pypath"], "tokens": 187}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestResolveCollectionArgument:\n\n def test_pypath(self, invocation_path: Path) -> None:\n \"\"\"Dotted name and parts.\"\"\"\n assert resolve_collection_argument(\n invocation_path, \"pkg.test\", as_pypath=True\n ) == (invocation_path / \"src/pkg/test.py\", [])\n assert resolve_collection_argument(\n invocation_path, \"pkg.test::foo::bar\", as_pypath=True\n ) == (invocation_path / \"src/pkg/test.py\", [\"foo\", \"bar\"])\n assert resolve_collection_argument(invocation_path, \"pkg\", as_pypath=True) == (\n invocation_path / \"src/pkg\",\n [],\n )\n\n with pytest.raises(\n UsageError, match=r\"package argument cannot contain :: selection parts\"\n ):\n resolve_collection_argument(\n invocation_path, \"pkg::foo::bar\", as_pypath=True\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_main.py_TestResolveCollectionArgument.test_does_not_exist_TestResolveCollectionArgument.test_does_not_exist.None_1.resolve_collection_argume": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_main.py_TestResolveCollectionArgument.test_does_not_exist_TestResolveCollectionArgument.test_does_not_exist.None_1.resolve_collection_argume", "embedding": null, "metadata": {"file_path": "testing/test_main.py", "file_name": "test_main.py", "file_type": "text/x-python", "category": "test", "start_line": 174, "end_line": 187, "span_ids": ["TestResolveCollectionArgument.test_does_not_exist"], "tokens": 124}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestResolveCollectionArgument:\n\n def test_does_not_exist(self, invocation_path: Path) -> None:\n \"\"\"Given a file/module that does not exist raises UsageError.\"\"\"\n with pytest.raises(\n UsageError, match=re.escape(\"file or directory not found: foobar\")\n ):\n resolve_collection_argument(invocation_path, \"foobar\")\n\n with pytest.raises(\n UsageError,\n match=re.escape(\n \"module or package not found: foobar (missing __init__.py?)\"\n ),\n ):\n resolve_collection_argument(invocation_path, \"foobar\", as_pypath=True)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_main.py_TestResolveCollectionArgument.test_absolute_paths_are_resolved_correctly_TestResolveCollectionArgument.test_absolute_paths_are_resolved_correctly.None_2": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_main.py_TestResolveCollectionArgument.test_absolute_paths_are_resolved_correctly_TestResolveCollectionArgument.test_absolute_paths_are_resolved_correctly.None_2", "embedding": null, "metadata": {"file_path": "testing/test_main.py", "file_name": "test_main.py", "file_type": "text/x-python", "category": "test", "start_line": 189, "end_line": 202, "span_ids": ["TestResolveCollectionArgument.test_absolute_paths_are_resolved_correctly"], "tokens": 143}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestResolveCollectionArgument:\n\n def test_absolute_paths_are_resolved_correctly(self, invocation_path: Path) -> None:\n \"\"\"Absolute paths resolve back to absolute paths.\"\"\"\n full_path = str(invocation_path / \"src\")\n assert resolve_collection_argument(invocation_path, full_path) == (\n Path(os.path.abspath(\"src\")),\n [],\n )\n\n # ensure full paths given in the command-line without the drive letter resolve\n # to the full path correctly (#7628)\n drive, full_path_without_drive = os.path.splitdrive(full_path)\n assert resolve_collection_argument(\n invocation_path, full_path_without_drive\n ) == (Path(os.path.abspath(\"src\")), [])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_main.py_test_module_full_path_without_drive_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_main.py_test_module_full_path_without_drive_", "embedding": null, "metadata": {"file_path": "testing/test_main.py", "file_name": "test_main.py", "file_type": "text/x-python", "category": "test", "start_line": 205, "end_line": 242, "span_ids": ["test_module_full_path_without_drive"], "tokens": 262}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_module_full_path_without_drive(pytester: Pytester) -> None:\n \"\"\"Collect and run test using full path except for the drive letter (#7628).\n\n Passing a full path without a drive letter would trigger a bug in py.path.local\n where it would keep the full path without the drive letter around, instead of resolving\n to the full path, resulting in fixtures node ids not matching against test node ids correctly.\n \"\"\"\n pytester.makepyfile(\n **{\n \"project/conftest.py\": \"\"\"\n import pytest\n @pytest.fixture\n def fix(): return 1\n \"\"\",\n }\n )\n\n pytester.makepyfile(\n **{\n \"project/tests/dummy_test.py\": \"\"\"\n def test(fix):\n assert fix == 1\n \"\"\"\n }\n )\n fn = pytester.path.joinpath(\"project/tests/dummy_test.py\")\n assert fn.is_file()\n\n drive, path = os.path.splitdrive(str(fn))\n\n result = pytester.runpytest(path, \"-v\")\n result.stdout.fnmatch_lines(\n [\n os.path.join(\"project\", \"tests\", \"dummy_test.py\") + \"::test PASSED *\",\n \"* 1 passed in *\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_mark_option_test_mark_option.assert_passed_str_expe": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_mark_option_test_mark_option.assert_passed_str_expe", "embedding": null, "metadata": {"file_path": "testing/test_mark.py", "file_name": "test_mark.py", "file_type": "text/x-python", "category": "test", "start_line": 205, "end_line": 233, "span_ids": ["test_mark_option"], "tokens": 205}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\n (\"expr\", \"expected_passed\"),\n [\n (\"xyz\", [\"test_one\"]),\n (\"((( xyz)) )\", [\"test_one\"]),\n (\"not not xyz\", [\"test_one\"]),\n (\"xyz and xyz2\", []),\n (\"xyz2\", [\"test_two\"]),\n (\"xyz or xyz2\", [\"test_one\", \"test_two\"]),\n ],\n)\ndef test_mark_option(\n expr: str, expected_passed: List[Optional[str]], pytester: Pytester\n) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.mark.xyz\n def test_one():\n pass\n @pytest.mark.xyz2\n def test_two():\n pass\n \"\"\"\n )\n rec = pytester.inline_run(\"-m\", expr)\n passed, skipped, fail = rec.listoutcomes()\n passed_str = [x.nodeid.split(\"::\")[-1] for x in passed]\n assert passed_str == expected_passed", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_mark_option_custom_test_mark_option_custom.assert_passed_str_expe": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_mark_option_custom_test_mark_option_custom.assert_passed_str_expe", "embedding": null, "metadata": {"file_path": "testing/test_mark.py", "file_name": "test_mark.py", "file_type": "text/x-python", "category": "test", "start_line": 236, "end_line": 263, "span_ids": ["test_mark_option_custom"], "tokens": 192}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\n (\"expr\", \"expected_passed\"),\n [(\"interface\", [\"test_interface\"]), (\"not interface\", [\"test_nointer\"])],\n)\ndef test_mark_option_custom(\n expr: str, expected_passed: List[str], pytester: Pytester\n) -> None:\n pytester.makeconftest(\n \"\"\"\n import pytest\n def pytest_collection_modifyitems(items):\n for item in items:\n if \"interface\" in item.nodeid:\n item.add_marker(pytest.mark.interface)\n \"\"\"\n )\n pytester.makepyfile(\n \"\"\"\n def test_interface():\n pass\n def test_nointer():\n pass\n \"\"\"\n )\n rec = pytester.inline_run(\"-m\", expr)\n passed, skipped, fail = rec.listoutcomes()\n passed_str = [x.nodeid.split(\"::\")[-1] for x in passed]\n assert passed_str == expected_passed", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_keyword_option_custom_test_keyword_option_custom.assert_passed_str_expe": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_keyword_option_custom_test_keyword_option_custom.assert_passed_str_expe", "embedding": null, "metadata": {"file_path": "testing/test_mark.py", "file_name": "test_mark.py", "file_type": "text/x-python", "category": "test", "start_line": 266, "end_line": 298, "span_ids": ["test_keyword_option_custom"], "tokens": 286}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\n (\"expr\", \"expected_passed\"),\n [\n (\"interface\", [\"test_interface\"]),\n (\"not interface\", [\"test_nointer\", \"test_pass\", \"test_1\", \"test_2\"]),\n (\"pass\", [\"test_pass\"]),\n (\"not pass\", [\"test_interface\", \"test_nointer\", \"test_1\", \"test_2\"]),\n (\"not not not (pass)\", [\"test_interface\", \"test_nointer\", \"test_1\", \"test_2\"]),\n (\"1 or 2\", [\"test_1\", \"test_2\"]),\n (\"not (1 or 2)\", [\"test_interface\", \"test_nointer\", \"test_pass\"]),\n ],\n)\ndef test_keyword_option_custom(\n expr: str, expected_passed: List[str], pytester: Pytester\n) -> None:\n pytester.makepyfile(\n \"\"\"\n def test_interface():\n pass\n def test_nointer():\n pass\n def test_pass():\n pass\n def test_1():\n pass\n def test_2():\n pass\n \"\"\"\n )\n rec = pytester.inline_run(\"-k\", expr)\n passed, skipped, fail = rec.listoutcomes()\n passed_str = [x.nodeid.split(\"::\")[-1] for x in passed]\n assert passed_str == expected_passed", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_keyword_option_considers_mark_test_keyword_option_parametrize.assert_passed_str_expe": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_keyword_option_considers_mark_test_keyword_option_parametrize.assert_passed_str_expe", "embedding": null, "metadata": {"file_path": "testing/test_mark.py", "file_name": "test_mark.py", "file_type": "text/x-python", "category": "test", "start_line": 301, "end_line": 330, "span_ids": ["test_keyword_option_parametrize", "test_keyword_option_considers_mark"], "tokens": 248}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_keyword_option_considers_mark(pytester: Pytester) -> None:\n pytester.copy_example(\"marks/marks_considered_keywords\")\n rec = pytester.inline_run(\"-k\", \"foo\")\n passed = rec.listoutcomes()[0]\n assert len(passed) == 1\n\n\n@pytest.mark.parametrize(\n (\"expr\", \"expected_passed\"),\n [\n (\"None\", [\"test_func[None]\"]),\n (\"[1.3]\", [\"test_func[1.3]\"]),\n (\"2-3\", [\"test_func[2-3]\"]),\n ],\n)\ndef test_keyword_option_parametrize(\n expr: str, expected_passed: List[str], pytester: Pytester\n) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.mark.parametrize(\"arg\", [None, 1.3, \"2-3\"])\n def test_func(arg):\n pass\n \"\"\"\n )\n rec = pytester.inline_run(\"-k\", expr)\n passed, skipped, fail = rec.listoutcomes()\n passed_str = [x.nodeid.split(\"::\")[-1] for x in passed]\n assert passed_str == expected_passed", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_parametrize_with_module_test_parametrize_with_module.assert_passed_0_nodeid_s": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_test_parametrize_with_module_test_parametrize_with_module.assert_passed_0_nodeid_s", "embedding": null, "metadata": {"file_path": "testing/test_mark.py", "file_name": "test_mark.py", "file_type": "text/x-python", "category": "test", "start_line": 333, "end_line": 345, "span_ids": ["test_parametrize_with_module"], "tokens": 107}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_parametrize_with_module(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.mark.parametrize(\"arg\", [pytest,])\n def test_func(arg):\n pass\n \"\"\"\n )\n rec = pytester.inline_run()\n passed, skipped, fail = rec.listoutcomes()\n expected_id = \"test_func[\" + pytest.__name__ + \"]\"\n assert passed[0].nodeid.split(\"::\")[-1] == expected_id", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestFunctional.test_mark_closest_TestFunctional.test_mark_closest.assert_has_own_get_closes": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_mark.py_TestFunctional.test_mark_closest_TestFunctional.test_mark_closest.assert_has_own_get_closes", "embedding": null, "metadata": {"file_path": "testing/test_mark.py", "file_name": "test_mark.py", "file_type": "text/x-python", "category": "test", "start_line": 565, "end_line": 589, "span_ids": ["TestFunctional.test_mark_closest"], "tokens": 194}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFunctional:\n\n def test_mark_closest(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.mark.c(location=\"class\")\n class Test:\n @pytest.mark.c(location=\"function\")\n def test_has_own(self):\n pass\n\n def test_has_inherited(self):\n pass\n\n \"\"\"\n )\n items, rec = pytester.inline_genitems(p)\n has_own, has_inherited = items\n has_own_marker = has_own.get_closest_marker(\"c\")\n has_inherited_marker = has_inherited.get_closest_marker(\"c\")\n assert has_own_marker is not None\n assert has_inherited_marker is not None\n assert has_own_marker.kwargs == {\"location\": \"function\"}\n assert has_inherited_marker.kwargs == {\"location\": \"class\"}\n assert has_own.get_closest_marker(\"missing\") is None", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_meta.py__Test_importing_of_all__": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_meta.py__Test_importing_of_all__", "embedding": null, "metadata": {"file_path": "testing/test_meta.py", "file_name": "test_meta.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 33, "span_ids": ["_modules", "docstring", "imports", "test_no_warnings"], "tokens": 188}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"Test importing of all internal packages and modules.\n\nThis ensures all internal packages can be imported without needing the pytest\nnamespace being set, which is critical for the initialization of xdist.\n\"\"\"\nimport pkgutil\nimport subprocess\nimport sys\nfrom typing import List\n\nimport _pytest\nimport pytest\n\n\ndef _modules() -> List[str]:\n pytest_pkg: str = _pytest.__path__ # type: ignore\n return sorted(\n n\n for _, n, _ in pkgutil.walk_packages(pytest_pkg, prefix=_pytest.__name__ + \".\")\n )\n\n\n@pytest.mark.slow\n@pytest.mark.parametrize(\"module\", _modules())\ndef test_no_warnings(module: str) -> None:\n # fmt: off\n subprocess.check_call((\n sys.executable,\n \"-W\", \"error\",\n \"-c\", f\"__import__({module!r})\",\n ))\n # fmt: on", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_monkeypatch.py_Sample_test_issue156_undo_staticmethod.assert_Sample_hello_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_monkeypatch.py_Sample_test_issue156_undo_staticmethod.assert_Sample_hello_", "embedding": null, "metadata": {"file_path": "testing/test_monkeypatch.py", "file_name": "test_monkeypatch.py", "file_type": "text/x-python", "category": "test", "start_line": 340, "end_line": 362, "span_ids": ["Sample", "SampleInherit", "Sample.hello", "test_issue156_undo_staticmethod"], "tokens": 120}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Sample:\n @staticmethod\n def hello() -> bool:\n return True\n\n\nclass SampleInherit(Sample):\n pass\n\n\n@pytest.mark.parametrize(\n \"Sample\",\n [Sample, SampleInherit],\n ids=[\"new\", \"new-inherit\"],\n)\ndef test_issue156_undo_staticmethod(Sample: Type[Sample]) -> None:\n monkeypatch = MonkeyPatch()\n\n monkeypatch.setattr(Sample, \"hello\", None)\n assert Sample.hello is None\n\n monkeypatch.undo() # type: ignore[unreachable]\n assert Sample.hello()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_monkeypatch.py_test_undo_class_descriptors_delattr_test_undo_class_descriptors_delattr.assert_original_world_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_monkeypatch.py_test_undo_class_descriptors_delattr_test_undo_class_descriptors_delattr.assert_original_world_", "embedding": null, "metadata": {"file_path": "testing/test_monkeypatch.py", "file_name": "test_monkeypatch.py", "file_type": "text/x-python", "category": "test", "start_line": 365, "end_line": 389, "span_ids": ["test_undo_class_descriptors_delattr.SampleParent.hello", "test_undo_class_descriptors_delattr", "test_undo_class_descriptors_delattr.SampleChild", "test_undo_class_descriptors_delattr.SampleParent", "test_undo_class_descriptors_delattr.SampleChild:2"], "tokens": 153}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_undo_class_descriptors_delattr() -> None:\n class SampleParent:\n @classmethod\n def hello(_cls):\n pass\n\n @staticmethod\n def world():\n pass\n\n class SampleChild(SampleParent):\n pass\n\n monkeypatch = MonkeyPatch()\n\n original_hello = SampleChild.hello\n original_world = SampleChild.world\n monkeypatch.delattr(SampleParent, \"hello\")\n monkeypatch.delattr(SampleParent, \"world\")\n assert getattr(SampleParent, \"hello\", None) is None\n assert getattr(SampleParent, \"world\", None) is None\n\n monkeypatch.undo()\n assert original_hello == SampleChild.hello\n assert original_world == SampleChild.world", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_monkeypatch.py_test_issue1338_name_resolving_test_context_classmethod.assert_A_x_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_monkeypatch.py_test_issue1338_name_resolving_test_context_classmethod.assert_A_x_1", "embedding": null, "metadata": {"file_path": "testing/test_monkeypatch.py", "file_name": "test_monkeypatch.py", "file_type": "text/x-python", "category": "test", "start_line": 392, "end_line": 420, "span_ids": ["test_context_classmethod.A", "test_context_classmethod", "test_context", "test_issue1338_name_resolving", "test_context_classmethod.A:2"], "tokens": 170}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_issue1338_name_resolving() -> None:\n pytest.importorskip(\"requests\")\n monkeypatch = MonkeyPatch()\n try:\n monkeypatch.delattr(\"requests.sessions.Session.request\")\n finally:\n monkeypatch.undo()\n\n\ndef test_context() -> None:\n monkeypatch = MonkeyPatch()\n\n import functools\n import inspect\n\n with monkeypatch.context() as m:\n m.setattr(functools, \"partial\", 3)\n assert not inspect.isclass(functools.partial)\n assert inspect.isclass(functools.partial)\n\n\ndef test_context_classmethod() -> None:\n class A:\n x = 1\n\n with MonkeyPatch.context() as m:\n m.setattr(A, \"x\", 2)\n assert A.x == 2\n assert A.x == 1", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_nodes.py_from_pathlib_import_Path_test_iterparentnodeids.assert_result_expected": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_nodes.py_from_pathlib_import_Path_test_iterparentnodeids.assert_result_expected", "embedding": null, "metadata": {"file_path": "testing/test_nodes.py", "file_name": "test_nodes.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 32, "span_ids": ["test_iterparentnodeids", "imports"], "tokens": 337}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from pathlib import Path\nfrom typing import cast\nfrom typing import List\nfrom typing import Type\n\nimport py\n\nimport pytest\nfrom _pytest import nodes\nfrom _pytest.pytester import Pytester\nfrom _pytest.warning_types import PytestWarning\n\n\n@pytest.mark.parametrize(\n (\"nodeid\", \"expected\"),\n (\n (\"\", [\"\"]),\n (\"a\", [\"\", \"a\"]),\n (\"aa/b\", [\"\", \"aa\", \"aa/b\"]),\n (\"a/b/c\", [\"\", \"a\", \"a/b\", \"a/b/c\"]),\n (\"a/bbb/c::D\", [\"\", \"a\", \"a/bbb\", \"a/bbb/c\", \"a/bbb/c::D\"]),\n (\"a/b/c::D::eee\", [\"\", \"a\", \"a/b\", \"a/b/c\", \"a/b/c::D\", \"a/b/c::D::eee\"]),\n # :: considered only at the last component.\n (\"::xx\", [\"\", \"::xx\"]),\n (\"a/b/c::D/d::e\", [\"\", \"a\", \"a/b\", \"a/b/c::D\", \"a/b/c::D/d\", \"a/b/c::D/d::e\"]),\n # : alone is not a separator.\n (\"a/b::D:e:f::g\", [\"\", \"a\", \"a/b\", \"a/b::D:e:f\", \"a/b::D:e:f::g\"]),\n ),\n)\ndef test_iterparentnodeids(nodeid: str, expected: List[str]) -> None:\n result = list(nodes.iterparentnodeids(nodeid))\n assert result == expected", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_nodes.py_test_node_from_parent_disallowed_arguments_test_node_warn_is_no_longer_only_pytest_warnings.with_pytest_warns_warn_ty.items_0_warn_warn_type_m": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_nodes.py_test_node_from_parent_disallowed_arguments_test_node_warn_is_no_longer_only_pytest_warnings.with_pytest_warns_warn_ty.items_0_warn_warn_type_m", "embedding": null, "metadata": {"file_path": "testing/test_nodes.py", "file_name": "test_nodes.py", "file_type": "text/x-python", "category": "test", "start_line": 35, "end_line": 55, "span_ids": ["test_node_warn_is_no_longer_only_pytest_warnings", "test_node_from_parent_disallowed_arguments"], "tokens": 178}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_node_from_parent_disallowed_arguments() -> None:\n with pytest.raises(TypeError, match=\"session is\"):\n nodes.Node.from_parent(None, session=None) # type: ignore[arg-type]\n with pytest.raises(TypeError, match=\"config is\"):\n nodes.Node.from_parent(None, config=None) # type: ignore[arg-type]\n\n\n@pytest.mark.parametrize(\n \"warn_type, msg\", [(DeprecationWarning, \"deprecated\"), (PytestWarning, \"pytest\")]\n)\ndef test_node_warn_is_no_longer_only_pytest_warnings(\n pytester: Pytester, warn_type: Type[Warning], msg: str\n) -> None:\n items = pytester.getitems(\n \"\"\"\n def test():\n pass\n \"\"\"\n )\n with pytest.warns(warn_type, match=msg):\n items[0].warn(warn_type(msg))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_nodes.py_test_node_warning_enforces_warning_types_test__check_initialpaths_for_relpath.None_2": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_nodes.py_test_node_warning_enforces_warning_types_test__check_initialpaths_for_relpath.None_2", "embedding": null, "metadata": {"file_path": "testing/test_nodes.py", "file_name": "test_nodes.py", "file_type": "text/x-python", "category": "test", "start_line": 58, "end_line": 92, "span_ids": ["test__check_initialpaths_for_relpath", "test_node_warning_enforces_warning_types", "test__check_initialpaths_for_relpath.FakeSession1:2", "test__check_initialpaths_for_relpath.FakeSession2", "test__check_initialpaths_for_relpath.FakeSession2:2", "test__check_initialpaths_for_relpath.FakeSession1"], "tokens": 244}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_node_warning_enforces_warning_types(pytester: Pytester) -> None:\n items = pytester.getitems(\n \"\"\"\n def test():\n pass\n \"\"\"\n )\n with pytest.raises(\n ValueError, match=\"warning must be an instance of Warning or subclass\"\n ):\n items[0].warn(Exception(\"ok\")) # type: ignore[arg-type]\n\n\ndef test__check_initialpaths_for_relpath() -> None:\n \"\"\"Ensure that it handles dirs, and does not always use dirname.\"\"\"\n cwd = Path.cwd()\n\n class FakeSession1:\n _initialpaths = frozenset({cwd})\n\n session = cast(pytest.Session, FakeSession1)\n\n assert nodes._check_initialpaths_for_relpath(session, py.path.local(cwd)) == \"\"\n\n sub = cwd / \"file\"\n\n class FakeSession2:\n _initialpaths = frozenset({cwd})\n\n session = cast(pytest.Session, FakeSession2)\n\n assert nodes._check_initialpaths_for_relpath(session, py.path.local(sub)) == \"file\"\n\n outside = py.path.local(\"/outside\")\n assert nodes._check_initialpaths_for_relpath(session, outside) is None", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pastebin.py_io_TestPasteCapture.test_failed.assert_reprec_countoutcom": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pastebin.py_io_TestPasteCapture.test_failed.assert_reprec_countoutcom", "embedding": null, "metadata": {"file_path": "testing/test_pastebin.py", "file_name": "test_pastebin.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 34, "span_ids": ["TestPasteCapture.pastebinlist", "TestPasteCapture.test_failed", "imports", "TestPasteCapture"], "tokens": 254}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import io\nfrom typing import List\nfrom typing import Union\n\nimport pytest\nfrom _pytest.monkeypatch import MonkeyPatch\nfrom _pytest.pytester import Pytester\n\n\nclass TestPasteCapture:\n @pytest.fixture\n def pastebinlist(self, monkeypatch, request) -> List[Union[str, bytes]]:\n pastebinlist: List[Union[str, bytes]] = []\n plugin = request.config.pluginmanager.getplugin(\"pastebin\")\n monkeypatch.setattr(plugin, \"create_new_paste\", pastebinlist.append)\n return pastebinlist\n\n def test_failed(self, pytester: Pytester, pastebinlist) -> None:\n testpath = pytester.makepyfile(\n \"\"\"\n import pytest\n def test_pass() -> None:\n pass\n def test_fail():\n assert 0\n def test_skip():\n pytest.skip(\"\")\n \"\"\"\n )\n reprec = pytester.inline_run(testpath, \"--pastebin=failed\")\n assert len(pastebinlist) == 1\n s = pastebinlist[0]\n assert s.find(\"def test_fail\") != -1\n assert reprec.countoutcomes() == [1, 1, 1]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_os.path_from__pytest_tmpdir_impor": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_os.path_from__pytest_tmpdir_impor", "embedding": null, "metadata": {"file_path": "testing/test_pathlib.py", "file_name": "test_pathlib.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 26, "span_ids": ["imports"], "tokens": 185}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import os.path\nimport pickle\nimport sys\nimport unittest.mock\nfrom pathlib import Path\nfrom textwrap import dedent\nfrom types import ModuleType\nfrom typing import Generator\n\nimport py\n\nimport pytest\nfrom _pytest.monkeypatch import MonkeyPatch\nfrom _pytest.pathlib import bestrelpath\nfrom _pytest.pathlib import commonpath\nfrom _pytest.pathlib import ensure_deletable\nfrom _pytest.pathlib import fnmatch_ex\nfrom _pytest.pathlib import get_extended_length_path_str\nfrom _pytest.pathlib import get_lock_path\nfrom _pytest.pathlib import import_path\nfrom _pytest.pathlib import ImportPathMismatchError\nfrom _pytest.pathlib import maybe_delete_a_numbered_dir\nfrom _pytest.pathlib import resolve_package_path\nfrom _pytest.pathlib import symlink_or_skip\nfrom _pytest.pathlib import visit\nfrom _pytest.tmpdir import TempPathFactory", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_TestImportPath_TestImportPath.path1.assert_path_joinpath_sam": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_TestImportPath_TestImportPath.path1.assert_path_joinpath_sam", "embedding": null, "metadata": {"file_path": "testing/test_pathlib.py", "file_name": "test_pathlib.py", "file_type": "text/x-python", "category": "test", "start_line": 94, "end_line": 107, "span_ids": ["TestImportPath.path1", "TestImportPath"], "tokens": 114}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestImportPath:\n \"\"\"\n\n Most of the tests here were copied from py lib's tests for \"py.local.path.pyimport\".\n\n Having our own pyimport-like function is inline with removing py.path dependency in the future.\n \"\"\"\n\n @pytest.fixture(scope=\"session\")\n def path1(self, tmp_path_factory: TempPathFactory) -> Generator[Path, None, None]:\n path = tmp_path_factory.mktemp(\"path\")\n self.setuptestfs(path)\n yield path\n assert path.joinpath(\"samplefile\").exists()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_TestImportPath.setuptestfs_TestImportPath.setuptestfs.module_d_write_text_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_TestImportPath.setuptestfs_TestImportPath.setuptestfs.module_d_write_text_", "embedding": null, "metadata": {"file_path": "testing/test_pathlib.py", "file_name": "test_pathlib.py", "file_type": "text/x-python", "category": "test", "start_line": 109, "end_line": 154, "span_ids": ["TestImportPath.setuptestfs"], "tokens": 332}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestImportPath:\n\n def setuptestfs(self, path: Path) -> None:\n # print \"setting up test fs for\", repr(path)\n samplefile = path / \"samplefile\"\n samplefile.write_text(\"samplefile\\n\")\n\n execfile = path / \"execfile\"\n execfile.write_text(\"x=42\")\n\n execfilepy = path / \"execfile.py\"\n execfilepy.write_text(\"x=42\")\n\n d = {1: 2, \"hello\": \"world\", \"answer\": 42}\n path.joinpath(\"samplepickle\").write_bytes(pickle.dumps(d, 1))\n\n sampledir = path / \"sampledir\"\n sampledir.mkdir()\n sampledir.joinpath(\"otherfile\").touch()\n\n otherdir = path / \"otherdir\"\n otherdir.mkdir()\n otherdir.joinpath(\"__init__.py\").touch()\n\n module_a = otherdir / \"a.py\"\n module_a.write_text(\"from .b import stuff as result\\n\")\n module_b = otherdir / \"b.py\"\n module_b.write_text('stuff=\"got it\"\\n')\n module_c = otherdir / \"c.py\"\n module_c.write_text(\n dedent(\n \"\"\"\n import py;\n import otherdir.a\n value = otherdir.a.result\n \"\"\"\n )\n )\n module_d = otherdir / \"d.py\"\n module_d.write_text(\n dedent(\n \"\"\"\n import py;\n from otherdir import a\n value2 = a.result\n \"\"\"\n )\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_TestImportPath.test_smoke_test_TestImportPath.test_renamed_dir_creates_mismatch.None_1.import_path_tmp_path_join": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_TestImportPath.test_smoke_test_TestImportPath.test_renamed_dir_creates_mismatch.None_1.import_path_tmp_path_join", "embedding": null, "metadata": {"file_path": "testing/test_pathlib.py", "file_name": "test_pathlib.py", "file_type": "text/x-python", "category": "test", "start_line": 156, "end_line": 179, "span_ids": ["TestImportPath.test_renamed_dir_creates_mismatch", "TestImportPath.test_smoke_test"], "tokens": 260}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestImportPath:\n\n def test_smoke_test(self, path1: Path) -> None:\n obj = import_path(path1 / \"execfile.py\")\n assert obj.x == 42 # type: ignore[attr-defined]\n assert obj.__name__ == \"execfile\"\n\n def test_renamed_dir_creates_mismatch(\n self, tmp_path: Path, monkeypatch: MonkeyPatch\n ) -> None:\n tmp_path.joinpath(\"a\").mkdir()\n p = tmp_path.joinpath(\"a\", \"test_x123.py\")\n p.touch()\n import_path(p)\n tmp_path.joinpath(\"a\").rename(tmp_path.joinpath(\"b\"))\n with pytest.raises(ImportPathMismatchError):\n import_path(tmp_path.joinpath(\"b\", \"test_x123.py\"))\n\n # Errors can be ignored.\n monkeypatch.setenv(\"PY_IGNORE_IMPORTMISMATCH\", \"1\")\n import_path(tmp_path.joinpath(\"b\", \"test_x123.py\"))\n\n # PY_IGNORE_IMPORTMISMATCH=0 does not ignore error.\n monkeypatch.setenv(\"PY_IGNORE_IMPORTMISMATCH\", \"0\")\n with pytest.raises(ImportPathMismatchError):\n import_path(tmp_path.joinpath(\"b\", \"test_x123.py\"))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_TestImportPath.test_issue131_on__init___TestImportPath.test_issue131_on__init__.assert_m1_m2": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_TestImportPath.test_issue131_on__init___TestImportPath.test_issue131_on__init__.assert_m1_m2", "embedding": null, "metadata": {"file_path": "testing/test_pathlib.py", "file_name": "test_pathlib.py", "file_type": "text/x-python", "category": "test", "start_line": 259, "end_line": 271, "span_ids": ["TestImportPath.test_issue131_on__init__"], "tokens": 159}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestImportPath:\n\n def test_issue131_on__init__(self, tmp_path: Path) -> None:\n # __init__.py files may be namespace packages, and thus the\n # __file__ of an imported module may not be ourselves\n # see issue\n tmp_path.joinpath(\"proja\").mkdir()\n p1 = tmp_path.joinpath(\"proja\", \"__init__.py\")\n p1.touch()\n tmp_path.joinpath(\"sub\", \"proja\").mkdir(parents=True)\n p2 = tmp_path.joinpath(\"sub\", \"proja\", \"__init__.py\")\n p2.touch()\n m1 = import_path(p1)\n m2 = import_path(p2)\n assert m1 == m2", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_TestImportPath.test_ensuresyspath_append_TestImportPath.test_importmode_twice_is_different_module.assert_module1_is_not_mod": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_TestImportPath.test_ensuresyspath_append_TestImportPath.test_importmode_twice_is_different_module.assert_module1_is_not_mod", "embedding": null, "metadata": {"file_path": "testing/test_pathlib.py", "file_name": "test_pathlib.py", "file_type": "text/x-python", "category": "test", "start_line": 273, "end_line": 309, "span_ids": ["TestImportPath.test_importmode_importlib", "TestImportPath.test_ensuresyspath_append", "TestImportPath.test_invalid_path", "TestImportPath.simple_module", "TestImportPath.test_importmode_twice_is_different_module"], "tokens": 345}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestImportPath:\n\n def test_ensuresyspath_append(self, tmp_path: Path) -> None:\n root1 = tmp_path / \"root1\"\n root1.mkdir()\n file1 = root1 / \"x123.py\"\n file1.touch()\n assert str(root1) not in sys.path\n import_path(file1, mode=\"append\")\n assert str(root1) == sys.path[-1]\n assert str(root1) not in sys.path[:-1]\n\n def test_invalid_path(self, tmp_path: Path) -> None:\n with pytest.raises(ImportError):\n import_path(tmp_path / \"invalid.py\")\n\n @pytest.fixture\n def simple_module(self, tmp_path: Path) -> Path:\n fn = tmp_path / \"mymod.py\"\n fn.write_text(\n dedent(\n \"\"\"\n def foo(x): return 40 + x\n \"\"\"\n )\n )\n return fn\n\n def test_importmode_importlib(self, simple_module: Path) -> None:\n \"\"\"`importlib` mode does not change sys.path.\"\"\"\n module = import_path(simple_module, mode=\"importlib\")\n assert module.foo(2) == 42 # type: ignore[attr-defined]\n assert str(simple_module.parent) not in sys.path\n\n def test_importmode_twice_is_different_module(self, simple_module: Path) -> None:\n \"\"\"`importlib` mode always returns a new module.\"\"\"\n module1 = import_path(simple_module, mode=\"importlib\")\n module2 = import_path(simple_module, mode=\"importlib\")\n assert module1 is not module2", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_test_resolve_package_path_test_package_unimportable.assert_not_resolve_packag": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_test_resolve_package_path_test_package_unimportable.assert_not_resolve_packag", "embedding": null, "metadata": {"file_path": "testing/test_pathlib.py", "file_name": "test_pathlib.py", "file_type": "text/x-python", "category": "test", "start_line": 329, "end_line": 350, "span_ids": ["test_package_unimportable", "test_resolve_package_path"], "tokens": 205}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_resolve_package_path(tmp_path: Path) -> None:\n pkg = tmp_path / \"pkg1\"\n pkg.mkdir()\n (pkg / \"__init__.py\").touch()\n (pkg / \"subdir\").mkdir()\n (pkg / \"subdir/__init__.py\").touch()\n assert resolve_package_path(pkg) == pkg\n assert resolve_package_path(pkg.joinpath(\"subdir\", \"__init__.py\")) == pkg\n\n\ndef test_package_unimportable(tmp_path: Path) -> None:\n pkg = tmp_path / \"pkg1-1\"\n pkg.mkdir()\n pkg.joinpath(\"__init__.py\").touch()\n subdir = pkg.joinpath(\"subdir\")\n subdir.mkdir()\n pkg.joinpath(\"subdir/__init__.py\").touch()\n assert resolve_package_path(subdir) == subdir\n xyz = subdir.joinpath(\"xyz.py\")\n xyz.touch()\n assert resolve_package_path(xyz) == subdir\n assert not resolve_package_path(pkg)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_test_access_denied_during_cleanup_test_access_denied_during_cleanup.assert_not_lock_path_is_f": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_test_access_denied_during_cleanup_test_access_denied_during_cleanup.assert_not_lock_path_is_f", "embedding": null, "metadata": {"file_path": "testing/test_pathlib.py", "file_name": "test_pathlib.py", "file_type": "text/x-python", "category": "test", "start_line": 353, "end_line": 365, "span_ids": ["test_access_denied_during_cleanup"], "tokens": 112}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_access_denied_during_cleanup(tmp_path: Path, monkeypatch: MonkeyPatch) -> None:\n \"\"\"Ensure that deleting a numbered dir does not fail because of OSErrors (#4262).\"\"\"\n path = tmp_path / \"temp-1\"\n path.mkdir()\n\n def renamed_failed(*args):\n raise OSError(\"access denied\")\n\n monkeypatch.setattr(Path, \"rename\", renamed_failed)\n\n lock_path = get_lock_path(path)\n maybe_delete_a_numbered_dir(path)\n assert not lock_path.is_file()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_test_suppress_error_removing_lock_test_suppress_error_removing_lock.assert_not_lock_is_file_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_test_suppress_error_removing_lock_test_suppress_error_removing_lock.assert_not_lock_is_file_", "embedding": null, "metadata": {"file_path": "testing/test_pathlib.py", "file_name": "test_pathlib.py", "file_type": "text/x-python", "category": "test", "start_line": 391, "end_line": 415, "span_ids": ["test_suppress_error_removing_lock"], "tokens": 238}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_suppress_error_removing_lock(tmp_path: Path) -> None:\n \"\"\"ensure_deletable should be resilient if lock file cannot be removed (#5456, #7491)\"\"\"\n path = tmp_path / \"dir\"\n path.mkdir()\n lock = get_lock_path(path)\n lock.touch()\n mtime = lock.stat().st_mtime\n\n with unittest.mock.patch.object(Path, \"unlink\", side_effect=OSError) as m:\n assert not ensure_deletable(\n path, consider_lock_dead_if_created_before=mtime + 30\n )\n assert m.call_count == 1\n assert lock.is_file()\n\n with unittest.mock.patch.object(Path, \"is_file\", side_effect=OSError) as m:\n assert not ensure_deletable(\n path, consider_lock_dead_if_created_before=mtime + 30\n )\n assert m.call_count == 1\n assert lock.is_file()\n\n # check now that we can remove the lock file in normal circumstances\n assert ensure_deletable(path, consider_lock_dead_if_created_before=mtime + 30)\n assert not lock.is_file()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_test_bestrelpath_test_bestrelpath.None_4": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_test_bestrelpath_test_bestrelpath.None_4", "embedding": null, "metadata": {"file_path": "testing/test_pathlib.py", "file_name": "test_pathlib.py", "file_type": "text/x-python", "category": "test", "start_line": 418, "end_line": 424, "span_ids": ["test_bestrelpath"], "tokens": 122}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_bestrelpath() -> None:\n curdir = Path(\"/foo/bar/baz/path\")\n assert bestrelpath(curdir, curdir) == \".\"\n assert bestrelpath(curdir, curdir / \"hello\" / \"world\") == \"hello\" + os.sep + \"world\"\n assert bestrelpath(curdir, curdir.parent / \"sister\") == \"..\" + os.sep + \"sister\"\n assert bestrelpath(curdir, curdir.parent) == \"..\"\n assert bestrelpath(curdir, Path(\"hello\")) == \"hello\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_test_commonpath_test_visit_ignores_errors.assert_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_test_commonpath_test_visit_ignores_errors.assert_", "embedding": null, "metadata": {"file_path": "testing/test_pathlib.py", "file_name": "test_pathlib.py", "file_type": "text/x-python", "category": "test", "start_line": 427, "end_line": 443, "span_ids": ["test_commonpath", "test_visit_ignores_errors"], "tokens": 169}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_commonpath() -> None:\n path = Path(\"/foo/bar/baz/path\")\n subpath = path / \"sampledir\"\n assert commonpath(path, subpath) == path\n assert commonpath(subpath, path) == path\n assert commonpath(Path(str(path) + \"suffix\"), path) == path.parent\n assert commonpath(path, path.parent.parent) == path.parent.parent\n\n\ndef test_visit_ignores_errors(tmp_path: Path) -> None:\n symlink_or_skip(\"recursive\", tmp_path / \"recursive\")\n tmp_path.joinpath(\"foo\").write_bytes(b\"\")\n tmp_path.joinpath(\"bar\").write_bytes(b\"\")\n\n assert [\n entry.name for entry in visit(str(tmp_path), recurse=lambda entry: False)\n ] == [\"bar\", \"foo\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_test_samefile_false_negatives_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pathlib.py_test_samefile_false_negatives_", "embedding": null, "metadata": {"file_path": "testing/test_pathlib.py", "file_name": "test_pathlib.py", "file_type": "text/x-python", "category": "test", "start_line": 446, "end_line": 464, "span_ids": ["test_samefile_false_negatives"], "tokens": 218}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.skipif(not sys.platform.startswith(\"win\"), reason=\"Windows only\")\ndef test_samefile_false_negatives(tmp_path: Path, monkeypatch: MonkeyPatch) -> None:\n \"\"\"\n import_file() should not raise ImportPathMismatchError if the paths are exactly\n equal on Windows. It seems directories mounted as UNC paths make os.path.samefile\n return False, even when they are clearly equal.\n \"\"\"\n module_path = tmp_path.joinpath(\"my_module.py\")\n module_path.write_text(\"def foo(): return 42\")\n monkeypatch.syspath_prepend(tmp_path)\n\n with monkeypatch.context() as mp:\n # Forcibly make os.path.samefile() return False here to ensure we are comparing\n # the paths too. Using a context to narrow the patch as much as possible given\n # this is an important system function.\n mp.setattr(os.path, \"samefile\", lambda x, y: False)\n module = import_path(module_path)\n assert getattr(module, \"foo\")() == 42", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_os_None_18": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_os_None_18", "embedding": null, "metadata": {"file_path": "testing/test_pytester.py", "file_name": "test_pytester.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 20, "span_ids": ["imports"], "tokens": 134}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import os\nimport subprocess\nimport sys\nimport time\nfrom pathlib import Path\nfrom types import ModuleType\nfrom typing import List\n\nimport _pytest.pytester as pytester_mod\nimport pytest\nfrom _pytest.config import ExitCode\nfrom _pytest.config import PytestPluginManager\nfrom _pytest.monkeypatch import MonkeyPatch\nfrom _pytest.pytester import CwdSnapshot\nfrom _pytest.pytester import HookRecorder\nfrom _pytest.pytester import LineMatcher\nfrom _pytest.pytester import Pytester\nfrom _pytest.pytester import SysModulesSnapshot\nfrom _pytest.pytester import SysPathsSnapshot\nfrom _pytest.pytester import Testdir", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_make_hook_recorder_test_make_hook_recorder.pytest_raises_ValueError_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_make_hook_recorder_test_make_hook_recorder.pytest_raises_ValueError_", "embedding": null, "metadata": {"file_path": "testing/test_pytester.py", "file_name": "test_pytester.py", "file_type": "text/x-python", "category": "test", "start_line": 23, "end_line": 75, "span_ids": ["test_make_hook_recorder.rep2", "test_make_hook_recorder", "test_make_hook_recorder.rep2:2", "test_make_hook_recorder.rep:2", "test_make_hook_recorder.rep"], "tokens": 469}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_make_hook_recorder(pytester: Pytester) -> None:\n item = pytester.getitem(\"def test_func(): pass\")\n recorder = pytester.make_hook_recorder(item.config.pluginmanager)\n assert not recorder.getfailures()\n\n # (The silly condition is to fool mypy that the code below this is reachable)\n if 1 + 1 == 2:\n pytest.xfail(\"internal reportrecorder tests need refactoring\")\n\n class rep:\n excinfo = None\n passed = False\n failed = True\n skipped = False\n when = \"call\"\n\n recorder.hook.pytest_runtest_logreport(report=rep) # type: ignore[attr-defined]\n failures = recorder.getfailures()\n assert failures == [rep] # type: ignore[comparison-overlap]\n failures = recorder.getfailures()\n assert failures == [rep] # type: ignore[comparison-overlap]\n\n class rep2:\n excinfo = None\n passed = False\n failed = False\n skipped = True\n when = \"call\"\n\n rep2.passed = False\n rep2.skipped = True\n recorder.hook.pytest_runtest_logreport(report=rep2) # type: ignore[attr-defined]\n\n modcol = pytester.getmodulecol(\"\")\n rep3 = modcol.config.hook.pytest_make_collect_report(collector=modcol)\n rep3.passed = False\n rep3.failed = True\n rep3.skipped = False\n recorder.hook.pytest_collectreport(report=rep3) # type: ignore[attr-defined]\n\n passed, skipped, failed = recorder.listoutcomes()\n assert not passed and skipped and failed\n\n numpassed, numskipped, numfailed = recorder.countoutcomes()\n assert numpassed == 0\n assert numskipped == 1\n assert numfailed == 1\n assert len(recorder.getfailedcollections()) == 1\n\n recorder.unregister() # type: ignore[attr-defined]\n recorder.clear()\n recorder.hook.pytest_runtest_logreport(report=rep3) # type: ignore[attr-defined]\n pytest.raises(ValueError, recorder.getfailures)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_parseconfig_test_pytester_runs_with_plugin.result_assert_outcomes_pa": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_parseconfig_test_pytester_runs_with_plugin.result_assert_outcomes_pa", "embedding": null, "metadata": {"file_path": "testing/test_pytester.py", "file_name": "test_pytester.py", "file_type": "text/x-python", "category": "test", "start_line": 78, "end_line": 93, "span_ids": ["test_pytester_runs_with_plugin", "test_parseconfig"], "tokens": 107}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_parseconfig(pytester: Pytester) -> None:\n config1 = pytester.parseconfig()\n config2 = pytester.parseconfig()\n assert config2 is not config1\n\n\ndef test_pytester_runs_with_plugin(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n pytest_plugins = \"pytester\"\n def test_hello(pytester):\n assert 1\n \"\"\"\n )\n result = pytester.runpytest()\n result.assert_outcomes(passed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_pytester_with_doctest_test_pytester_with_doctest.assert_result_ret_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_pytester_with_doctest_test_pytester_with_doctest.assert_result_ret_0", "embedding": null, "metadata": {"file_path": "testing/test_pytester.py", "file_name": "test_pytester.py", "file_type": "text/x-python", "category": "test", "start_line": 96, "end_line": 116, "span_ids": ["test_pytester_with_doctest"], "tokens": 172}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_pytester_with_doctest(pytester: Pytester) -> None:\n \"\"\"Check that pytester can be used within doctests.\n\n It used to use `request.function`, which is `None` with doctests.\"\"\"\n pytester.makepyfile(\n **{\n \"sub/t-doctest.py\": \"\"\"\n '''\n >>> import os\n >>> pytester = getfixture(\"pytester\")\n >>> str(pytester.makepyfile(\"content\")).replace(os.sep, '/')\n '.../basetemp/sub.t-doctest0/sub.py'\n '''\n \"\"\",\n \"sub/__init__.py\": \"\",\n }\n )\n result = pytester.runpytest(\n \"-p\", \"pytester\", \"--doctest-modules\", \"sub/t-doctest.py\"\n )\n assert result.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_TestSysPathsSnapshot.test_preserve_container_test_pytester_subprocess.assert_pytester_runpytest": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_TestSysPathsSnapshot.test_preserve_container_test_pytester_subprocess.assert_pytester_runpytest", "embedding": null, "metadata": {"file_path": "testing/test_pytester.py", "file_name": "test_pytester.py", "file_type": "text/x-python", "category": "test", "start_line": 410, "end_line": 427, "span_ids": ["TestSysPathsSnapshot.test_preserve_container", "test_pytester_subprocess"], "tokens": 211}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\"path_type\", (\"path\", \"meta_path\"))\nclass TestSysPathsSnapshot:\n\n def test_preserve_container(self, monkeypatch: MonkeyPatch, path_type) -> None:\n other_path_type = self.other_path[path_type]\n original_data = list(getattr(sys, path_type))\n original_other = getattr(sys, other_path_type)\n original_other_data = list(original_other)\n new: List[object] = []\n snapshot = SysPathsSnapshot()\n monkeypatch.setattr(sys, path_type, new)\n snapshot.restore()\n assert getattr(sys, path_type) is new\n assert getattr(sys, path_type) == original_data\n assert getattr(sys, other_path_type) is original_other\n assert getattr(sys, other_path_type) == original_other_data\n\n\ndef test_pytester_subprocess(pytester: Pytester) -> None:\n testfile = pytester.makepyfile(\"def test_one(): pass\")\n assert pytester.runpytest_subprocess(testfile).ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_pytester_subprocess_via_runpytest_arg_test_pytester_subprocess_via_runpytest_arg.assert_result_ret_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_pytester_subprocess_via_runpytest_arg_test_pytester_subprocess_via_runpytest_arg.assert_result_ret_0", "embedding": null, "metadata": {"file_path": "testing/test_pytester.py", "file_name": "test_pytester.py", "file_type": "text/x-python", "category": "test", "start_line": 430, "end_line": 448, "span_ids": ["test_pytester_subprocess_via_runpytest_arg"], "tokens": 141}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_pytester_subprocess_via_runpytest_arg(pytester: Pytester) -> None:\n testfile = pytester.makepyfile(\n \"\"\"\n def test_pytester_subprocess(pytester):\n import os\n testfile = pytester.makepyfile(\n \\\"\"\"\n import os\n def test_one():\n assert {} != os.getpid()\n \\\"\"\".format(os.getpid())\n )\n assert pytester.runpytest(testfile).ret == 0\n \"\"\"\n )\n result = pytester.runpytest_inprocess(\n \"-p\", \"pytester\", \"--runpytest\", \"subprocess\", testfile\n )\n assert result.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_unicode_args_test_pytester_run_timeout_expires.with_pytest_raises_pytest.pytester_runpytest_subpro": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_unicode_args_test_pytester_run_timeout_expires.with_pytest_raises_pytest.pytester_runpytest_subpro", "embedding": null, "metadata": {"file_path": "testing/test_pytester.py", "file_name": "test_pytester.py", "file_type": "text/x-python", "category": "test", "start_line": 451, "end_line": 484, "span_ids": ["test_pytester_run_with_timeout", "test_unicode_args", "test_pytester_run_no_timeout", "test_pytester_run_timeout_expires"], "tokens": 251}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_unicode_args(pytester: Pytester) -> None:\n result = pytester.runpytest(\"-k\", \"\u05d0\u05d1\u05d2\")\n assert result.ret == ExitCode.NO_TESTS_COLLECTED\n\n\ndef test_pytester_run_no_timeout(pytester: Pytester) -> None:\n testfile = pytester.makepyfile(\"def test_no_timeout(): pass\")\n assert pytester.runpytest_subprocess(testfile).ret == ExitCode.OK\n\n\ndef test_pytester_run_with_timeout(pytester: Pytester) -> None:\n testfile = pytester.makepyfile(\"def test_no_timeout(): pass\")\n\n timeout = 120\n\n start = time.time()\n result = pytester.runpytest_subprocess(testfile, timeout=timeout)\n end = time.time()\n duration = end - start\n\n assert result.ret == ExitCode.OK\n assert duration < timeout\n\n\ndef test_pytester_run_timeout_expires(pytester: Pytester) -> None:\n testfile = pytester.makepyfile(\n \"\"\"\n import time\n\n def test_timeout():\n time.sleep(10)\"\"\"\n )\n with pytest.raises(pytester.TimeoutExpired):\n pytester.runpytest_subprocess(testfile, timeout=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_linematcher_no_matching_after_match_test_linematcher_string_api.assert_str_lm_foo_nb": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_linematcher_no_matching_after_match_test_linematcher_string_api.assert_str_lm_foo_nb", "embedding": null, "metadata": {"file_path": "testing/test_pytester.py", "file_name": "test_pytester.py", "file_type": "text/x-python", "category": "test", "start_line": 608, "end_line": 618, "span_ids": ["test_linematcher_no_matching_after_match", "test_linematcher_string_api"], "tokens": 117}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_linematcher_no_matching_after_match() -> None:\n lm = LineMatcher([\"1\", \"2\", \"3\"])\n lm.fnmatch_lines([\"1\", \"3\"])\n with pytest.raises(pytest.fail.Exception) as e:\n lm.no_fnmatch_line(\"*\")\n assert str(e.value).splitlines() == [\"fnmatch: '*'\", \" with: '1'\"]\n\n\ndef test_linematcher_string_api() -> None:\n lm = LineMatcher([\"foo\", \"bar\"])\n assert str(lm) == \"foo\\nbar\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_pytester_addopts_before_testdir_test_pytester_addopts_before_testdir.None_2": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_pytester_addopts_before_testdir_test_pytester_addopts_before_testdir.None_2", "embedding": null, "metadata": {"file_path": "testing/test_pytester.py", "file_name": "test_pytester.py", "file_type": "text/x-python", "category": "test", "start_line": 621, "end_line": 629, "span_ids": ["test_pytester_addopts_before_testdir"], "tokens": 119}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_pytester_addopts_before_testdir(request, monkeypatch: MonkeyPatch) -> None:\n orig = os.environ.get(\"PYTEST_ADDOPTS\", None)\n monkeypatch.setenv(\"PYTEST_ADDOPTS\", \"--orig-unused\")\n testdir = request.getfixturevalue(\"testdir\")\n assert \"PYTEST_ADDOPTS\" not in os.environ\n testdir.finalize()\n assert os.environ.get(\"PYTEST_ADDOPTS\") == \"--orig-unused\"\n monkeypatch.undo()\n assert os.environ.get(\"PYTEST_ADDOPTS\") == orig", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_pytester_outcomes_with_multiple_errors_test_pytester_outcomes_with_multiple_errors.assert_result_parseoutcom": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_pytester_outcomes_with_multiple_errors_test_pytester_outcomes_with_multiple_errors.assert_result_parseoutcom", "embedding": null, "metadata": {"file_path": "testing/test_pytester.py", "file_name": "test_pytester.py", "file_type": "text/x-python", "category": "test", "start_line": 760, "end_line": 779, "span_ids": ["test_pytester_outcomes_with_multiple_errors"], "tokens": 112}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_pytester_outcomes_with_multiple_errors(pytester: Pytester) -> None:\n p1 = pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.fixture\n def bad_fixture():\n raise Exception(\"bad\")\n\n def test_error1(bad_fixture):\n pass\n\n def test_error2(bad_fixture):\n pass\n \"\"\"\n )\n result = pytester.runpytest(str(p1))\n result.assert_outcomes(errors=2)\n\n assert result.parseoutcomes() == {\"errors\": 2}", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_parse_summary_line_always_plural_test_parse_summary_line_always_plural.None_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_parse_summary_line_always_plural_test_parse_summary_line_always_plural.None_1", "embedding": null, "metadata": {"file_path": "testing/test_pytester.py", "file_name": "test_pytester.py", "file_type": "text/x-python", "category": "test", "start_line": 782, "end_line": 808, "span_ids": ["test_parse_summary_line_always_plural"], "tokens": 217}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_parse_summary_line_always_plural() -> None:\n \"\"\"Parsing summaries always returns plural nouns (#6505)\"\"\"\n lines = [\n \"some output 1\",\n \"some output 2\",\n \"======= 1 failed, 1 passed, 1 warning, 1 error in 0.13s ====\",\n \"done.\",\n ]\n assert pytester_mod.RunResult.parse_summary_nouns(lines) == {\n \"errors\": 1,\n \"failed\": 1,\n \"passed\": 1,\n \"warnings\": 1,\n }\n\n lines = [\n \"some output 1\",\n \"some output 2\",\n \"======= 1 failed, 1 passed, 2 warnings, 2 errors in 0.13s ====\",\n \"done.\",\n ]\n assert pytester_mod.RunResult.parse_summary_nouns(lines) == {\n \"errors\": 2,\n \"failed\": 1,\n \"passed\": 1,\n \"warnings\": 2,\n }", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_makefile_joins_absolute_path_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_pytester.py_test_makefile_joins_absolute_path_", "embedding": null, "metadata": {"file_path": "testing/test_pytester.py", "file_name": "test_pytester.py", "file_type": "text/x-python", "category": "test", "start_line": 811, "end_line": 850, "span_ids": ["test_pytester_makefile_dot_prefixes_extension_with_warning", "test_testdir_makefile_dot_prefixes_extension_silently", "test_makefile_joins_absolute_path", "test_testdir_makefile_ext_none_raises_type_error", "test_testtmproot", "test_testdir_makefile_ext_empty_string_makes_file"], "tokens": 320}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_makefile_joins_absolute_path(pytester: Pytester) -> None:\n absfile = pytester.path / \"absfile\"\n p1 = pytester.makepyfile(**{str(absfile): \"\"})\n assert str(p1) == str(pytester.path / \"absfile.py\")\n\n\ndef test_testtmproot(testdir) -> None:\n \"\"\"Check test_tmproot is a py.path attribute for backward compatibility.\"\"\"\n assert testdir.test_tmproot.check(dir=1)\n\n\ndef test_testdir_makefile_dot_prefixes_extension_silently(\n testdir: Testdir,\n) -> None:\n \"\"\"For backwards compat #8192\"\"\"\n p1 = testdir.makefile(\"foo.bar\", \"\")\n assert \".foo.bar\" in str(p1)\n\n\ndef test_pytester_makefile_dot_prefixes_extension_with_warning(\n pytester: Pytester,\n) -> None:\n with pytest.raises(\n ValueError,\n match=\"pytester.makefile expects a file extension, try .foo.bar instead of foo.bar\",\n ):\n pytester.makefile(\"foo.bar\", \"\")\n\n\ndef test_testdir_makefile_ext_none_raises_type_error(testdir) -> None:\n \"\"\"For backwards compat #8192\"\"\"\n with pytest.raises(TypeError):\n testdir.makefile(None, \"\")\n\n\ndef test_testdir_makefile_ext_empty_string_makes_file(testdir) -> None:\n \"\"\"For backwards compat #8192\"\"\"\n p1 = testdir.makefile(\"\", \"\")\n assert \"test_testdir_makefile\" in str(p1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_recwarn.py_TestWarningsRecorderChecker.test_warn_stacklevel_TestWarningsRecorderChecker.test_invalid_enter_exit.with_WarningsRecorder__is.None_1.with_rec_.with_rec_._can_t_enter_twice": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_recwarn.py_TestWarningsRecorderChecker.test_warn_stacklevel_TestWarningsRecorderChecker.test_invalid_enter_exit.with_WarningsRecorder__is.None_1.with_rec_.with_rec_._can_t_enter_twice", "embedding": null, "metadata": {"file_path": "testing/test_recwarn.py", "file_name": "test_recwarn.py", "file_type": "text/x-python", "category": "test", "start_line": 47, "end_line": 74, "span_ids": ["TestWarningsRecorderChecker.test_warn_stacklevel", "TestWarningsRecorderChecker.test_typechecking", "TestWarningsRecorderChecker.test_invalid_enter_exit"], "tokens": 268}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestWarningsRecorderChecker:\n\n def test_warn_stacklevel(self) -> None:\n \"\"\"#4243\"\"\"\n rec = WarningsRecorder(_ispytest=True)\n with rec:\n warnings.warn(\"test\", DeprecationWarning, 2)\n\n def test_typechecking(self) -> None:\n from _pytest.recwarn import WarningsChecker\n\n with pytest.raises(TypeError):\n WarningsChecker(5, _ispytest=True) # type: ignore[arg-type]\n with pytest.raises(TypeError):\n WarningsChecker((\"hi\", RuntimeWarning), _ispytest=True) # type: ignore[arg-type]\n with pytest.raises(TypeError):\n WarningsChecker([DeprecationWarning, RuntimeWarning], _ispytest=True) # type: ignore[arg-type]\n\n def test_invalid_enter_exit(self) -> None:\n # wrap this test in WarningsRecorder to ensure warning state gets reset\n with WarningsRecorder(_ispytest=True):\n with pytest.raises(RuntimeError):\n rec = WarningsRecorder(_ispytest=True)\n rec.__exit__(None, None, None) # can't exit before entering\n\n with pytest.raises(RuntimeError):\n rec = WarningsRecorder(_ispytest=True)\n with rec:\n with rec:\n pass # can't enter twice", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_from_typing_import_Sequen_TestReportSerialization.test_xdist_longrepr_to_str_issue_241.assert_test_b_call__to_js": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_from_typing_import_Sequen_TestReportSerialization.test_xdist_longrepr_to_str_issue_241.assert_test_b_call__to_js", "embedding": null, "metadata": {"file_path": "testing/test_reports.py", "file_name": "test_reports.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 37, "span_ids": ["TestReportSerialization", "imports", "TestReportSerialization.test_xdist_longrepr_to_str_issue_241"], "tokens": 288}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from typing import Sequence\nfrom typing import Union\n\nimport py.path\n\nimport pytest\nfrom _pytest._code.code import ExceptionChainRepr\nfrom _pytest._code.code import ExceptionRepr\nfrom _pytest.config import Config\nfrom _pytest.pytester import Pytester\nfrom _pytest.reports import CollectReport\nfrom _pytest.reports import TestReport\n\n\nclass TestReportSerialization:\n def test_xdist_longrepr_to_str_issue_241(self, pytester: Pytester) -> None:\n \"\"\"Regarding issue pytest-xdist#241.\n\n This test came originally from test_remote.py in xdist (ca03269).\n \"\"\"\n pytester.makepyfile(\n \"\"\"\n def test_a(): assert False\n def test_b(): pass\n \"\"\"\n )\n reprec = pytester.inline_run()\n reports = reprec.getreports(\"pytest_runtest_logreport\")\n assert len(reports) == 6\n test_a_call = reports[1]\n assert test_a_call.when == \"call\"\n assert test_a_call.outcome == \"failed\"\n assert test_a_call._to_json()[\"longrepr\"][\"reprtraceback\"][\"style\"] == \"long\"\n test_b_call = reports[4]\n assert test_b_call.when == \"call\"\n assert test_b_call.outcome == \"passed\"\n assert test_b_call._to_json()[\"longrepr\"] is None", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestReportSerialization.test_reprentries_serialization_170_TestReportSerialization.test_reprentries_serialization_170.for_i_in_range_len_a_entr.assert_rep_entry_style_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestReportSerialization.test_reprentries_serialization_170_TestReportSerialization.test_reprentries_serialization_170.for_i_in_range_len_a_entr.assert_rep_entry_style_", "embedding": null, "metadata": {"file_path": "testing/test_reports.py", "file_name": "test_reports.py", "file_type": "text/x-python", "category": "test", "start_line": 78, "end_line": 122, "span_ids": ["TestReportSerialization.test_reprentries_serialization_170"], "tokens": 430}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestReportSerialization:\n\n def test_reprentries_serialization_170(self, pytester: Pytester) -> None:\n \"\"\"Regarding issue pytest-xdist#170\n\n This test came originally from test_remote.py in xdist (ca03269).\n \"\"\"\n from _pytest._code.code import ReprEntry\n\n reprec = pytester.inline_runsource(\n \"\"\"\n def test_repr_entry():\n x = 0\n assert x\n \"\"\",\n \"--showlocals\",\n )\n reports = reprec.getreports(\"pytest_runtest_logreport\")\n assert len(reports) == 3\n rep = reports[1]\n assert isinstance(rep.longrepr, ExceptionRepr)\n d = rep._to_json()\n a = TestReport._from_json(d)\n assert isinstance(a.longrepr, ExceptionRepr)\n\n rep_entries = rep.longrepr.reprtraceback.reprentries\n a_entries = a.longrepr.reprtraceback.reprentries\n for i in range(len(a_entries)):\n rep_entry = rep_entries[i]\n assert isinstance(rep_entry, ReprEntry)\n assert rep_entry.reprfileloc is not None\n assert rep_entry.reprfuncargs is not None\n assert rep_entry.reprlocals is not None\n\n a_entry = a_entries[i]\n assert isinstance(a_entry, ReprEntry)\n assert a_entry.reprfileloc is not None\n assert a_entry.reprfuncargs is not None\n assert a_entry.reprlocals is not None\n\n assert rep_entry.lines == a_entry.lines\n assert rep_entry.reprfileloc.lineno == a_entry.reprfileloc.lineno\n assert rep_entry.reprfileloc.message == a_entry.reprfileloc.message\n assert rep_entry.reprfileloc.path == a_entry.reprfileloc.path\n assert rep_entry.reprfuncargs.args == a_entry.reprfuncargs.args\n assert rep_entry.reprlocals.lines == a_entry.reprlocals.lines\n assert rep_entry.style == a_entry.style", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestReportSerialization.test_chained_exceptions_no_reprcrash._3_reports_setup_call_t_TestReportSerialization.test_chained_exceptions_no_reprcrash.loaded_report_longrepr_to": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestReportSerialization.test_chained_exceptions_no_reprcrash._3_reports_setup_call_t_TestReportSerialization.test_chained_exceptions_no_reprcrash.loaded_report_longrepr_to", "embedding": null, "metadata": {"file_path": "testing/test_reports.py", "file_name": "test_reports.py", "file_type": "text/x-python", "category": "test", "start_line": 383, "end_line": 399, "span_ids": ["TestReportSerialization.test_chained_exceptions_no_reprcrash"], "tokens": 166}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestReportSerialization:\n\n def test_chained_exceptions_no_reprcrash(self, pytester: Pytester, tw_mock) -> None:\n\n # 3 reports: setup/call/teardown: get the call report\n assert len(reports) == 3\n report = reports[1]\n\n assert report.failed\n check_longrepr(report.longrepr)\n\n data = report._to_json()\n loaded_report = TestReport._from_json(data)\n\n assert loaded_report.failed\n check_longrepr(loaded_report.longrepr)\n\n # for same reasons as previous test, ensure we don't blow up here\n assert loaded_report.longrepr is not None\n assert isinstance(loaded_report.longrepr, ExceptionChainRepr)\n loaded_report.longrepr.toterminal(tw_mock)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestReportSerialization.test_report_prevent_ConftestImportFailure_hiding_exception_TestReportSerialization.test_report_prevent_ConftestImportFailure_hiding_exception.result_stdout_no_fnmatch_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_reports.py_TestReportSerialization.test_report_prevent_ConftestImportFailure_hiding_exception_TestReportSerialization.test_report_prevent_ConftestImportFailure_hiding_exception.result_stdout_no_fnmatch_", "embedding": null, "metadata": {"file_path": "testing/test_reports.py", "file_name": "test_reports.py", "file_type": "text/x-python", "category": "test", "start_line": 401, "end_line": 410, "span_ids": ["TestReportSerialization.test_report_prevent_ConftestImportFailure_hiding_exception"], "tokens": 117}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestReportSerialization:\n\n def test_report_prevent_ConftestImportFailure_hiding_exception(\n self, pytester: Pytester\n ) -> None:\n sub_dir = pytester.path.joinpath(\"ns\")\n sub_dir.mkdir()\n sub_dir.joinpath(\"conftest.py\").write_text(\"import unknown\")\n\n result = pytester.runpytest_subprocess(\".\")\n result.stdout.fnmatch_lines([\"E *Error: No module named 'unknown'\"])\n result.stdout.no_fnmatch_line(\"ERROR - *ConftestImportFailure*\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_inspect_TestSetupState.test_setup_fails_and_failure_is_cached.None_1.ss_setup_item_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_inspect_TestSetupState.test_setup_fails_and_failure_is_cached.None_1.ss_setup_item_", "embedding": null, "metadata": {"file_path": "testing/test_runner.py", "file_name": "test_runner.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 54, "span_ids": ["TestSetupState.test_teardown_exact_stack_empty", "imports", "TestSetupState.test_setup", "TestSetupState", "TestSetupState.test_setup_fails_and_failure_is_cached"], "tokens": 348}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import inspect\nimport os\nimport sys\nimport types\nfrom pathlib import Path\nfrom typing import Dict\nfrom typing import List\nfrom typing import Tuple\nfrom typing import Type\n\nimport pytest\nfrom _pytest import outcomes\nfrom _pytest import reports\nfrom _pytest import runner\nfrom _pytest._code import ExceptionInfo\nfrom _pytest._code.code import ExceptionChainRepr\nfrom _pytest.config import ExitCode\nfrom _pytest.monkeypatch import MonkeyPatch\nfrom _pytest.outcomes import OutcomeException\nfrom _pytest.pytester import Pytester\n\n\nclass TestSetupState:\n def test_setup(self, pytester: Pytester) -> None:\n item = pytester.getitem(\"def test_func(): pass\")\n ss = item.session._setupstate\n values = [1]\n ss.setup(item)\n ss.addfinalizer(values.pop, item)\n assert values\n ss.teardown_exact(None)\n assert not values\n\n def test_teardown_exact_stack_empty(self, pytester: Pytester) -> None:\n item = pytester.getitem(\"def test_func(): pass\")\n ss = item.session._setupstate\n ss.setup(item)\n ss.teardown_exact(None)\n ss.teardown_exact(None)\n ss.teardown_exact(None)\n\n def test_setup_fails_and_failure_is_cached(self, pytester: Pytester) -> None:\n item = pytester.getitem(\n \"\"\"\n def setup_module(mod):\n raise ValueError(42)\n def test_func(): pass\n \"\"\"\n )\n ss = item.session._setupstate\n with pytest.raises(ValueError):\n ss.setup(item)\n with pytest.raises(ValueError):\n ss.setup(item)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_TestSetupState.test_teardown_multiple_scopes_one_fails_TestSetupState.test_teardown_multiple_scopes_one_fails.assert_module_teardown_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_TestSetupState.test_teardown_multiple_scopes_one_fails_TestSetupState.test_teardown_multiple_scopes_one_fails.assert_module_teardown_", "embedding": null, "metadata": {"file_path": "testing/test_runner.py", "file_name": "test_runner.py", "file_type": "text/x-python", "category": "test", "start_line": 97, "end_line": 114, "span_ids": ["TestSetupState.test_teardown_multiple_scopes_one_fails"], "tokens": 145}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestSetupState:\n\n def test_teardown_multiple_scopes_one_fails(self, pytester: Pytester) -> None:\n module_teardown = []\n\n def fin_func():\n raise Exception(\"oops1\")\n\n def fin_module():\n module_teardown.append(\"fin_module\")\n\n item = pytester.getitem(\"def test_func(): pass\")\n mod = item.listchain()[-2]\n ss = item.session._setupstate\n ss.setup(item)\n ss.addfinalizer(fin_module, mod)\n ss.addfinalizer(fin_func, item)\n with pytest.raises(Exception, match=\"oops1\"):\n ss.teardown_exact(None)\n assert module_teardown == [\"fin_module\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_BaseFunctionalTests.test_exact_teardown_issue1206_BaseFunctionalTests.test_exact_teardown_issue1206.None_12": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_BaseFunctionalTests.test_exact_teardown_issue1206_BaseFunctionalTests.test_exact_teardown_issue1206.None_12", "embedding": null, "metadata": {"file_path": "testing/test_runner.py", "file_name": "test_runner.py", "file_type": "text/x-python", "category": "test", "start_line": 317, "end_line": 353, "span_ids": ["BaseFunctionalTests.test_exact_teardown_issue1206"], "tokens": 319}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class BaseFunctionalTests:\n\n def test_exact_teardown_issue1206(self, pytester: Pytester) -> None:\n \"\"\"Issue shadowing error with wrong number of arguments on teardown_method.\"\"\"\n rec = pytester.inline_runsource(\n \"\"\"\n import pytest\n\n class TestClass(object):\n def teardown_method(self, x, y, z):\n pass\n\n def test_method(self):\n assert True\n \"\"\"\n )\n reps = rec.getreports(\"pytest_runtest_logreport\")\n print(reps)\n assert len(reps) == 3\n #\n assert reps[0].nodeid.endswith(\"test_method\")\n assert reps[0].passed\n assert reps[0].when == \"setup\"\n #\n assert reps[1].nodeid.endswith(\"test_method\")\n assert reps[1].passed\n assert reps[1].when == \"call\"\n #\n assert reps[2].nodeid.endswith(\"test_method\")\n assert reps[2].failed\n assert reps[2].when == \"teardown\"\n longrepr = reps[2].longrepr\n assert isinstance(longrepr, ExceptionChainRepr)\n assert longrepr.reprcrash\n assert longrepr.reprcrash.message in (\n \"TypeError: teardown_method() missing 2 required positional arguments: 'y' and 'z'\",\n # Python >= 3.10\n \"TypeError: TestClass.teardown_method() missing 2 required positional arguments: 'y' and 'z'\",\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_test_pytest_fail_notrace_runtest_test_pytest_fail_notrace_runtest.result_stdout_no_fnmatch_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_test_pytest_fail_notrace_runtest_test_pytest_fail_notrace_runtest.result_stdout_no_fnmatch_", "embedding": null, "metadata": {"file_path": "testing/test_runner.py", "file_name": "test_runner.py", "file_type": "text/x-python", "category": "test", "start_line": 617, "end_line": 630, "span_ids": ["test_pytest_fail_notrace_runtest"], "tokens": 119}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_pytest_fail_notrace_runtest(pytester: Pytester) -> None:\n \"\"\"Test pytest.fail(..., pytrace=False) does not show tracebacks during test run.\"\"\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n def test_hello():\n pytest.fail(\"hello\", pytrace=False)\n def teardown_function(function):\n pytest.fail(\"world\", pytrace=False)\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"world\", \"hello\"])\n result.stdout.no_fnmatch_line(\"*def teardown_function*\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_test_pytest_fail_notrace_collection_test_pytest_fail_notrace_collection.result_stdout_no_fnmatch_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_test_pytest_fail_notrace_collection_test_pytest_fail_notrace_collection.result_stdout_no_fnmatch_", "embedding": null, "metadata": {"file_path": "testing/test_runner.py", "file_name": "test_runner.py", "file_type": "text/x-python", "category": "test", "start_line": 633, "end_line": 645, "span_ids": ["test_pytest_fail_notrace_collection"], "tokens": 105}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_pytest_fail_notrace_collection(pytester: Pytester) -> None:\n \"\"\"Test pytest.fail(..., pytrace=False) does not show tracebacks during collection.\"\"\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n def some_internal_function():\n pytest.fail(\"hello\", pytrace=False)\n some_internal_function()\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines([\"hello\"])\n result.stdout.no_fnmatch_line(\"*def some_internal_function()*\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_test_importorskip_test_importorskip.try_.except_pytest_skip_Except.assert_False_f_spurious_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_test_importorskip_test_importorskip.try_.except_pytest_skip_Except.assert_False_f_spurious_", "embedding": null, "metadata": {"file_path": "testing/test_runner.py", "file_name": "test_runner.py", "file_type": "text/x-python", "category": "test", "start_line": 698, "end_line": 728, "span_ids": ["test_importorskip"], "tokens": 328}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_importorskip(monkeypatch) -> None:\n importorskip = pytest.importorskip\n\n def f():\n importorskip(\"asdlkj\")\n\n try:\n sysmod = importorskip(\"sys\")\n assert sysmod is sys\n # path = pytest.importorskip(\"os.path\")\n # assert path == os.path\n excinfo = pytest.raises(pytest.skip.Exception, f)\n assert excinfo is not None\n excrepr = excinfo.getrepr()\n assert excrepr is not None\n assert excrepr.reprcrash is not None\n path = Path(excrepr.reprcrash.path)\n # check that importorskip reports the actual call\n # in this test the test_runner.py file\n assert path.stem == \"test_runner\"\n pytest.raises(SyntaxError, pytest.importorskip, \"x y z\")\n pytest.raises(SyntaxError, pytest.importorskip, \"x=y\")\n mod = types.ModuleType(\"hello123\")\n mod.__version__ = \"1.3\" # type: ignore\n monkeypatch.setitem(sys.modules, \"hello123\", mod)\n with pytest.raises(pytest.skip.Exception):\n pytest.importorskip(\"hello123\", minversion=\"1.3.1\")\n mod2 = pytest.importorskip(\"hello123\", minversion=\"1.3\")\n assert mod2 == mod\n except pytest.skip.Exception: # pragma: no cover\n assert False, f\"spurious skip: {ExceptionInfo.from_current()}\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_test_importorskip_imports_last_module_part_test_importorskip_dev_module.try_.except_pytest_skip_Except.assert_False_f_spurious_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_test_importorskip_imports_last_module_part_test_importorskip_dev_module.try_.except_pytest_skip_Except.assert_False_f_spurious_", "embedding": null, "metadata": {"file_path": "testing/test_runner.py", "file_name": "test_runner.py", "file_type": "text/x-python", "category": "test", "start_line": 731, "end_line": 746, "span_ids": ["test_importorskip_imports_last_module_part", "test_importorskip_dev_module"], "tokens": 184}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_importorskip_imports_last_module_part() -> None:\n ospath = pytest.importorskip(\"os.path\")\n assert os.path == ospath\n\n\ndef test_importorskip_dev_module(monkeypatch) -> None:\n try:\n mod = types.ModuleType(\"mockmodule\")\n mod.__version__ = \"0.13.0.dev-43290\" # type: ignore\n monkeypatch.setitem(sys.modules, \"mockmodule\", mod)\n mod2 = pytest.importorskip(\"mockmodule\", minversion=\"0.12.0\")\n assert mod2 == mod\n with pytest.raises(pytest.skip.Exception):\n pytest.importorskip(\"mockmodule1\", minversion=\"0.14.0\")\n except pytest.skip.Exception: # pragma: no cover\n assert False, f\"spurious skip: {ExceptionInfo.from_current()}\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_TestReportContents_TestReportContents.test_longreprtext_skip.assert_Skipped_in_call_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_TestReportContents_TestReportContents.test_longreprtext_skip.assert_Skipped_in_call_", "embedding": null, "metadata": {"file_path": "testing/test_runner.py", "file_name": "test_runner.py", "file_type": "text/x-python", "category": "test", "start_line": 942, "end_line": 969, "span_ids": ["TestReportContents", "TestReportContents.test_longreprtext_skip", "TestReportContents.getrunner", "TestReportContents.test_longreprtext_pass"], "tokens": 190}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestReportContents:\n \"\"\"Test user-level API of ``TestReport`` objects.\"\"\"\n\n def getrunner(self):\n return lambda item: runner.runtestprotocol(item, log=False)\n\n def test_longreprtext_pass(self, pytester: Pytester) -> None:\n reports = pytester.runitem(\n \"\"\"\n def test_func():\n pass\n \"\"\"\n )\n rep = reports[1]\n assert rep.longreprtext == \"\"\n\n def test_longreprtext_skip(self, pytester: Pytester) -> None:\n \"\"\"TestReport.longreprtext can handle non-str ``longrepr`` attributes (#7559)\"\"\"\n reports = pytester.runitem(\n \"\"\"\n import pytest\n def test_func():\n pytest.skip()\n \"\"\"\n )\n _, call_rep, _ = reports\n assert isinstance(call_rep.longrepr, tuple)\n assert \"Skipped\" in call_rep.longreprtext", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_TestReportContents.test_longreprtext_collect_skip_TestReportContents.test_longreprtext_failure.assert_assert_1_4_in": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner.py_TestReportContents.test_longreprtext_collect_skip_TestReportContents.test_longreprtext_failure.assert_assert_1_4_in", "embedding": null, "metadata": {"file_path": "testing/test_runner.py", "file_name": "test_runner.py", "file_type": "text/x-python", "category": "test", "start_line": 971, "end_line": 994, "span_ids": ["TestReportContents.test_longreprtext_failure", "TestReportContents.test_longreprtext_collect_skip"], "tokens": 191}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestReportContents:\n\n def test_longreprtext_collect_skip(self, pytester: Pytester) -> None:\n \"\"\"CollectReport.longreprtext can handle non-str ``longrepr`` attributes (#7559)\"\"\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n pytest.skip(allow_module_level=True)\n \"\"\"\n )\n rec = pytester.inline_run()\n calls = rec.getcalls(\"pytest_collectreport\")\n _, call = calls\n assert isinstance(call.report.longrepr, tuple)\n assert \"Skipped\" in call.report.longreprtext\n\n def test_longreprtext_failure(self, pytester: Pytester) -> None:\n reports = pytester.runitem(\n \"\"\"\n def test_func():\n x = 1\n assert x == 4\n \"\"\"\n )\n rep = reports[1]\n assert \"assert 1 == 4\" in rep.longreprtext", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner_xunit.py__Test_correct_setup_tea_test_module_and_function_setup.None_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner_xunit.py__Test_correct_setup_tea_test_module_and_function_setup.None_1", "embedding": null, "metadata": {"file_path": "testing/test_runner_xunit.py", "file_name": "test_runner_xunit.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 38, "span_ids": ["test_module_and_function_setup", "docstring", "imports"], "tokens": 220}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"Test correct setup/teardowns at module, class, and instance level.\"\"\"\nfrom typing import List\n\nimport pytest\nfrom _pytest.pytester import Pytester\n\n\ndef test_module_and_function_setup(pytester: Pytester) -> None:\n reprec = pytester.inline_runsource(\n \"\"\"\n modlevel = []\n def setup_module(module):\n assert not modlevel\n module.modlevel.append(42)\n\n def teardown_module(module):\n modlevel.pop()\n\n def setup_function(function):\n function.answer = 17\n\n def teardown_function(function):\n del function.answer\n\n def test_modlevel():\n assert modlevel[0] == 42\n assert test_modlevel.answer == 17\n\n class TestFromClass(object):\n def test_module(self):\n assert modlevel[0] == 42\n assert not hasattr(test_modlevel, 'answer')\n \"\"\"\n )\n rep = reprec.matchreport(\"test_modlevel\")\n assert rep.passed\n rep = reprec.matchreport(\"test_module\")\n assert rep.passed", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner_xunit.py_test_module_setup_failure_no_teardown_test_module_setup_failure_no_teardown.assert_calls_0_item_modu": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner_xunit.py_test_module_setup_failure_no_teardown_test_module_setup_failure_no_teardown.assert_calls_0_item_modu", "embedding": null, "metadata": {"file_path": "testing/test_runner_xunit.py", "file_name": "test_runner_xunit.py", "file_type": "text/x-python", "category": "test", "start_line": 41, "end_line": 58, "span_ids": ["test_module_setup_failure_no_teardown"], "tokens": 112}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_module_setup_failure_no_teardown(pytester: Pytester) -> None:\n reprec = pytester.inline_runsource(\n \"\"\"\n values = []\n def setup_module(module):\n values.append(1)\n 0/0\n\n def test_nothing():\n pass\n\n def teardown_module(module):\n values.append(2)\n \"\"\"\n )\n reprec.assertoutcome(failed=1)\n calls = reprec.getcalls(\"pytest_runtest_setup\")\n assert calls[0].item.module.values == [1]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner_xunit.py_test_setup_function_failure_no_teardown_test_setup_function_failure_no_teardown.assert_calls_0_item_modu": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_runner_xunit.py_test_setup_function_failure_no_teardown_test_setup_function_failure_no_teardown.assert_calls_0_item_modu", "embedding": null, "metadata": {"file_path": "testing/test_runner_xunit.py", "file_name": "test_runner_xunit.py", "file_type": "text/x-python", "category": "test", "start_line": 61, "end_line": 77, "span_ids": ["test_setup_function_failure_no_teardown"], "tokens": 105}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_setup_function_failure_no_teardown(pytester: Pytester) -> None:\n reprec = pytester.inline_runsource(\n \"\"\"\n modlevel = []\n def setup_function(function):\n modlevel.append(1)\n 0/0\n\n def teardown_function(module):\n modlevel.append(2)\n\n def test_func():\n pass\n \"\"\"\n )\n calls = reprec.getcalls(\"pytest_runtest_setup\")\n assert calls[0].item.module.modlevel == [1]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_session.py_SessionTests.test_raises_output_SessionTests.test_raises_output.assert_DID_NOT_RAISE_in": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_session.py_SessionTests.test_raises_output_SessionTests.test_raises_output.assert_DID_NOT_RAISE_in", "embedding": null, "metadata": {"file_path": "testing/test_session.py", "file_name": "test_session.py", "file_type": "text/x-python", "category": "test", "start_line": 58, "end_line": 69, "span_ids": ["SessionTests.test_raises_output"], "tokens": 115}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class SessionTests:\n\n def test_raises_output(self, pytester: Pytester) -> None:\n reprec = pytester.inline_runsource(\n \"\"\"\n import pytest\n def test_raises_doesnt():\n pytest.raises(ValueError, int, \"3\")\n \"\"\"\n )\n passed, skipped, failed = reprec.listoutcomes()\n assert len(failed) == 1\n out = failed[0].longrepr.reprcrash.message # type: ignore[union-attr]\n assert \"DID NOT RAISE\" in out", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_session.py_SessionTests.test_syntax_error_module_SessionTests.test_maxfail.assert_passed_skipped_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_session.py_SessionTests.test_syntax_error_module_SessionTests.test_maxfail.assert_passed_skipped_", "embedding": null, "metadata": {"file_path": "testing/test_session.py", "file_name": "test_session.py", "file_type": "text/x-python", "category": "test", "start_line": 71, "end_line": 101, "span_ids": ["SessionTests.test_maxfail", "SessionTests.test_exit_first_problem", "SessionTests.test_syntax_error_module"], "tokens": 259}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class SessionTests:\n\n def test_syntax_error_module(self, pytester: Pytester) -> None:\n reprec = pytester.inline_runsource(\"this is really not python\")\n values = reprec.getfailedcollections()\n assert len(values) == 1\n out = str(values[0].longrepr)\n assert out.find(\"not python\") != -1\n\n def test_exit_first_problem(self, pytester: Pytester) -> None:\n reprec = pytester.inline_runsource(\n \"\"\"\n def test_one(): assert 0\n def test_two(): assert 0\n \"\"\",\n \"--exitfirst\",\n )\n passed, skipped, failed = reprec.countoutcomes()\n assert failed == 1\n assert passed == skipped == 0\n\n def test_maxfail(self, pytester: Pytester) -> None:\n reprec = pytester.inline_runsource(\n \"\"\"\n def test_one(): assert 0\n def test_two(): assert 0\n def test_three(): assert 0\n \"\"\",\n \"--maxfail=2\",\n )\n passed, skipped, failed = reprec.countoutcomes()\n assert failed == 2\n assert passed == skipped == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_setupplan.py_from__pytest_pytester_imp_test_show_fixtures_and_test.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_setupplan.py_from__pytest_pytester_imp_test_show_fixtures_and_test.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_setupplan.py", "file_name": "test_setupplan.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 24, "span_ids": ["test_show_fixtures_and_test", "imports"], "tokens": 138}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from _pytest.pytester import Pytester\n\n\ndef test_show_fixtures_and_test(\n pytester: Pytester, dummy_yaml_custom_test: None\n) -> None:\n \"\"\"Verify that fixtures are not executed.\"\"\"\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.fixture\n def arg():\n assert False\n def test_arg(arg):\n assert False\n \"\"\"\n )\n\n result = pytester.runpytest(\"--setup-plan\")\n assert result.ret == 0\n\n result.stdout.fnmatch_lines(\n [\"*SETUP F arg*\", \"*test_arg (fixtures used: arg)\", \"*TEARDOWN F arg*\"]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestEvaluation.test_marked_one_arg_twice2_TestEvaluation.test_marked_one_arg_twice2.assert_skipped_reason_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestEvaluation.test_marked_one_arg_twice2_TestEvaluation.test_marked_one_arg_twice2.assert_skipped_reason_", "embedding": null, "metadata": {"file_path": "testing/test_skipping.py", "file_name": "test_skipping.py", "file_type": "text/x-python", "category": "test", "start_line": 91, "end_line": 103, "span_ids": ["TestEvaluation.test_marked_one_arg_twice2"], "tokens": 109}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestEvaluation:\n\n def test_marked_one_arg_twice2(self, pytester: Pytester) -> None:\n item = pytester.getitem(\n \"\"\"\n import pytest\n @pytest.mark.skipif(\"hasattr(os, 'murks')\")\n @pytest.mark.skipif(\"not hasattr(os, 'murks')\")\n def test_func():\n pass\n \"\"\"\n )\n skipped = evaluate_skip_marks(item)\n assert skipped\n assert skipped.reason == \"condition: not hasattr(os, 'murks')\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestEvaluation.test_marked_skipif_with_boolean_without_reason_TestEvaluation.test_marked_skipif_with_boolean_without_reason.assert_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestEvaluation.test_marked_skipif_with_boolean_without_reason_TestEvaluation.test_marked_skipif_with_boolean_without_reason.assert_", "embedding": null, "metadata": {"file_path": "testing/test_skipping.py", "file_name": "test_skipping.py", "file_type": "text/x-python", "category": "test", "start_line": 105, "end_line": 122, "span_ids": ["TestEvaluation.test_marked_skipif_with_boolean_without_reason"], "tokens": 127}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestEvaluation:\n\n def test_marked_skipif_with_boolean_without_reason(\n self, pytester: Pytester\n ) -> None:\n item = pytester.getitem(\n \"\"\"\n import pytest\n @pytest.mark.skipif(False)\n def test_func():\n pass\n \"\"\"\n )\n with pytest.raises(pytest.fail.Exception) as excinfo:\n evaluate_skip_marks(item)\n assert excinfo.value.msg is not None\n assert (\n \"\"\"Error evaluating 'skipif': you need to specify reason=STRING when using booleans as conditions.\"\"\"\n in excinfo.value.msg\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestEvaluation.test_skipif_markeval_namespace_TestEvaluation.test_skipif_markeval_namespace.None_2": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestEvaluation.test_skipif_markeval_namespace_TestEvaluation.test_skipif_markeval_namespace.None_2", "embedding": null, "metadata": {"file_path": "testing/test_skipping.py", "file_name": "test_skipping.py", "file_type": "text/x-python", "category": "test", "start_line": 159, "end_line": 184, "span_ids": ["TestEvaluation.test_skipif_markeval_namespace"], "tokens": 162}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestEvaluation:\n\n def test_skipif_markeval_namespace(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n import pytest\n\n def pytest_markeval_namespace():\n return {\"color\": \"green\"}\n \"\"\"\n )\n p = pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.mark.skipif(\"color == 'green'\")\n def test_1():\n assert True\n\n @pytest.mark.skipif(\"color == 'red'\")\n def test_2():\n assert True\n \"\"\"\n )\n res = pytester.runpytest(p)\n assert res.ret == 0\n res.stdout.fnmatch_lines([\"*1 skipped*\"])\n res.stdout.fnmatch_lines([\"*1 passed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestEvaluation.test_skipif_markeval_namespace_multiple_TestEvaluation.test_skipif_markeval_namespace_multiple.reprec_assertoutcome_skip": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestEvaluation.test_skipif_markeval_namespace_multiple_TestEvaluation.test_skipif_markeval_namespace_multiple.reprec_assertoutcome_skip", "embedding": null, "metadata": {"file_path": "testing/test_skipping.py", "file_name": "test_skipping.py", "file_type": "text/x-python", "category": "test", "start_line": 186, "end_line": 261, "span_ids": ["TestEvaluation.test_skipif_markeval_namespace_multiple"], "tokens": 447}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestEvaluation:\n\n def test_skipif_markeval_namespace_multiple(self, pytester: Pytester) -> None:\n \"\"\"Keys defined by ``pytest_markeval_namespace()`` in nested plugins override top-level ones.\"\"\"\n root = pytester.mkdir(\"root\")\n root.joinpath(\"__init__.py\").touch()\n root.joinpath(\"conftest.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n import pytest\n\n def pytest_markeval_namespace():\n return {\"arg\": \"root\"}\n \"\"\"\n )\n )\n root.joinpath(\"test_root.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n import pytest\n\n @pytest.mark.skipif(\"arg == 'root'\")\n def test_root():\n assert False\n \"\"\"\n )\n )\n foo = root.joinpath(\"foo\")\n foo.mkdir()\n foo.joinpath(\"__init__.py\").touch()\n foo.joinpath(\"conftest.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n import pytest\n\n def pytest_markeval_namespace():\n return {\"arg\": \"foo\"}\n \"\"\"\n )\n )\n foo.joinpath(\"test_foo.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n import pytest\n\n @pytest.mark.skipif(\"arg == 'foo'\")\n def test_foo():\n assert False\n \"\"\"\n )\n )\n bar = root.joinpath(\"bar\")\n bar.mkdir()\n bar.joinpath(\"__init__.py\").touch()\n bar.joinpath(\"conftest.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n import pytest\n\n def pytest_markeval_namespace():\n return {\"arg\": \"bar\"}\n \"\"\"\n )\n )\n bar.joinpath(\"test_bar.py\").write_text(\n textwrap.dedent(\n \"\"\"\\\n import pytest\n\n @pytest.mark.skipif(\"arg == 'bar'\")\n def test_bar():\n assert False\n \"\"\"\n )\n )\n\n reprec = pytester.inline_run(\"-vs\", \"--capture=no\")\n reprec.assertoutcome(skipped=3)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestEvaluation.test_skipif_markeval_namespace_ValueError_TestEvaluation.test_skipif_markeval_namespace_ValueError.res_stdout_fnmatch_lines_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestEvaluation.test_skipif_markeval_namespace_ValueError_TestEvaluation.test_skipif_markeval_namespace_ValueError.res_stdout_fnmatch_lines_", "embedding": null, "metadata": {"file_path": "testing/test_skipping.py", "file_name": "test_skipping.py", "file_type": "text/x-python", "category": "test", "start_line": 263, "end_line": 287, "span_ids": ["TestEvaluation.test_skipif_markeval_namespace_ValueError"], "tokens": 148}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestEvaluation:\n\n def test_skipif_markeval_namespace_ValueError(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n import pytest\n\n def pytest_markeval_namespace():\n return True\n \"\"\"\n )\n p = pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.mark.skipif(\"color == 'green'\")\n def test_1():\n assert True\n \"\"\"\n )\n res = pytester.runpytest(p)\n assert res.ret == 1\n res.stdout.fnmatch_lines(\n [\n \"*ValueError: pytest_markeval_namespace() needs to return a dict, got True*\"\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_xfail_xpassed_TestXFail.test_xfail_xpassed.assert_callreport_wasxfai": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_xfail_xpassed_TestXFail.test_xfail_xpassed.assert_callreport_wasxfai", "embedding": null, "metadata": {"file_path": "testing/test_skipping.py", "file_name": "test_skipping.py", "file_type": "text/x-python", "category": "test", "start_line": 308, "end_line": 321, "span_ids": ["TestXFail.test_xfail_xpassed"], "tokens": 117}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestXFail:\n\n def test_xfail_xpassed(self, pytester: Pytester) -> None:\n item = pytester.getitem(\n \"\"\"\n import pytest\n @pytest.mark.xfail(reason=\"this is an xfail\")\n def test_func():\n assert 1\n \"\"\"\n )\n reports = runtestprotocol(item, log=False)\n assert len(reports) == 3\n callreport = reports[1]\n assert callreport.passed\n assert callreport.wasxfail == \"this is an xfail\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_xfail_using_platform_TestXFail.test_xfail_using_platform.assert_callreport_wasxfai": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_xfail_using_platform_TestXFail.test_xfail_using_platform.assert_callreport_wasxfai", "embedding": null, "metadata": {"file_path": "testing/test_skipping.py", "file_name": "test_skipping.py", "file_type": "text/x-python", "category": "test", "start_line": 323, "end_line": 336, "span_ids": ["TestXFail.test_xfail_using_platform"], "tokens": 115}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestXFail:\n\n def test_xfail_using_platform(self, pytester: Pytester) -> None:\n \"\"\"Verify that platform can be used with xfail statements.\"\"\"\n item = pytester.getitem(\n \"\"\"\n import pytest\n @pytest.mark.xfail(\"platform.platform() == platform.platform()\")\n def test_func():\n assert 0\n \"\"\"\n )\n reports = runtestprotocol(item, log=False)\n assert len(reports) == 3\n callreport = reports[1]\n assert callreport.wasxfail", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_xfail_xpassed_strict_TestXFail.test_xfail_xpassed_strict.assert_not_hasattr_callre": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_xfail_xpassed_strict_TestXFail.test_xfail_xpassed_strict.assert_not_hasattr_callre", "embedding": null, "metadata": {"file_path": "testing/test_skipping.py", "file_name": "test_skipping.py", "file_type": "text/x-python", "category": "test", "start_line": 338, "end_line": 352, "span_ids": ["TestXFail.test_xfail_xpassed_strict"], "tokens": 131}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestXFail:\n\n def test_xfail_xpassed_strict(self, pytester: Pytester) -> None:\n item = pytester.getitem(\n \"\"\"\n import pytest\n @pytest.mark.xfail(strict=True, reason=\"nope\")\n def test_func():\n assert 1\n \"\"\"\n )\n reports = runtestprotocol(item, log=False)\n assert len(reports) == 3\n callreport = reports[1]\n assert callreport.failed\n assert str(callreport.longrepr) == \"[XPASS(strict)] nope\"\n assert not hasattr(callreport, \"wasxfail\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_xfail_run_anyway_TestXFail.test_xfail_run_anyway.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_xfail_run_anyway_TestXFail.test_xfail_run_anyway.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_skipping.py", "file_name": "test_skipping.py", "file_type": "text/x-python", "category": "test", "start_line": 354, "end_line": 368, "span_ids": ["TestXFail.test_xfail_run_anyway"], "tokens": 113}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestXFail:\n\n def test_xfail_run_anyway(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.mark.xfail\n def test_func():\n assert 0\n def test_func2():\n pytest.xfail(\"hello\")\n \"\"\"\n )\n result = pytester.runpytest(\"--runxfail\")\n result.stdout.fnmatch_lines(\n [\"*def test_func():*\", \"*assert 0*\", \"*1 failed*1 pass*\"]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.xtest_dynamic_xfail_set_during_setup_TestXFail.xtest_dynamic_xfail_set_during_setup.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.xtest_dynamic_xfail_set_during_setup_TestXFail.xtest_dynamic_xfail_set_during_setup.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_skipping.py", "file_name": "test_skipping.py", "file_type": "text/x-python", "category": "test", "start_line": 519, "end_line": 532, "span_ids": ["TestXFail.xtest_dynamic_xfail_set_during_setup"], "tokens": 114}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestXFail:\n\n def xtest_dynamic_xfail_set_during_setup(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n import pytest\n def setup_function(function):\n pytest.mark.xfail(function)\n def test_this():\n assert 0\n def test_that():\n assert 1\n \"\"\"\n )\n result = pytester.runpytest(p, \"-rxX\")\n result.stdout.fnmatch_lines([\"*XFAIL*test_this*\", \"*XPASS*test_that*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_dynamic_xfail_no_run_TestXFail.test_dynamic_xfail_set_during_runtest_passed_strict.result_assert_outcomes_fa": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_dynamic_xfail_no_run_TestXFail.test_dynamic_xfail_set_during_runtest_passed_strict.result_assert_outcomes_fa", "embedding": null, "metadata": {"file_path": "testing/test_skipping.py", "file_name": "test_skipping.py", "file_type": "text/x-python", "category": "test", "start_line": 534, "end_line": 587, "span_ids": ["TestXFail.test_dynamic_xfail_set_during_funcarg_setup", "TestXFail.test_dynamic_xfail_set_during_runtest_passed_strict", "TestXFail.test_dynamic_xfail_no_run", "TestXFail.test_dynamic_xfail_set_during_runtest_failed"], "tokens": 396}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestXFail:\n\n def test_dynamic_xfail_no_run(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.fixture\n def arg(request):\n request.applymarker(pytest.mark.xfail(run=False))\n def test_this(arg):\n assert 0\n \"\"\"\n )\n result = pytester.runpytest(p, \"-rxX\")\n result.stdout.fnmatch_lines([\"*XFAIL*test_this*\", \"*NOTRUN*\"])\n\n def test_dynamic_xfail_set_during_funcarg_setup(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.fixture\n def arg(request):\n request.applymarker(pytest.mark.xfail)\n def test_this2(arg):\n assert 0\n \"\"\"\n )\n result = pytester.runpytest(p)\n result.stdout.fnmatch_lines([\"*1 xfailed*\"])\n\n def test_dynamic_xfail_set_during_runtest_failed(self, pytester: Pytester) -> None:\n # Issue #7486.\n p = pytester.makepyfile(\n \"\"\"\n import pytest\n def test_this(request):\n request.node.add_marker(pytest.mark.xfail(reason=\"xfail\"))\n assert 0\n \"\"\"\n )\n result = pytester.runpytest(p)\n result.assert_outcomes(xfailed=1)\n\n def test_dynamic_xfail_set_during_runtest_passed_strict(\n self, pytester: Pytester\n ) -> None:\n # Issue #7486.\n p = pytester.makepyfile(\n \"\"\"\n import pytest\n def test_this(request):\n request.node.add_marker(pytest.mark.xfail(reason=\"xfail\", strict=True))\n \"\"\"\n )\n result = pytester.runpytest(p)\n result.assert_outcomes(failed=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_strict_xfail_TestXFail.test_strict_xfail.assert_pytester_path_join": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_strict_xfail_TestXFail.test_strict_xfail.assert_pytester_path_join", "embedding": null, "metadata": {"file_path": "testing/test_skipping.py", "file_name": "test_skipping.py", "file_type": "text/x-python", "category": "test", "start_line": 628, "end_line": 653, "span_ids": ["TestXFail.test_strict_xfail"], "tokens": 214}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestXFail:\n\n @pytest.mark.parametrize(\"strict\", [True, False])\n def test_strict_xfail(self, pytester: Pytester, strict: bool) -> None:\n p = pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.mark.xfail(reason='unsupported feature', strict=%s)\n def test_foo():\n with open('foo_executed', 'w'): pass # make sure test executes\n \"\"\"\n % strict\n )\n result = pytester.runpytest(p, \"-rxX\")\n if strict:\n result.stdout.fnmatch_lines(\n [\"*test_foo*\", \"*XPASS(strict)*unsupported feature*\"]\n )\n else:\n result.stdout.fnmatch_lines(\n [\n \"*test_strict_xfail*\",\n \"XPASS test_strict_xfail.py::test_foo unsupported feature\",\n ]\n )\n assert result.ret == (1 if strict else 0)\n assert pytester.path.joinpath(\"foo_executed\").exists()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_xfail_markeval_namespace_TestXFail.test_xfail_markeval_namespace.None_2": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestXFail.test_xfail_markeval_namespace_TestXFail.test_xfail_markeval_namespace.None_2", "embedding": null, "metadata": {"file_path": "testing/test_skipping.py", "file_name": "test_skipping.py", "file_type": "text/x-python", "category": "test", "start_line": 711, "end_line": 736, "span_ids": ["TestXFail.test_xfail_markeval_namespace"], "tokens": 164}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestXFail:\n\n def test_xfail_markeval_namespace(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n import pytest\n\n def pytest_markeval_namespace():\n return {\"color\": \"green\"}\n \"\"\"\n )\n p = pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.mark.xfail(\"color == 'green'\")\n def test_1():\n assert False\n\n @pytest.mark.xfail(\"color == 'red'\")\n def test_2():\n assert False\n \"\"\"\n )\n res = pytester.runpytest(p)\n assert res.ret == 1\n res.stdout.fnmatch_lines([\"*1 failed*\"])\n res.stdout.fnmatch_lines([\"*1 xfailed*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestBooleanCondition_TestBooleanCondition.test_skipif.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestBooleanCondition_TestBooleanCondition.test_skipif.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_skipping.py", "file_name": "test_skipping.py", "file_type": "text/x-python", "category": "test", "start_line": 1224, "end_line": 1242, "span_ids": ["TestBooleanCondition.test_skipif", "TestBooleanCondition"], "tokens": 111}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestBooleanCondition:\n def test_skipif(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.mark.skipif(True, reason=\"True123\")\n def test_func1():\n pass\n @pytest.mark.skipif(False, reason=\"True123\")\n def test_func2():\n pass\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n \"\"\"\n *1 passed*1 skipped*\n \"\"\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestBooleanCondition.test_skipif_noreason_TestBooleanCondition.test_xfail.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_skipping.py_TestBooleanCondition.test_skipif_noreason_TestBooleanCondition.test_xfail.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_skipping.py", "file_name": "test_skipping.py", "file_type": "text/x-python", "category": "test", "start_line": 1244, "end_line": 1276, "span_ids": ["TestBooleanCondition.test_xfail", "TestBooleanCondition.test_skipif_noreason"], "tokens": 181}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestBooleanCondition:\n\n def test_skipif_noreason(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.mark.skipif(True)\n def test_func():\n pass\n \"\"\"\n )\n result = pytester.runpytest(\"-rs\")\n result.stdout.fnmatch_lines(\n \"\"\"\n *1 error*\n \"\"\"\n )\n\n def test_xfail(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.mark.xfail(True, reason=\"True123\")\n def test_func():\n assert 0\n \"\"\"\n )\n result = pytester.runpytest(\"-rxs\")\n result.stdout.fnmatch_lines(\n \"\"\"\n *XFAIL*\n *True123*\n *1 xfail*\n \"\"\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_stepwise.py_pytest_stepwise_pytester.return.pytester": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_stepwise.py_pytest_stepwise_pytester.return.pytester", "embedding": null, "metadata": {"file_path": "testing/test_stepwise.py", "file_name": "test_stepwise.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 54, "span_ids": ["stepwise_pytester", "imports"], "tokens": 299}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import pytest\nfrom _pytest.monkeypatch import MonkeyPatch\nfrom _pytest.pytester import Pytester\n\n\n@pytest.fixture\ndef stepwise_pytester(pytester: Pytester) -> Pytester:\n # Rather than having to modify our testfile between tests, we introduce\n # a flag for whether or not the second test should fail.\n pytester.makeconftest(\n \"\"\"\ndef pytest_addoption(parser):\n group = parser.getgroup('general')\n group.addoption('--fail', action='store_true', dest='fail')\n group.addoption('--fail-last', action='store_true', dest='fail_last')\n\"\"\"\n )\n\n # Create a simple test suite.\n pytester.makepyfile(\n test_a=\"\"\"\ndef test_success_before_fail():\n assert 1\n\ndef test_fail_on_flag(request):\n assert not request.config.getvalue('fail')\n\ndef test_success_after_fail():\n assert 1\n\ndef test_fail_last_on_flag(request):\n assert not request.config.getvalue('fail_last')\n\ndef test_success_after_last_fail():\n assert 1\n\"\"\"\n )\n\n pytester.makepyfile(\n test_b=\"\"\"\ndef test_success():\n assert 1\n\"\"\"\n )\n\n # customize cache directory so we don't use the tox's cache directory, which makes tests in this module flaky\n pytester.makeini(\n \"\"\"\n [pytest]\n cache_dir = .cache\n \"\"\"\n )\n\n return pytester", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_stepwise.py_error_pytester_test_run_without_stepwise.None_2": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_stepwise.py_error_pytester_test_run_without_stepwise.None_2", "embedding": null, "metadata": {"file_path": "testing/test_stepwise.py", "file_name": "test_stepwise.py", "file_type": "text/x-python", "category": "test", "start_line": 57, "end_line": 94, "span_ids": ["broken_pytester", "test_run_without_stepwise", "_strip_resource_warnings", "error_pytester"], "tokens": 263}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.fixture\ndef error_pytester(pytester: Pytester) -> Pytester:\n pytester.makepyfile(\n test_a=\"\"\"\ndef test_error(nonexisting_fixture):\n assert 1\n\ndef test_success_after_fail():\n assert 1\n\"\"\"\n )\n\n return pytester\n\n\n@pytest.fixture\ndef broken_pytester(pytester: Pytester) -> Pytester:\n pytester.makepyfile(\n working_testfile=\"def test_proper(): assert 1\", broken_testfile=\"foobar\"\n )\n return pytester\n\n\ndef _strip_resource_warnings(lines):\n # Strip unreliable ResourceWarnings, so no-output assertions on stderr can work.\n # (https://github.com/pytest-dev/pytest/issues/5088)\n return [\n x\n for x in lines\n if not x.startswith((\"Exception ignored in:\", \"ResourceWarning\"))\n ]\n\n\ndef test_run_without_stepwise(stepwise_pytester: Pytester) -> None:\n result = stepwise_pytester.runpytest(\"-v\", \"--strict-markers\", \"--fail\")\n result.stdout.fnmatch_lines([\"*test_success_before_fail PASSED*\"])\n result.stdout.fnmatch_lines([\"*test_fail_on_flag FAILED*\"])\n result.stdout.fnmatch_lines([\"*test_success_after_fail PASSED*\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_stepwise.py_test_stepwise_output_summary_test_stepwise_output_summary.None_2": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_stepwise.py_test_stepwise_output_summary_test_stepwise_output_summary.None_2", "embedding": null, "metadata": {"file_path": "testing/test_stepwise.py", "file_name": "test_stepwise.py", "file_type": "text/x-python", "category": "test", "start_line": 97, "end_line": 111, "span_ids": ["test_stepwise_output_summary"], "tokens": 139}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_stepwise_output_summary(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.mark.parametrize(\"expected\", [True, True, True, True, False])\n def test_data(expected):\n assert expected\n \"\"\"\n )\n result = pytester.runpytest(\"-v\", \"--stepwise\")\n result.stdout.fnmatch_lines([\"stepwise: no previously failed tests, not skipping.\"])\n result = pytester.runpytest(\"-v\", \"--stepwise\")\n result.stdout.fnmatch_lines(\n [\"stepwise: skipping 4 already passed items.\", \"*1 failed, 4 deselected*\"]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_stepwise.py_test_run_with_skip_option_test_run_with_skip_option.None_4": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_stepwise.py_test_run_with_skip_option_test_run_with_skip_option.None_4", "embedding": null, "metadata": {"file_path": "testing/test_stepwise.py", "file_name": "test_stepwise.py", "file_type": "text/x-python", "category": "test", "start_line": 138, "end_line": 155, "span_ids": ["test_run_with_skip_option"], "tokens": 169}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.parametrize(\"stepwise_skip\", [\"--stepwise-skip\", \"--sw-skip\"])\ndef test_run_with_skip_option(stepwise_pytester: Pytester, stepwise_skip: str) -> None:\n result = stepwise_pytester.runpytest(\n \"-v\",\n \"--strict-markers\",\n \"--stepwise\",\n stepwise_skip,\n \"--fail\",\n \"--fail-last\",\n )\n assert _strip_resource_warnings(result.stderr.lines) == []\n\n stdout = result.stdout.str()\n # Make sure first fail is ignore and second fail stops the test run.\n assert \"test_fail_on_flag FAILED\" in stdout\n assert \"test_success_after_fail PASSED\" in stdout\n assert \"test_fail_last_on_flag FAILED\" in stdout\n assert \"test_success_after_last_fail\" not in stdout", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_stepwise.py_test_fail_on_errors_test_change_testfile.assert_test_success_PASS": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_stepwise.py_test_fail_on_errors_test_change_testfile.assert_test_success_PASS", "embedding": null, "metadata": {"file_path": "testing/test_stepwise.py", "file_name": "test_stepwise.py", "file_type": "text/x-python", "category": "test", "start_line": 158, "end_line": 185, "span_ids": ["test_fail_on_errors", "test_change_testfile"], "tokens": 245}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_fail_on_errors(error_pytester: Pytester) -> None:\n result = error_pytester.runpytest(\"-v\", \"--strict-markers\", \"--stepwise\")\n\n assert _strip_resource_warnings(result.stderr.lines) == []\n stdout = result.stdout.str()\n\n assert \"test_error ERROR\" in stdout\n assert \"test_success_after_fail\" not in stdout\n\n\ndef test_change_testfile(stepwise_pytester: Pytester) -> None:\n result = stepwise_pytester.runpytest(\n \"-v\", \"--strict-markers\", \"--stepwise\", \"--fail\", \"test_a.py\"\n )\n assert _strip_resource_warnings(result.stderr.lines) == []\n\n stdout = result.stdout.str()\n assert \"test_fail_on_flag FAILED\" in stdout\n\n # Make sure the second test run starts from the beginning, since the\n # test to continue from does not exist in testfile_b.\n result = stepwise_pytester.runpytest(\n \"-v\", \"--strict-markers\", \"--stepwise\", \"test_b.py\"\n )\n assert _strip_resource_warnings(result.stderr.lines) == []\n\n stdout = result.stdout.str()\n assert \"test_success PASSED\" in stdout", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py__Terminal_reporting_of__option.return.request_param": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py__Terminal_reporting_of__option.return.request_param", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 58, "span_ids": ["option", "impl", "Option.args", "docstring", "Option", "imports"], "tokens": 369}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"Terminal reporting of the full testing process.\"\"\"\nimport collections\nimport os\nimport sys\nimport textwrap\nfrom io import StringIO\nfrom pathlib import Path\nfrom types import SimpleNamespace\nfrom typing import cast\nfrom typing import Dict\nfrom typing import List\nfrom typing import Tuple\n\nimport pluggy\nimport py\n\nimport _pytest.config\nimport _pytest.terminal\nimport pytest\nfrom _pytest._io.wcwidth import wcswidth\nfrom _pytest.config import Config\nfrom _pytest.config import ExitCode\nfrom _pytest.monkeypatch import MonkeyPatch\nfrom _pytest.pytester import Pytester\nfrom _pytest.reports import BaseReport\nfrom _pytest.reports import CollectReport\nfrom _pytest.reports import TestReport\nfrom _pytest.terminal import _folded_skips\nfrom _pytest.terminal import _format_trimmed\nfrom _pytest.terminal import _get_line_with_reprcrash_message\nfrom _pytest.terminal import _get_raw_skip_reason\nfrom _pytest.terminal import _plugin_nameversions\nfrom _pytest.terminal import getreportopt\nfrom _pytest.terminal import TerminalReporter\n\nDistInfo = collections.namedtuple(\"DistInfo\", [\"project_name\", \"version\"])\n\n\nTRANS_FNMATCH = str.maketrans({\"[\": \"[[]\", \"]\": \"[]]\"})\n\n\nclass Option:\n def __init__(self, verbosity=0):\n self.verbosity = verbosity\n\n @property\n def args(self):\n values = []\n values.append(\"--verbosity=%d\" % self.verbosity)\n return values\n\n\n@pytest.fixture(\n params=[Option(verbosity=0), Option(verbosity=1), Option(verbosity=-1)],\n ids=[\"default\", \"verbose\", \"quiet\"],\n)\ndef option(request):\n return request.param", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminal.test_verbose_skip_reason_TestTerminal.test_verbose_skip_reason.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminal.test_verbose_skip_reason_TestTerminal.test_verbose_skip_reason.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 349, "end_line": 405, "span_ids": ["TestTerminal.test_verbose_skip_reason"], "tokens": 407}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTerminal:\n\n def test_verbose_skip_reason(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n @pytest.mark.skip(reason=\"123\")\n def test_1():\n pass\n\n @pytest.mark.xfail(reason=\"456\")\n def test_2():\n pass\n\n @pytest.mark.xfail(reason=\"789\")\n def test_3():\n assert False\n\n @pytest.mark.xfail(reason=\"\")\n def test_4():\n assert False\n\n @pytest.mark.skip\n def test_5():\n pass\n\n @pytest.mark.xfail\n def test_6():\n pass\n\n def test_7():\n pytest.skip()\n\n def test_8():\n pytest.skip(\"888 is great\")\n\n def test_9():\n pytest.xfail()\n\n def test_10():\n pytest.xfail(\"It's \ud83d\udd59 o'clock\")\n \"\"\"\n )\n result = pytester.runpytest(\"-v\")\n result.stdout.fnmatch_lines(\n [\n \"test_verbose_skip_reason.py::test_1 SKIPPED (123) *\",\n \"test_verbose_skip_reason.py::test_2 XPASS (456) *\",\n \"test_verbose_skip_reason.py::test_3 XFAIL (789) *\",\n \"test_verbose_skip_reason.py::test_4 XFAIL *\",\n \"test_verbose_skip_reason.py::test_5 SKIPPED (unconditional skip) *\",\n \"test_verbose_skip_reason.py::test_6 XPASS *\",\n \"test_verbose_skip_reason.py::test_7 SKIPPED *\",\n \"test_verbose_skip_reason.py::test_8 SKIPPED (888 is great) *\",\n \"test_verbose_skip_reason.py::test_9 XFAIL *\",\n \"test_verbose_skip_reason.py::test_10 XFAIL (It's \ud83d\udd59 o'clock) *\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestCollectonly.test_collect_only_summary_status_TestCollectonly.test_collect_only_summary_status.None_10": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestCollectonly.test_collect_only_summary_status_TestCollectonly.test_collect_only_summary_status.None_10", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 531, "end_line": 571, "span_ids": ["TestCollectonly.test_collect_only_summary_status"], "tokens": 421}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCollectonly:\n\n def test_collect_only_summary_status(self, pytester: Pytester) -> None:\n \"\"\"Custom status depending on test selection using -k or -m. #7701.\"\"\"\n pytester.makepyfile(\n test_collect_foo=\"\"\"\n def test_foo(): pass\n \"\"\",\n test_collect_bar=\"\"\"\n def test_foobar(): pass\n def test_bar(): pass\n \"\"\",\n )\n result = pytester.runpytest(\"--collect-only\")\n result.stdout.fnmatch_lines(\"*== 3 tests collected in * ==*\")\n\n result = pytester.runpytest(\"--collect-only\", \"test_collect_foo.py\")\n result.stdout.fnmatch_lines(\"*== 1 test collected in * ==*\")\n\n result = pytester.runpytest(\"--collect-only\", \"-k\", \"foo\")\n result.stdout.fnmatch_lines(\"*== 2/3 tests collected (1 deselected) in * ==*\")\n\n result = pytester.runpytest(\"--collect-only\", \"-k\", \"test_bar\")\n result.stdout.fnmatch_lines(\"*== 1/3 tests collected (2 deselected) in * ==*\")\n\n result = pytester.runpytest(\"--collect-only\", \"-k\", \"invalid\")\n result.stdout.fnmatch_lines(\"*== no tests collected (3 deselected) in * ==*\")\n\n pytester.mkdir(\"no_tests_here\")\n result = pytester.runpytest(\"--collect-only\", \"no_tests_here\")\n result.stdout.fnmatch_lines(\"*== no tests collected in * ==*\")\n\n pytester.makepyfile(\n test_contains_error=\"\"\"\n raise RuntimeError\n \"\"\",\n )\n result = pytester.runpytest(\"--collect-only\")\n result.stdout.fnmatch_lines(\"*== 3 tests collected, 1 error in * ==*\")\n result = pytester.runpytest(\"--collect-only\", \"-k\", \"foo\")\n result.stdout.fnmatch_lines(\n \"*== 2/3 tests collected (1 deselected), 1 error in * ==*\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminalFunctional.test_header_absolute_testpath_TestTerminalFunctional.test_header_absolute_testpath.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestTerminalFunctional.test_header_absolute_testpath_TestTerminalFunctional.test_header_absolute_testpath.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 872, "end_line": 893, "span_ids": ["TestTerminalFunctional.test_header_absolute_testpath"], "tokens": 148}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTerminalFunctional:\n\n def test_header_absolute_testpath(\n self, pytester: Pytester, monkeypatch: MonkeyPatch\n ) -> None:\n \"\"\"Regresstion test for #7814.\"\"\"\n tests = pytester.path.joinpath(\"tests\")\n tests.mkdir()\n pytester.makepyprojecttoml(\n \"\"\"\n [tool.pytest.ini_options]\n testpaths = ['{}']\n \"\"\".format(\n tests\n )\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n [\n \"rootdir: *absolute_testpath0, configfile: pyproject.toml, testpaths: {}\".format(\n tests\n )\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_color_yes_test_color_yes.None_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_color_yes_test_color_yes.None_1", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 1158, "end_line": 1214, "span_ids": ["test_color_yes"], "tokens": 537}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_color_yes(pytester: Pytester, color_mapping) -> None:\n p1 = pytester.makepyfile(\n \"\"\"\n def fail():\n assert 0\n\n def test_this():\n fail()\n \"\"\"\n )\n result = pytester.runpytest(\"--color=yes\", str(p1))\n result.stdout.fnmatch_lines(\n color_mapping.format_for_fnmatch(\n [\n \"{bold}=*= test session starts =*={reset}\",\n \"collected 1 item\",\n \"\",\n \"test_color_yes.py {red}F{reset}{red} * [100%]{reset}\",\n \"\",\n \"=*= FAILURES =*=\",\n \"{red}{bold}_*_ test_this _*_{reset}\",\n \"\",\n \" {kw}def{hl-reset} {function}test_this{hl-reset}():\",\n \"> fail()\",\n \"\",\n \"{bold}{red}test_color_yes.py{reset}:5: \",\n \"_ _ * _ _*\",\n \"\",\n \" {kw}def{hl-reset} {function}fail{hl-reset}():\",\n \"> {kw}assert{hl-reset} {number}0{hl-reset}\",\n \"{bold}{red}E assert 0{reset}\",\n \"\",\n \"{bold}{red}test_color_yes.py{reset}:2: AssertionError\",\n \"{red}=*= {red}{bold}1 failed{reset}{red} in *s{reset}{red} =*={reset}\",\n ]\n )\n )\n result = pytester.runpytest(\"--color=yes\", \"--tb=short\", str(p1))\n result.stdout.fnmatch_lines(\n color_mapping.format_for_fnmatch(\n [\n \"{bold}=*= test session starts =*={reset}\",\n \"collected 1 item\",\n \"\",\n \"test_color_yes.py {red}F{reset}{red} * [100%]{reset}\",\n \"\",\n \"=*= FAILURES =*=\",\n \"{red}{bold}_*_ test_this _*_{reset}\",\n \"{bold}{red}test_color_yes.py{reset}:5: in test_this\",\n \" fail()\",\n \"{bold}{red}test_color_yes.py{reset}:2: in fail\",\n \" {kw}assert{hl-reset} {number}0{hl-reset}\",\n \"{bold}{red}E assert 0{reset}\",\n \"{red}=*= {red}{bold}1 failed{reset}{red} in *s{reset}{red} =*={reset}\",\n ]\n )\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_tbstyle_native_setup_error_test_tbstyle_native_setup_error.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_tbstyle_native_setup_error_test_tbstyle_native_setup_error.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 1583, "end_line": 1598, "span_ids": ["test_tbstyle_native_setup_error"], "tokens": 107}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_tbstyle_native_setup_error(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest\n @pytest.fixture\n def setup_error_fixture():\n raise Exception(\"error in exception\")\n\n def test_error_fixture(setup_error_fixture):\n pass\n \"\"\"\n )\n result = pytester.runpytest(\"--tb=native\")\n result.stdout.fnmatch_lines(\n ['*File *test_tbstyle_native_setup_error.py\", line *, in setup_error_fixture*']\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_terminal_summary_test_terminal_summary.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_terminal_summary_test_terminal_summary.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 1601, "end_line": 1618, "span_ids": ["test_terminal_summary"], "tokens": 110}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_terminal_summary(pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n def pytest_terminal_summary(terminalreporter, exitstatus):\n w = terminalreporter\n w.section(\"hello\")\n w.line(\"world\")\n w.line(\"exitstatus: {0}\".format(exitstatus))\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n \"\"\"\n *==== hello ====*\n world\n exitstatus: 5\n \"\"\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestProgressOutputStyle_TestProgressOutputStyle.many_tests_files.pytester_makepyfile_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestProgressOutputStyle_TestProgressOutputStyle.many_tests_files.pytester_makepyfile_", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 1924, "end_line": 1943, "span_ids": ["TestProgressOutputStyle", "TestProgressOutputStyle.many_tests_files"], "tokens": 138}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestProgressOutputStyle:\n @pytest.fixture\n def many_tests_files(self, pytester: Pytester) -> None:\n pytester.makepyfile(\n test_bar=\"\"\"\n import pytest\n @pytest.mark.parametrize('i', range(10))\n def test_bar(i): pass\n \"\"\",\n test_foo=\"\"\"\n import pytest\n @pytest.mark.parametrize('i', range(5))\n def test_foo(i): pass\n \"\"\",\n test_foobar=\"\"\"\n import pytest\n @pytest.mark.parametrize('i', range(5))\n def test_foobar(i): pass\n \"\"\",\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestProgressWithTeardown_TestProgressWithTeardown.many_files.pytester_makepyfile_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestProgressWithTeardown_TestProgressWithTeardown.many_files.pytester_makepyfile_", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 2129, "end_line": 2160, "span_ids": ["TestProgressWithTeardown.contest_with_teardown_fixture", "TestProgressWithTeardown.many_files", "TestProgressWithTeardown"], "tokens": 198}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestProgressWithTeardown:\n \"\"\"Ensure we show the correct percentages for tests that fail during teardown (#3088)\"\"\"\n\n @pytest.fixture\n def contest_with_teardown_fixture(self, pytester: Pytester) -> None:\n pytester.makeconftest(\n \"\"\"\n import pytest\n\n @pytest.fixture\n def fail_teardown():\n yield\n assert False\n \"\"\"\n )\n\n @pytest.fixture\n def many_files(self, pytester: Pytester, contest_with_teardown_fixture) -> None:\n pytester.makepyfile(\n test_bar=\"\"\"\n import pytest\n @pytest.mark.parametrize('i', range(5))\n def test_bar(fail_teardown, i):\n pass\n \"\"\",\n test_foo=\"\"\"\n import pytest\n @pytest.mark.parametrize('i', range(15))\n def test_foo(fail_teardown, i):\n pass\n \"\"\",\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestCodeHighlight.test_code_highlight_continuation_TestCodeHighlight.test_code_highlight_continuation.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_TestCodeHighlight.test_code_highlight_continuation_TestCodeHighlight.test_code_highlight_continuation.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 2388, "end_line": 2409, "span_ids": ["TestCodeHighlight.test_code_highlight_continuation"], "tokens": 189}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCodeHighlight:\n\n def test_code_highlight_continuation(\n self, pytester: Pytester, color_mapping\n ) -> None:\n pytester.makepyfile(\n \"\"\"\n def test_foo():\n print('''\n '''); assert 0\n \"\"\"\n )\n result = pytester.runpytest(\"--color=yes\")\n\n result.stdout.fnmatch_lines(\n color_mapping.format_for_fnmatch(\n [\n \" {kw}def{hl-reset} {function}test_foo{hl-reset}():\",\n \" {print}print{hl-reset}({str}'''{hl-reset}{str}{hl-reset}\",\n \"> {str} {hl-reset}{str}'''{hl-reset}); {kw}assert{hl-reset} {number}0{hl-reset}\",\n \"{bold}{red}E assert 0{reset}\",\n ]\n )\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_raw_skip_reason_skipped_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_terminal.py_test_raw_skip_reason_skipped_", "embedding": null, "metadata": {"file_path": "testing/test_terminal.py", "file_name": "test_terminal.py", "file_type": "text/x-python", "category": "test", "start_line": 2412, "end_line": 2434, "span_ids": ["test_raw_skip_reason_skipped", "test_raw_skip_reason_xfail", "test_format_trimmed"], "tokens": 196}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_raw_skip_reason_skipped() -> None:\n report = SimpleNamespace()\n report.skipped = True\n report.longrepr = (\"xyz\", 3, \"Skipped: Just so\")\n\n reason = _get_raw_skip_reason(cast(TestReport, report))\n assert reason == \"Just so\"\n\n\ndef test_raw_skip_reason_xfail() -> None:\n report = SimpleNamespace()\n report.wasxfail = \"reason: To everything there is a season\"\n\n reason = _get_raw_skip_reason(cast(TestReport, report))\n assert reason == \"To everything there is a season\"\n\n\ndef test_format_trimmed() -> None:\n msg = \"unconditional skip\"\n\n assert _format_trimmed(\" ({}) \", msg, len(msg) + 4) == \" (unconditional skip) \"\n assert _format_trimmed(\" ({}) \", msg, len(msg) + 3) == \" (unconditional ...) \"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_threadexception.py_sys_test_unhandled_thread_exception.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_threadexception.py_sys_test_unhandled_thread_exception.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_threadexception.py", "file_name": "test_threadexception.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 42, "span_ids": ["test_unhandled_thread_exception", "impl", "imports"], "tokens": 260}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import sys\n\nimport pytest\nfrom _pytest.pytester import Pytester\n\n\nif sys.version_info < (3, 8):\n pytest.skip(\"threadexception plugin needs Python>=3.8\", allow_module_level=True)\n\n\n@pytest.mark.filterwarnings(\"default\")\ndef test_unhandled_thread_exception(pytester: Pytester) -> None:\n pytester.makepyfile(\n test_it=\"\"\"\n import threading\n\n def test_it():\n def oops():\n raise ValueError(\"Oops\")\n\n t = threading.Thread(target=oops, name=\"MyThread\")\n t.start()\n t.join()\n\n def test_2(): pass\n \"\"\"\n )\n result = pytester.runpytest()\n assert result.ret == 0\n assert result.parseoutcomes() == {\"passed\": 2, \"warnings\": 1}\n result.stdout.fnmatch_lines(\n [\n \"*= warnings summary =*\",\n \"test_it.py::test_it\",\n \" * PytestUnhandledThreadExceptionWarning: Exception in thread MyThread\",\n \" \",\n \" Traceback (most recent call last):\",\n \" ValueError: Oops\",\n \" \",\n \" warnings.warn(pytest.PytestUnhandledThreadExceptionWarning(msg))\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_threadexception.py_test_unhandled_thread_exception_in_setup_test_unhandled_thread_exception_in_setup.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_threadexception.py_test_unhandled_thread_exception_in_setup_test_unhandled_thread_exception_in_setup.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_threadexception.py", "file_name": "test_threadexception.py", "file_type": "text/x-python", "category": "test", "start_line": 45, "end_line": 78, "span_ids": ["test_unhandled_thread_exception_in_setup"], "tokens": 235}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.filterwarnings(\"default\")\ndef test_unhandled_thread_exception_in_setup(pytester: Pytester) -> None:\n pytester.makepyfile(\n test_it=\"\"\"\n import threading\n import pytest\n\n @pytest.fixture\n def threadexc():\n def oops():\n raise ValueError(\"Oops\")\n t = threading.Thread(target=oops, name=\"MyThread\")\n t.start()\n t.join()\n\n def test_it(threadexc): pass\n def test_2(): pass\n \"\"\"\n )\n result = pytester.runpytest()\n assert result.ret == 0\n assert result.parseoutcomes() == {\"passed\": 2, \"warnings\": 1}\n result.stdout.fnmatch_lines(\n [\n \"*= warnings summary =*\",\n \"test_it.py::test_it\",\n \" * PytestUnhandledThreadExceptionWarning: Exception in thread MyThread\",\n \" \",\n \" Traceback (most recent call last):\",\n \" ValueError: Oops\",\n \" \",\n \" warnings.warn(pytest.PytestUnhandledThreadExceptionWarning(msg))\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_threadexception.py_test_unhandled_thread_exception_in_teardown_test_unhandled_thread_exception_in_teardown.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_threadexception.py_test_unhandled_thread_exception_in_teardown_test_unhandled_thread_exception_in_teardown.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_threadexception.py", "file_name": "test_threadexception.py", "file_type": "text/x-python", "category": "test", "start_line": 81, "end_line": 115, "span_ids": ["test_unhandled_thread_exception_in_teardown"], "tokens": 239}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.filterwarnings(\"default\")\ndef test_unhandled_thread_exception_in_teardown(pytester: Pytester) -> None:\n pytester.makepyfile(\n test_it=\"\"\"\n import threading\n import pytest\n\n @pytest.fixture\n def threadexc():\n def oops():\n raise ValueError(\"Oops\")\n yield\n t = threading.Thread(target=oops, name=\"MyThread\")\n t.start()\n t.join()\n\n def test_it(threadexc): pass\n def test_2(): pass\n \"\"\"\n )\n result = pytester.runpytest()\n assert result.ret == 0\n assert result.parseoutcomes() == {\"passed\": 2, \"warnings\": 1}\n result.stdout.fnmatch_lines(\n [\n \"*= warnings summary =*\",\n \"test_it.py::test_it\",\n \" * PytestUnhandledThreadExceptionWarning: Exception in thread MyThread\",\n \" \",\n \" Traceback (most recent call last):\",\n \" ValueError: Oops\",\n \" \",\n \" warnings.warn(pytest.PytestUnhandledThreadExceptionWarning(msg))\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_threadexception.py_test_unhandled_thread_exception_warning_error_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_threadexception.py_test_unhandled_thread_exception_warning_error_", "embedding": null, "metadata": {"file_path": "testing/test_threadexception.py", "file_name": "test_threadexception.py", "file_type": "text/x-python", "category": "test", "start_line": 118, "end_line": 138, "span_ids": ["test_unhandled_thread_exception_warning_error"], "tokens": 143}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.filterwarnings(\"error::pytest.PytestUnhandledThreadExceptionWarning\")\ndef test_unhandled_thread_exception_warning_error(pytester: Pytester) -> None:\n pytester.makepyfile(\n test_it=\"\"\"\n import threading\n import pytest\n\n def test_it():\n def oops():\n raise ValueError(\"Oops\")\n t = threading.Thread(target=oops, name=\"MyThread\")\n t.start()\n t.join()\n\n def test_2(): pass\n \"\"\"\n )\n result = pytester.runpytest()\n assert result.ret == pytest.ExitCode.TESTS_FAILED\n assert result.parseoutcomes() == {\"passed\": 1, \"failed\": 1}", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_unittest_skip_issue148_test_unittest_skip_issue148.reprec_assertoutcome_skip": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_unittest_skip_issue148_test_unittest_skip_issue148.reprec_assertoutcome_skip", "embedding": null, "metadata": {"file_path": "testing/test_unittest.py", "file_name": "test_unittest.py", "file_type": "text/x-python", "category": "test", "start_line": 216, "end_line": 234, "span_ids": ["test_unittest_skip_issue148"], "tokens": 106}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_unittest_skip_issue148(pytester: Pytester) -> None:\n testpath = pytester.makepyfile(\n \"\"\"\n import unittest\n\n @unittest.skip(\"hello\")\n class MyTestCase(unittest.TestCase):\n @classmethod\n def setUpClass(self):\n xxx\n def test_one(self):\n pass\n @classmethod\n def tearDownClass(self):\n xxx\n \"\"\"\n )\n reprec = pytester.inline_run(testpath)\n reprec.assertoutcome(skipped=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_method_and_teardown_failing_reporting_test_method_and_teardown_failing_reporting.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_method_and_teardown_failing_reporting_test_method_and_teardown_failing_reporting.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_unittest.py", "file_name": "test_unittest.py", "file_type": "text/x-python", "category": "test", "start_line": 237, "end_line": 258, "span_ids": ["test_method_and_teardown_failing_reporting"], "tokens": 133}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_method_and_teardown_failing_reporting(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import unittest\n class TC(unittest.TestCase):\n def tearDown(self):\n assert 0, \"down1\"\n def test_method(self):\n assert False, \"down2\"\n \"\"\"\n )\n result = pytester.runpytest(\"-s\")\n assert result.ret == 1\n result.stdout.fnmatch_lines(\n [\n \"*tearDown*\",\n \"*assert 0*\",\n \"*test_method*\",\n \"*assert False*\",\n \"*1 failed*1 error*\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_TestTrialUnittest.test_trial_pdb_TestTrialUnittest.test_trial_testcase_todo_property.reprec_assertoutcome_skip": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_TestTrialUnittest.test_trial_pdb_TestTrialUnittest.test_trial_testcase_todo_property.reprec_assertoutcome_skip", "embedding": null, "metadata": {"file_path": "testing/test_unittest.py", "file_name": "test_unittest.py", "file_type": "text/x-python", "category": "test", "start_line": 565, "end_line": 616, "span_ids": ["TestTrialUnittest.test_trial_testfunction_skip_property", "TestTrialUnittest.test_trial_testcase_skip_property", "TestTrialUnittest.test_trial_testcase_todo_property", "TestTrialUnittest.test_trial_pdb"], "tokens": 368}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTrialUnittest:\n\n def test_trial_pdb(self, pytester: Pytester) -> None:\n p = pytester.makepyfile(\n \"\"\"\n from twisted.trial import unittest\n import pytest\n class TC(unittest.TestCase):\n def test_hello(self):\n assert 0, \"hellopdb\"\n \"\"\"\n )\n child = pytester.spawn_pytest(str(p))\n child.expect(\"hellopdb\")\n child.sendeof()\n\n def test_trial_testcase_skip_property(self, pytester: Pytester) -> None:\n testpath = pytester.makepyfile(\n \"\"\"\n from twisted.trial import unittest\n class MyTestCase(unittest.TestCase):\n skip = 'dont run'\n def test_func(self):\n pass\n \"\"\"\n )\n reprec = pytester.inline_run(testpath, \"-s\")\n reprec.assertoutcome(skipped=1)\n\n def test_trial_testfunction_skip_property(self, pytester: Pytester) -> None:\n testpath = pytester.makepyfile(\n \"\"\"\n from twisted.trial import unittest\n class MyTestCase(unittest.TestCase):\n def test_func(self):\n pass\n test_func.skip = 'dont run'\n \"\"\"\n )\n reprec = pytester.inline_run(testpath, \"-s\")\n reprec.assertoutcome(skipped=1)\n\n def test_trial_testcase_todo_property(self, pytester: Pytester) -> None:\n testpath = pytester.makepyfile(\n \"\"\"\n from twisted.trial import unittest\n class MyTestCase(unittest.TestCase):\n todo = 'dont run'\n def test_func(self):\n assert 0\n \"\"\"\n )\n reprec = pytester.inline_run(testpath, \"-s\")\n reprec.assertoutcome(skipped=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_TestTrialUnittest.test_trial_testfunction_todo_property_TestTrialUnittest.test_trial_testfunction_todo_property.reprec_assertoutcome_skip": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_TestTrialUnittest.test_trial_testfunction_todo_property_TestTrialUnittest.test_trial_testfunction_todo_property.reprec_assertoutcome_skip", "embedding": null, "metadata": {"file_path": "testing/test_unittest.py", "file_name": "test_unittest.py", "file_type": "text/x-python", "category": "test", "start_line": 618, "end_line": 631, "span_ids": ["TestTrialUnittest.test_trial_testfunction_todo_property"], "tokens": 112}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTrialUnittest:\n\n def test_trial_testfunction_todo_property(self, pytester: Pytester) -> None:\n testpath = pytester.makepyfile(\n \"\"\"\n from twisted.trial import unittest\n class MyTestCase(unittest.TestCase):\n def test_func(self):\n assert 0\n test_func.todo = 'dont run'\n \"\"\"\n )\n reprec = pytester.inline_run(\n testpath, \"-s\", *self.ignore_unclosed_socket_warning\n )\n reprec.assertoutcome(skipped=1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_unittest_not_shown_in_traceback_test_unorderable_types.assert_result_ret_Exit": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_unittest_not_shown_in_traceback_test_unorderable_types.assert_result_ret_Exit", "embedding": null, "metadata": {"file_path": "testing/test_unittest.py", "file_name": "test_unittest.py", "file_type": "text/x-python", "category": "test", "start_line": 692, "end_line": 723, "span_ids": ["test_unorderable_types", "test_unittest_not_shown_in_traceback"], "tokens": 192}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_unittest_not_shown_in_traceback(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import unittest\n class t(unittest.TestCase):\n def test_hello(self):\n x = 3\n self.assertEqual(x, 4)\n \"\"\"\n )\n res = pytester.runpytest()\n res.stdout.no_fnmatch_line(\"*failUnlessEqual*\")\n\n\ndef test_unorderable_types(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import unittest\n class TestJoinEmpty(unittest.TestCase):\n pass\n\n def make_test():\n class Test(unittest.TestCase):\n pass\n Test.__name__ = \"TestFoo\"\n return Test\n TestFoo = make_test()\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.no_fnmatch_line(\"*TypeError*\")\n assert result.ret == ExitCode.NO_TESTS_COLLECTED", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_unittest_typerror_traceback_test_unittest_expected_failure_for_failing_test_is_xfail.assert_result_ret_0": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_unittest_typerror_traceback_test_unittest_expected_failure_for_failing_test_is_xfail.assert_result_ret_0", "embedding": null, "metadata": {"file_path": "testing/test_unittest.py", "file_name": "test_unittest.py", "file_type": "text/x-python", "category": "test", "start_line": 726, "end_line": 763, "span_ids": ["test_unittest_expected_failure_for_failing_test_is_xfail", "test_unittest_typerror_traceback"], "tokens": 269}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_unittest_typerror_traceback(pytester: Pytester) -> None:\n pytester.makepyfile(\n \"\"\"\n import unittest\n class TestJoinEmpty(unittest.TestCase):\n def test_hello(self, arg1):\n pass\n \"\"\"\n )\n result = pytester.runpytest()\n assert \"TypeError\" in result.stdout.str()\n assert result.ret == 1\n\n\n@pytest.mark.parametrize(\"runner\", [\"pytest\", \"unittest\"])\ndef test_unittest_expected_failure_for_failing_test_is_xfail(\n pytester: Pytester, runner\n) -> None:\n script = pytester.makepyfile(\n \"\"\"\n import unittest\n class MyTestCase(unittest.TestCase):\n @unittest.expectedFailure\n def test_failing_test_is_xfail(self):\n assert False\n if __name__ == '__main__':\n unittest.main()\n \"\"\"\n )\n if runner == \"pytest\":\n result = pytester.runpytest(\"-rxX\")\n result.stdout.fnmatch_lines(\n [\"*XFAIL*MyTestCase*test_failing_test_is_xfail*\", \"*1 xfailed*\"]\n )\n else:\n result = pytester.runpython(script)\n result.stderr.fnmatch_lines([\"*1 test in*\", \"*OK*(expected failures=1)*\"])\n assert result.ret == 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_async_support_test_asynctest_support.reprec_assertoutcome_fail": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_async_support_test_asynctest_support.reprec_assertoutcome_fail", "embedding": null, "metadata": {"file_path": "testing/test_unittest.py", "file_name": "test_unittest.py", "file_type": "text/x-python", "category": "test", "start_line": 1243, "end_line": 1257, "span_ids": ["test_asynctest_support", "test_async_support"], "tokens": 135}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_async_support(pytester: Pytester) -> None:\n pytest.importorskip(\"unittest.async_case\")\n\n pytester.copy_example(\"unittest/test_unittest_asyncio.py\")\n reprec = pytester.inline_run()\n reprec.assertoutcome(failed=1, passed=2)\n\n\ndef test_asynctest_support(pytester: Pytester) -> None:\n \"\"\"Check asynctest support (#7110)\"\"\"\n pytest.importorskip(\"asynctest\")\n\n pytester.copy_example(\"unittest/test_unittest_asynctest.py\")\n reprec = pytester.inline_run()\n reprec.assertoutcome(failed=1, passed=2)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_plain_unittest_does_not_support_async_test_plain_unittest_does_not_support_async.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_plain_unittest_does_not_support_async_test_plain_unittest_does_not_support_async.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_unittest.py", "file_name": "test_unittest.py", "file_type": "text/x-python", "category": "test", "start_line": 1260, "end_line": 1280, "span_ids": ["test_plain_unittest_does_not_support_async"], "tokens": 218}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_plain_unittest_does_not_support_async(pytester: Pytester) -> None:\n \"\"\"Async functions in plain unittest.TestCase subclasses are not supported without plugins.\n\n This test exists here to avoid introducing this support by accident, leading users\n to expect that it works, rather than doing so intentionally as a feature.\n\n See https://github.com/pytest-dev/pytest-asyncio/issues/180 for more context.\n \"\"\"\n pytester.copy_example(\"unittest/test_unittest_plain_async.py\")\n result = pytester.runpytest_subprocess()\n if hasattr(sys, \"pypy_version_info\"):\n # in PyPy we can't reliable get the warning about the coroutine not being awaited,\n # because it depends on the coroutine being garbage collected; given that\n # we are running in a subprocess, that's difficult to enforce\n expected_lines = [\"*1 passed*\"]\n else:\n expected_lines = [\n \"*RuntimeWarning: coroutine * was never awaited\",\n \"*1 passed*\",\n ]\n result.stdout.fnmatch_lines(expected_lines)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_do_class_cleanups_on_success_test_do_class_cleanups_on_success.assert_passed_3": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_do_class_cleanups_on_success_test_do_class_cleanups_on_success.assert_passed_3", "embedding": null, "metadata": {"file_path": "testing/test_unittest.py", "file_name": "test_unittest.py", "file_type": "text/x-python", "category": "test", "start_line": 1283, "end_line": 1308, "span_ids": ["test_do_class_cleanups_on_success"], "tokens": 177}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.skipif(\n sys.version_info < (3, 8), reason=\"Feature introduced in Python 3.8\"\n)\ndef test_do_class_cleanups_on_success(pytester: Pytester) -> None:\n testpath = pytester.makepyfile(\n \"\"\"\n import unittest\n class MyTestCase(unittest.TestCase):\n values = []\n @classmethod\n def setUpClass(cls):\n def cleanup():\n cls.values.append(1)\n cls.addClassCleanup(cleanup)\n def test_one(self):\n pass\n def test_two(self):\n pass\n def test_cleanup_called_exactly_once():\n assert MyTestCase.values == [1]\n \"\"\"\n )\n reprec = pytester.inline_run(testpath)\n passed, skipped, failed = reprec.countoutcomes()\n assert failed == 0\n assert passed == 3", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_do_class_cleanups_on_setupclass_failure_test_do_class_cleanups_on_setupclass_failure.assert_passed_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_do_class_cleanups_on_setupclass_failure_test_do_class_cleanups_on_setupclass_failure.assert_passed_1", "embedding": null, "metadata": {"file_path": "testing/test_unittest.py", "file_name": "test_unittest.py", "file_type": "text/x-python", "category": "test", "start_line": 1311, "end_line": 1335, "span_ids": ["test_do_class_cleanups_on_setupclass_failure"], "tokens": 174}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.skipif(\n sys.version_info < (3, 8), reason=\"Feature introduced in Python 3.8\"\n)\ndef test_do_class_cleanups_on_setupclass_failure(pytester: Pytester) -> None:\n testpath = pytester.makepyfile(\n \"\"\"\n import unittest\n class MyTestCase(unittest.TestCase):\n values = []\n @classmethod\n def setUpClass(cls):\n def cleanup():\n cls.values.append(1)\n cls.addClassCleanup(cleanup)\n assert False\n def test_one(self):\n pass\n def test_cleanup_called_exactly_once():\n assert MyTestCase.values == [1]\n \"\"\"\n )\n reprec = pytester.inline_run(testpath)\n passed, skipped, failed = reprec.countoutcomes()\n assert failed == 1\n assert passed == 1", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_do_class_cleanups_on_teardownclass_failure_test_do_class_cleanups_on_teardownclass_failure.assert_passed_3": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_do_class_cleanups_on_teardownclass_failure_test_do_class_cleanups_on_teardownclass_failure.assert_passed_3", "embedding": null, "metadata": {"file_path": "testing/test_unittest.py", "file_name": "test_unittest.py", "file_type": "text/x-python", "category": "test", "start_line": 1338, "end_line": 1365, "span_ids": ["test_do_class_cleanups_on_teardownclass_failure"], "tokens": 187}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.skipif(\n sys.version_info < (3, 8), reason=\"Feature introduced in Python 3.8\"\n)\ndef test_do_class_cleanups_on_teardownclass_failure(pytester: Pytester) -> None:\n testpath = pytester.makepyfile(\n \"\"\"\n import unittest\n class MyTestCase(unittest.TestCase):\n values = []\n @classmethod\n def setUpClass(cls):\n def cleanup():\n cls.values.append(1)\n cls.addClassCleanup(cleanup)\n @classmethod\n def tearDownClass(cls):\n assert False\n def test_one(self):\n pass\n def test_two(self):\n pass\n def test_cleanup_called_exactly_once():\n assert MyTestCase.values == [1]\n \"\"\"\n )\n reprec = pytester.inline_run(testpath)\n passed, skipped, failed = reprec.countoutcomes()\n assert passed == 3", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_do_cleanups_on_success_test_do_cleanups_on_success.assert_passed_3": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_do_cleanups_on_success_test_do_cleanups_on_success.assert_passed_3", "embedding": null, "metadata": {"file_path": "testing/test_unittest.py", "file_name": "test_unittest.py", "file_type": "text/x-python", "category": "test", "start_line": 1368, "end_line": 1389, "span_ids": ["test_do_cleanups_on_success"], "tokens": 148}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_do_cleanups_on_success(pytester: Pytester) -> None:\n testpath = pytester.makepyfile(\n \"\"\"\n import unittest\n class MyTestCase(unittest.TestCase):\n values = []\n def setUp(self):\n def cleanup():\n self.values.append(1)\n self.addCleanup(cleanup)\n def test_one(self):\n pass\n def test_two(self):\n pass\n def test_cleanup_called_the_right_number_of_times():\n assert MyTestCase.values == [1, 1]\n \"\"\"\n )\n reprec = pytester.inline_run(testpath)\n passed, skipped, failed = reprec.countoutcomes()\n assert failed == 0\n assert passed == 3", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_do_cleanups_on_setup_failure_test_do_cleanups_on_setup_failure.assert_passed_1": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_do_cleanups_on_setup_failure_test_do_cleanups_on_setup_failure.assert_passed_1", "embedding": null, "metadata": {"file_path": "testing/test_unittest.py", "file_name": "test_unittest.py", "file_type": "text/x-python", "category": "test", "start_line": 1392, "end_line": 1414, "span_ids": ["test_do_cleanups_on_setup_failure"], "tokens": 153}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_do_cleanups_on_setup_failure(pytester: Pytester) -> None:\n testpath = pytester.makepyfile(\n \"\"\"\n import unittest\n class MyTestCase(unittest.TestCase):\n values = []\n def setUp(self):\n def cleanup():\n self.values.append(1)\n self.addCleanup(cleanup)\n assert False\n def test_one(self):\n pass\n def test_two(self):\n pass\n def test_cleanup_called_the_right_number_of_times():\n assert MyTestCase.values == [1, 1]\n \"\"\"\n )\n reprec = pytester.inline_run(testpath)\n passed, skipped, failed = reprec.countoutcomes()\n assert failed == 2\n assert passed == 1", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_do_cleanups_on_teardown_failure_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unittest.py_test_do_cleanups_on_teardown_failure_", "embedding": null, "metadata": {"file_path": "testing/test_unittest.py", "file_name": "test_unittest.py", "file_type": "text/x-python", "category": "test", "start_line": 1417, "end_line": 1441, "span_ids": ["test_do_cleanups_on_teardown_failure"], "tokens": 159}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def test_do_cleanups_on_teardown_failure(pytester: Pytester) -> None:\n testpath = pytester.makepyfile(\n \"\"\"\n import unittest\n class MyTestCase(unittest.TestCase):\n values = []\n def setUp(self):\n def cleanup():\n self.values.append(1)\n self.addCleanup(cleanup)\n def tearDown(self):\n assert False\n def test_one(self):\n pass\n def test_two(self):\n pass\n def test_cleanup_called_the_right_number_of_times():\n assert MyTestCase.values == [1, 1]\n \"\"\"\n )\n reprec = pytester.inline_run(testpath)\n passed, skipped, failed = reprec.countoutcomes()\n assert failed == 2\n assert passed == 1", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unraisableexception.py_sys_test_unraisable.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unraisableexception.py_sys_test_unraisable.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_unraisableexception.py", "file_name": "test_unraisableexception.py", "file_type": "text/x-python", "category": "test", "start_line": 1, "end_line": 40, "span_ids": ["test_unraisable", "impl", "imports"], "tokens": 266}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import sys\n\nimport pytest\nfrom _pytest.pytester import Pytester\n\n\nif sys.version_info < (3, 8):\n pytest.skip(\"unraisableexception plugin needs Python>=3.8\", allow_module_level=True)\n\n\n@pytest.mark.filterwarnings(\"default\")\ndef test_unraisable(pytester: Pytester) -> None:\n pytester.makepyfile(\n test_it=\"\"\"\n class BrokenDel:\n def __del__(self):\n raise ValueError(\"del is broken\")\n\n def test_it():\n obj = BrokenDel()\n del obj\n\n def test_2(): pass\n \"\"\"\n )\n result = pytester.runpytest()\n assert result.ret == 0\n assert result.parseoutcomes() == {\"passed\": 2, \"warnings\": 1}\n result.stdout.fnmatch_lines(\n [\n \"*= warnings summary =*\",\n \"test_it.py::test_it\",\n \" * PytestUnraisableExceptionWarning: Exception ignored in: \",\n \" \",\n \" Traceback (most recent call last):\",\n \" ValueError: del is broken\",\n \" \",\n \" warnings.warn(pytest.PytestUnraisableExceptionWarning(msg))\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unraisableexception.py_test_unraisable_in_setup_test_unraisable_in_setup.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unraisableexception.py_test_unraisable_in_setup_test_unraisable_in_setup.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_unraisableexception.py", "file_name": "test_unraisableexception.py", "file_type": "text/x-python", "category": "test", "start_line": 43, "end_line": 76, "span_ids": ["test_unraisable_in_setup"], "tokens": 240}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.filterwarnings(\"default\")\ndef test_unraisable_in_setup(pytester: Pytester) -> None:\n pytester.makepyfile(\n test_it=\"\"\"\n import pytest\n\n class BrokenDel:\n def __del__(self):\n raise ValueError(\"del is broken\")\n\n @pytest.fixture\n def broken_del():\n obj = BrokenDel()\n del obj\n\n def test_it(broken_del): pass\n def test_2(): pass\n \"\"\"\n )\n result = pytester.runpytest()\n assert result.ret == 0\n assert result.parseoutcomes() == {\"passed\": 2, \"warnings\": 1}\n result.stdout.fnmatch_lines(\n [\n \"*= warnings summary =*\",\n \"test_it.py::test_it\",\n \" * PytestUnraisableExceptionWarning: Exception ignored in: \",\n \" \",\n \" Traceback (most recent call last):\",\n \" ValueError: del is broken\",\n \" \",\n \" warnings.warn(pytest.PytestUnraisableExceptionWarning(msg))\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unraisableexception.py_test_unraisable_in_teardown_test_unraisable_in_teardown.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unraisableexception.py_test_unraisable_in_teardown_test_unraisable_in_teardown.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_unraisableexception.py", "file_name": "test_unraisableexception.py", "file_type": "text/x-python", "category": "test", "start_line": 79, "end_line": 113, "span_ids": ["test_unraisable_in_teardown"], "tokens": 244}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.filterwarnings(\"default\")\ndef test_unraisable_in_teardown(pytester: Pytester) -> None:\n pytester.makepyfile(\n test_it=\"\"\"\n import pytest\n\n class BrokenDel:\n def __del__(self):\n raise ValueError(\"del is broken\")\n\n @pytest.fixture\n def broken_del():\n yield\n obj = BrokenDel()\n del obj\n\n def test_it(broken_del): pass\n def test_2(): pass\n \"\"\"\n )\n result = pytester.runpytest()\n assert result.ret == 0\n assert result.parseoutcomes() == {\"passed\": 2, \"warnings\": 1}\n result.stdout.fnmatch_lines(\n [\n \"*= warnings summary =*\",\n \"test_it.py::test_it\",\n \" * PytestUnraisableExceptionWarning: Exception ignored in: \",\n \" \",\n \" Traceback (most recent call last):\",\n \" ValueError: del is broken\",\n \" \",\n \" warnings.warn(pytest.PytestUnraisableExceptionWarning(msg))\",\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unraisableexception.py_test_unraisable_warning_error_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_unraisableexception.py_test_unraisable_warning_error_", "embedding": null, "metadata": {"file_path": "testing/test_unraisableexception.py", "file_name": "test_unraisableexception.py", "file_type": "text/x-python", "category": "test", "start_line": 116, "end_line": 134, "span_ids": ["test_unraisable_warning_error"], "tokens": 138}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@pytest.mark.filterwarnings(\"error::pytest.PytestUnraisableExceptionWarning\")\ndef test_unraisable_warning_error(pytester: Pytester) -> None:\n pytester.makepyfile(\n test_it=\"\"\"\n class BrokenDel:\n def __del__(self) -> None:\n raise ValueError(\"del is broken\")\n\n def test_it() -> None:\n obj = BrokenDel()\n del obj\n\n def test_2(): pass\n \"\"\"\n )\n result = pytester.runpytest()\n assert result.ret == pytest.ExitCode.TESTS_FAILED\n assert result.parseoutcomes() == {\"passed\": 1, \"failed\": 1}", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_TestDeprecationWarningsByDefault_TestDeprecationWarningsByDefault.create_file.pytester_makepyfile_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_TestDeprecationWarningsByDefault_TestDeprecationWarningsByDefault.create_file.pytester_makepyfile_", "embedding": null, "metadata": {"file_path": "testing/test_warnings.py", "file_name": "test_warnings.py", "file_type": "text/x-python", "category": "test", "start_line": 428, "end_line": 447, "span_ids": ["TestDeprecationWarningsByDefault", "TestDeprecationWarningsByDefault.create_file"], "tokens": 120}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDeprecationWarningsByDefault:\n \"\"\"\n Note: all pytest runs are executed in a subprocess so we don't inherit warning filters\n from pytest's own test suite\n \"\"\"\n\n def create_file(self, pytester: Pytester, mark=\"\") -> None:\n pytester.makepyfile(\n \"\"\"\n import pytest, warnings\n\n warnings.warn(DeprecationWarning(\"collection\"))\n\n {mark}\n def test_foo():\n warnings.warn(PendingDeprecationWarning(\"test run\"))\n \"\"\".format(\n mark=mark\n )\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_TestStackLevel.test_conftest_warning_captured_TestStackLevel.test_conftest_warning_captured.result_stdout_fnmatch_lin": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_TestStackLevel.test_conftest_warning_captured_TestStackLevel.test_conftest_warning_captured.result_stdout_fnmatch_lin", "embedding": null, "metadata": {"file_path": "testing/test_warnings.py", "file_name": "test_warnings.py", "file_type": "text/x-python", "category": "test", "start_line": 722, "end_line": 734, "span_ids": ["TestStackLevel.test_conftest_warning_captured"], "tokens": 116}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestStackLevel:\n\n @pytest.mark.filterwarnings(\"default\")\n def test_conftest_warning_captured(self, pytester: Pytester) -> None:\n \"\"\"Warnings raised during importing of conftest.py files is captured (#2891).\"\"\"\n pytester.makeconftest(\n \"\"\"\n import warnings\n warnings.warn(UserWarning(\"my custom warning\"))\n \"\"\"\n )\n result = pytester.runpytest()\n result.stdout.fnmatch_lines(\n [\"conftest.py:2\", \"*UserWarning: my custom warning*\"]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_TestStackLevel.test_issue4445_import_plugin_TestStackLevel.test_issue4445_import_plugin.assert_func__warn_abo": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/test_warnings.py_TestStackLevel.test_issue4445_import_plugin_TestStackLevel.test_issue4445_import_plugin.assert_func__warn_abo", "embedding": null, "metadata": {"file_path": "testing/test_warnings.py", "file_name": "test_warnings.py", "file_type": "text/x-python", "category": "test", "start_line": 736, "end_line": 756, "span_ids": ["TestStackLevel.test_issue4445_import_plugin"], "tokens": 200}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestStackLevel:\n\n def test_issue4445_import_plugin(self, pytester: Pytester, capwarn) -> None:\n \"\"\"#4445: Make sure the warning points to a reasonable location\"\"\"\n pytester.makepyfile(\n some_plugin=\"\"\"\n import pytest\n pytest.skip(\"thing\", allow_module_level=True)\n \"\"\"\n )\n pytester.syspathinsert()\n pytester.parseconfig(\"-p\", \"some_plugin\")\n\n # with stacklevel=2 the warning should originate from\n # config.PytestPluginManager.import_plugin is thrown by a skipped plugin\n\n assert len(capwarn.captured) == 1\n warning, location = capwarn.captured.pop()\n file, _, func = location\n\n assert \"skipped plugin 'some_plugin': thing\" in str(warning.message)\n assert f\"config{os.sep}__init__.py\" in file\n assert func == \"_warn_about_skipped_plugins\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/tmp/repos/swe-bench_pytest-dev__pytest/testing/typing_checks.py__File_for_checking_typi_": {"__data__": {"id_": "/tmp/repos/swe-bench_pytest-dev__pytest/testing/typing_checks.py__File_for_checking_typi_", "embedding": null, "metadata": {"file_path": "testing/typing_checks.py", "file_name": "typing_checks.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 25, "span_ids": ["check_parametrize_ids_callable", "check_fixture_ids_callable", "docstring", "check_mark_xfail_raises", "docstring:2", "imports"], "tokens": 149}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "\"\"\"File for checking typing issues.\n\nThis file is not executed, it is only checked by mypy to ensure that\nnone of the code triggers any mypy errors.\n\"\"\"\nimport pytest\n\n\n# Issue #7488.\n@pytest.mark.xfail(raises=RuntimeError)\ndef check_mark_xfail_raises() -> None:\n pass\n\n\n# Issue #7494.\n@pytest.fixture(params=[(0, 0), (1, 1)], ids=lambda x: str(x[0]))\ndef check_fixture_ids_callable() -> None:\n pass\n\n\n# Issue #7494.\n@pytest.mark.parametrize(\"func\", [str, int], ids=lambda x: str(x.__name__))\ndef check_parametrize_ids_callable(func) -> None:\n pass", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}}} \ No newline at end of file