├── refactor-benchmark ├── shell_Command_python │ ├── .docs │ │ └── instructions.md │ ├── shell_test.py │ └── shell.py ├── config_NetworkConfig_parse │ ├── .docs │ │ └── instructions.md │ └── config_test.py ├── special_RunSQL__run_sql │ ├── .docs │ │ └── instructions.md │ ├── special_test.py │ └── special.py ├── inspectdb_Command_get_meta │ ├── .docs │ │ └── instructions.md │ └── inspectdb_test.py ├── figure_FigureBase_colorbar │ ├── .docs │ │ └── instructions.md │ └── figure_test.py ├── finders_FileSystemFinder_check │ ├── .docs │ │ └── instructions.md │ ├── finders_test.py │ └── finders.py ├── baseconv_BaseConverter_convert │ ├── .docs │ │ └── instructions.md │ ├── baseconv_test.py │ └── baseconv.py ├── dataloader_DataLoader__is_role │ ├── .docs │ │ └── instructions.md │ └── dataloader_test.py ├── quiver_Barbs__make_barbs │ ├── .docs │ │ └── instructions.md │ └── quiver_test.py ├── weather_NWSWeather__forecast │ ├── .docs │ │ └── instructions.md │ └── weather_test.py ├── group_batch_fusion_GroupLinearFusion_fuse │ ├── .docs │ │ └── instructions.md │ └── group_batch_fusion_test.py ├── backends_ModelBackend_with_perm │ ├── .docs │ │ └── instructions.md │ ├── backends_test.py │ └── backends.py ├── diffsettings_Command_output_hash │ ├── .docs │ │ └── instructions.md │ ├── diffsettings_test.py │ └── diffsettings.py ├── getitem_BaseGetitemTests_test_get │ ├── .docs │ │ └── instructions.md │ └── getitem_test.py ├── migrate_Command_add_arguments │ ├── .docs │ │ └── instructions.md │ └── migrate_test.py ├── dumpdata_Command_add_arguments │ ├── .docs │ │ └── instructions.md │ ├── dumpdata_test.py │ └── dumpdata.py ├── i18n_JavaScriptCatalog_get_paths │ ├── .docs │ │ └── instructions.md │ └── i18n_test.py ├── makemessages_Command_add_arguments │ ├── .docs │ │ └── instructions.md │ └── makemessages_test.py ├── ogrinspect_Command_add_arguments │ ├── .docs │ │ └── instructions.md │ ├── ogrinspect_test.py │ └── ogrinspect.py ├── options_ModelAdmin_message_user │ ├── .docs │ │ └── instructions.md │ └── options_test.py ├── symbolic_shapes_ShapeEnv_bind_symbols │ ├── .docs │ │ └── instructions.md │ └── symbolic_shapes_test.py ├── inspectdb_Command_get_field_type │ ├── .docs │ │ └── instructions.md │ └── inspectdb_test.py ├── makemigrations_Command_add_arguments │ ├── .docs │ │ └── instructions.md │ └── makemigrations_test.py ├── base_BaseHandler_check_response │ ├── .docs │ │ └── instructions.md │ └── base_test.py ├── main_widget_PylintWidget_parse_output │ ├── .docs │ │ └── instructions.md │ └── main_widget_test.py ├── patches__Curve__get_arrow_wedge │ ├── .docs │ │ └── instructions.md │ └── patches_test.py ├── builtin_BuiltinVariable_call_setattr │ ├── .docs │ │ └── instructions.md │ └── builtin_test.py ├── codeeditor_CodeEditor___get_brackets │ ├── .docs │ │ └── instructions.md │ └── codeeditor_test.py ├── common_utils_TestCase_genSparseTensor │ ├── .docs │ │ └── instructions.md │ └── common_utils_test.py ├── doc_DocCLI_get_role_man_text │ ├── .docs │ │ └── instructions.md │ └── doc_test.py ├── graph_MigrationGraph_iterative_dfs │ ├── .docs │ │ └── instructions.md │ └── graph_test.py ├── kernel_SpyderKernel_get_fault_text │ ├── .docs │ │ └── instructions.md │ └── kernel_test.py ├── grad_scaler_GradScaler__unscale_grads_ │ ├── .docs │ │ └── instructions.md │ └── grad_scaler_test.py ├── split_cat_SplitCatSimplifier_replace_cat │ ├── .docs │ │ └── instructions.md │ └── split_cat_test.py ├── config_AppConfig__path_from_module │ ├── .docs │ │ └── instructions.md │ └── config_test.py ├── doc_DocCLI_display_plugin_list │ ├── .docs │ │ └── instructions.md │ └── doc_test.py ├── reshaping_BaseReshapingTests_test_unstack │ ├── .docs │ │ └── instructions.md │ └── reshaping_test.py ├── schema_DatabaseSchemaEditor_quote_value │ ├── .docs │ │ └── instructions.md │ └── schema_test.py ├── triton_TritonScheduling_define_kernel │ ├── .docs │ │ └── instructions.md │ └── triton_test.py ├── base_BaseHandler_adapt_method_mode │ ├── .docs │ │ └── instructions.md │ └── base_test.py ├── config_ConfigCLI__get_settings_vars │ ├── .docs │ │ └── instructions.md │ └── config_test.py ├── inspectdb_Command_normalize_col_name │ ├── .docs │ │ └── instructions.md │ └── inspectdb_test.py ├── polar_RadialTick__determine_anchor │ ├── .docs │ │ └── instructions.md │ └── polar_test.py ├── split_cat_SplitCatSimplifier_replace_split │ ├── .docs │ │ └── instructions.md │ └── split_cat_test.py ├── text_CountVectorizer__limit_features │ ├── .docs │ │ └── instructions.md │ └── text_test.py ├── dataframeeditor_DataFrameView_next_index_name │ ├── .docs │ │ └── instructions.md │ └── dataframeeditor_test.py ├── feedgenerator_Atom1Feed_add_item_elements │ ├── .docs │ │ └── instructions.md │ └── feedgenerator_test.py ├── operations_OracleOperations_convert_extent │ ├── .docs │ │ └── instructions.md │ ├── operations_test.py │ └── operations.py ├── autodetector_MigrationAutodetector__trim_to_apps │ ├── .docs │ │ └── instructions.md │ └── autodetector_test.py ├── csrf_CsrfViewMiddleware__set_csrf_cookie │ ├── .docs │ │ └── instructions.md │ └── csrf_test.py ├── operations_DatabaseOperations_bulk_insert_sql │ ├── .docs │ │ └── instructions.md │ └── operations_test.py ├── sharding_policies_MaxShardSizePolicy__add_partition │ ├── .docs │ │ └── instructions.md │ └── sharding_policies_test.py ├── analyzer_cli_DebugAnalyzer__make_source_table │ ├── .docs │ │ └── instructions.md │ └── analyzer_cli_test.py ├── gateway_Gateway_get_and_delete_all_sms │ ├── .docs │ │ └── instructions.md │ ├── gateway_test.py │ └── gateway.py ├── methods_BaseMethodsTests_test_where_series │ ├── .docs │ │ └── instructions.md │ └── methods_test.py ├── concat__Concatenator__clean_keys_and_objs │ ├── .docs │ │ └── instructions.md │ └── concat_test.py ├── autodetector_MigrationAutodetector_check_dependency │ ├── .docs │ │ └── instructions.md │ └── autodetector_test.py ├── checks_ModelAdminChecks__check_inlines_item │ ├── .docs │ │ └── instructions.md │ └── checks_test.py ├── clustering_ops_KMeans__mini_batch_training_op │ ├── .docs │ │ └── instructions.md │ └── clustering_ops_test.py ├── compile_utils_MetricsContainer__get_metric_object │ ├── .docs │ │ └── instructions.md │ └── compile_utils_test.py ├── galaxy_GalaxyCLI_execute_list_collection │ ├── .docs │ │ └── instructions.md │ └── galaxy_test.py ├── generic_bsd_GenericBsdIfconfigNetwork_parse_inet_line │ ├── .docs │ │ └── instructions.md │ └── generic_bsd_test.py ├── onnxfunction_dispatcher_OnnxFunctionDispatcher__get_aten_name │ ├── .docs │ │ └── instructions.md │ └── onnxfunction_dispatcher_test.py ├── autosave_AutosaveForPlugin_get_files_to_recover │ ├── .docs │ │ └── instructions.md │ └── autosave_test.py ├── operations_DatabaseOperations_last_executed_query │ ├── .docs │ │ └── instructions.md │ └── operations_test.py ├── checks_BaseModelAdminChecks__check_ordering_item │ ├── .docs │ │ └── instructions.md │ └── checks_test.py ├── graph_drawer_FxGraphDrawer__stringify_tensor_meta │ ├── .docs │ │ └── instructions.md │ └── graph_drawer_test.py ├── split_cat_SplitCatSimplifier_get_transform_params │ ├── .docs │ │ └── instructions.md │ └── split_cat_test.py ├── triton_TritonScheduling_generate_node_schedule │ ├── .docs │ │ └── instructions.md │ └── triton_test.py ├── dim2_Dim2CompatTests_test_reductions_2d_axis0 │ ├── .docs │ │ └── instructions.md │ └── dim2_test.py ├── load_v1_in_v2__EagerSavedModelLoader__extract_signatures │ ├── .docs │ │ └── instructions.md │ └── load_v1_in_v2_test.py ├── checks_ModelAdminChecks__check_list_display_item │ ├── .docs │ │ └── instructions.md │ └── checks_test.py ├── checks_ModelAdminChecks__check_list_editable_item │ ├── .docs │ │ └── instructions.md │ └── checks_test.py ├── functional_Functional__conform_to_reference_input │ ├── .docs │ │ └── instructions.md │ └── functional_test.py ├── gradient_checker_GradientChecker__assertInferTensorChecks │ ├── .docs │ │ └── instructions.md │ └── gradient_checker_test.py ├── profile_analyzer_cli_ProfileAnalyzer__get_list_profile_lines │ ├── .docs │ │ └── instructions.md │ └── profile_analyzer_cli_test.py ├── cuda_cpp_scheduling_CUDACPPScheduling__can_fuse_epilogue_impl │ ├── .docs │ │ └── instructions.md │ ├── cuda_cpp_scheduling_test.py │ └── cuda_cpp_scheduling.py ├── introspection_DatabaseIntrospection__get_column_collations │ ├── .docs │ │ └── instructions.md │ └── introspection_test.py ├── operations_DatabaseOperations_check_expression_support │ ├── .docs │ │ └── instructions.md │ └── operations_test.py ├── reshaping_BaseReshapingTests_test_concat_mixed_dtypes │ ├── .docs │ │ └── instructions.md │ └── reshaping_test.py ├── checks_BaseModelAdminChecks__check_raw_id_fields_item │ ├── .docs │ │ └── instructions.md │ └── checks_test.py ├── common_methods_invocations_foreach_inputs_sample_func__sample_rightmost_arg │ ├── .docs │ │ └── instructions.md │ └── common_methods_invocations_test.py ├── distribution_DistributionFiles_parse_distribution_file_SUSE │ ├── .docs │ │ └── instructions.md │ └── distribution_test.py ├── checks_ModelAdminChecks__check_action_permission_methods │ ├── .docs │ │ └── instructions.md │ └── checks_test.py ├── coordinator_HERETransitDataUpdateCoordinator__parse_transit_response │ ├── .docs │ │ └── instructions.md │ └── coordinator_test.py ├── checks_BaseModelAdminChecks__check_autocomplete_fields_item │ ├── .docs │ │ └── instructions.md │ └── checks_test.py ├── generator_GenOpTestCase_out_variant_op_test_case_generator │ ├── .docs │ │ └── instructions.md │ └── generator_test.py ├── grpc_debug_server_EventListenerBaseServicer__process_tensor_event_in_chunks │ ├── .docs │ │ └── instructions.md │ └── grpc_debug_server_test.py └── introspection_DatabaseIntrospection__parse_column_or_constraint_definition │ ├── .docs │ └── instructions.md │ └── introspection_test.py ├── README.md └── LICENSE /refactor-benchmark/shell_Command_python/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor Command.python 2 | 3 | Refactor the `python` method in the `Command` class to be a stand alone, top level function. 4 | Name the new function `python`, exactly the same name as the existing method. 5 | Update any existing `self.python` calls to work with the new `python` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/config_NetworkConfig_parse/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor NetworkConfig.parse 2 | 3 | Refactor the `parse` method in the `NetworkConfig` class to be a stand alone, top level function. 4 | Name the new function `parse`, exactly the same name as the existing method. 5 | Update any existing `self.parse` calls to work with the new `parse` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/special_RunSQL__run_sql/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor RunSQL._run_sql 2 | 3 | Refactor the `_run_sql` method in the `RunSQL` class to be a stand alone, top level function. 4 | Name the new function `_run_sql`, exactly the same name as the existing method. 5 | Update any existing `self._run_sql` calls to work with the new `_run_sql` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/inspectdb_Command_get_meta/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor Command.get_meta 2 | 3 | Refactor the `get_meta` method in the `Command` class to be a stand alone, top level function. 4 | Name the new function `get_meta`, exactly the same name as the existing method. 5 | Update any existing `self.get_meta` calls to work with the new `get_meta` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/figure_FigureBase_colorbar/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor FigureBase.colorbar 2 | 3 | Refactor the `colorbar` method in the `FigureBase` class to be a stand alone, top level function. 4 | Name the new function `colorbar`, exactly the same name as the existing method. 5 | Update any existing `self.colorbar` calls to work with the new `colorbar` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/finders_FileSystemFinder_check/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor FileSystemFinder.check 2 | 3 | Refactor the `check` method in the `FileSystemFinder` class to be a stand alone, top level function. 4 | Name the new function `check`, exactly the same name as the existing method. 5 | Update any existing `self.check` calls to work with the new `check` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/baseconv_BaseConverter_convert/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor BaseConverter.convert 2 | 3 | Refactor the `convert` method in the `BaseConverter` class to be a stand alone, top level function. 4 | Name the new function `convert`, exactly the same name as the existing method. 5 | Update any existing `self.convert` calls to work with the new `convert` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/dataloader_DataLoader__is_role/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor DataLoader._is_role 2 | 3 | Refactor the `_is_role` method in the `DataLoader` class to be a stand alone, top level function. 4 | Name the new function `_is_role`, exactly the same name as the existing method. 5 | Update any existing `self._is_role` calls to work with the new `_is_role` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/quiver_Barbs__make_barbs/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor Barbs._make_barbs 2 | 3 | Refactor the `_make_barbs` method in the `Barbs` class to be a stand alone, top level function. 4 | Name the new function `_make_barbs`, exactly the same name as the existing method. 5 | Update any existing `self._make_barbs` calls to work with the new `_make_barbs` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/weather_NWSWeather__forecast/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor NWSWeather._forecast 2 | 3 | Refactor the `_forecast` method in the `NWSWeather` class to be a stand alone, top level function. 4 | Name the new function `_forecast`, exactly the same name as the existing method. 5 | Update any existing `self._forecast` calls to work with the new `_forecast` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/group_batch_fusion_GroupLinearFusion_fuse/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor GroupLinearFusion.fuse 2 | 3 | Refactor the `fuse` method in the `GroupLinearFusion` class to be a stand alone, top level function. 4 | Name the new function `fuse`, exactly the same name as the existing method. 5 | Update any existing `self.fuse` calls to work with the new `fuse` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/backends_ModelBackend_with_perm/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor ModelBackend.with_perm 2 | 3 | Refactor the `with_perm` method in the `ModelBackend` class to be a stand alone, top level function. 4 | Name the new function `with_perm`, exactly the same name as the existing method. 5 | Update any existing `self.with_perm` calls to work with the new `with_perm` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/diffsettings_Command_output_hash/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor Command.output_hash 2 | 3 | Refactor the `output_hash` method in the `Command` class to be a stand alone, top level function. 4 | Name the new function `output_hash`, exactly the same name as the existing method. 5 | Update any existing `self.output_hash` calls to work with the new `output_hash` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/getitem_BaseGetitemTests_test_get/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor BaseGetitemTests.test_get 2 | 3 | Refactor the `test_get` method in the `BaseGetitemTests` class to be a stand alone, top level function. 4 | Name the new function `test_get`, exactly the same name as the existing method. 5 | Update any existing `self.test_get` calls to work with the new `test_get` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/migrate_Command_add_arguments/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor Command.add_arguments 2 | 3 | Refactor the `add_arguments` method in the `Command` class to be a stand alone, top level function. 4 | Name the new function `add_arguments`, exactly the same name as the existing method. 5 | Update any existing `self.add_arguments` calls to work with the new `add_arguments` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/dumpdata_Command_add_arguments/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor Command.add_arguments 2 | 3 | Refactor the `add_arguments` method in the `Command` class to be a stand alone, top level function. 4 | Name the new function `add_arguments`, exactly the same name as the existing method. 5 | Update any existing `self.add_arguments` calls to work with the new `add_arguments` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/i18n_JavaScriptCatalog_get_paths/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor JavaScriptCatalog.get_paths 2 | 3 | Refactor the `get_paths` method in the `JavaScriptCatalog` class to be a stand alone, top level function. 4 | Name the new function `get_paths`, exactly the same name as the existing method. 5 | Update any existing `self.get_paths` calls to work with the new `get_paths` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/makemessages_Command_add_arguments/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor Command.add_arguments 2 | 3 | Refactor the `add_arguments` method in the `Command` class to be a stand alone, top level function. 4 | Name the new function `add_arguments`, exactly the same name as the existing method. 5 | Update any existing `self.add_arguments` calls to work with the new `add_arguments` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/ogrinspect_Command_add_arguments/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor Command.add_arguments 2 | 3 | Refactor the `add_arguments` method in the `Command` class to be a stand alone, top level function. 4 | Name the new function `add_arguments`, exactly the same name as the existing method. 5 | Update any existing `self.add_arguments` calls to work with the new `add_arguments` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/options_ModelAdmin_message_user/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor ModelAdmin.message_user 2 | 3 | Refactor the `message_user` method in the `ModelAdmin` class to be a stand alone, top level function. 4 | Name the new function `message_user`, exactly the same name as the existing method. 5 | Update any existing `self.message_user` calls to work with the new `message_user` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/symbolic_shapes_ShapeEnv_bind_symbols/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor ShapeEnv.bind_symbols 2 | 3 | Refactor the `bind_symbols` method in the `ShapeEnv` class to be a stand alone, top level function. 4 | Name the new function `bind_symbols`, exactly the same name as the existing method. 5 | Update any existing `self.bind_symbols` calls to work with the new `bind_symbols` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/inspectdb_Command_get_field_type/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor Command.get_field_type 2 | 3 | Refactor the `get_field_type` method in the `Command` class to be a stand alone, top level function. 4 | Name the new function `get_field_type`, exactly the same name as the existing method. 5 | Update any existing `self.get_field_type` calls to work with the new `get_field_type` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/makemigrations_Command_add_arguments/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor Command.add_arguments 2 | 3 | Refactor the `add_arguments` method in the `Command` class to be a stand alone, top level function. 4 | Name the new function `add_arguments`, exactly the same name as the existing method. 5 | Update any existing `self.add_arguments` calls to work with the new `add_arguments` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/base_BaseHandler_check_response/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor BaseHandler.check_response 2 | 3 | Refactor the `check_response` method in the `BaseHandler` class to be a stand alone, top level function. 4 | Name the new function `check_response`, exactly the same name as the existing method. 5 | Update any existing `self.check_response` calls to work with the new `check_response` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/main_widget_PylintWidget_parse_output/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor PylintWidget.parse_output 2 | 3 | Refactor the `parse_output` method in the `PylintWidget` class to be a stand alone, top level function. 4 | Name the new function `parse_output`, exactly the same name as the existing method. 5 | Update any existing `self.parse_output` calls to work with the new `parse_output` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/patches__Curve__get_arrow_wedge/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor _Curve._get_arrow_wedge 2 | 3 | Refactor the `_get_arrow_wedge` method in the `_Curve` class to be a stand alone, top level function. 4 | Name the new function `_get_arrow_wedge`, exactly the same name as the existing method. 5 | Update any existing `self._get_arrow_wedge` calls to work with the new `_get_arrow_wedge` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/builtin_BuiltinVariable_call_setattr/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor BuiltinVariable.call_setattr 2 | 3 | Refactor the `call_setattr` method in the `BuiltinVariable` class to be a stand alone, top level function. 4 | Name the new function `call_setattr`, exactly the same name as the existing method. 5 | Update any existing `self.call_setattr` calls to work with the new `call_setattr` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/codeeditor_CodeEditor___get_brackets/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor CodeEditor.__get_brackets 2 | 3 | Refactor the `__get_brackets` method in the `CodeEditor` class to be a stand alone, top level function. 4 | Name the new function `__get_brackets`, exactly the same name as the existing method. 5 | Update any existing `self.__get_brackets` calls to work with the new `__get_brackets` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/common_utils_TestCase_genSparseTensor/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor TestCase.genSparseTensor 2 | 3 | Refactor the `genSparseTensor` method in the `TestCase` class to be a stand alone, top level function. 4 | Name the new function `genSparseTensor`, exactly the same name as the existing method. 5 | Update any existing `self.genSparseTensor` calls to work with the new `genSparseTensor` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/doc_DocCLI_get_role_man_text/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor DocCLI.get_role_man_text 2 | 3 | Refactor the `get_role_man_text` method in the `DocCLI` class to be a stand alone, top level function. 4 | Name the new function `get_role_man_text`, exactly the same name as the existing method. 5 | Update any existing `self.get_role_man_text` calls to work with the new `get_role_man_text` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/graph_MigrationGraph_iterative_dfs/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor MigrationGraph.iterative_dfs 2 | 3 | Refactor the `iterative_dfs` method in the `MigrationGraph` class to be a stand alone, top level function. 4 | Name the new function `iterative_dfs`, exactly the same name as the existing method. 5 | Update any existing `self.iterative_dfs` calls to work with the new `iterative_dfs` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/kernel_SpyderKernel_get_fault_text/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor SpyderKernel.get_fault_text 2 | 3 | Refactor the `get_fault_text` method in the `SpyderKernel` class to be a stand alone, top level function. 4 | Name the new function `get_fault_text`, exactly the same name as the existing method. 5 | Update any existing `self.get_fault_text` calls to work with the new `get_fault_text` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/grad_scaler_GradScaler__unscale_grads_/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor GradScaler._unscale_grads_ 2 | 3 | Refactor the `_unscale_grads_` method in the `GradScaler` class to be a stand alone, top level function. 4 | Name the new function `_unscale_grads_`, exactly the same name as the existing method. 5 | Update any existing `self._unscale_grads_` calls to work with the new `_unscale_grads_` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/split_cat_SplitCatSimplifier_replace_cat/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor SplitCatSimplifier.replace_cat 2 | 3 | Refactor the `replace_cat` method in the `SplitCatSimplifier` class to be a stand alone, top level function. 4 | Name the new function `replace_cat`, exactly the same name as the existing method. 5 | Update any existing `self.replace_cat` calls to work with the new `replace_cat` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/config_AppConfig__path_from_module/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor AppConfig._path_from_module 2 | 3 | Refactor the `_path_from_module` method in the `AppConfig` class to be a stand alone, top level function. 4 | Name the new function `_path_from_module`, exactly the same name as the existing method. 5 | Update any existing `self._path_from_module` calls to work with the new `_path_from_module` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/doc_DocCLI_display_plugin_list/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor DocCLI.display_plugin_list 2 | 3 | Refactor the `display_plugin_list` method in the `DocCLI` class to be a stand alone, top level function. 4 | Name the new function `display_plugin_list`, exactly the same name as the existing method. 5 | Update any existing `self.display_plugin_list` calls to work with the new `display_plugin_list` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/reshaping_BaseReshapingTests_test_unstack/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor BaseReshapingTests.test_unstack 2 | 3 | Refactor the `test_unstack` method in the `BaseReshapingTests` class to be a stand alone, top level function. 4 | Name the new function `test_unstack`, exactly the same name as the existing method. 5 | Update any existing `self.test_unstack` calls to work with the new `test_unstack` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/schema_DatabaseSchemaEditor_quote_value/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor DatabaseSchemaEditor.quote_value 2 | 3 | Refactor the `quote_value` method in the `DatabaseSchemaEditor` class to be a stand alone, top level function. 4 | Name the new function `quote_value`, exactly the same name as the existing method. 5 | Update any existing `self.quote_value` calls to work with the new `quote_value` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/triton_TritonScheduling_define_kernel/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor TritonScheduling.define_kernel 2 | 3 | Refactor the `define_kernel` method in the `TritonScheduling` class to be a stand alone, top level function. 4 | Name the new function `define_kernel`, exactly the same name as the existing method. 5 | Update any existing `self.define_kernel` calls to work with the new `define_kernel` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/base_BaseHandler_adapt_method_mode/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor BaseHandler.adapt_method_mode 2 | 3 | Refactor the `adapt_method_mode` method in the `BaseHandler` class to be a stand alone, top level function. 4 | Name the new function `adapt_method_mode`, exactly the same name as the existing method. 5 | Update any existing `self.adapt_method_mode` calls to work with the new `adapt_method_mode` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/config_ConfigCLI__get_settings_vars/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor ConfigCLI._get_settings_vars 2 | 3 | Refactor the `_get_settings_vars` method in the `ConfigCLI` class to be a stand alone, top level function. 4 | Name the new function `_get_settings_vars`, exactly the same name as the existing method. 5 | Update any existing `self._get_settings_vars` calls to work with the new `_get_settings_vars` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/inspectdb_Command_normalize_col_name/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor Command.normalize_col_name 2 | 3 | Refactor the `normalize_col_name` method in the `Command` class to be a stand alone, top level function. 4 | Name the new function `normalize_col_name`, exactly the same name as the existing method. 5 | Update any existing `self.normalize_col_name` calls to work with the new `normalize_col_name` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/polar_RadialTick__determine_anchor/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor RadialTick._determine_anchor 2 | 3 | Refactor the `_determine_anchor` method in the `RadialTick` class to be a stand alone, top level function. 4 | Name the new function `_determine_anchor`, exactly the same name as the existing method. 5 | Update any existing `self._determine_anchor` calls to work with the new `_determine_anchor` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/split_cat_SplitCatSimplifier_replace_split/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor SplitCatSimplifier.replace_split 2 | 3 | Refactor the `replace_split` method in the `SplitCatSimplifier` class to be a stand alone, top level function. 4 | Name the new function `replace_split`, exactly the same name as the existing method. 5 | Update any existing `self.replace_split` calls to work with the new `replace_split` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/text_CountVectorizer__limit_features/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor CountVectorizer._limit_features 2 | 3 | Refactor the `_limit_features` method in the `CountVectorizer` class to be a stand alone, top level function. 4 | Name the new function `_limit_features`, exactly the same name as the existing method. 5 | Update any existing `self._limit_features` calls to work with the new `_limit_features` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/dataframeeditor_DataFrameView_next_index_name/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor DataFrameView.next_index_name 2 | 3 | Refactor the `next_index_name` method in the `DataFrameView` class to be a stand alone, top level function. 4 | Name the new function `next_index_name`, exactly the same name as the existing method. 5 | Update any existing `self.next_index_name` calls to work with the new `next_index_name` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/feedgenerator_Atom1Feed_add_item_elements/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor Atom1Feed.add_item_elements 2 | 3 | Refactor the `add_item_elements` method in the `Atom1Feed` class to be a stand alone, top level function. 4 | Name the new function `add_item_elements`, exactly the same name as the existing method. 5 | Update any existing `self.add_item_elements` calls to work with the new `add_item_elements` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/operations_OracleOperations_convert_extent/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor OracleOperations.convert_extent 2 | 3 | Refactor the `convert_extent` method in the `OracleOperations` class to be a stand alone, top level function. 4 | Name the new function `convert_extent`, exactly the same name as the existing method. 5 | Update any existing `self.convert_extent` calls to work with the new `convert_extent` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/autodetector_MigrationAutodetector__trim_to_apps/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor MigrationAutodetector._trim_to_apps 2 | 3 | Refactor the `_trim_to_apps` method in the `MigrationAutodetector` class to be a stand alone, top level function. 4 | Name the new function `_trim_to_apps`, exactly the same name as the existing method. 5 | Update any existing `self._trim_to_apps` calls to work with the new `_trim_to_apps` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/csrf_CsrfViewMiddleware__set_csrf_cookie/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor CsrfViewMiddleware._set_csrf_cookie 2 | 3 | Refactor the `_set_csrf_cookie` method in the `CsrfViewMiddleware` class to be a stand alone, top level function. 4 | Name the new function `_set_csrf_cookie`, exactly the same name as the existing method. 5 | Update any existing `self._set_csrf_cookie` calls to work with the new `_set_csrf_cookie` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/operations_DatabaseOperations_bulk_insert_sql/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor DatabaseOperations.bulk_insert_sql 2 | 3 | Refactor the `bulk_insert_sql` method in the `DatabaseOperations` class to be a stand alone, top level function. 4 | Name the new function `bulk_insert_sql`, exactly the same name as the existing method. 5 | Update any existing `self.bulk_insert_sql` calls to work with the new `bulk_insert_sql` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/sharding_policies_MaxShardSizePolicy__add_partition/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor MaxShardSizePolicy._add_partition 2 | 3 | Refactor the `_add_partition` method in the `MaxShardSizePolicy` class to be a stand alone, top level function. 4 | Name the new function `_add_partition`, exactly the same name as the existing method. 5 | Update any existing `self._add_partition` calls to work with the new `_add_partition` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/analyzer_cli_DebugAnalyzer__make_source_table/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor DebugAnalyzer._make_source_table 2 | 3 | Refactor the `_make_source_table` method in the `DebugAnalyzer` class to be a stand alone, top level function. 4 | Name the new function `_make_source_table`, exactly the same name as the existing method. 5 | Update any existing `self._make_source_table` calls to work with the new `_make_source_table` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/gateway_Gateway_get_and_delete_all_sms/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor Gateway.get_and_delete_all_sms 2 | 3 | Refactor the `get_and_delete_all_sms` method in the `Gateway` class to be a stand alone, top level function. 4 | Name the new function `get_and_delete_all_sms`, exactly the same name as the existing method. 5 | Update any existing `self.get_and_delete_all_sms` calls to work with the new `get_and_delete_all_sms` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/methods_BaseMethodsTests_test_where_series/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor BaseMethodsTests.test_where_series 2 | 3 | Refactor the `test_where_series` method in the `BaseMethodsTests` class to be a stand alone, top level function. 4 | Name the new function `test_where_series`, exactly the same name as the existing method. 5 | Update any existing `self.test_where_series` calls to work with the new `test_where_series` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/concat__Concatenator__clean_keys_and_objs/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor _Concatenator._clean_keys_and_objs 2 | 3 | Refactor the `_clean_keys_and_objs` method in the `_Concatenator` class to be a stand alone, top level function. 4 | Name the new function `_clean_keys_and_objs`, exactly the same name as the existing method. 5 | Update any existing `self._clean_keys_and_objs` calls to work with the new `_clean_keys_and_objs` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/autodetector_MigrationAutodetector_check_dependency/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor MigrationAutodetector.check_dependency 2 | 3 | Refactor the `check_dependency` method in the `MigrationAutodetector` class to be a stand alone, top level function. 4 | Name the new function `check_dependency`, exactly the same name as the existing method. 5 | Update any existing `self.check_dependency` calls to work with the new `check_dependency` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/checks_ModelAdminChecks__check_inlines_item/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor ModelAdminChecks._check_inlines_item 2 | 3 | Refactor the `_check_inlines_item` method in the `ModelAdminChecks` class to be a stand alone, top level function. 4 | Name the new function `_check_inlines_item`, exactly the same name as the existing method. 5 | Update any existing `self._check_inlines_item` calls to work with the new `_check_inlines_item` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/clustering_ops_KMeans__mini_batch_training_op/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor KMeans._mini_batch_training_op 2 | 3 | Refactor the `_mini_batch_training_op` method in the `KMeans` class to be a stand alone, top level function. 4 | Name the new function `_mini_batch_training_op`, exactly the same name as the existing method. 5 | Update any existing `self._mini_batch_training_op` calls to work with the new `_mini_batch_training_op` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/compile_utils_MetricsContainer__get_metric_object/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor MetricsContainer._get_metric_object 2 | 3 | Refactor the `_get_metric_object` method in the `MetricsContainer` class to be a stand alone, top level function. 4 | Name the new function `_get_metric_object`, exactly the same name as the existing method. 5 | Update any existing `self._get_metric_object` calls to work with the new `_get_metric_object` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/galaxy_GalaxyCLI_execute_list_collection/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor GalaxyCLI.execute_list_collection 2 | 3 | Refactor the `execute_list_collection` method in the `GalaxyCLI` class to be a stand alone, top level function. 4 | Name the new function `execute_list_collection`, exactly the same name as the existing method. 5 | Update any existing `self.execute_list_collection` calls to work with the new `execute_list_collection` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/generic_bsd_GenericBsdIfconfigNetwork_parse_inet_line/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor GenericBsdIfconfigNetwork.parse_inet_line 2 | 3 | Refactor the `parse_inet_line` method in the `GenericBsdIfconfigNetwork` class to be a stand alone, top level function. 4 | Name the new function `parse_inet_line`, exactly the same name as the existing method. 5 | Update any existing `self.parse_inet_line` calls to work with the new `parse_inet_line` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/onnxfunction_dispatcher_OnnxFunctionDispatcher__get_aten_name/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor OnnxFunctionDispatcher._get_aten_name 2 | 3 | Refactor the `_get_aten_name` method in the `OnnxFunctionDispatcher` class to be a stand alone, top level function. 4 | Name the new function `_get_aten_name`, exactly the same name as the existing method. 5 | Update any existing `self._get_aten_name` calls to work with the new `_get_aten_name` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/autosave_AutosaveForPlugin_get_files_to_recover/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor AutosaveForPlugin.get_files_to_recover 2 | 3 | Refactor the `get_files_to_recover` method in the `AutosaveForPlugin` class to be a stand alone, top level function. 4 | Name the new function `get_files_to_recover`, exactly the same name as the existing method. 5 | Update any existing `self.get_files_to_recover` calls to work with the new `get_files_to_recover` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/operations_DatabaseOperations_last_executed_query/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor DatabaseOperations.last_executed_query 2 | 3 | Refactor the `last_executed_query` method in the `DatabaseOperations` class to be a stand alone, top level function. 4 | Name the new function `last_executed_query`, exactly the same name as the existing method. 5 | Update any existing `self.last_executed_query` calls to work with the new `last_executed_query` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/checks_BaseModelAdminChecks__check_ordering_item/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor BaseModelAdminChecks._check_ordering_item 2 | 3 | Refactor the `_check_ordering_item` method in the `BaseModelAdminChecks` class to be a stand alone, top level function. 4 | Name the new function `_check_ordering_item`, exactly the same name as the existing method. 5 | Update any existing `self._check_ordering_item` calls to work with the new `_check_ordering_item` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/graph_drawer_FxGraphDrawer__stringify_tensor_meta/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor FxGraphDrawer._stringify_tensor_meta 2 | 3 | Refactor the `_stringify_tensor_meta` method in the `FxGraphDrawer` class to be a stand alone, top level function. 4 | Name the new function `_stringify_tensor_meta`, exactly the same name as the existing method. 5 | Update any existing `self._stringify_tensor_meta` calls to work with the new `_stringify_tensor_meta` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/split_cat_SplitCatSimplifier_get_transform_params/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor SplitCatSimplifier.get_transform_params 2 | 3 | Refactor the `get_transform_params` method in the `SplitCatSimplifier` class to be a stand alone, top level function. 4 | Name the new function `get_transform_params`, exactly the same name as the existing method. 5 | Update any existing `self.get_transform_params` calls to work with the new `get_transform_params` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/triton_TritonScheduling_generate_node_schedule/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor TritonScheduling.generate_node_schedule 2 | 3 | Refactor the `generate_node_schedule` method in the `TritonScheduling` class to be a stand alone, top level function. 4 | Name the new function `generate_node_schedule`, exactly the same name as the existing method. 5 | Update any existing `self.generate_node_schedule` calls to work with the new `generate_node_schedule` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/dim2_Dim2CompatTests_test_reductions_2d_axis0/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor Dim2CompatTests.test_reductions_2d_axis0 2 | 3 | Refactor the `test_reductions_2d_axis0` method in the `Dim2CompatTests` class to be a stand alone, top level function. 4 | Name the new function `test_reductions_2d_axis0`, exactly the same name as the existing method. 5 | Update any existing `self.test_reductions_2d_axis0` calls to work with the new `test_reductions_2d_axis0` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/load_v1_in_v2__EagerSavedModelLoader__extract_signatures/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor _EagerSavedModelLoader._extract_signatures 2 | 3 | Refactor the `_extract_signatures` method in the `_EagerSavedModelLoader` class to be a stand alone, top level function. 4 | Name the new function `_extract_signatures`, exactly the same name as the existing method. 5 | Update any existing `self._extract_signatures` calls to work with the new `_extract_signatures` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/checks_ModelAdminChecks__check_list_display_item/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor ModelAdminChecks._check_list_display_item 2 | 3 | Refactor the `_check_list_display_item` method in the `ModelAdminChecks` class to be a stand alone, top level function. 4 | Name the new function `_check_list_display_item`, exactly the same name as the existing method. 5 | Update any existing `self._check_list_display_item` calls to work with the new `_check_list_display_item` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/checks_ModelAdminChecks__check_list_editable_item/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor ModelAdminChecks._check_list_editable_item 2 | 3 | Refactor the `_check_list_editable_item` method in the `ModelAdminChecks` class to be a stand alone, top level function. 4 | Name the new function `_check_list_editable_item`, exactly the same name as the existing method. 5 | Update any existing `self._check_list_editable_item` calls to work with the new `_check_list_editable_item` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/functional_Functional__conform_to_reference_input/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor Functional._conform_to_reference_input 2 | 3 | Refactor the `_conform_to_reference_input` method in the `Functional` class to be a stand alone, top level function. 4 | Name the new function `_conform_to_reference_input`, exactly the same name as the existing method. 5 | Update any existing `self._conform_to_reference_input` calls to work with the new `_conform_to_reference_input` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/gradient_checker_GradientChecker__assertInferTensorChecks/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor GradientChecker._assertInferTensorChecks 2 | 3 | Refactor the `_assertInferTensorChecks` method in the `GradientChecker` class to be a stand alone, top level function. 4 | Name the new function `_assertInferTensorChecks`, exactly the same name as the existing method. 5 | Update any existing `self._assertInferTensorChecks` calls to work with the new `_assertInferTensorChecks` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/profile_analyzer_cli_ProfileAnalyzer__get_list_profile_lines/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor ProfileAnalyzer._get_list_profile_lines 2 | 3 | Refactor the `_get_list_profile_lines` method in the `ProfileAnalyzer` class to be a stand alone, top level function. 4 | Name the new function `_get_list_profile_lines`, exactly the same name as the existing method. 5 | Update any existing `self._get_list_profile_lines` calls to work with the new `_get_list_profile_lines` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/cuda_cpp_scheduling_CUDACPPScheduling__can_fuse_epilogue_impl/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor CUDACPPScheduling._can_fuse_epilogue_impl 2 | 3 | Refactor the `_can_fuse_epilogue_impl` method in the `CUDACPPScheduling` class to be a stand alone, top level function. 4 | Name the new function `_can_fuse_epilogue_impl`, exactly the same name as the existing method. 5 | Update any existing `self._can_fuse_epilogue_impl` calls to work with the new `_can_fuse_epilogue_impl` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/introspection_DatabaseIntrospection__get_column_collations/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor DatabaseIntrospection._get_column_collations 2 | 3 | Refactor the `_get_column_collations` method in the `DatabaseIntrospection` class to be a stand alone, top level function. 4 | Name the new function `_get_column_collations`, exactly the same name as the existing method. 5 | Update any existing `self._get_column_collations` calls to work with the new `_get_column_collations` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/operations_DatabaseOperations_check_expression_support/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor DatabaseOperations.check_expression_support 2 | 3 | Refactor the `check_expression_support` method in the `DatabaseOperations` class to be a stand alone, top level function. 4 | Name the new function `check_expression_support`, exactly the same name as the existing method. 5 | Update any existing `self.check_expression_support` calls to work with the new `check_expression_support` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/reshaping_BaseReshapingTests_test_concat_mixed_dtypes/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor BaseReshapingTests.test_concat_mixed_dtypes 2 | 3 | Refactor the `test_concat_mixed_dtypes` method in the `BaseReshapingTests` class to be a stand alone, top level function. 4 | Name the new function `test_concat_mixed_dtypes`, exactly the same name as the existing method. 5 | Update any existing `self.test_concat_mixed_dtypes` calls to work with the new `test_concat_mixed_dtypes` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/checks_BaseModelAdminChecks__check_raw_id_fields_item/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor BaseModelAdminChecks._check_raw_id_fields_item 2 | 3 | Refactor the `_check_raw_id_fields_item` method in the `BaseModelAdminChecks` class to be a stand alone, top level function. 4 | Name the new function `_check_raw_id_fields_item`, exactly the same name as the existing method. 5 | Update any existing `self._check_raw_id_fields_item` calls to work with the new `_check_raw_id_fields_item` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/common_methods_invocations_foreach_inputs_sample_func__sample_rightmost_arg/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor foreach_inputs_sample_func._sample_rightmost_arg 2 | 3 | Refactor the `_sample_rightmost_arg` method in the `foreach_inputs_sample_func` class to be a stand alone, top level function. 4 | Name the new function `_sample_rightmost_arg`, exactly the same name as the existing method. 5 | Update any existing `self._sample_rightmost_arg` calls to work with the new `_sample_rightmost_arg` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/distribution_DistributionFiles_parse_distribution_file_SUSE/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor DistributionFiles.parse_distribution_file_SUSE 2 | 3 | Refactor the `parse_distribution_file_SUSE` method in the `DistributionFiles` class to be a stand alone, top level function. 4 | Name the new function `parse_distribution_file_SUSE`, exactly the same name as the existing method. 5 | Update any existing `self.parse_distribution_file_SUSE` calls to work with the new `parse_distribution_file_SUSE` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/checks_ModelAdminChecks__check_action_permission_methods/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor ModelAdminChecks._check_action_permission_methods 2 | 3 | Refactor the `_check_action_permission_methods` method in the `ModelAdminChecks` class to be a stand alone, top level function. 4 | Name the new function `_check_action_permission_methods`, exactly the same name as the existing method. 5 | Update any existing `self._check_action_permission_methods` calls to work with the new `_check_action_permission_methods` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/coordinator_HERETransitDataUpdateCoordinator__parse_transit_response/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor HERETransitDataUpdateCoordinator._parse_transit_response 2 | 3 | Refactor the `_parse_transit_response` method in the `HERETransitDataUpdateCoordinator` class to be a stand alone, top level function. 4 | Name the new function `_parse_transit_response`, exactly the same name as the existing method. 5 | Update any existing `self._parse_transit_response` calls to work with the new `_parse_transit_response` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/checks_BaseModelAdminChecks__check_autocomplete_fields_item/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor BaseModelAdminChecks._check_autocomplete_fields_item 2 | 3 | Refactor the `_check_autocomplete_fields_item` method in the `BaseModelAdminChecks` class to be a stand alone, top level function. 4 | Name the new function `_check_autocomplete_fields_item`, exactly the same name as the existing method. 5 | Update any existing `self._check_autocomplete_fields_item` calls to work with the new `_check_autocomplete_fields_item` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/generator_GenOpTestCase_out_variant_op_test_case_generator/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor GenOpTestCase.out_variant_op_test_case_generator 2 | 3 | Refactor the `out_variant_op_test_case_generator` method in the `GenOpTestCase` class to be a stand alone, top level function. 4 | Name the new function `out_variant_op_test_case_generator`, exactly the same name as the existing method. 5 | Update any existing `self.out_variant_op_test_case_generator` calls to work with the new `out_variant_op_test_case_generator` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/grpc_debug_server_EventListenerBaseServicer__process_tensor_event_in_chunks/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor EventListenerBaseServicer._process_tensor_event_in_chunks 2 | 3 | Refactor the `_process_tensor_event_in_chunks` method in the `EventListenerBaseServicer` class to be a stand alone, top level function. 4 | Name the new function `_process_tensor_event_in_chunks`, exactly the same name as the existing method. 5 | Update any existing `self._process_tensor_event_in_chunks` calls to work with the new `_process_tensor_event_in_chunks` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/introspection_DatabaseIntrospection__parse_column_or_constraint_definition/.docs/instructions.md: -------------------------------------------------------------------------------- 1 | # Refactor DatabaseIntrospection._parse_column_or_constraint_definition 2 | 3 | Refactor the `_parse_column_or_constraint_definition` method in the `DatabaseIntrospection` class to be a stand alone, top level function. 4 | Name the new function `_parse_column_or_constraint_definition`, exactly the same name as the existing method. 5 | Update any existing `self._parse_column_or_constraint_definition` calls to work with the new `_parse_column_or_constraint_definition` function. 6 | -------------------------------------------------------------------------------- /refactor-benchmark/shell_Command_python/shell_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test_python(self): 8 | fname = Path(__file__).parent / "shell.py" 9 | method = "python" 10 | method_children = 166 11 | 12 | class_name = "Command" 13 | class_children = 378 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/special_RunSQL__run_sql/special_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test__run_sql(self): 8 | fname = Path(__file__).parent / "special.py" 9 | method = "_run_sql" 10 | method_children = 119 11 | 12 | class_name = "RunSQL" 13 | class_children = 384 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/config_NetworkConfig_parse/config_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test_parse(self): 8 | fname = Path(__file__).parent / "config.py" 9 | method = "parse" 10 | method_children = 299 11 | 12 | class_name = "NetworkConfig" 13 | class_children = 1480 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/figure_FigureBase_colorbar/figure_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test_colorbar(self): 8 | fname = Path(__file__).parent / "figure.py" 9 | method = "colorbar" 10 | method_children = 261 11 | 12 | class_name = "FigureBase" 13 | class_children = 5312 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/quiver_Barbs__make_barbs/quiver_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test__make_barbs(self): 8 | fname = Path(__file__).parent / "quiver.py" 9 | method = "_make_barbs" 10 | method_children = 530 11 | 12 | class_name = "Barbs" 13 | class_children = 1408 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/finders_FileSystemFinder_check/finders_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test_check(self): 8 | fname = Path(__file__).parent / "finders.py" 9 | method = "check" 10 | method_children = 166 11 | 12 | class_name = "FileSystemFinder" 13 | class_children = 509 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/inspectdb_Command_get_meta/inspectdb_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test_get_meta(self): 8 | fname = Path(__file__).parent / "inspectdb.py" 9 | method = "get_meta" 10 | method_children = 191 11 | 12 | class_name = "Command" 13 | class_children = 1753 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/baseconv_BaseConverter_convert/baseconv_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test_convert(self): 8 | fname = Path(__file__).parent / "baseconv.py" 9 | method = "convert" 10 | method_children = 144 11 | 12 | class_name = "BaseConverter" 13 | class_children = 298 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/doc_DocCLI_get_role_man_text/doc_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test_get_role_man_text(self): 8 | fname = Path(__file__).parent / "doc.py" 9 | method = "get_role_man_text" 10 | method_children = 428 11 | 12 | class_name = "DocCLI" 13 | class_children = 7038 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/i18n_JavaScriptCatalog_get_paths/i18n_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test_get_paths(self): 8 | fname = Path(__file__).parent / "i18n.py" 9 | method = "get_paths" 10 | method_children = 105 11 | 12 | class_name = "JavaScriptCatalog" 13 | class_children = 662 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/weather_NWSWeather__forecast/weather_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test__forecast(self): 8 | fname = Path(__file__).parent / "weather.py" 9 | method = "_forecast" 10 | method_children = 327 11 | 12 | class_name = "NWSWeather" 13 | class_children = 1235 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/backends_ModelBackend_with_perm/backends_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test_with_perm(self): 8 | fname = Path(__file__).parent / "backends.py" 9 | method = "with_perm" 10 | method_children = 178 11 | 12 | class_name = "ModelBackend" 13 | class_children = 665 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/base_BaseHandler_check_response/base_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test_check_response(self): 8 | fname = Path(__file__).parent / "base.py" 9 | method = "check_response" 10 | method_children = 108 11 | 12 | class_name = "BaseHandler" 13 | class_children = 1369 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/dataloader_DataLoader__is_role/dataloader_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test__is_role(self): 8 | fname = Path(__file__).parent / "dataloader.py" 9 | method = "_is_role" 10 | method_children = 261 11 | 12 | class_name = "DataLoader" 13 | class_children = 2512 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/dumpdata_Command_add_arguments/dumpdata_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test_add_arguments(self): 8 | fname = Path(__file__).parent / "dumpdata.py" 9 | method = "add_arguments" 10 | method_children = 128 11 | 12 | class_name = "Command" 13 | class_children = 1078 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/getitem_BaseGetitemTests_test_get/getitem_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test_test_get(self): 8 | fname = Path(__file__).parent / "getitem.py" 9 | method = "test_get" 10 | method_children = 339 11 | 12 | class_name = "BaseGetitemTests" 13 | class_children = 3543 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/migrate_Command_add_arguments/migrate_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test_add_arguments(self): 8 | fname = Path(__file__).parent / "migrate.py" 9 | method = "add_arguments" 10 | method_children = 133 11 | 12 | class_name = "Command" 13 | class_children = 2475 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/options_ModelAdmin_message_user/options_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test_message_user(self): 8 | fname = Path(__file__).parent / "options.py" 9 | method = "message_user" 10 | method_children = 106 11 | 12 | class_name = "ModelAdmin" 13 | class_children = 7705 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/diffsettings_Command_output_hash/diffsettings_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test_output_hash(self): 8 | fname = Path(__file__).parent / "diffsettings.py" 9 | method = "output_hash" 10 | method_children = 107 11 | 12 | class_name = "Command" 13 | class_children = 399 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/doc_DocCLI_display_plugin_list/doc_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test_display_plugin_list(self): 8 | fname = Path(__file__).parent / "doc.py" 9 | method = "display_plugin_list" 10 | method_children = 389 11 | 12 | class_name = "DocCLI" 13 | class_children = 7038 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/graph_MigrationGraph_iterative_dfs/graph_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test_iterative_dfs(self): 8 | fname = Path(__file__).parent / "graph.py" 9 | method = "iterative_dfs" 10 | method_children = 114 11 | 12 | class_name = "MigrationGraph" 13 | class_children = 1324 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/ogrinspect_Command_add_arguments/ogrinspect_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test_add_arguments(self): 8 | fname = Path(__file__).parent / "ogrinspect.py" 9 | method = "add_arguments" 10 | method_children = 144 11 | 12 | class_name = "Command" 13 | class_children = 413 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/patches__Curve__get_arrow_wedge/patches_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test__get_arrow_wedge(self): 8 | fname = Path(__file__).parent / "patches.py" 9 | method = "_get_arrow_wedge" 10 | method_children = 287 11 | 12 | class_name = "_Curve" 13 | class_children = 1467 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/base_BaseHandler_adapt_method_mode/base_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test_adapt_method_mode(self): 8 | fname = Path(__file__).parent / "base.py" 9 | method = "adapt_method_mode" 10 | method_children = 104 11 | 12 | class_name = "BaseHandler" 13 | class_children = 1369 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/builtin_BuiltinVariable_call_setattr/builtin_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test_call_setattr(self): 8 | fname = Path(__file__).parent / "builtin.py" 9 | method = "call_setattr" 10 | method_children = 534 11 | 12 | class_name = "BuiltinVariable" 13 | class_children = 8457 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/config_AppConfig__path_from_module/config_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test__path_from_module(self): 8 | fname = Path(__file__).parent / "config.py" 9 | method = "_path_from_module" 10 | method_children = 112 11 | 12 | class_name = "AppConfig" 13 | class_children = 934 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/inspectdb_Command_get_field_type/inspectdb_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test_get_field_type(self): 8 | fname = Path(__file__).parent / "inspectdb.py" 9 | method = "get_field_type" 10 | method_children = 194 11 | 12 | class_name = "Command" 13 | class_children = 1753 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/kernel_SpyderKernel_get_fault_text/kernel_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test_get_fault_text(self): 8 | fname = Path(__file__).parent / "kernel.py" 9 | method = "get_fault_text" 10 | method_children = 265 11 | 12 | class_name = "SpyderKernel" 13 | class_children = 3684 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/polar_RadialTick__determine_anchor/polar_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test__determine_anchor(self): 8 | fname = Path(__file__).parent / "polar.py" 9 | method = "_determine_anchor" 10 | method_children = 252 11 | 12 | class_name = "RadialTick" 13 | class_children = 945 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/text_CountVectorizer__limit_features/text_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test__limit_features(self): 8 | fname = Path(__file__).parent / "text.py" 9 | method = "_limit_features" 10 | method_children = 300 11 | 12 | class_name = "CountVectorizer" 13 | class_children = 1601 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/config_ConfigCLI__get_settings_vars/config_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test__get_settings_vars(self): 8 | fname = Path(__file__).parent / "config.py" 9 | method = "_get_settings_vars" 10 | method_children = 386 11 | 12 | class_name = "ConfigCLI" 13 | class_children = 3084 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/makemessages_Command_add_arguments/makemessages_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test_add_arguments(self): 8 | fname = Path(__file__).parent / "makemessages.py" 9 | method = "add_arguments" 10 | method_children = 179 11 | 12 | class_name = "Command" 13 | class_children = 2914 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/schema_DatabaseSchemaEditor_quote_value/schema_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test_quote_value(self): 8 | fname = Path(__file__).parent / "schema.py" 9 | method = "quote_value" 10 | method_children = 130 11 | 12 | class_name = "DatabaseSchemaEditor" 13 | class_children = 2395 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/triton_TritonScheduling_define_kernel/triton_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test_define_kernel(self): 8 | fname = Path(__file__).parent / "triton.py" 9 | method = "define_kernel" 10 | method_children = 267 11 | 12 | class_name = "TritonScheduling" 13 | class_children = 4346 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/codeeditor_CodeEditor___get_brackets/codeeditor_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test___get_brackets(self): 8 | fname = Path(__file__).parent / "codeeditor.py" 9 | method = "__get_brackets" 10 | method_children = 271 11 | 12 | class_name = "CodeEditor" 13 | class_children = 22095 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/common_utils_TestCase_genSparseTensor/common_utils_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test_genSparseTensor(self): 8 | fname = Path(__file__).parent / "common_utils.py" 9 | method = "genSparseTensor" 10 | method_children = 261 11 | 12 | class_name = "TestCase" 13 | class_children = 8203 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/csrf_CsrfViewMiddleware__set_csrf_cookie/csrf_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test__set_csrf_cookie(self): 8 | fname = Path(__file__).parent / "csrf.py" 9 | method = "_set_csrf_cookie" 10 | method_children = 101 11 | 12 | class_name = "CsrfViewMiddleware" 13 | class_children = 1120 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/inspectdb_Command_normalize_col_name/inspectdb_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test_normalize_col_name(self): 8 | fname = Path(__file__).parent / "inspectdb.py" 9 | method = "normalize_col_name" 10 | method_children = 316 11 | 12 | class_name = "Command" 13 | class_children = 1753 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/main_widget_PylintWidget_parse_output/main_widget_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test_parse_output(self): 8 | fname = Path(__file__).parent / "main_widget.py" 9 | method = "parse_output" 10 | method_children = 376 11 | 12 | class_name = "PylintWidget" 13 | class_children = 3564 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/makemigrations_Command_add_arguments/makemigrations_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test_add_arguments(self): 8 | fname = Path(__file__).parent / "makemigrations.py" 9 | method = "add_arguments" 10 | method_children = 126 11 | 12 | class_name = "Command" 13 | class_children = 2334 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/split_cat_SplitCatSimplifier_replace_cat/split_cat_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test_replace_cat(self): 8 | fname = Path(__file__).parent / "split_cat.py" 9 | method = "replace_cat" 10 | method_children = 561 11 | 12 | class_name = "SplitCatSimplifier" 13 | class_children = 2326 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/gateway_Gateway_get_and_delete_all_sms/gateway_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test_get_and_delete_all_sms(self): 8 | fname = Path(__file__).parent / "gateway.py" 9 | method = "get_and_delete_all_sms" 10 | method_children = 294 11 | 12 | class_name = "Gateway" 13 | class_children = 938 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/grad_scaler_GradScaler__unscale_grads_/grad_scaler_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test__unscale_grads_(self): 8 | fname = Path(__file__).parent / "grad_scaler.py" 9 | method = "_unscale_grads_" 10 | method_children = 275 11 | 12 | class_name = "GradScaler" 13 | class_children = 2468 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/group_batch_fusion_GroupLinearFusion_fuse/group_batch_fusion_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test_fuse(self): 8 | fname = Path(__file__).parent / "group_batch_fusion.py" 9 | method = "fuse" 10 | method_children = 279 11 | 12 | class_name = "GroupLinearFusion" 13 | class_children = 635 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/reshaping_BaseReshapingTests_test_unstack/reshaping_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test_test_unstack(self): 8 | fname = Path(__file__).parent / "reshaping.py" 9 | method = "test_unstack" 10 | method_children = 376 11 | 12 | class_name = "BaseReshapingTests" 13 | class_children = 3099 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/split_cat_SplitCatSimplifier_replace_split/split_cat_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test_replace_split(self): 8 | fname = Path(__file__).parent / "split_cat.py" 9 | method = "replace_split" 10 | method_children = 327 11 | 12 | class_name = "SplitCatSimplifier" 13 | class_children = 2326 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/symbolic_shapes_ShapeEnv_bind_symbols/symbolic_shapes_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test_bind_symbols(self): 8 | fname = Path(__file__).parent / "symbolic_shapes.py" 9 | method = "bind_symbols" 10 | method_children = 277 11 | 12 | class_name = "ShapeEnv" 13 | class_children = 11290 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/checks_ModelAdminChecks__check_inlines_item/checks_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test__check_inlines_item(self): 8 | fname = Path(__file__).parent / "checks.py" 9 | method = "_check_inlines_item" 10 | method_children = 152 11 | 12 | class_name = "ModelAdminChecks" 13 | class_children = 1746 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/concat__Concatenator__clean_keys_and_objs/concat_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test__clean_keys_and_objs(self): 8 | fname = Path(__file__).parent / "concat.py" 9 | method = "_clean_keys_and_objs" 10 | method_children = 305 11 | 12 | class_name = "_Concatenator" 13 | class_children = 2038 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/feedgenerator_Atom1Feed_add_item_elements/feedgenerator_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test_add_item_elements(self): 8 | fname = Path(__file__).parent / "feedgenerator.py" 9 | method = "add_item_elements" 10 | method_children = 301 11 | 12 | class_name = "Atom1Feed" 13 | class_children = 706 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/galaxy_GalaxyCLI_execute_list_collection/galaxy_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test_execute_list_collection(self): 8 | fname = Path(__file__).parent / "galaxy.py" 9 | method = "execute_list_collection" 10 | method_children = 454 11 | 12 | class_name = "GalaxyCLI" 13 | class_children = 9670 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/methods_BaseMethodsTests_test_where_series/methods_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test_test_where_series(self): 8 | fname = Path(__file__).parent / "methods.py" 9 | method = "test_where_series" 10 | method_children = 370 11 | 12 | class_name = "BaseMethodsTests" 13 | class_children = 5272 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/operations_OracleOperations_convert_extent/operations_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test_convert_extent(self): 8 | fname = Path(__file__).parent / "operations.py" 9 | method = "convert_extent" 10 | method_children = 133 11 | 12 | class_name = "OracleOperations" 13 | class_children = 635 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/operations_DatabaseOperations_bulk_insert_sql/operations_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test_bulk_insert_sql(self): 8 | fname = Path(__file__).parent / "operations.py" 9 | method = "bulk_insert_sql" 10 | method_children = 128 11 | 12 | class_name = "DatabaseOperations" 13 | class_children = 3107 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/analyzer_cli_DebugAnalyzer__make_source_table/analyzer_cli_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test__make_source_table(self): 8 | fname = Path(__file__).parent / "analyzer_cli.py" 9 | method = "_make_source_table" 10 | method_children = 466 11 | 12 | class_name = "DebugAnalyzer" 13 | class_children = 6016 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/autodetector_MigrationAutodetector__trim_to_apps/autodetector_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test__trim_to_apps(self): 8 | fname = Path(__file__).parent / "autodetector.py" 9 | method = "_trim_to_apps" 10 | method_children = 130 11 | 12 | class_name = "MigrationAutodetector" 13 | class_children = 8548 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/autosave_AutosaveForPlugin_get_files_to_recover/autosave_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test_get_files_to_recover(self): 8 | fname = Path(__file__).parent / "autosave.py" 9 | method = "get_files_to_recover" 10 | method_children = 289 11 | 12 | class_name = "AutosaveForPlugin" 13 | class_children = 659 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/dataframeeditor_DataFrameView_next_index_name/dataframeeditor_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test_next_index_name(self): 8 | fname = Path(__file__).parent / "dataframeeditor.py" 9 | method = "next_index_name" 10 | method_children = 626 11 | 12 | class_name = "DataFrameView" 13 | class_children = 4394 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/dim2_Dim2CompatTests_test_reductions_2d_axis0/dim2_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test_test_reductions_2d_axis0(self): 8 | fname = Path(__file__).parent / "dim2.py" 9 | method = "test_reductions_2d_axis0" 10 | method_children = 380 11 | 12 | class_name = "Dim2CompatTests" 13 | class_children = 1919 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/triton_TritonScheduling_generate_node_schedule/triton_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test_generate_node_schedule(self): 8 | fname = Path(__file__).parent / "triton.py" 9 | method = "generate_node_schedule" 10 | method_children = 467 11 | 12 | class_name = "TritonScheduling" 13 | class_children = 4346 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/checks_BaseModelAdminChecks__check_ordering_item/checks_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test__check_ordering_item(self): 8 | fname = Path(__file__).parent / "checks.py" 9 | method = "_check_ordering_item" 10 | method_children = 180 11 | 12 | class_name = "BaseModelAdminChecks" 13 | class_children = 2479 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/checks_ModelAdminChecks__check_list_display_item/checks_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test__check_list_display_item(self): 8 | fname = Path(__file__).parent / "checks.py" 9 | method = "_check_list_display_item" 10 | method_children = 131 11 | 12 | class_name = "ModelAdminChecks" 13 | class_children = 1746 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/autodetector_MigrationAutodetector_check_dependency/autodetector_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test_check_dependency(self): 8 | fname = Path(__file__).parent / "autodetector.py" 9 | method = "check_dependency" 10 | method_children = 452 11 | 12 | class_name = "MigrationAutodetector" 13 | class_children = 8548 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/checks_ModelAdminChecks__check_list_editable_item/checks_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test__check_list_editable_item(self): 8 | fname = Path(__file__).parent / "checks.py" 9 | method = "_check_list_editable_item" 10 | method_children = 203 11 | 12 | class_name = "ModelAdminChecks" 13 | class_children = 1746 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/clustering_ops_KMeans__mini_batch_training_op/clustering_ops_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test__mini_batch_training_op(self): 8 | fname = Path(__file__).parent / "clustering_ops.py" 9 | method = "_mini_batch_training_op" 10 | method_children = 334 11 | 12 | class_name = "KMeans" 13 | class_children = 1886 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/compile_utils_MetricsContainer__get_metric_object/compile_utils_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test__get_metric_object(self): 8 | fname = Path(__file__).parent / "compile_utils.py" 9 | method = "_get_metric_object" 10 | method_children = 280 11 | 12 | class_name = "MetricsContainer" 13 | class_children = 1423 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/operations_DatabaseOperations_last_executed_query/operations_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test_last_executed_query(self): 8 | fname = Path(__file__).parent / "operations.py" 9 | method = "last_executed_query" 10 | method_children = 122 11 | 12 | class_name = "DatabaseOperations" 13 | class_children = 3107 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/split_cat_SplitCatSimplifier_get_transform_params/split_cat_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test_get_transform_params(self): 8 | fname = Path(__file__).parent / "split_cat.py" 9 | method = "get_transform_params" 10 | method_children = 344 11 | 12 | class_name = "SplitCatSimplifier" 13 | class_children = 2326 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/functional_Functional__conform_to_reference_input/functional_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test__conform_to_reference_input(self): 8 | fname = Path(__file__).parent / "functional.py" 9 | method = "_conform_to_reference_input" 10 | method_children = 267 11 | 12 | class_name = "Functional" 13 | class_children = 3817 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/generic_bsd_GenericBsdIfconfigNetwork_parse_inet_line/generic_bsd_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test_parse_inet_line(self): 8 | fname = Path(__file__).parent / "generic_bsd.py" 9 | method = "parse_inet_line" 10 | method_children = 390 11 | 12 | class_name = "GenericBsdIfconfigNetwork" 13 | class_children = 1918 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/graph_drawer_FxGraphDrawer__stringify_tensor_meta/graph_drawer_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test__stringify_tensor_meta(self): 8 | fname = Path(__file__).parent / "graph_drawer.py" 9 | method = "_stringify_tensor_meta" 10 | method_children = 354 11 | 12 | class_name = "FxGraphDrawer" 13 | class_children = 2078 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/sharding_policies_MaxShardSizePolicy__add_partition/sharding_policies_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test__add_partition(self): 8 | fname = Path(__file__).parent / "sharding_policies.py" 9 | method = "_add_partition" 10 | method_children = 382 11 | 12 | class_name = "MaxShardSizePolicy" 13 | class_children = 1235 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/checks_BaseModelAdminChecks__check_raw_id_fields_item/checks_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test__check_raw_id_fields_item(self): 8 | fname = Path(__file__).parent / "checks.py" 9 | method = "_check_raw_id_fields_item" 10 | method_children = 101 11 | 12 | class_name = "BaseModelAdminChecks" 13 | class_children = 2479 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/reshaping_BaseReshapingTests_test_concat_mixed_dtypes/reshaping_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test_test_concat_mixed_dtypes(self): 8 | fname = Path(__file__).parent / "reshaping.py" 9 | method = "test_concat_mixed_dtypes" 10 | method_children = 273 11 | 12 | class_name = "BaseReshapingTests" 13 | class_children = 3099 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/load_v1_in_v2__EagerSavedModelLoader__extract_signatures/load_v1_in_v2_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test__extract_signatures(self): 8 | fname = Path(__file__).parent / "load_v1_in_v2.py" 9 | method = "_extract_signatures" 10 | method_children = 424 11 | 12 | class_name = "_EagerSavedModelLoader" 13 | class_children = 1203 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/operations_DatabaseOperations_check_expression_support/operations_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test_check_expression_support(self): 8 | fname = Path(__file__).parent / "operations.py" 9 | method = "check_expression_support" 10 | method_children = 118 11 | 12 | class_name = "DatabaseOperations" 13 | class_children = 1942 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/checks_ModelAdminChecks__check_action_permission_methods/checks_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test__check_action_permission_methods(self): 8 | fname = Path(__file__).parent / "checks.py" 9 | method = "_check_action_permission_methods" 10 | method_children = 103 11 | 12 | class_name = "ModelAdminChecks" 13 | class_children = 1746 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/introspection_DatabaseIntrospection__get_column_collations/introspection_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test__get_column_collations(self): 8 | fname = Path(__file__).parent / "introspection.py" 9 | method = "_get_column_collations" 10 | method_children = 145 11 | 12 | class_name = "DatabaseIntrospection" 13 | class_children = 1779 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/checks_BaseModelAdminChecks__check_autocomplete_fields_item/checks_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test__check_autocomplete_fields_item(self): 8 | fname = Path(__file__).parent / "checks.py" 9 | method = "_check_autocomplete_fields_item" 10 | method_children = 176 11 | 12 | class_name = "BaseModelAdminChecks" 13 | class_children = 2479 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/gradient_checker_GradientChecker__assertInferTensorChecks/gradient_checker_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test__assertInferTensorChecks(self): 8 | fname = Path(__file__).parent / "gradient_checker.py" 9 | method = "_assertInferTensorChecks" 10 | method_children = 297 11 | 12 | class_name = "GradientChecker" 13 | class_children = 1237 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/distribution_DistributionFiles_parse_distribution_file_SUSE/distribution_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test_parse_distribution_file_SUSE(self): 8 | fname = Path(__file__).parent / "distribution.py" 9 | method = "parse_distribution_file_SUSE" 10 | method_children = 440 11 | 12 | class_name = "DistributionFiles" 13 | class_children = 2766 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/generator_GenOpTestCase_out_variant_op_test_case_generator/generator_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test_out_variant_op_test_case_generator(self): 8 | fname = Path(__file__).parent / "generator.py" 9 | method = "out_variant_op_test_case_generator" 10 | method_children = 269 11 | 12 | class_name = "GenOpTestCase" 13 | class_children = 645 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/onnxfunction_dispatcher_OnnxFunctionDispatcher__get_aten_name/onnxfunction_dispatcher_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test__get_aten_name(self): 8 | fname = Path(__file__).parent / "onnxfunction_dispatcher.py" 9 | method = "_get_aten_name" 10 | method_children = 355 11 | 12 | class_name = "OnnxFunctionDispatcher" 13 | class_children = 1323 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/cuda_cpp_scheduling_CUDACPPScheduling__can_fuse_epilogue_impl/cuda_cpp_scheduling_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test__can_fuse_epilogue_impl(self): 8 | fname = Path(__file__).parent / "cuda_cpp_scheduling.py" 9 | method = "_can_fuse_epilogue_impl" 10 | method_children = 296 11 | 12 | class_name = "CUDACPPScheduling" 13 | class_children = 992 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/profile_analyzer_cli_ProfileAnalyzer__get_list_profile_lines/profile_analyzer_cli_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test__get_list_profile_lines(self): 8 | fname = Path(__file__).parent / "profile_analyzer_cli.py" 9 | method = "_get_list_profile_lines" 10 | method_children = 539 11 | 12 | class_name = "ProfileAnalyzer" 13 | class_children = 2723 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/coordinator_HERETransitDataUpdateCoordinator__parse_transit_response/coordinator_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test__parse_transit_response(self): 8 | fname = Path(__file__).parent / "coordinator.py" 9 | method = "_parse_transit_response" 10 | method_children = 288 11 | 12 | class_name = "HERETransitDataUpdateCoordinator" 13 | class_children = 599 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/common_methods_invocations_foreach_inputs_sample_func__sample_rightmost_arg/common_methods_invocations_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test__sample_rightmost_arg(self): 8 | fname = Path(__file__).parent / "common_methods_invocations.py" 9 | method = "_sample_rightmost_arg" 10 | method_children = 297 11 | 12 | class_name = "foreach_inputs_sample_func" 13 | class_children = 1626 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/grpc_debug_server_EventListenerBaseServicer__process_tensor_event_in_chunks/grpc_debug_server_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test__process_tensor_event_in_chunks(self): 8 | fname = Path(__file__).parent / "grpc_debug_server.py" 9 | method = "_process_tensor_event_in_chunks" 10 | method_children = 279 11 | 12 | class_name = "EventListenerBaseServicer" 13 | class_children = 1416 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/introspection_DatabaseIntrospection__parse_column_or_constraint_definition/introspection_test.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from benchmark.refactor_tools import verify_refactor 4 | from pathlib import Path 5 | 6 | class TheTest(unittest.TestCase): 7 | def test__parse_column_or_constraint_definition(self): 8 | fname = Path(__file__).parent / "introspection.py" 9 | method = "_parse_column_or_constraint_definition" 10 | method_children = 578 11 | 12 | class_name = "DatabaseIntrospection" 13 | class_children = 1779 14 | 15 | verify_refactor(fname, method, method_children, class_name, class_children) 16 | 17 | if __name__ == "__main__": 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /refactor-benchmark/baseconv_BaseConverter_convert/baseconv.py: -------------------------------------------------------------------------------- 1 | # RemovedInDjango50Warning 2 | # Copyright (c) 2010 Guilherme Gondim. All rights reserved. 3 | # Copyright (c) 2009 Simon Willison. All rights reserved. 4 | # Copyright (c) 2002 Drew Perttula. All rights reserved. 5 | # 6 | # License: 7 | # Python Software Foundation License version 2 8 | # 9 | # See the file "LICENSE" for terms & conditions for usage, and a DISCLAIMER OF 10 | # ALL WARRANTIES. 11 | # 12 | # This Baseconv distribution contains no GNU General Public Licensed (GPLed) 13 | # code so it may be used in proprietary projects just like prior ``baseconv`` 14 | # distributions. 15 | # 16 | # All trademarks referenced herein are property of their respective holders. 17 | # 18 | 19 | """ 20 | Convert numbers from base 10 integers to base X strings and back again. 21 | 22 | Sample usage:: 23 | 24 | >>> base20 = BaseConverter('0123456789abcdefghij') 25 | >>> base20.encode(1234) 26 | '31e' 27 | >>> base20.decode('31e') 28 | 1234 29 | >>> base20.encode(-1234) 30 | '-31e' 31 | >>> base20.decode('-31e') 32 | -1234 33 | >>> base11 = BaseConverter('0123456789-', sign='$') 34 | >>> base11.encode(-1234) 35 | '$-22' 36 | >>> base11.decode('$-22') 37 | -1234 38 | 39 | """ 40 | import warnings 41 | 42 | from django.utils.deprecation import RemovedInDjango50Warning 43 | 44 | warnings.warn( 45 | "The django.utils.baseconv module is deprecated.", 46 | category=RemovedInDjango50Warning, 47 | stacklevel=2, 48 | ) 49 | 50 | BASE2_ALPHABET = "01" 51 | BASE16_ALPHABET = "0123456789ABCDEF" 52 | BASE56_ALPHABET = "23456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnpqrstuvwxyz" 53 | BASE36_ALPHABET = "0123456789abcdefghijklmnopqrstuvwxyz" 54 | BASE62_ALPHABET = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" 55 | BASE64_ALPHABET = BASE62_ALPHABET + "-_" 56 | 57 | 58 | class BaseConverter: 59 | decimal_digits = "0123456789" 60 | 61 | def __init__(self, digits, sign="-"): 62 | self.sign = sign 63 | self.digits = digits 64 | if sign in self.digits: 65 | raise ValueError("Sign character found in converter base digits.") 66 | 67 | def __repr__(self): 68 | return "<%s: base%s (%s)>" % ( 69 | self.__class__.__name__, 70 | len(self.digits), 71 | self.digits, 72 | ) 73 | 74 | def encode(self, i): 75 | neg, value = self.convert(i, self.decimal_digits, self.digits, "-") 76 | if neg: 77 | return self.sign + value 78 | return value 79 | 80 | def decode(self, s): 81 | neg, value = self.convert(s, self.digits, self.decimal_digits, self.sign) 82 | if neg: 83 | value = "-" + value 84 | return int(value) 85 | 86 | def convert(self, number, from_digits, to_digits, sign): 87 | if str(number)[0] == sign: 88 | number = str(number)[1:] 89 | neg = 1 90 | else: 91 | neg = 0 92 | 93 | # make an integer out of the number 94 | x = 0 95 | for digit in str(number): 96 | x = x * len(from_digits) + from_digits.index(digit) 97 | 98 | # create the result in base 'len(to_digits)' 99 | if x == 0: 100 | res = to_digits[0] 101 | else: 102 | res = "" 103 | while x > 0: 104 | digit = x % len(to_digits) 105 | res = to_digits[digit] + res 106 | x = int(x // len(to_digits)) 107 | return neg, res 108 | 109 | 110 | base2 = BaseConverter(BASE2_ALPHABET) 111 | base16 = BaseConverter(BASE16_ALPHABET) 112 | base36 = BaseConverter(BASE36_ALPHABET) 113 | base56 = BaseConverter(BASE56_ALPHABET) 114 | base62 = BaseConverter(BASE62_ALPHABET) 115 | base64 = BaseConverter(BASE64_ALPHABET, sign="$") 116 | -------------------------------------------------------------------------------- /refactor-benchmark/diffsettings_Command_output_hash/diffsettings.py: -------------------------------------------------------------------------------- 1 | from django.core.management.base import BaseCommand 2 | 3 | 4 | def module_to_dict(module, omittable=lambda k: k.startswith("_") or not k.isupper()): 5 | """Convert a module namespace to a Python dictionary.""" 6 | return {k: repr(getattr(module, k)) for k in dir(module) if not omittable(k)} 7 | 8 | 9 | class Command(BaseCommand): 10 | help = """Displays differences between the current settings.py and Django's 11 | default settings.""" 12 | 13 | requires_system_checks = [] 14 | 15 | def add_arguments(self, parser): 16 | parser.add_argument( 17 | "--all", 18 | action="store_true", 19 | help=( 20 | 'Display all settings, regardless of their value. In "hash" ' 21 | 'mode, default values are prefixed by "###".' 22 | ), 23 | ) 24 | parser.add_argument( 25 | "--default", 26 | metavar="MODULE", 27 | help=( 28 | "The settings module to compare the current settings against. Leave " 29 | "empty to compare against Django's default settings." 30 | ), 31 | ) 32 | parser.add_argument( 33 | "--output", 34 | default="hash", 35 | choices=("hash", "unified"), 36 | help=( 37 | "Selects the output format. 'hash' mode displays each changed " 38 | "setting, with the settings that don't appear in the defaults " 39 | "followed by ###. 'unified' mode prefixes the default setting " 40 | "with a minus sign, followed by the changed setting prefixed " 41 | "with a plus sign." 42 | ), 43 | ) 44 | 45 | def handle(self, **options): 46 | from django.conf import Settings, global_settings, settings 47 | 48 | # Because settings are imported lazily, we need to explicitly load them. 49 | if not settings.configured: 50 | settings._setup() 51 | 52 | user_settings = module_to_dict(settings._wrapped) 53 | default = options["default"] 54 | default_settings = module_to_dict( 55 | Settings(default) if default else global_settings 56 | ) 57 | output_func = { 58 | "hash": self.output_hash, 59 | "unified": self.output_unified, 60 | }[options["output"]] 61 | return "\n".join(output_func(user_settings, default_settings, **options)) 62 | 63 | def output_hash(self, user_settings, default_settings, **options): 64 | # Inspired by Postfix's "postconf -n". 65 | output = [] 66 | for key in sorted(user_settings): 67 | if key not in default_settings: 68 | output.append("%s = %s ###" % (key, user_settings[key])) 69 | elif user_settings[key] != default_settings[key]: 70 | output.append("%s = %s" % (key, user_settings[key])) 71 | elif options["all"]: 72 | output.append("### %s = %s" % (key, user_settings[key])) 73 | return output 74 | 75 | def output_unified(self, user_settings, default_settings, **options): 76 | output = [] 77 | for key in sorted(user_settings): 78 | if key not in default_settings: 79 | output.append( 80 | self.style.SUCCESS("+ %s = %s" % (key, user_settings[key])) 81 | ) 82 | elif user_settings[key] != default_settings[key]: 83 | output.append( 84 | self.style.ERROR("- %s = %s" % (key, default_settings[key])) 85 | ) 86 | output.append( 87 | self.style.SUCCESS("+ %s = %s" % (key, user_settings[key])) 88 | ) 89 | elif options["all"]: 90 | output.append(" %s = %s" % (key, user_settings[key])) 91 | return output 92 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 | ## Aider's refactoring benchmark 3 | 4 | This repository holds exercises for a coding benchmark used by the 5 | [aider](https://github.com/paul-gauthier/aider) 6 | AI coding tool. 7 | This benchmark was designed to provoke "lazy coding" in the GPT-4 Turbo models, 8 | which have this widely reported problem. 9 | 10 | This benchmarked assisted in the design and evaluation of a solution to the 11 | lazy coding problem. 12 | [Asking GPT-4 Turbo to format code changes as unified diffs](https://aider.chat/docs/unified-diffs.html) 13 | reduced lazy coding 14 | by 3X. 15 | 16 | ## Benchmark details 17 | 18 | Aider has long used a 19 | [benchmark suite based on 133 Exercism python exercises](). 20 | But these are mostly small coding problems, 21 | usually requiring only a few dozen lines of code. 22 | GPT-4 Turbo is typically only lazy on 2-3 of these exercises: 23 | the ones with the most code and which involve refactoring. 24 | 25 | Based on this observation, I set out to build a benchmark based on refactoring 26 | a non-trivial amount of code found in fairly large files. 27 | To do this, I used python's `ast` module to analyze 28 | [9 popular open source python repositories](https://github.com/paul-gauthier/refactor-benchmark) 29 | to identify challenging refactoring tasks. 30 | The goal was to find: 31 | 32 | - Source files that contain classes with non-trivial methods, having 100-250+ AST nodes in their implementation. 33 | - Focus on methods that are part of a larger class, which has at least twice as much code as the method itself. 34 | - Select methods that don't use their `self` parameter, so they can be trivially refactored out of the class. 35 | 36 | We can then turn each of these source files into a task for the benchmark, 37 | where we ask GPT to do something like: 38 | 39 | > Refactor the `_set_csrf_cookie` method in the `CsrfViewMiddleware` class to be a stand alone, top level function. 40 | > Name the new function `_set_csrf_cookie`, exactly the same name as the existing method. 41 | > Update any existing `self._set_csrf_cookie` calls to work with the new `_set_csrf_cookie` function. 42 | 43 | A [simple python AST scanning script](https://github.com/paul-gauthier/aider/blob/main/benchmark/refactor_tools.py) 44 | found 89 suitable files 45 | and packaged them up as benchmark tasks. 46 | Each task has a test 47 | that checks if the refactor 48 | was performed roughly correctly: 49 | 50 | - The updated source file must parse as valid python, to detect misapplied edits which produce invalid code. 51 | - The target method must now exist as a top-level function in the file. 52 | - This new top-level function must contain approximately the same number of AST nodes as the original class method. This ensures that GPT didn't elide code and replace it with comments. 53 | - The original class must still be present in the file, and it must be smaller by about the number of AST nodes in the method which was removed. This helps confirm that the method was removed from the class, without other significant modifications. 54 | 55 | To be clear, this is not a rigorous test that the refactor was performed correctly. 56 | But it does serve as a basic sanity check that the refactor was essentially done as a cut & paste, without eliding any code as comments. 57 | And it correlates well with other laziness metrics 58 | gathered during benchmarking like the 59 | introduction of new comments that contain "...". 60 | 61 | The result is a pragmatic 62 | [benchmark suite that provokes, detects and quantifies GPT coding laziness](https://github.com/paul-gauthier/refactor-benchmark). 63 | 64 | 65 | # Credits 66 | 67 | The refactoring exercises are based on code from the following 68 | repositories: 69 | 70 | - https://github.com/ansible/ansible 71 | - https://github.com/django/django 72 | - https://github.com/home-assistant/core 73 | - https://github.com/matplotlib/matplotlib 74 | - https://github.com/pandas-dev/pandas 75 | - https://github.com/pytorch/pytorch 76 | - https://github.com/scikit-learn/scikit-learn 77 | - https://github.com/spyder-ide/spyder 78 | - https://github.com/tensorflow/tensorflow 79 | 80 | -------------------------------------------------------------------------------- /refactor-benchmark/shell_Command_python/shell.py: -------------------------------------------------------------------------------- 1 | import os 2 | import select 3 | import sys 4 | import traceback 5 | 6 | from django.core.management import BaseCommand, CommandError 7 | from django.utils.datastructures import OrderedSet 8 | 9 | 10 | class Command(BaseCommand): 11 | help = ( 12 | "Runs a Python interactive interpreter. Tries to use IPython or " 13 | "bpython, if one of them is available. Any standard input is executed " 14 | "as code." 15 | ) 16 | 17 | requires_system_checks = [] 18 | shells = ["ipython", "bpython", "python"] 19 | 20 | def add_arguments(self, parser): 21 | parser.add_argument( 22 | "--no-startup", 23 | action="store_true", 24 | help=( 25 | "When using plain Python, ignore the PYTHONSTARTUP environment " 26 | "variable and ~/.pythonrc.py script." 27 | ), 28 | ) 29 | parser.add_argument( 30 | "-i", 31 | "--interface", 32 | choices=self.shells, 33 | help=( 34 | "Specify an interactive interpreter interface. Available options: " 35 | '"ipython", "bpython", and "python"' 36 | ), 37 | ) 38 | parser.add_argument( 39 | "-c", 40 | "--command", 41 | help=( 42 | "Instead of opening an interactive shell, run a command as Django and " 43 | "exit." 44 | ), 45 | ) 46 | 47 | def ipython(self, options): 48 | from IPython import start_ipython 49 | 50 | start_ipython(argv=[]) 51 | 52 | def bpython(self, options): 53 | import bpython 54 | 55 | bpython.embed() 56 | 57 | def python(self, options): 58 | import code 59 | 60 | # Set up a dictionary to serve as the environment for the shell. 61 | imported_objects = {} 62 | 63 | # We want to honor both $PYTHONSTARTUP and .pythonrc.py, so follow system 64 | # conventions and get $PYTHONSTARTUP first then .pythonrc.py. 65 | if not options["no_startup"]: 66 | for pythonrc in OrderedSet( 67 | [os.environ.get("PYTHONSTARTUP"), os.path.expanduser("~/.pythonrc.py")] 68 | ): 69 | if not pythonrc: 70 | continue 71 | if not os.path.isfile(pythonrc): 72 | continue 73 | with open(pythonrc) as handle: 74 | pythonrc_code = handle.read() 75 | # Match the behavior of the cpython shell where an error in 76 | # PYTHONSTARTUP prints an exception and continues. 77 | try: 78 | exec(compile(pythonrc_code, pythonrc, "exec"), imported_objects) 79 | except Exception: 80 | traceback.print_exc() 81 | 82 | # By default, this will set up readline to do tab completion and to read and 83 | # write history to the .python_history file, but this can be overridden by 84 | # $PYTHONSTARTUP or ~/.pythonrc.py. 85 | try: 86 | hook = sys.__interactivehook__ 87 | except AttributeError: 88 | # Match the behavior of the cpython shell where a missing 89 | # sys.__interactivehook__ is ignored. 90 | pass 91 | else: 92 | try: 93 | hook() 94 | except Exception: 95 | # Match the behavior of the cpython shell where an error in 96 | # sys.__interactivehook__ prints a warning and the exception 97 | # and continues. 98 | print("Failed calling sys.__interactivehook__") 99 | traceback.print_exc() 100 | 101 | # Set up tab completion for objects imported by $PYTHONSTARTUP or 102 | # ~/.pythonrc.py. 103 | try: 104 | import readline 105 | import rlcompleter 106 | 107 | readline.set_completer(rlcompleter.Completer(imported_objects).complete) 108 | except ImportError: 109 | pass 110 | 111 | # Start the interactive interpreter. 112 | code.interact(local=imported_objects) 113 | 114 | def handle(self, **options): 115 | # Execute the command and exit. 116 | if options["command"]: 117 | exec(options["command"], globals()) 118 | return 119 | 120 | # Execute stdin if it has anything to read and exit. 121 | # Not supported on Windows due to select.select() limitations. 122 | if ( 123 | sys.platform != "win32" 124 | and not sys.stdin.isatty() 125 | and select.select([sys.stdin], [], [], 0)[0] 126 | ): 127 | exec(sys.stdin.read(), globals()) 128 | return 129 | 130 | available_shells = ( 131 | [options["interface"]] if options["interface"] else self.shells 132 | ) 133 | 134 | for shell in available_shells: 135 | try: 136 | return getattr(self, shell)(options) 137 | except ImportError: 138 | pass 139 | raise CommandError("Couldn't import {} interface.".format(shell)) 140 | -------------------------------------------------------------------------------- /refactor-benchmark/ogrinspect_Command_add_arguments/ogrinspect.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | 3 | from django.contrib.gis import gdal 4 | from django.core.management.base import BaseCommand, CommandError 5 | from django.utils.inspect import get_func_args 6 | 7 | 8 | class LayerOptionAction(argparse.Action): 9 | """ 10 | Custom argparse action for the `ogrinspect` `layer_key` keyword option 11 | which may be an integer or a string. 12 | """ 13 | 14 | def __call__(self, parser, namespace, value, option_string=None): 15 | try: 16 | setattr(namespace, self.dest, int(value)) 17 | except ValueError: 18 | setattr(namespace, self.dest, value) 19 | 20 | 21 | class ListOptionAction(argparse.Action): 22 | """ 23 | Custom argparse action for `ogrinspect` keywords that require 24 | a string list. If the string is 'True'/'true' then the option 25 | value will be a boolean instead. 26 | """ 27 | 28 | def __call__(self, parser, namespace, value, option_string=None): 29 | if value.lower() == "true": 30 | setattr(namespace, self.dest, True) 31 | else: 32 | setattr(namespace, self.dest, value.split(",")) 33 | 34 | 35 | class Command(BaseCommand): 36 | help = ( 37 | "Inspects the given OGR-compatible data source (e.g., a shapefile) and " 38 | "outputs\na GeoDjango model with the given model name. For example:\n" 39 | " ./manage.py ogrinspect zipcode.shp Zipcode" 40 | ) 41 | 42 | requires_system_checks = [] 43 | 44 | def add_arguments(self, parser): 45 | parser.add_argument("data_source", help="Path to the data source.") 46 | parser.add_argument("model_name", help="Name of the model to create.") 47 | parser.add_argument( 48 | "--blank", 49 | action=ListOptionAction, 50 | default=False, 51 | help="Use a comma separated list of OGR field names to add " 52 | "the `blank=True` option to the field definition. Set to `true` " 53 | "to apply to all applicable fields.", 54 | ) 55 | parser.add_argument( 56 | "--decimal", 57 | action=ListOptionAction, 58 | default=False, 59 | help="Use a comma separated list of OGR float fields to " 60 | "generate `DecimalField` instead of the default " 61 | "`FloatField`. Set to `true` to apply to all OGR float fields.", 62 | ) 63 | parser.add_argument( 64 | "--geom-name", 65 | default="geom", 66 | help="Specifies the model name for the Geometry Field (defaults to `geom`)", 67 | ) 68 | parser.add_argument( 69 | "--layer", 70 | dest="layer_key", 71 | action=LayerOptionAction, 72 | default=0, 73 | help="The key for specifying which layer in the OGR data " 74 | "source to use. Defaults to 0 (the first layer). May be " 75 | "an integer or a string identifier for the layer.", 76 | ) 77 | parser.add_argument( 78 | "--multi-geom", 79 | action="store_true", 80 | help="Treat the geometry in the data source as a geometry collection.", 81 | ) 82 | parser.add_argument( 83 | "--name-field", 84 | help="Specifies a field name to return for the __str__() method.", 85 | ) 86 | parser.add_argument( 87 | "--no-imports", 88 | action="store_false", 89 | dest="imports", 90 | help="Do not include `from django.contrib.gis.db import models` statement.", 91 | ) 92 | parser.add_argument( 93 | "--null", 94 | action=ListOptionAction, 95 | default=False, 96 | help="Use a comma separated list of OGR field names to add " 97 | "the `null=True` option to the field definition. Set to `true` " 98 | "to apply to all applicable fields.", 99 | ) 100 | parser.add_argument( 101 | "--srid", 102 | help="The SRID to use for the Geometry Field. If it can be " 103 | "determined, the SRID of the data source is used.", 104 | ) 105 | parser.add_argument( 106 | "--mapping", 107 | action="store_true", 108 | help="Generate mapping dictionary for use with `LayerMapping`.", 109 | ) 110 | 111 | def handle(self, *args, **options): 112 | data_source, model_name = options.pop("data_source"), options.pop("model_name") 113 | 114 | # Getting the OGR DataSource from the string parameter. 115 | try: 116 | ds = gdal.DataSource(data_source) 117 | except gdal.GDALException as msg: 118 | raise CommandError(msg) 119 | 120 | # Returning the output of ogrinspect with the given arguments 121 | # and options. 122 | from django.contrib.gis.utils.ogrinspect import _ogrinspect, mapping 123 | 124 | # Filter options to params accepted by `_ogrinspect` 125 | ogr_options = { 126 | k: v 127 | for k, v in options.items() 128 | if k in get_func_args(_ogrinspect) and v is not None 129 | } 130 | output = [s for s in _ogrinspect(ds, model_name, **ogr_options)] 131 | 132 | if options["mapping"]: 133 | # Constructing the keyword arguments for `mapping`, and 134 | # calling it on the data source. 135 | kwargs = { 136 | "geom_name": options["geom_name"], 137 | "layer_key": options["layer_key"], 138 | "multi_geom": options["multi_geom"], 139 | } 140 | mapping_dict = mapping(ds, **kwargs) 141 | # This extra legwork is so that the dictionary definition comes 142 | # out in the same order as the fields in the model definition. 143 | rev_mapping = {v: k for k, v in mapping_dict.items()} 144 | output.extend( 145 | [ 146 | "", 147 | "", 148 | "# Auto-generated `LayerMapping` dictionary for %s model" 149 | % model_name, 150 | "%s_mapping = {" % model_name.lower(), 151 | ] 152 | ) 153 | output.extend( 154 | " '%s': '%s'," % (rev_mapping[ogr_fld], ogr_fld) 155 | for ogr_fld in ds[options["layer_key"]].fields 156 | ) 157 | output.extend( 158 | [ 159 | " '%s': '%s'," 160 | % (options["geom_name"], mapping_dict[options["geom_name"]]), 161 | "}", 162 | ] 163 | ) 164 | return "\n".join(output) 165 | -------------------------------------------------------------------------------- /refactor-benchmark/special_RunSQL__run_sql/special.py: -------------------------------------------------------------------------------- 1 | from django.db import router 2 | 3 | from .base import Operation 4 | 5 | 6 | class SeparateDatabaseAndState(Operation): 7 | """ 8 | Take two lists of operations - ones that will be used for the database, 9 | and ones that will be used for the state change. This allows operations 10 | that don't support state change to have it applied, or have operations 11 | that affect the state or not the database, or so on. 12 | """ 13 | 14 | serialization_expand_args = ["database_operations", "state_operations"] 15 | 16 | def __init__(self, database_operations=None, state_operations=None): 17 | self.database_operations = database_operations or [] 18 | self.state_operations = state_operations or [] 19 | 20 | def deconstruct(self): 21 | kwargs = {} 22 | if self.database_operations: 23 | kwargs["database_operations"] = self.database_operations 24 | if self.state_operations: 25 | kwargs["state_operations"] = self.state_operations 26 | return (self.__class__.__qualname__, [], kwargs) 27 | 28 | def state_forwards(self, app_label, state): 29 | for state_operation in self.state_operations: 30 | state_operation.state_forwards(app_label, state) 31 | 32 | def database_forwards(self, app_label, schema_editor, from_state, to_state): 33 | # We calculate state separately in here since our state functions aren't useful 34 | for database_operation in self.database_operations: 35 | to_state = from_state.clone() 36 | database_operation.state_forwards(app_label, to_state) 37 | database_operation.database_forwards( 38 | app_label, schema_editor, from_state, to_state 39 | ) 40 | from_state = to_state 41 | 42 | def database_backwards(self, app_label, schema_editor, from_state, to_state): 43 | # We calculate state separately in here since our state functions aren't useful 44 | to_states = {} 45 | for dbop in self.database_operations: 46 | to_states[dbop] = to_state 47 | to_state = to_state.clone() 48 | dbop.state_forwards(app_label, to_state) 49 | # to_state now has the states of all the database_operations applied 50 | # which is the from_state for the backwards migration of the last 51 | # operation. 52 | for database_operation in reversed(self.database_operations): 53 | from_state = to_state 54 | to_state = to_states[database_operation] 55 | database_operation.database_backwards( 56 | app_label, schema_editor, from_state, to_state 57 | ) 58 | 59 | def describe(self): 60 | return "Custom state/database change combination" 61 | 62 | 63 | class RunSQL(Operation): 64 | """ 65 | Run some raw SQL. A reverse SQL statement may be provided. 66 | 67 | Also accept a list of operations that represent the state change effected 68 | by this SQL change, in case it's custom column/table creation/deletion. 69 | """ 70 | 71 | noop = "" 72 | 73 | def __init__( 74 | self, sql, reverse_sql=None, state_operations=None, hints=None, elidable=False 75 | ): 76 | self.sql = sql 77 | self.reverse_sql = reverse_sql 78 | self.state_operations = state_operations or [] 79 | self.hints = hints or {} 80 | self.elidable = elidable 81 | 82 | def deconstruct(self): 83 | kwargs = { 84 | "sql": self.sql, 85 | } 86 | if self.reverse_sql is not None: 87 | kwargs["reverse_sql"] = self.reverse_sql 88 | if self.state_operations: 89 | kwargs["state_operations"] = self.state_operations 90 | if self.hints: 91 | kwargs["hints"] = self.hints 92 | return (self.__class__.__qualname__, [], kwargs) 93 | 94 | @property 95 | def reversible(self): 96 | return self.reverse_sql is not None 97 | 98 | def state_forwards(self, app_label, state): 99 | for state_operation in self.state_operations: 100 | state_operation.state_forwards(app_label, state) 101 | 102 | def database_forwards(self, app_label, schema_editor, from_state, to_state): 103 | if router.allow_migrate( 104 | schema_editor.connection.alias, app_label, **self.hints 105 | ): 106 | self._run_sql(schema_editor, self.sql) 107 | 108 | def database_backwards(self, app_label, schema_editor, from_state, to_state): 109 | if self.reverse_sql is None: 110 | raise NotImplementedError("You cannot reverse this operation") 111 | if router.allow_migrate( 112 | schema_editor.connection.alias, app_label, **self.hints 113 | ): 114 | self._run_sql(schema_editor, self.reverse_sql) 115 | 116 | def describe(self): 117 | return "Raw SQL operation" 118 | 119 | def _run_sql(self, schema_editor, sqls): 120 | if isinstance(sqls, (list, tuple)): 121 | for sql in sqls: 122 | params = None 123 | if isinstance(sql, (list, tuple)): 124 | elements = len(sql) 125 | if elements == 2: 126 | sql, params = sql 127 | else: 128 | raise ValueError("Expected a 2-tuple but got %d" % elements) 129 | schema_editor.execute(sql, params=params) 130 | elif sqls != RunSQL.noop: 131 | statements = schema_editor.connection.ops.prepare_sql_script(sqls) 132 | for statement in statements: 133 | schema_editor.execute(statement, params=None) 134 | 135 | 136 | class RunPython(Operation): 137 | """ 138 | Run Python code in a context suitable for doing versioned ORM operations. 139 | """ 140 | 141 | reduces_to_sql = False 142 | 143 | def __init__( 144 | self, code, reverse_code=None, atomic=None, hints=None, elidable=False 145 | ): 146 | self.atomic = atomic 147 | # Forwards code 148 | if not callable(code): 149 | raise ValueError("RunPython must be supplied with a callable") 150 | self.code = code 151 | # Reverse code 152 | if reverse_code is None: 153 | self.reverse_code = None 154 | else: 155 | if not callable(reverse_code): 156 | raise ValueError("RunPython must be supplied with callable arguments") 157 | self.reverse_code = reverse_code 158 | self.hints = hints or {} 159 | self.elidable = elidable 160 | 161 | def deconstruct(self): 162 | kwargs = { 163 | "code": self.code, 164 | } 165 | if self.reverse_code is not None: 166 | kwargs["reverse_code"] = self.reverse_code 167 | if self.atomic is not None: 168 | kwargs["atomic"] = self.atomic 169 | if self.hints: 170 | kwargs["hints"] = self.hints 171 | return (self.__class__.__qualname__, [], kwargs) 172 | 173 | @property 174 | def reversible(self): 175 | return self.reverse_code is not None 176 | 177 | def state_forwards(self, app_label, state): 178 | # RunPython objects have no state effect. To add some, combine this 179 | # with SeparateDatabaseAndState. 180 | pass 181 | 182 | def database_forwards(self, app_label, schema_editor, from_state, to_state): 183 | # RunPython has access to all models. Ensure that all models are 184 | # reloaded in case any are delayed. 185 | from_state.clear_delayed_apps_cache() 186 | if router.allow_migrate( 187 | schema_editor.connection.alias, app_label, **self.hints 188 | ): 189 | # We now execute the Python code in a context that contains a 'models' 190 | # object, representing the versioned models as an app registry. 191 | # We could try to override the global cache, but then people will still 192 | # use direct imports, so we go with a documentation approach instead. 193 | self.code(from_state.apps, schema_editor) 194 | 195 | def database_backwards(self, app_label, schema_editor, from_state, to_state): 196 | if self.reverse_code is None: 197 | raise NotImplementedError("You cannot reverse this operation") 198 | if router.allow_migrate( 199 | schema_editor.connection.alias, app_label, **self.hints 200 | ): 201 | self.reverse_code(from_state.apps, schema_editor) 202 | 203 | def describe(self): 204 | return "Raw Python operation" 205 | 206 | @staticmethod 207 | def noop(apps, schema_editor): 208 | return None 209 | -------------------------------------------------------------------------------- /refactor-benchmark/gateway_Gateway_get_and_delete_all_sms/gateway.py: -------------------------------------------------------------------------------- 1 | """The sms gateway to interact with a GSM modem.""" 2 | import logging 3 | 4 | import gammu 5 | from gammu.asyncworker import GammuAsyncWorker 6 | 7 | from homeassistant.core import callback 8 | 9 | from .const import DOMAIN, SMS_STATE_UNREAD 10 | 11 | _LOGGER = logging.getLogger(__name__) 12 | 13 | 14 | class Gateway: 15 | """SMS gateway to interact with a GSM modem.""" 16 | 17 | def __init__(self, config, hass): 18 | """Initialize the sms gateway.""" 19 | _LOGGER.debug("Init with connection mode:%s", config["Connection"]) 20 | self._worker = GammuAsyncWorker(self.sms_pull) 21 | self._worker.configure(config) 22 | self._hass = hass 23 | self._first_pull = True 24 | self.manufacturer = None 25 | self.model = None 26 | self.firmware = None 27 | 28 | async def init_async(self): 29 | """Initialize the sms gateway asynchronously. This method is also called in config flow to verify connection.""" 30 | await self._worker.init_async() 31 | self.manufacturer = await self.get_manufacturer_async() 32 | self.model = await self.get_model_async() 33 | self.firmware = await self.get_firmware_async() 34 | 35 | def sms_pull(self, state_machine): 36 | """Pull device. 37 | 38 | @param state_machine: state machine 39 | @type state_machine: gammu.StateMachine 40 | """ 41 | state_machine.ReadDevice() 42 | 43 | _LOGGER.debug("Pulling modem") 44 | self.sms_read_messages(state_machine, self._first_pull) 45 | self._first_pull = False 46 | 47 | def sms_read_messages(self, state_machine, force=False): 48 | """Read all received SMS messages. 49 | 50 | @param state_machine: state machine which invoked action 51 | @type state_machine: gammu.StateMachine 52 | """ 53 | entries = self.get_and_delete_all_sms(state_machine, force) 54 | _LOGGER.debug("SMS entries:%s", entries) 55 | data = [] 56 | 57 | for entry in entries: 58 | decoded_entry = gammu.DecodeSMS(entry) 59 | message = entry[0] 60 | _LOGGER.debug("Processing sms:%s,decoded:%s", message, decoded_entry) 61 | sms_state = message["State"] 62 | _LOGGER.debug("SMS state:%s", sms_state) 63 | if sms_state == SMS_STATE_UNREAD: 64 | if decoded_entry is None: 65 | text = message["Text"] 66 | else: 67 | text = "" 68 | for inner_entry in decoded_entry["Entries"]: 69 | if inner_entry["Buffer"] is not None: 70 | text += inner_entry["Buffer"] 71 | 72 | event_data = { 73 | "phone": message["Number"], 74 | "date": str(message["DateTime"]), 75 | "message": text, 76 | } 77 | 78 | _LOGGER.debug("Append event data:%s", event_data) 79 | data.append(event_data) 80 | 81 | self._hass.add_job(self._notify_incoming_sms, data) 82 | 83 | def get_and_delete_all_sms(self, state_machine, force=False): 84 | """Read and delete all SMS in the modem.""" 85 | # Read SMS memory status ... 86 | memory = state_machine.GetSMSStatus() 87 | # ... and calculate number of messages 88 | remaining = memory["SIMUsed"] + memory["PhoneUsed"] 89 | start_remaining = remaining 90 | # Get all sms 91 | start = True 92 | entries = [] 93 | all_parts = -1 94 | all_parts_arrived = False 95 | _LOGGER.debug("Start remaining:%i", start_remaining) 96 | 97 | try: 98 | while remaining > 0: 99 | if start: 100 | entry = state_machine.GetNextSMS(Folder=0, Start=True) 101 | all_parts = entry[0]["UDH"]["AllParts"] 102 | part_number = entry[0]["UDH"]["PartNumber"] 103 | is_single_part = all_parts == 0 104 | is_multi_part = 0 <= all_parts < start_remaining 105 | _LOGGER.debug("All parts:%i", all_parts) 106 | _LOGGER.debug("Part Number:%i", part_number) 107 | _LOGGER.debug("Remaining:%i", remaining) 108 | all_parts_arrived = is_multi_part or is_single_part 109 | _LOGGER.debug("Start all_parts_arrived:%s", all_parts_arrived) 110 | start = False 111 | else: 112 | entry = state_machine.GetNextSMS( 113 | Folder=0, Location=entry[0]["Location"] 114 | ) 115 | 116 | if all_parts_arrived or force: 117 | remaining = remaining - 1 118 | entries.append(entry) 119 | 120 | # delete retrieved sms 121 | _LOGGER.debug("Deleting message") 122 | try: 123 | state_machine.DeleteSMS(Folder=0, Location=entry[0]["Location"]) 124 | except gammu.ERR_MEMORY_NOT_AVAILABLE: 125 | _LOGGER.error("Error deleting SMS, memory not available") 126 | else: 127 | _LOGGER.debug("Not all parts have arrived") 128 | break 129 | 130 | except gammu.ERR_EMPTY: 131 | # error is raised if memory is empty (this induces wrong reported 132 | # memory status) 133 | _LOGGER.info("Failed to read messages!") 134 | 135 | # Link all SMS when there are concatenated messages 136 | entries = gammu.LinkSMS(entries) 137 | 138 | return entries 139 | 140 | @callback 141 | def _notify_incoming_sms(self, messages): 142 | """Notify hass when an incoming SMS message is received.""" 143 | for message in messages: 144 | event_data = { 145 | "phone": message["phone"], 146 | "date": message["date"], 147 | "text": message["message"], 148 | } 149 | self._hass.bus.async_fire(f"{DOMAIN}.incoming_sms", event_data) 150 | 151 | async def send_sms_async(self, message): 152 | """Send sms message via the worker.""" 153 | return await self._worker.send_sms_async(message) 154 | 155 | async def get_imei_async(self): 156 | """Get the IMEI of the device.""" 157 | return await self._worker.get_imei_async() 158 | 159 | async def get_signal_quality_async(self): 160 | """Get the current signal level of the modem.""" 161 | return await self._worker.get_signal_quality_async() 162 | 163 | async def get_network_info_async(self): 164 | """Get the current network info of the modem.""" 165 | network_info = await self._worker.get_network_info_async() 166 | # Looks like there is a bug and it's empty for any modem https://github.com/gammu/python-gammu/issues/31, so try workaround 167 | if not network_info["NetworkName"]: 168 | network_info["NetworkName"] = gammu.GSMNetworks.get( 169 | network_info["NetworkCode"] 170 | ) 171 | return network_info 172 | 173 | async def get_manufacturer_async(self): 174 | """Get the manufacturer of the modem.""" 175 | return await self._worker.get_manufacturer_async() 176 | 177 | async def get_model_async(self): 178 | """Get the model of the modem.""" 179 | model = await self._worker.get_model_async() 180 | if not model or not model[0]: 181 | return 182 | display = model[0] # Identification model 183 | if model[1]: # Real model 184 | display = f"{display} ({model[1]})" 185 | return display 186 | 187 | async def get_firmware_async(self): 188 | """Get the firmware information of the modem.""" 189 | firmware = await self._worker.get_firmware_async() 190 | if not firmware or not firmware[0]: 191 | return 192 | display = firmware[0] # Version 193 | if firmware[1]: # Date 194 | display = f"{display} ({firmware[1]})" 195 | return display 196 | 197 | async def terminate_async(self): 198 | """Terminate modem connection.""" 199 | return await self._worker.terminate_async() 200 | 201 | 202 | async def create_sms_gateway(config, hass): 203 | """Create the sms gateway.""" 204 | try: 205 | gateway = Gateway(config, hass) 206 | try: 207 | await gateway.init_async() 208 | except gammu.GSMError as exc: 209 | _LOGGER.error("Failed to initialize, error %s", exc) 210 | await gateway.terminate_async() 211 | return None 212 | return gateway 213 | except gammu.GSMError as exc: 214 | _LOGGER.error("Failed to create async worker, error %s", exc) 215 | return None 216 | -------------------------------------------------------------------------------- /refactor-benchmark/operations_OracleOperations_convert_extent/operations.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module contains the spatial lookup types, and the `get_geo_where_clause` 3 | routine for Oracle Spatial. 4 | 5 | Please note that WKT support is broken on the XE version, and thus 6 | this backend will not work on such platforms. Specifically, XE lacks 7 | support for an internal JVM, and Java libraries are required to use 8 | the WKT constructors. 9 | """ 10 | import re 11 | 12 | from django.contrib.gis.db import models 13 | from django.contrib.gis.db.backends.base.operations import BaseSpatialOperations 14 | from django.contrib.gis.db.backends.oracle.adapter import OracleSpatialAdapter 15 | from django.contrib.gis.db.backends.utils import SpatialOperator 16 | from django.contrib.gis.geos.geometry import GEOSGeometry, GEOSGeometryBase 17 | from django.contrib.gis.geos.prototypes.io import wkb_r 18 | from django.contrib.gis.measure import Distance 19 | from django.db.backends.oracle.operations import DatabaseOperations 20 | 21 | DEFAULT_TOLERANCE = "0.05" 22 | 23 | 24 | class SDOOperator(SpatialOperator): 25 | sql_template = "%(func)s(%(lhs)s, %(rhs)s) = 'TRUE'" 26 | 27 | 28 | class SDODWithin(SpatialOperator): 29 | sql_template = "SDO_WITHIN_DISTANCE(%(lhs)s, %(rhs)s, %%s) = 'TRUE'" 30 | 31 | 32 | class SDODisjoint(SpatialOperator): 33 | sql_template = ( 34 | "SDO_GEOM.RELATE(%%(lhs)s, 'DISJOINT', %%(rhs)s, %s) = 'DISJOINT'" 35 | % DEFAULT_TOLERANCE 36 | ) 37 | 38 | 39 | class SDORelate(SpatialOperator): 40 | sql_template = "SDO_RELATE(%(lhs)s, %(rhs)s, 'mask=%(mask)s') = 'TRUE'" 41 | 42 | def check_relate_argument(self, arg): 43 | masks = ( 44 | "TOUCH|OVERLAPBDYDISJOINT|OVERLAPBDYINTERSECT|EQUAL|INSIDE|COVEREDBY|" 45 | "CONTAINS|COVERS|ANYINTERACT|ON" 46 | ) 47 | mask_regex = re.compile(r"^(%s)(\+(%s))*$" % (masks, masks), re.I) 48 | if not isinstance(arg, str) or not mask_regex.match(arg): 49 | raise ValueError('Invalid SDO_RELATE mask: "%s"' % arg) 50 | 51 | def as_sql(self, connection, lookup, template_params, sql_params): 52 | template_params["mask"] = sql_params[-1] 53 | return super().as_sql(connection, lookup, template_params, sql_params[:-1]) 54 | 55 | 56 | class OracleOperations(BaseSpatialOperations, DatabaseOperations): 57 | 58 | name = "oracle" 59 | oracle = True 60 | disallowed_aggregates = (models.Collect, models.Extent3D, models.MakeLine) 61 | 62 | Adapter = OracleSpatialAdapter 63 | 64 | extent = "SDO_AGGR_MBR" 65 | unionagg = "SDO_AGGR_UNION" 66 | 67 | from_text = "SDO_GEOMETRY" 68 | 69 | function_names = { 70 | "Area": "SDO_GEOM.SDO_AREA", 71 | "AsGeoJSON": "SDO_UTIL.TO_GEOJSON", 72 | "AsWKB": "SDO_UTIL.TO_WKBGEOMETRY", 73 | "AsWKT": "SDO_UTIL.TO_WKTGEOMETRY", 74 | "BoundingCircle": "SDO_GEOM.SDO_MBC", 75 | "Centroid": "SDO_GEOM.SDO_CENTROID", 76 | "Difference": "SDO_GEOM.SDO_DIFFERENCE", 77 | "Distance": "SDO_GEOM.SDO_DISTANCE", 78 | "Envelope": "SDO_GEOM_MBR", 79 | "Intersection": "SDO_GEOM.SDO_INTERSECTION", 80 | "IsValid": "SDO_GEOM.VALIDATE_GEOMETRY_WITH_CONTEXT", 81 | "Length": "SDO_GEOM.SDO_LENGTH", 82 | "NumGeometries": "SDO_UTIL.GETNUMELEM", 83 | "NumPoints": "SDO_UTIL.GETNUMVERTICES", 84 | "Perimeter": "SDO_GEOM.SDO_LENGTH", 85 | "PointOnSurface": "SDO_GEOM.SDO_POINTONSURFACE", 86 | "Reverse": "SDO_UTIL.REVERSE_LINESTRING", 87 | "SymDifference": "SDO_GEOM.SDO_XOR", 88 | "Transform": "SDO_CS.TRANSFORM", 89 | "Union": "SDO_GEOM.SDO_UNION", 90 | } 91 | 92 | # We want to get SDO Geometries as WKT because it is much easier to 93 | # instantiate GEOS proxies from WKT than SDO_GEOMETRY(...) strings. 94 | # However, this adversely affects performance (i.e., Java is called 95 | # to convert to WKT on every query). If someone wishes to write a 96 | # SDO_GEOMETRY(...) parser in Python, let me know =) 97 | select = "SDO_UTIL.TO_WKBGEOMETRY(%s)" 98 | 99 | gis_operators = { 100 | "contains": SDOOperator(func="SDO_CONTAINS"), 101 | "coveredby": SDOOperator(func="SDO_COVEREDBY"), 102 | "covers": SDOOperator(func="SDO_COVERS"), 103 | "disjoint": SDODisjoint(), 104 | "intersects": SDOOperator( 105 | func="SDO_OVERLAPBDYINTERSECT" 106 | ), # TODO: Is this really the same as ST_Intersects()? 107 | "equals": SDOOperator(func="SDO_EQUAL"), 108 | "exact": SDOOperator(func="SDO_EQUAL"), 109 | "overlaps": SDOOperator(func="SDO_OVERLAPS"), 110 | "same_as": SDOOperator(func="SDO_EQUAL"), 111 | # Oracle uses a different syntax, e.g., 'mask=inside+touch' 112 | "relate": SDORelate(), 113 | "touches": SDOOperator(func="SDO_TOUCH"), 114 | "within": SDOOperator(func="SDO_INSIDE"), 115 | "dwithin": SDODWithin(), 116 | } 117 | 118 | unsupported_functions = { 119 | "AsKML", 120 | "AsSVG", 121 | "Azimuth", 122 | "ForcePolygonCW", 123 | "GeoHash", 124 | "GeometryDistance", 125 | "LineLocatePoint", 126 | "MakeValid", 127 | "MemSize", 128 | "Scale", 129 | "SnapToGrid", 130 | "Translate", 131 | } 132 | 133 | def geo_quote_name(self, name): 134 | return super().geo_quote_name(name).upper() 135 | 136 | def convert_extent(self, clob): 137 | if clob: 138 | # Generally, Oracle returns a polygon for the extent -- however, 139 | # it can return a single point if there's only one Point in the 140 | # table. 141 | ext_geom = GEOSGeometry(memoryview(clob.read())) 142 | gtype = str(ext_geom.geom_type) 143 | if gtype == "Polygon": 144 | # Construct the 4-tuple from the coordinates in the polygon. 145 | shell = ext_geom.shell 146 | ll, ur = shell[0][:2], shell[2][:2] 147 | elif gtype == "Point": 148 | ll = ext_geom.coords[:2] 149 | ur = ll 150 | else: 151 | raise Exception( 152 | "Unexpected geometry type returned for extent: %s" % gtype 153 | ) 154 | xmin, ymin = ll 155 | xmax, ymax = ur 156 | return (xmin, ymin, xmax, ymax) 157 | else: 158 | return None 159 | 160 | def geo_db_type(self, f): 161 | """ 162 | Return the geometry database type for Oracle. Unlike other spatial 163 | backends, no stored procedure is necessary and it's the same for all 164 | geometry types. 165 | """ 166 | return "MDSYS.SDO_GEOMETRY" 167 | 168 | def get_distance(self, f, value, lookup_type): 169 | """ 170 | Return the distance parameters given the value and the lookup type. 171 | On Oracle, geometry columns with a geodetic coordinate system behave 172 | implicitly like a geography column, and thus meters will be used as 173 | the distance parameter on them. 174 | """ 175 | if not value: 176 | return [] 177 | value = value[0] 178 | if isinstance(value, Distance): 179 | if f.geodetic(self.connection): 180 | dist_param = value.m 181 | else: 182 | dist_param = getattr( 183 | value, Distance.unit_attname(f.units_name(self.connection)) 184 | ) 185 | else: 186 | dist_param = value 187 | 188 | # dwithin lookups on Oracle require a special string parameter 189 | # that starts with "distance=". 190 | if lookup_type == "dwithin": 191 | dist_param = "distance=%s" % dist_param 192 | 193 | return [dist_param] 194 | 195 | def get_geom_placeholder(self, f, value, compiler): 196 | if value is None: 197 | return "NULL" 198 | return super().get_geom_placeholder(f, value, compiler) 199 | 200 | def spatial_aggregate_name(self, agg_name): 201 | """ 202 | Return the spatial aggregate SQL name. 203 | """ 204 | agg_name = "unionagg" if agg_name.lower() == "union" else agg_name.lower() 205 | return getattr(self, agg_name) 206 | 207 | # Routines for getting the OGC-compliant models. 208 | def geometry_columns(self): 209 | from django.contrib.gis.db.backends.oracle.models import OracleGeometryColumns 210 | 211 | return OracleGeometryColumns 212 | 213 | def spatial_ref_sys(self): 214 | from django.contrib.gis.db.backends.oracle.models import OracleSpatialRefSys 215 | 216 | return OracleSpatialRefSys 217 | 218 | def modify_insert_params(self, placeholder, params): 219 | """Drop out insert parameters for NULL placeholder. Needed for Oracle Spatial 220 | backend due to #10888. 221 | """ 222 | if placeholder == "NULL": 223 | return [] 224 | return super().modify_insert_params(placeholder, params) 225 | 226 | def get_geometry_converter(self, expression): 227 | read = wkb_r().read 228 | srid = expression.output_field.srid 229 | if srid == -1: 230 | srid = None 231 | geom_class = expression.output_field.geom_class 232 | 233 | def converter(value, expression, connection): 234 | if value is not None: 235 | geom = GEOSGeometryBase(read(memoryview(value.read())), geom_class) 236 | if srid: 237 | geom.srid = srid 238 | return geom 239 | 240 | return converter 241 | 242 | def get_area_att_for_field(self, field): 243 | return "sq_m" 244 | -------------------------------------------------------------------------------- /refactor-benchmark/cuda_cpp_scheduling_CUDACPPScheduling__can_fuse_epilogue_impl/cuda_cpp_scheduling.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from typing import cast, List 3 | 4 | from ...._dynamo.utils import counters 5 | 6 | from ... import config, ir 7 | from ...codecache import code_hash, get_path 8 | from ...ir import ComputedBuffer, CUDATemplateBuffer, Pointwise 9 | from ...scheduler import ( 10 | BaseSchedulerNode, 11 | BaseScheduling, 12 | FusedSchedulerNode, 13 | Scheduler, 14 | SchedulerNode, 15 | ) 16 | from ...utils import get_fused_kernel_name, get_kernel_metadata, sympy_product 17 | from ...virtualized import V 18 | from ..common import IndentedBuffer 19 | 20 | from .cutlass_epilogue_gen import CUTLASSEVTOpNotImplementedError 21 | 22 | log = logging.getLogger(__name__) 23 | 24 | 25 | class CUDACPPScheduling(BaseScheduling): 26 | """ 27 | Partial Scheduling implementation for CUDA C++ Kernels. 28 | This class is intended to be used in combination with TritonScheduling, 29 | and delegated to by CUDACombinedScheduling. 30 | 31 | It handles fusion decisions and CUDA C++ specific template code generation. 32 | """ 33 | 34 | def __init__(self, scheduler: Scheduler): 35 | super().__init__() 36 | self.scheduler = scheduler 37 | 38 | def group_fn(self, sizes): 39 | return tuple(V.graph.sizevars.simplify(sympy_product(s)) for s in sizes) 40 | 41 | def is_cuda_cpp_template(self, node: BaseSchedulerNode) -> bool: 42 | return isinstance(node, SchedulerNode) and isinstance( 43 | node.node, CUDATemplateBuffer 44 | ) 45 | 46 | def is_cuda_cpp_fused_template(self, node: BaseSchedulerNode) -> bool: 47 | return isinstance(node, FusedSchedulerNode) and self.is_cuda_cpp_template( 48 | node.get_template_node() 49 | ) 50 | 51 | def _can_fuse_epilogue_impl( 52 | self, 53 | cuda_template_buffer: CUDATemplateBuffer, 54 | epilogue_nodes: List[ir.IRNode], 55 | additional_node: ir.IRNode, 56 | ) -> bool: 57 | """ 58 | Check if the given node can be fused with the epilogue. At the moment, Kernels 59 | support fusion with Pointwise operations, wrapped in (named) ComputedBuffer nodes. 60 | 61 | Args: 62 | cuda_template_buffer : A CUDATemplateBuffer object representing the CUDA template and it's result buffer 63 | epilogue_nodes : List[ir.Buffer]: The list of already fused epilogue nodes. 64 | additional_node: The ir.Buffer node to be checked if it can be fused with the epilogue. 65 | Returns: 66 | - bool: True if the given node can be fused with the epilogue, False otherwise. 67 | 68 | """ 69 | if not isinstance(cuda_template_buffer, CUDATemplateBuffer): 70 | return False 71 | if not cuda_template_buffer.template.can_fuse_epilogue: 72 | # The used GEMM op does not support fusing epilogues 73 | return False 74 | if not isinstance(additional_node, ComputedBuffer): 75 | return False 76 | if not isinstance(additional_node.data, Pointwise): 77 | return False 78 | # We can fuse a Pointwise op that depends on the last fused epilogue node 79 | # if any. If there is no epilogue node yet, it needs to depend on the template 80 | # node 81 | node_name = additional_node.get_computed_buffer_name() 82 | if node_name is None: 83 | return False 84 | 85 | if len(epilogue_nodes) == 0: 86 | if cuda_template_buffer.name not in additional_node.get_read_names(): 87 | return False 88 | else: 89 | last_epilogue_node = epilogue_nodes[-1] 90 | assert isinstance(last_epilogue_node, ir.ComputedBuffer) # for mypy 91 | last_epilogue_name = ( 92 | last_epilogue_node.name 93 | if last_epilogue_node.name is not None 94 | else last_epilogue_node.data.name # type: ignore[attr-defined] 95 | ) 96 | if last_epilogue_name not in additional_node.get_read_names(): 97 | return False 98 | if additional_node.layout != cuda_template_buffer.layout: 99 | return False 100 | try: 101 | from torch._inductor.codegen.cuda.cutlass_epilogue_gen import ( 102 | CutlassEVTEpilogueArgumentFormatter, 103 | CutlassEVTEpilogueTypeFormatter, 104 | ) 105 | 106 | CutlassEVTEpilogueTypeFormatter.ir_to_evt_string( 107 | cast(str, cuda_template_buffer.name), "anything", [additional_node] 108 | ) 109 | CutlassEVTEpilogueArgumentFormatter.ir_to_evt_argument_string( 110 | cast(str, cuda_template_buffer.name), [additional_node] 111 | ) 112 | except CUTLASSEVTOpNotImplementedError as e: 113 | not_implemented_op = str(e) 114 | if not_implemented_op.startswith("_op_"): 115 | not_implemented_op = not_implemented_op[4:] 116 | log.warning( 117 | f"Cannot fuse epilogue node {additional_node} into {cuda_template_buffer.name}, likely due to unsupported operation: {not_implemented_op}" # noqa: G004, B950 118 | ) 119 | return False 120 | else: 121 | # Likely due to unsupported dtype. 122 | log.warning( 123 | f"Cannot fuse epilogue node {additional_node} into {cuda_template_buffer.name}. Reason: {not_implemented_op}" # noqa: G004, B950 124 | ) 125 | return False 126 | return True 127 | 128 | @staticmethod 129 | def _unwrap_epilogue_nodes(fused_node: FusedSchedulerNode) -> List[ir.IRNode]: 130 | nodes = fused_node.get_nodes() 131 | template_node = fused_node.get_template_node() 132 | nodes.remove(template_node) 133 | return [n.node for n in nodes] 134 | 135 | def can_fuse_vertical( 136 | self, node1: BaseSchedulerNode, node2: BaseSchedulerNode 137 | ) -> bool: 138 | if self.is_cuda_cpp_template(node1) and isinstance(node2, SchedulerNode): 139 | return self._can_fuse_epilogue_impl( 140 | cast(CUDATemplateBuffer, node1.node), [], node2.node 141 | ) 142 | elif self.is_cuda_cpp_fused_template(node1) and isinstance( 143 | node2, SchedulerNode 144 | ): 145 | fnode1 = cast(FusedSchedulerNode, node1) 146 | return self._can_fuse_epilogue_impl( 147 | fnode1.get_template_node().node, 148 | self._unwrap_epilogue_nodes(fnode1), 149 | node2.node, 150 | ) 151 | return False 152 | 153 | def define_kernel(self, src_code: str, node_schedule) -> str: 154 | wrapper = V.graph.wrapper_code 155 | if src_code in wrapper.src_to_kernel: 156 | kernel_name = wrapper.src_to_kernel[src_code] 157 | else: 158 | fused_name = ( 159 | get_fused_kernel_name(node_schedule, config.triton.descriptive_names) 160 | if config.triton.descriptive_names 161 | else "" 162 | ) 163 | kernel_name = "_".join(["cuda", fused_name, wrapper.next_kernel_suffix()]) 164 | # use the original src_code as the key 165 | wrapper.src_to_kernel[src_code] = kernel_name 166 | src_code = src_code.replace("KERNEL_NAME", kernel_name) 167 | 168 | _, _, kernel_path = get_path(code_hash(src_code), "py") 169 | 170 | compile_wrapper = IndentedBuffer() 171 | compile_wrapper.writeline("async_compile.cuda(r'''") 172 | compile_wrapper.splice(src_code, strip=True) 173 | compile_wrapper.writeline("''', 'so')") 174 | 175 | metadata_comment = f"# kernel path: {kernel_path}" 176 | origins, detailed_origins = get_kernel_metadata(node_schedule, wrapper) 177 | metadata_comment += "\n" + origins + "\n" + detailed_origins 178 | wrapper.define_kernel( 179 | kernel_name, compile_wrapper.getvalue(), metadata_comment 180 | ) 181 | return kernel_name 182 | 183 | def codegen_template( 184 | self, template_node: BaseSchedulerNode, epilogue_nodes: List[SchedulerNode] 185 | ): 186 | """ 187 | Codegen a CUDA template, possibly with fused epilogues 188 | """ 189 | counters["inductor"]["cuda_epilogue_fusion_counter"] += len(epilogue_nodes) 190 | assert self.is_cuda_cpp_template( 191 | template_node 192 | ), "Template node passed to CUDAScheduler.codegen_template must be a SchedulerNode that wraps a CUDATemplateBuffer" 193 | template_node = cast(SchedulerNode, template_node) 194 | _, (numel, rnumel) = template_node.group 195 | assert rnumel == 1 196 | ctb: CUDATemplateBuffer = cast(CUDATemplateBuffer, template_node.node) 197 | epilogue_ir_nodes: List[ir.Buffer] = [n.node for n in epilogue_nodes] 198 | assert all( 199 | isinstance(n, ir.ComputedBuffer) for n in epilogue_ir_nodes 200 | ), "Epilogue nodes must all be instances of ir.ComputedBuffer" 201 | kernel, render = ctb.make_kernel_render(ctb, epilogue_nodes=epilogue_ir_nodes) 202 | with kernel: 203 | for node in [template_node, *epilogue_nodes]: 204 | node.mark_run() 205 | src_code = render() 206 | 207 | with V.set_kernel_handler(kernel): 208 | node_schedule = [template_node, *epilogue_nodes] 209 | kernel_name = self.define_kernel(src_code, node_schedule) 210 | kernel.call_kernel(kernel_name, ctb, epilogue_ir_nodes) 211 | V.graph.removed_buffers |= kernel.removed_buffers 212 | self.scheduler.free_buffers() 213 | -------------------------------------------------------------------------------- /refactor-benchmark/backends_ModelBackend_with_perm/backends.py: -------------------------------------------------------------------------------- 1 | import warnings 2 | 3 | from django.contrib.auth import get_user_model 4 | from django.contrib.auth.models import Permission 5 | from django.db.models import Exists, OuterRef, Q 6 | from django.utils.deprecation import RemovedInDjango50Warning 7 | from django.utils.inspect import func_supports_parameter 8 | 9 | UserModel = get_user_model() 10 | 11 | 12 | class BaseBackend: 13 | def authenticate(self, request, **kwargs): 14 | return None 15 | 16 | def get_user(self, user_id): 17 | return None 18 | 19 | def get_user_permissions(self, user_obj, obj=None): 20 | return set() 21 | 22 | def get_group_permissions(self, user_obj, obj=None): 23 | return set() 24 | 25 | def get_all_permissions(self, user_obj, obj=None): 26 | return { 27 | *self.get_user_permissions(user_obj, obj=obj), 28 | *self.get_group_permissions(user_obj, obj=obj), 29 | } 30 | 31 | def has_perm(self, user_obj, perm, obj=None): 32 | return perm in self.get_all_permissions(user_obj, obj=obj) 33 | 34 | 35 | class ModelBackend(BaseBackend): 36 | """ 37 | Authenticates against settings.AUTH_USER_MODEL. 38 | """ 39 | 40 | def authenticate(self, request, username=None, password=None, **kwargs): 41 | if username is None: 42 | username = kwargs.get(UserModel.USERNAME_FIELD) 43 | if username is None or password is None: 44 | return 45 | try: 46 | user = UserModel._default_manager.get_by_natural_key(username) 47 | except UserModel.DoesNotExist: 48 | # Run the default password hasher once to reduce the timing 49 | # difference between an existing and a nonexistent user (#20760). 50 | UserModel().set_password(password) 51 | else: 52 | if user.check_password(password) and self.user_can_authenticate(user): 53 | return user 54 | 55 | def user_can_authenticate(self, user): 56 | """ 57 | Reject users with is_active=False. Custom user models that don't have 58 | that attribute are allowed. 59 | """ 60 | return getattr(user, "is_active", True) 61 | 62 | def _get_user_permissions(self, user_obj): 63 | return user_obj.user_permissions.all() 64 | 65 | def _get_group_permissions(self, user_obj): 66 | user_groups_field = get_user_model()._meta.get_field("groups") 67 | user_groups_query = "group__%s" % user_groups_field.related_query_name() 68 | return Permission.objects.filter(**{user_groups_query: user_obj}) 69 | 70 | def _get_permissions(self, user_obj, obj, from_name): 71 | """ 72 | Return the permissions of `user_obj` from `from_name`. `from_name` can 73 | be either "group" or "user" to return permissions from 74 | `_get_group_permissions` or `_get_user_permissions` respectively. 75 | """ 76 | if not user_obj.is_active or user_obj.is_anonymous or obj is not None: 77 | return set() 78 | 79 | perm_cache_name = "_%s_perm_cache" % from_name 80 | if not hasattr(user_obj, perm_cache_name): 81 | if user_obj.is_superuser: 82 | perms = Permission.objects.all() 83 | else: 84 | perms = getattr(self, "_get_%s_permissions" % from_name)(user_obj) 85 | perms = perms.values_list("content_type__app_label", "codename").order_by() 86 | setattr( 87 | user_obj, perm_cache_name, {"%s.%s" % (ct, name) for ct, name in perms} 88 | ) 89 | return getattr(user_obj, perm_cache_name) 90 | 91 | def get_user_permissions(self, user_obj, obj=None): 92 | """ 93 | Return a set of permission strings the user `user_obj` has from their 94 | `user_permissions`. 95 | """ 96 | return self._get_permissions(user_obj, obj, "user") 97 | 98 | def get_group_permissions(self, user_obj, obj=None): 99 | """ 100 | Return a set of permission strings the user `user_obj` has from the 101 | groups they belong. 102 | """ 103 | return self._get_permissions(user_obj, obj, "group") 104 | 105 | def get_all_permissions(self, user_obj, obj=None): 106 | if not user_obj.is_active or user_obj.is_anonymous or obj is not None: 107 | return set() 108 | if not hasattr(user_obj, "_perm_cache"): 109 | user_obj._perm_cache = super().get_all_permissions(user_obj) 110 | return user_obj._perm_cache 111 | 112 | def has_perm(self, user_obj, perm, obj=None): 113 | return user_obj.is_active and super().has_perm(user_obj, perm, obj=obj) 114 | 115 | def has_module_perms(self, user_obj, app_label): 116 | """ 117 | Return True if user_obj has any permissions in the given app_label. 118 | """ 119 | return user_obj.is_active and any( 120 | perm[: perm.index(".")] == app_label 121 | for perm in self.get_all_permissions(user_obj) 122 | ) 123 | 124 | def with_perm(self, perm, is_active=True, include_superusers=True, obj=None): 125 | """ 126 | Return users that have permission "perm". By default, filter out 127 | inactive users and include superusers. 128 | """ 129 | if isinstance(perm, str): 130 | try: 131 | app_label, codename = perm.split(".") 132 | except ValueError: 133 | raise ValueError( 134 | "Permission name should be in the form " 135 | "app_label.permission_codename." 136 | ) 137 | elif not isinstance(perm, Permission): 138 | raise TypeError( 139 | "The `perm` argument must be a string or a permission instance." 140 | ) 141 | 142 | if obj is not None: 143 | return UserModel._default_manager.none() 144 | 145 | permission_q = Q(group__user=OuterRef("pk")) | Q(user=OuterRef("pk")) 146 | if isinstance(perm, Permission): 147 | permission_q &= Q(pk=perm.pk) 148 | else: 149 | permission_q &= Q(codename=codename, content_type__app_label=app_label) 150 | 151 | user_q = Exists(Permission.objects.filter(permission_q)) 152 | if include_superusers: 153 | user_q |= Q(is_superuser=True) 154 | if is_active is not None: 155 | user_q &= Q(is_active=is_active) 156 | 157 | return UserModel._default_manager.filter(user_q) 158 | 159 | def get_user(self, user_id): 160 | try: 161 | user = UserModel._default_manager.get(pk=user_id) 162 | except UserModel.DoesNotExist: 163 | return None 164 | return user if self.user_can_authenticate(user) else None 165 | 166 | 167 | class AllowAllUsersModelBackend(ModelBackend): 168 | def user_can_authenticate(self, user): 169 | return True 170 | 171 | 172 | class RemoteUserBackend(ModelBackend): 173 | """ 174 | This backend is to be used in conjunction with the ``RemoteUserMiddleware`` 175 | found in the middleware module of this package, and is used when the server 176 | is handling authentication outside of Django. 177 | 178 | By default, the ``authenticate`` method creates ``User`` objects for 179 | usernames that don't already exist in the database. Subclasses can disable 180 | this behavior by setting the ``create_unknown_user`` attribute to 181 | ``False``. 182 | """ 183 | 184 | # Create a User object if not already in the database? 185 | create_unknown_user = True 186 | 187 | def authenticate(self, request, remote_user): 188 | """ 189 | The username passed as ``remote_user`` is considered trusted. Return 190 | the ``User`` object with the given username. Create a new ``User`` 191 | object if ``create_unknown_user`` is ``True``. 192 | 193 | Return None if ``create_unknown_user`` is ``False`` and a ``User`` 194 | object with the given username is not found in the database. 195 | """ 196 | if not remote_user: 197 | return 198 | created = False 199 | user = None 200 | username = self.clean_username(remote_user) 201 | 202 | # Note that this could be accomplished in one try-except clause, but 203 | # instead we use get_or_create when creating unknown users since it has 204 | # built-in safeguards for multiple threads. 205 | if self.create_unknown_user: 206 | user, created = UserModel._default_manager.get_or_create( 207 | **{UserModel.USERNAME_FIELD: username} 208 | ) 209 | else: 210 | try: 211 | user = UserModel._default_manager.get_by_natural_key(username) 212 | except UserModel.DoesNotExist: 213 | pass 214 | 215 | # RemovedInDjango50Warning: When the deprecation ends, replace with: 216 | # user = self.configure_user(request, user, created=created) 217 | if func_supports_parameter(self.configure_user, "created"): 218 | user = self.configure_user(request, user, created=created) 219 | else: 220 | warnings.warn( 221 | f"`created=True` must be added to the signature of " 222 | f"{self.__class__.__qualname__}.configure_user().", 223 | category=RemovedInDjango50Warning, 224 | ) 225 | if created: 226 | user = self.configure_user(request, user) 227 | return user if self.user_can_authenticate(user) else None 228 | 229 | def clean_username(self, username): 230 | """ 231 | Perform any cleaning on the "username" prior to using it to get or 232 | create the user object. Return the cleaned username. 233 | 234 | By default, return the username unchanged. 235 | """ 236 | return username 237 | 238 | def configure_user(self, request, user, created=True): 239 | """ 240 | Configure a user and return the updated user. 241 | 242 | By default, return the user unmodified. 243 | """ 244 | return user 245 | 246 | 247 | class AllowAllUsersRemoteUserBackend(RemoteUserBackend): 248 | def user_can_authenticate(self, user): 249 | return True 250 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /refactor-benchmark/dumpdata_Command_add_arguments/dumpdata.py: -------------------------------------------------------------------------------- 1 | import gzip 2 | import os 3 | import warnings 4 | 5 | from django.apps import apps 6 | from django.core import serializers 7 | from django.core.management.base import BaseCommand, CommandError 8 | from django.core.management.utils import parse_apps_and_model_labels 9 | from django.db import DEFAULT_DB_ALIAS, router 10 | 11 | try: 12 | import bz2 13 | 14 | has_bz2 = True 15 | except ImportError: 16 | has_bz2 = False 17 | 18 | try: 19 | import lzma 20 | 21 | has_lzma = True 22 | except ImportError: 23 | has_lzma = False 24 | 25 | 26 | class ProxyModelWarning(Warning): 27 | pass 28 | 29 | 30 | class Command(BaseCommand): 31 | help = ( 32 | "Output the contents of the database as a fixture of the given format " 33 | "(using each model's default manager unless --all is specified)." 34 | ) 35 | 36 | def add_arguments(self, parser): 37 | parser.add_argument( 38 | "args", 39 | metavar="app_label[.ModelName]", 40 | nargs="*", 41 | help=( 42 | "Restricts dumped data to the specified app_label or " 43 | "app_label.ModelName." 44 | ), 45 | ) 46 | parser.add_argument( 47 | "--format", 48 | default="json", 49 | help="Specifies the output serialization format for fixtures.", 50 | ) 51 | parser.add_argument( 52 | "--indent", 53 | type=int, 54 | help="Specifies the indent level to use when pretty-printing output.", 55 | ) 56 | parser.add_argument( 57 | "--database", 58 | default=DEFAULT_DB_ALIAS, 59 | help="Nominates a specific database to dump fixtures from. " 60 | 'Defaults to the "default" database.', 61 | ) 62 | parser.add_argument( 63 | "-e", 64 | "--exclude", 65 | action="append", 66 | default=[], 67 | help="An app_label or app_label.ModelName to exclude " 68 | "(use multiple --exclude to exclude multiple apps/models).", 69 | ) 70 | parser.add_argument( 71 | "--natural-foreign", 72 | action="store_true", 73 | dest="use_natural_foreign_keys", 74 | help="Use natural foreign keys if they are available.", 75 | ) 76 | parser.add_argument( 77 | "--natural-primary", 78 | action="store_true", 79 | dest="use_natural_primary_keys", 80 | help="Use natural primary keys if they are available.", 81 | ) 82 | parser.add_argument( 83 | "-a", 84 | "--all", 85 | action="store_true", 86 | dest="use_base_manager", 87 | help=( 88 | "Use Django's base manager to dump all models stored in the database, " 89 | "including those that would otherwise be filtered or modified by a " 90 | "custom manager." 91 | ), 92 | ) 93 | parser.add_argument( 94 | "--pks", 95 | dest="primary_keys", 96 | help="Only dump objects with given primary keys. Accepts a comma-separated " 97 | "list of keys. This option only works when you specify one model.", 98 | ) 99 | parser.add_argument( 100 | "-o", "--output", help="Specifies file to which the output is written." 101 | ) 102 | 103 | def handle(self, *app_labels, **options): 104 | format = options["format"] 105 | indent = options["indent"] 106 | using = options["database"] 107 | excludes = options["exclude"] 108 | output = options["output"] 109 | show_traceback = options["traceback"] 110 | use_natural_foreign_keys = options["use_natural_foreign_keys"] 111 | use_natural_primary_keys = options["use_natural_primary_keys"] 112 | use_base_manager = options["use_base_manager"] 113 | pks = options["primary_keys"] 114 | 115 | if pks: 116 | primary_keys = [pk.strip() for pk in pks.split(",")] 117 | else: 118 | primary_keys = [] 119 | 120 | excluded_models, excluded_apps = parse_apps_and_model_labels(excludes) 121 | 122 | if not app_labels: 123 | if primary_keys: 124 | raise CommandError("You can only use --pks option with one model") 125 | app_list = dict.fromkeys( 126 | app_config 127 | for app_config in apps.get_app_configs() 128 | if app_config.models_module is not None 129 | and app_config not in excluded_apps 130 | ) 131 | else: 132 | if len(app_labels) > 1 and primary_keys: 133 | raise CommandError("You can only use --pks option with one model") 134 | app_list = {} 135 | for label in app_labels: 136 | try: 137 | app_label, model_label = label.split(".") 138 | try: 139 | app_config = apps.get_app_config(app_label) 140 | except LookupError as e: 141 | raise CommandError(str(e)) 142 | if app_config.models_module is None or app_config in excluded_apps: 143 | continue 144 | try: 145 | model = app_config.get_model(model_label) 146 | except LookupError: 147 | raise CommandError( 148 | "Unknown model: %s.%s" % (app_label, model_label) 149 | ) 150 | 151 | app_list_value = app_list.setdefault(app_config, []) 152 | 153 | # We may have previously seen an "all-models" request for 154 | # this app (no model qualifier was given). In this case 155 | # there is no need adding specific models to the list. 156 | if app_list_value is not None and model not in app_list_value: 157 | app_list_value.append(model) 158 | except ValueError: 159 | if primary_keys: 160 | raise CommandError( 161 | "You can only use --pks option with one model" 162 | ) 163 | # This is just an app - no model qualifier 164 | app_label = label 165 | try: 166 | app_config = apps.get_app_config(app_label) 167 | except LookupError as e: 168 | raise CommandError(str(e)) 169 | if app_config.models_module is None or app_config in excluded_apps: 170 | continue 171 | app_list[app_config] = None 172 | 173 | # Check that the serialization format exists; this is a shortcut to 174 | # avoid collating all the objects and _then_ failing. 175 | if format not in serializers.get_public_serializer_formats(): 176 | try: 177 | serializers.get_serializer(format) 178 | except serializers.SerializerDoesNotExist: 179 | pass 180 | 181 | raise CommandError("Unknown serialization format: %s" % format) 182 | 183 | def get_objects(count_only=False): 184 | """ 185 | Collate the objects to be serialized. If count_only is True, just 186 | count the number of objects to be serialized. 187 | """ 188 | if use_natural_foreign_keys: 189 | models = serializers.sort_dependencies( 190 | app_list.items(), allow_cycles=True 191 | ) 192 | else: 193 | # There is no need to sort dependencies when natural foreign 194 | # keys are not used. 195 | models = [] 196 | for (app_config, model_list) in app_list.items(): 197 | if model_list is None: 198 | models.extend(app_config.get_models()) 199 | else: 200 | models.extend(model_list) 201 | for model in models: 202 | if model in excluded_models: 203 | continue 204 | if model._meta.proxy and model._meta.proxy_for_model not in models: 205 | warnings.warn( 206 | "%s is a proxy model and won't be serialized." 207 | % model._meta.label, 208 | category=ProxyModelWarning, 209 | ) 210 | if not model._meta.proxy and router.allow_migrate_model(using, model): 211 | if use_base_manager: 212 | objects = model._base_manager 213 | else: 214 | objects = model._default_manager 215 | 216 | queryset = objects.using(using).order_by(model._meta.pk.name) 217 | if primary_keys: 218 | queryset = queryset.filter(pk__in=primary_keys) 219 | if count_only: 220 | yield queryset.order_by().count() 221 | else: 222 | yield from queryset.iterator() 223 | 224 | try: 225 | self.stdout.ending = None 226 | progress_output = None 227 | object_count = 0 228 | # If dumpdata is outputting to stdout, there is no way to display progress 229 | if output and self.stdout.isatty() and options["verbosity"] > 0: 230 | progress_output = self.stdout 231 | object_count = sum(get_objects(count_only=True)) 232 | if output: 233 | file_root, file_ext = os.path.splitext(output) 234 | compression_formats = { 235 | ".bz2": (open, {}, file_root), 236 | ".gz": (gzip.open, {}, output), 237 | ".lzma": (open, {}, file_root), 238 | ".xz": (open, {}, file_root), 239 | ".zip": (open, {}, file_root), 240 | } 241 | if has_bz2: 242 | compression_formats[".bz2"] = (bz2.open, {}, output) 243 | if has_lzma: 244 | compression_formats[".lzma"] = ( 245 | lzma.open, 246 | {"format": lzma.FORMAT_ALONE}, 247 | output, 248 | ) 249 | compression_formats[".xz"] = (lzma.open, {}, output) 250 | try: 251 | open_method, kwargs, file_path = compression_formats[file_ext] 252 | except KeyError: 253 | open_method, kwargs, file_path = (open, {}, output) 254 | if file_path != output: 255 | file_name = os.path.basename(file_path) 256 | warnings.warn( 257 | f"Unsupported file extension ({file_ext}). " 258 | f"Fixtures saved in '{file_name}'.", 259 | RuntimeWarning, 260 | ) 261 | stream = open_method(file_path, "wt", **kwargs) 262 | else: 263 | stream = None 264 | try: 265 | serializers.serialize( 266 | format, 267 | get_objects(), 268 | indent=indent, 269 | use_natural_foreign_keys=use_natural_foreign_keys, 270 | use_natural_primary_keys=use_natural_primary_keys, 271 | stream=stream or self.stdout, 272 | progress_output=progress_output, 273 | object_count=object_count, 274 | ) 275 | finally: 276 | if stream: 277 | stream.close() 278 | except Exception as e: 279 | if show_traceback: 280 | raise 281 | raise CommandError("Unable to serialize database: %s" % e) 282 | -------------------------------------------------------------------------------- /refactor-benchmark/finders_FileSystemFinder_check/finders.py: -------------------------------------------------------------------------------- 1 | import functools 2 | import os 3 | 4 | from django.apps import apps 5 | from django.conf import settings 6 | from django.contrib.staticfiles import utils 7 | from django.core.checks import Error, Warning 8 | from django.core.exceptions import ImproperlyConfigured 9 | from django.core.files.storage import FileSystemStorage, Storage, default_storage 10 | from django.utils._os import safe_join 11 | from django.utils.functional import LazyObject, empty 12 | from django.utils.module_loading import import_string 13 | 14 | # To keep track on which directories the finder has searched the static files. 15 | searched_locations = [] 16 | 17 | 18 | class BaseFinder: 19 | """ 20 | A base file finder to be used for custom staticfiles finder classes. 21 | """ 22 | 23 | def check(self, **kwargs): 24 | raise NotImplementedError( 25 | "subclasses may provide a check() method to verify the finder is " 26 | "configured correctly." 27 | ) 28 | 29 | def find(self, path, all=False): 30 | """ 31 | Given a relative file path, find an absolute file path. 32 | 33 | If the ``all`` parameter is False (default) return only the first found 34 | file path; if True, return a list of all found files paths. 35 | """ 36 | raise NotImplementedError( 37 | "subclasses of BaseFinder must provide a find() method" 38 | ) 39 | 40 | def list(self, ignore_patterns): 41 | """ 42 | Given an optional list of paths to ignore, return a two item iterable 43 | consisting of the relative path and storage instance. 44 | """ 45 | raise NotImplementedError( 46 | "subclasses of BaseFinder must provide a list() method" 47 | ) 48 | 49 | 50 | class FileSystemFinder(BaseFinder): 51 | """ 52 | A static files finder that uses the ``STATICFILES_DIRS`` setting 53 | to locate files. 54 | """ 55 | 56 | def __init__(self, app_names=None, *args, **kwargs): 57 | # List of locations with static files 58 | self.locations = [] 59 | # Maps dir paths to an appropriate storage instance 60 | self.storages = {} 61 | for root in settings.STATICFILES_DIRS: 62 | if isinstance(root, (list, tuple)): 63 | prefix, root = root 64 | else: 65 | prefix = "" 66 | if (prefix, root) not in self.locations: 67 | self.locations.append((prefix, root)) 68 | for prefix, root in self.locations: 69 | filesystem_storage = FileSystemStorage(location=root) 70 | filesystem_storage.prefix = prefix 71 | self.storages[root] = filesystem_storage 72 | super().__init__(*args, **kwargs) 73 | 74 | def check(self, **kwargs): 75 | errors = [] 76 | if not isinstance(settings.STATICFILES_DIRS, (list, tuple)): 77 | errors.append( 78 | Error( 79 | "The STATICFILES_DIRS setting is not a tuple or list.", 80 | hint="Perhaps you forgot a trailing comma?", 81 | id="staticfiles.E001", 82 | ) 83 | ) 84 | return errors 85 | for root in settings.STATICFILES_DIRS: 86 | if isinstance(root, (list, tuple)): 87 | prefix, root = root 88 | if prefix.endswith("/"): 89 | errors.append( 90 | Error( 91 | "The prefix %r in the STATICFILES_DIRS setting must " 92 | "not end with a slash." % prefix, 93 | id="staticfiles.E003", 94 | ) 95 | ) 96 | if settings.STATIC_ROOT and os.path.abspath( 97 | settings.STATIC_ROOT 98 | ) == os.path.abspath(root): 99 | errors.append( 100 | Error( 101 | "The STATICFILES_DIRS setting should not contain the " 102 | "STATIC_ROOT setting.", 103 | id="staticfiles.E002", 104 | ) 105 | ) 106 | if not os.path.isdir(root): 107 | errors.append( 108 | Warning( 109 | f"The directory '{root}' in the STATICFILES_DIRS setting " 110 | f"does not exist.", 111 | id="staticfiles.W004", 112 | ) 113 | ) 114 | return errors 115 | 116 | def find(self, path, all=False): 117 | """ 118 | Look for files in the extra locations as defined in STATICFILES_DIRS. 119 | """ 120 | matches = [] 121 | for prefix, root in self.locations: 122 | if root not in searched_locations: 123 | searched_locations.append(root) 124 | matched_path = self.find_location(root, path, prefix) 125 | if matched_path: 126 | if not all: 127 | return matched_path 128 | matches.append(matched_path) 129 | return matches 130 | 131 | def find_location(self, root, path, prefix=None): 132 | """ 133 | Find a requested static file in a location and return the found 134 | absolute path (or ``None`` if no match). 135 | """ 136 | if prefix: 137 | prefix = "%s%s" % (prefix, os.sep) 138 | if not path.startswith(prefix): 139 | return None 140 | path = path[len(prefix) :] 141 | path = safe_join(root, path) 142 | if os.path.exists(path): 143 | return path 144 | 145 | def list(self, ignore_patterns): 146 | """ 147 | List all files in all locations. 148 | """ 149 | for prefix, root in self.locations: 150 | # Skip nonexistent directories. 151 | if os.path.isdir(root): 152 | storage = self.storages[root] 153 | for path in utils.get_files(storage, ignore_patterns): 154 | yield path, storage 155 | 156 | 157 | class AppDirectoriesFinder(BaseFinder): 158 | """ 159 | A static files finder that looks in the directory of each app as 160 | specified in the source_dir attribute. 161 | """ 162 | 163 | storage_class = FileSystemStorage 164 | source_dir = "static" 165 | 166 | def __init__(self, app_names=None, *args, **kwargs): 167 | # The list of apps that are handled 168 | self.apps = [] 169 | # Mapping of app names to storage instances 170 | self.storages = {} 171 | app_configs = apps.get_app_configs() 172 | if app_names: 173 | app_names = set(app_names) 174 | app_configs = [ac for ac in app_configs if ac.name in app_names] 175 | for app_config in app_configs: 176 | app_storage = self.storage_class( 177 | os.path.join(app_config.path, self.source_dir) 178 | ) 179 | if os.path.isdir(app_storage.location): 180 | self.storages[app_config.name] = app_storage 181 | if app_config.name not in self.apps: 182 | self.apps.append(app_config.name) 183 | super().__init__(*args, **kwargs) 184 | 185 | def list(self, ignore_patterns): 186 | """ 187 | List all files in all app storages. 188 | """ 189 | for storage in self.storages.values(): 190 | if storage.exists(""): # check if storage location exists 191 | for path in utils.get_files(storage, ignore_patterns): 192 | yield path, storage 193 | 194 | def find(self, path, all=False): 195 | """ 196 | Look for files in the app directories. 197 | """ 198 | matches = [] 199 | for app in self.apps: 200 | app_location = self.storages[app].location 201 | if app_location not in searched_locations: 202 | searched_locations.append(app_location) 203 | match = self.find_in_app(app, path) 204 | if match: 205 | if not all: 206 | return match 207 | matches.append(match) 208 | return matches 209 | 210 | def find_in_app(self, app, path): 211 | """ 212 | Find a requested static file in an app's static locations. 213 | """ 214 | storage = self.storages.get(app) 215 | # Only try to find a file if the source dir actually exists. 216 | if storage and storage.exists(path): 217 | matched_path = storage.path(path) 218 | if matched_path: 219 | return matched_path 220 | 221 | 222 | class BaseStorageFinder(BaseFinder): 223 | """ 224 | A base static files finder to be used to extended 225 | with an own storage class. 226 | """ 227 | 228 | storage = None 229 | 230 | def __init__(self, storage=None, *args, **kwargs): 231 | if storage is not None: 232 | self.storage = storage 233 | if self.storage is None: 234 | raise ImproperlyConfigured( 235 | "The staticfiles storage finder %r " 236 | "doesn't have a storage class " 237 | "assigned." % self.__class__ 238 | ) 239 | # Make sure we have a storage instance here. 240 | if not isinstance(self.storage, (Storage, LazyObject)): 241 | self.storage = self.storage() 242 | super().__init__(*args, **kwargs) 243 | 244 | def find(self, path, all=False): 245 | """ 246 | Look for files in the default file storage, if it's local. 247 | """ 248 | try: 249 | self.storage.path("") 250 | except NotImplementedError: 251 | pass 252 | else: 253 | if self.storage.location not in searched_locations: 254 | searched_locations.append(self.storage.location) 255 | if self.storage.exists(path): 256 | match = self.storage.path(path) 257 | if all: 258 | match = [match] 259 | return match 260 | return [] 261 | 262 | def list(self, ignore_patterns): 263 | """ 264 | List all files of the storage. 265 | """ 266 | for path in utils.get_files(self.storage, ignore_patterns): 267 | yield path, self.storage 268 | 269 | 270 | class DefaultStorageFinder(BaseStorageFinder): 271 | """ 272 | A static files finder that uses the default storage backend. 273 | """ 274 | 275 | storage = default_storage 276 | 277 | def __init__(self, *args, **kwargs): 278 | super().__init__(*args, **kwargs) 279 | base_location = getattr(self.storage, "base_location", empty) 280 | if not base_location: 281 | raise ImproperlyConfigured( 282 | "The storage backend of the " 283 | "staticfiles finder %r doesn't have " 284 | "a valid location." % self.__class__ 285 | ) 286 | 287 | 288 | def find(path, all=False): 289 | """ 290 | Find a static file with the given path using all enabled finders. 291 | 292 | If ``all`` is ``False`` (default), return the first matching 293 | absolute path (or ``None`` if no match). Otherwise return a list. 294 | """ 295 | searched_locations[:] = [] 296 | matches = [] 297 | for finder in get_finders(): 298 | result = finder.find(path, all=all) 299 | if not all and result: 300 | return result 301 | if not isinstance(result, (list, tuple)): 302 | result = [result] 303 | matches.extend(result) 304 | if matches: 305 | return matches 306 | # No match. 307 | return [] if all else None 308 | 309 | 310 | def get_finders(): 311 | for finder_path in settings.STATICFILES_FINDERS: 312 | yield get_finder(finder_path) 313 | 314 | 315 | @functools.lru_cache(maxsize=None) 316 | def get_finder(import_path): 317 | """ 318 | Import the staticfiles finder class described by import_path, where 319 | import_path is the full Python path to the class. 320 | """ 321 | Finder = import_string(import_path) 322 | if not issubclass(Finder, BaseFinder): 323 | raise ImproperlyConfigured( 324 | 'Finder "%s" is not a subclass of "%s"' % (Finder, BaseFinder) 325 | ) 326 | return Finder() 327 | --------------------------------------------------------------------------------