From ed06e550226ea1ae2c2c11139b02089e0186bf1e Mon Sep 17 00:00:00 2001 From: Eleanor Boyd Date: Mon, 17 Apr 2023 10:03:43 -0700 Subject: [PATCH] Edited Discovery Logic (#20631) closes https://github.com/microsoft/vscode-python/issues/20078 and closes https://github.com/microsoft/vscode-python/issues/20085 (which is about the testing work to support this code) This logic now successfully works to discover the pytest repo tests. This branch takes into account all the previous comments made to the python discovery logic in the previous PR (located on my personal fork). Therefore this is a second round of edits on this code. It now works for the pytest library (discovers all the tests in the pytest library). --------- Co-authored-by: Brett Cannon Co-authored-by: Karthik Nadig --- .vscode/launch.json | 9 + .../nested_folder_two/test_nest.py | 8 + .../nested_folder_one/test_bottom_folder.py | 14 + .../test_top_folder.py | 14 + .../pytestadapter/.data/empty_discovery.py | 7 + .../.data/error_parametrize_discovery.py | 10 + .../.data/error_syntax_discovery.txt | 7 + .../pytestadapter/.data/parametrize_tests.py | 10 + .../pytestadapter/.data/simple_pytest.py | 7 + .../pytestadapter/.data/text_docstring.txt | 4 + .../.data/unittest_folder/test_add.py | 21 + .../.data/unittest_folder/test_subtract.py | 25 + .../.data/unittest_pytest_same_file.py | 17 + pythonFiles/tests/pytestadapter/__init__.py | 2 + .../expected_discovery_test_output.py | 473 ++++++++++++++++++ pythonFiles/tests/pytestadapter/helpers.py | 161 ++++++ .../tests/pytestadapter/test_discovery.py | 112 +++++ pythonFiles/vscode_pytest/__init__.py | 334 +++++++++++++ .../pytest/pytestDiscoveryAdapter.ts | 4 +- .../pytest/pytestExecutionAdapter.ts | 99 ++-- .../unittest/testExecutionAdapter.ts | 6 +- .../pytestDiscoveryAdapter.unit.test.ts | 91 ++++ 22 files changed, 1380 insertions(+), 55 deletions(-) create mode 100644 pythonFiles/tests/pytestadapter/.data/double_nested_folder/nested_folder_one/nested_folder_two/test_nest.py create mode 100644 pythonFiles/tests/pytestadapter/.data/dual_level_nested_folder/nested_folder_one/test_bottom_folder.py create mode 100644 pythonFiles/tests/pytestadapter/.data/dual_level_nested_folder/test_top_folder.py create mode 100644 pythonFiles/tests/pytestadapter/.data/empty_discovery.py create mode 100644 pythonFiles/tests/pytestadapter/.data/error_parametrize_discovery.py create mode 100644 pythonFiles/tests/pytestadapter/.data/error_syntax_discovery.txt create mode 100644 pythonFiles/tests/pytestadapter/.data/parametrize_tests.py create mode 100644 pythonFiles/tests/pytestadapter/.data/simple_pytest.py create mode 100644 pythonFiles/tests/pytestadapter/.data/text_docstring.txt create mode 100644 pythonFiles/tests/pytestadapter/.data/unittest_folder/test_add.py create mode 100644 pythonFiles/tests/pytestadapter/.data/unittest_folder/test_subtract.py create mode 100644 pythonFiles/tests/pytestadapter/.data/unittest_pytest_same_file.py create mode 100644 pythonFiles/tests/pytestadapter/__init__.py create mode 100644 pythonFiles/tests/pytestadapter/expected_discovery_test_output.py create mode 100644 pythonFiles/tests/pytestadapter/helpers.py create mode 100644 pythonFiles/tests/pytestadapter/test_discovery.py create mode 100644 pythonFiles/vscode_pytest/__init__.py create mode 100644 src/test/testing/testController/pytest/pytestDiscoveryAdapter.unit.test.ts diff --git a/.vscode/launch.json b/.vscode/launch.json index 7b654703dbd8..82981a93305d 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -253,6 +253,15 @@ "request": "attach", "listen": { "host": "localhost", "port": 5678 }, "justMyCode": true + }, + { + "name": "Debug pytest plugin tests", + + "type": "python", + "request": "launch", + "module": "pytest", + "args": ["${workspaceFolder}/pythonFiles/tests/pytestadapter"], + "justMyCode": true } ], "compounds": [ diff --git a/pythonFiles/tests/pytestadapter/.data/double_nested_folder/nested_folder_one/nested_folder_two/test_nest.py b/pythonFiles/tests/pytestadapter/.data/double_nested_folder/nested_folder_one/nested_folder_two/test_nest.py new file mode 100644 index 000000000000..9ac9f7017f87 --- /dev/null +++ b/pythonFiles/tests/pytestadapter/.data/double_nested_folder/nested_folder_one/nested_folder_two/test_nest.py @@ -0,0 +1,8 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. + + +# This test's id is double_nested_folder/nested_folder_one/nested_folder_two/test_nest.py::test_function. +# This test passes. +def test_function(): # test_marker--test_function + assert 1 == 1 diff --git a/pythonFiles/tests/pytestadapter/.data/dual_level_nested_folder/nested_folder_one/test_bottom_folder.py b/pythonFiles/tests/pytestadapter/.data/dual_level_nested_folder/nested_folder_one/test_bottom_folder.py new file mode 100644 index 000000000000..59738aeba37f --- /dev/null +++ b/pythonFiles/tests/pytestadapter/.data/dual_level_nested_folder/nested_folder_one/test_bottom_folder.py @@ -0,0 +1,14 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. + + +# This test's id is dual_level_nested_folder/nested_folder_one/test_bottom_folder.py::test_bottom_function_t. +# This test passes. +def test_bottom_function_t(): # test_marker--test_bottom_function_t + assert True + + +# This test's id is dual_level_nested_folder/nested_folder_one/test_bottom_folder.py::test_bottom_function_f. +# This test fails. +def test_bottom_function_f(): # test_marker--test_bottom_function_f + assert False diff --git a/pythonFiles/tests/pytestadapter/.data/dual_level_nested_folder/test_top_folder.py b/pythonFiles/tests/pytestadapter/.data/dual_level_nested_folder/test_top_folder.py new file mode 100644 index 000000000000..010c54cf4461 --- /dev/null +++ b/pythonFiles/tests/pytestadapter/.data/dual_level_nested_folder/test_top_folder.py @@ -0,0 +1,14 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. + + +# This test's id is dual_level_nested_folder/test_top_folder.py::test_top_function_t. +# This test passes. +def test_top_function_t(): # test_marker--test_top_function_t + assert True + + +# This test's id is dual_level_nested_folder/test_top_folder.py::test_top_function_f. +# This test fails. +def test_top_function_f(): # test_marker--test_top_function_f + assert False diff --git a/pythonFiles/tests/pytestadapter/.data/empty_discovery.py b/pythonFiles/tests/pytestadapter/.data/empty_discovery.py new file mode 100644 index 000000000000..5f4ea27aec7f --- /dev/null +++ b/pythonFiles/tests/pytestadapter/.data/empty_discovery.py @@ -0,0 +1,7 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. + + +# This file has no tests in it; the discovery will return an empty list of tests. +def function_function(string): + return string diff --git a/pythonFiles/tests/pytestadapter/.data/error_parametrize_discovery.py b/pythonFiles/tests/pytestadapter/.data/error_parametrize_discovery.py new file mode 100644 index 000000000000..8e48224edf3b --- /dev/null +++ b/pythonFiles/tests/pytestadapter/.data/error_parametrize_discovery.py @@ -0,0 +1,10 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +import pytest + + +# This test has an error which will appear on pytest discovery. +# This error is intentional and is meant to test pytest discovery error handling. +@pytest.mark.parametrize("actual,expected", [("3+5", 8), ("2+4", 6), ("6*9", 42)]) +def test_function(): + assert True diff --git a/pythonFiles/tests/pytestadapter/.data/error_syntax_discovery.txt b/pythonFiles/tests/pytestadapter/.data/error_syntax_discovery.txt new file mode 100644 index 000000000000..78627fffb351 --- /dev/null +++ b/pythonFiles/tests/pytestadapter/.data/error_syntax_discovery.txt @@ -0,0 +1,7 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. + +# This test has a syntax error. +# This error is intentional and is meant to test pytest discovery error handling. +def test_function() + assert True diff --git a/pythonFiles/tests/pytestadapter/.data/parametrize_tests.py b/pythonFiles/tests/pytestadapter/.data/parametrize_tests.py new file mode 100644 index 000000000000..9421e0cc0691 --- /dev/null +++ b/pythonFiles/tests/pytestadapter/.data/parametrize_tests.py @@ -0,0 +1,10 @@ +import pytest + + +# Testing pytest with parametrized tests. The first two pass, the third fails. +# The tests ids are parametrize_tests.py::test_adding[3+5-8] and so on. +@pytest.mark.parametrize( # test_marker--test_adding + "actual, expected", [("3+5", 8), ("2+4", 6), ("6+9", 16)] +) +def test_adding(actual, expected): + assert eval(actual) == expected diff --git a/pythonFiles/tests/pytestadapter/.data/simple_pytest.py b/pythonFiles/tests/pytestadapter/.data/simple_pytest.py new file mode 100644 index 000000000000..9f9bfb014f3d --- /dev/null +++ b/pythonFiles/tests/pytestadapter/.data/simple_pytest.py @@ -0,0 +1,7 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. + + +# This test passes. +def test_function(): # test_marker--test_function + assert 1 == 1 diff --git a/pythonFiles/tests/pytestadapter/.data/text_docstring.txt b/pythonFiles/tests/pytestadapter/.data/text_docstring.txt new file mode 100644 index 000000000000..b29132c10b57 --- /dev/null +++ b/pythonFiles/tests/pytestadapter/.data/text_docstring.txt @@ -0,0 +1,4 @@ +This is a doctest test which passes #test_marker--text_docstring.txt +>>> x = 3 +>>> x +3 diff --git a/pythonFiles/tests/pytestadapter/.data/unittest_folder/test_add.py b/pythonFiles/tests/pytestadapter/.data/unittest_folder/test_add.py new file mode 100644 index 000000000000..a96c7f2fa392 --- /dev/null +++ b/pythonFiles/tests/pytestadapter/.data/unittest_folder/test_add.py @@ -0,0 +1,21 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +import unittest + + +def add(a, b): + return a + b + + +class TestAddFunction(unittest.TestCase): + # This test's id is unittest_folder/test_add.py::TestAddFunction::test_add_positive_numbers. + # This test passes. + def test_add_positive_numbers(self): # test_marker--test_add_positive_numbers + result = add(2, 3) + self.assertEqual(result, 5) + + # This test's id is unittest_folder/test_add.py::TestAddFunction::test_add_negative_numbers. + # This test passes. + def test_add_negative_numbers(self): # test_marker--test_add_negative_numbers + result = add(-2, -3) + self.assertEqual(result, -5) diff --git a/pythonFiles/tests/pytestadapter/.data/unittest_folder/test_subtract.py b/pythonFiles/tests/pytestadapter/.data/unittest_folder/test_subtract.py new file mode 100644 index 000000000000..80087fed0f3c --- /dev/null +++ b/pythonFiles/tests/pytestadapter/.data/unittest_folder/test_subtract.py @@ -0,0 +1,25 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +import unittest + + +def subtract(a, b): + return a - b + + +class TestSubtractFunction(unittest.TestCase): + # This test's id is unittest_folder/test_subtract.py::TestSubtractFunction::test_subtract_positive_numbers. + # This test passes. + def test_subtract_positive_numbers( # test_marker--test_subtract_positive_numbers + self, + ): + result = subtract(5, 3) + self.assertEqual(result, 2) + + # This test's id is unittest_folder/test_subtract.py::TestSubtractFunction::test_subtract_negative_numbers. + # This test passes. + def test_subtract_negative_numbers( # test_marker--test_subtract_negative_numbers + self, + ): + result = subtract(-2, -3) + self.assertEqual(result, 1) diff --git a/pythonFiles/tests/pytestadapter/.data/unittest_pytest_same_file.py b/pythonFiles/tests/pytestadapter/.data/unittest_pytest_same_file.py new file mode 100644 index 000000000000..ac66779b9cbe --- /dev/null +++ b/pythonFiles/tests/pytestadapter/.data/unittest_pytest_same_file.py @@ -0,0 +1,17 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. + +import unittest + + +class TestExample(unittest.TestCase): + # This test's id is unittest_pytest_same_file.py::TestExample::test_true_unittest. + # Test type is unittest and this test passes. + def test_true_unittest(self): # test_marker--test_true_unittest + assert True + + +# This test's id is unittest_pytest_same_file.py::test_true_pytest. +# Test type is pytest and this test passes. +def test_true_pytest(): # test_marker--test_true_pytest + assert True diff --git a/pythonFiles/tests/pytestadapter/__init__.py b/pythonFiles/tests/pytestadapter/__init__.py new file mode 100644 index 000000000000..5b7f7a925cc0 --- /dev/null +++ b/pythonFiles/tests/pytestadapter/__init__.py @@ -0,0 +1,2 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. diff --git a/pythonFiles/tests/pytestadapter/expected_discovery_test_output.py b/pythonFiles/tests/pytestadapter/expected_discovery_test_output.py new file mode 100644 index 000000000000..e1422a81c979 --- /dev/null +++ b/pythonFiles/tests/pytestadapter/expected_discovery_test_output.py @@ -0,0 +1,473 @@ +import os +import pathlib + +from .helpers import TEST_DATA_PATH, find_test_line_number + +# This is the expected output for the empty_discovery.py file. +# └── +TEST_DATA_PATH_STR = os.fspath(TEST_DATA_PATH) +empty_discovery_pytest_expected_output = { + "name": ".data", + "path": TEST_DATA_PATH_STR, + "type_": "folder", + "children": [], + "id_": TEST_DATA_PATH_STR, +} + +# This is the expected output for the simple_pytest.py file. +# └── simple_pytest.py +# └── test_function +simple_test_file_path = os.fspath(TEST_DATA_PATH / "simple_pytest.py") +simple_discovery_pytest_expected_output = { + "name": ".data", + "path": TEST_DATA_PATH_STR, + "type_": "folder", + "children": [ + { + "name": "simple_pytest.py", + "path": simple_test_file_path, + "type_": "file", + "id_": simple_test_file_path, + "children": [ + { + "name": "test_function", + "path": simple_test_file_path, + "lineno": find_test_line_number( + "test_function", + simple_test_file_path, + ), + "type_": "test", + "id_": "simple_pytest.py::test_function", + "runID": "simple_pytest.py::test_function", + } + ], + } + ], + "id_": TEST_DATA_PATH_STR, +} + +# This is the expected output for the unittest_pytest_same_file.py file. +# ├── unittest_pytest_same_file.py +# ├── TestExample +# │ └── test_true_unittest +# └── test_true_pytest +unit_pytest_same_file_path = os.fspath(TEST_DATA_PATH / "unittest_pytest_same_file.py") +unit_pytest_same_file_discovery_expected_output = { + "name": ".data", + "path": TEST_DATA_PATH_STR, + "type_": "folder", + "children": [ + { + "name": "unittest_pytest_same_file.py", + "path": unit_pytest_same_file_path, + "type_": "file", + "id_": unit_pytest_same_file_path, + "children": [ + { + "name": "TestExample", + "path": unit_pytest_same_file_path, + "type_": "class", + "children": [ + { + "name": "test_true_unittest", + "path": unit_pytest_same_file_path, + "lineno": find_test_line_number( + "test_true_unittest", + unit_pytest_same_file_path, + ), + "type_": "test", + "id_": "unittest_pytest_same_file.py::TestExample::test_true_unittest", + "runID": "unittest_pytest_same_file.py::TestExample::test_true_unittest", + } + ], + "id_": "unittest_pytest_same_file.py::TestExample", + }, + { + "name": "test_true_pytest", + "path": unit_pytest_same_file_path, + "lineno": find_test_line_number( + "test_true_pytest", + unit_pytest_same_file_path, + ), + "type_": "test", + "id_": "unittest_pytest_same_file.py::test_true_pytest", + "runID": "unittest_pytest_same_file.py::test_true_pytest", + }, + ], + } + ], + "id_": TEST_DATA_PATH_STR, +} + +# This is the expected output for the unittest_folder tests +# └── unittest_folder +# ├── test_add.py +# │ └── TestAddFunction +# │ ├── test_add_negative_numbers +# │ └── test_add_positive_numbers +# └── test_subtract.py +# └── TestSubtractFunction +# ├── test_subtract_negative_numbers +# └── test_subtract_positive_numbers +unittest_folder_path = os.fspath(TEST_DATA_PATH / "unittest_folder") +test_add_path = os.fspath(TEST_DATA_PATH / "unittest_folder" / "test_add.py") +test_subtract_path = os.fspath(TEST_DATA_PATH / "unittest_folder" / "test_subtract.py") +unittest_folder_discovery_expected_output = { + "name": ".data", + "path": TEST_DATA_PATH_STR, + "type_": "folder", + "children": [ + { + "name": "unittest_folder", + "path": unittest_folder_path, + "type_": "folder", + "id_": unittest_folder_path, + "children": [ + { + "name": "test_add.py", + "path": test_add_path, + "type_": "file", + "id_": test_add_path, + "children": [ + { + "name": "TestAddFunction", + "path": test_add_path, + "type_": "class", + "children": [ + { + "name": "test_add_negative_numbers", + "path": test_add_path, + "lineno": find_test_line_number( + "test_add_negative_numbers", + test_add_path, + ), + "type_": "test", + "id_": "unittest_folder/test_add.py::TestAddFunction::test_add_negative_numbers", + "runID": "unittest_folder/test_add.py::TestAddFunction::test_add_negative_numbers", + }, + { + "name": "test_add_positive_numbers", + "path": test_add_path, + "lineno": find_test_line_number( + "test_add_positive_numbers", + test_add_path, + ), + "type_": "test", + "id_": "unittest_folder/test_add.py::TestAddFunction::test_add_positive_numbers", + "runID": "unittest_folder/test_add.py::TestAddFunction::test_add_positive_numbers", + }, + ], + "id_": "unittest_folder/test_add.py::TestAddFunction", + } + ], + }, + { + "name": "test_subtract.py", + "path": test_subtract_path, + "type_": "file", + "id_": test_subtract_path, + "children": [ + { + "name": "TestSubtractFunction", + "path": test_subtract_path, + "type_": "class", + "children": [ + { + "name": "test_subtract_negative_numbers", + "path": test_subtract_path, + "lineno": find_test_line_number( + "test_subtract_negative_numbers", + test_subtract_path, + ), + "type_": "test", + "id_": "unittest_folder/test_subtract.py::TestSubtractFunction::test_subtract_negative_numbers", + "runID": "unittest_folder/test_subtract.py::TestSubtractFunction::test_subtract_negative_numbers", + }, + { + "name": "test_subtract_positive_numbers", + "path": test_subtract_path, + "lineno": find_test_line_number( + "test_subtract_positive_numbers", + test_subtract_path, + ), + "type_": "test", + "id_": "unittest_folder/test_subtract.py::TestSubtractFunction::test_subtract_positive_numbers", + "runID": "unittest_folder/test_subtract.py::TestSubtractFunction::test_subtract_positive_numbers", + }, + ], + "id_": "unittest_folder/test_subtract.py::TestSubtractFunction", + } + ], + }, + ], + } + ], + "id_": TEST_DATA_PATH_STR, +} + +# This is the expected output for the dual_level_nested_folder tests +# └── dual_level_nested_folder +# └── test_top_folder.py +# └── test_top_function_t +# └── test_top_function_f +# └── nested_folder_one +# └── test_bottom_folder.py +# └── test_bottom_function_t +# └── test_bottom_function_f +dual_level_nested_folder_path = os.fspath(TEST_DATA_PATH / "dual_level_nested_folder") +test_top_folder_path = os.fspath( + TEST_DATA_PATH / "dual_level_nested_folder" / "test_top_folder.py" +) +test_nested_folder_one_path = os.fspath( + TEST_DATA_PATH / "dual_level_nested_folder" / "nested_folder_one" +) +test_bottom_folder_path = os.fspath( + TEST_DATA_PATH + / "dual_level_nested_folder" + / "nested_folder_one" + / "test_bottom_folder.py" +) + +dual_level_nested_folder_expected_output = { + "name": ".data", + "path": TEST_DATA_PATH_STR, + "type_": "folder", + "children": [ + { + "name": "dual_level_nested_folder", + "path": dual_level_nested_folder_path, + "type_": "folder", + "id_": dual_level_nested_folder_path, + "children": [ + { + "name": "test_top_folder.py", + "path": test_top_folder_path, + "type_": "file", + "id_": test_top_folder_path, + "children": [ + { + "name": "test_top_function_t", + "path": test_top_folder_path, + "lineno": find_test_line_number( + "test_top_function_t", + test_top_folder_path, + ), + "type_": "test", + "id_": "dual_level_nested_folder/test_top_folder.py::test_top_function_t", + "runID": "dual_level_nested_folder/test_top_folder.py::test_top_function_t", + }, + { + "name": "test_top_function_f", + "path": test_top_folder_path, + "lineno": find_test_line_number( + "test_top_function_f", + test_top_folder_path, + ), + "type_": "test", + "id_": "dual_level_nested_folder/test_top_folder.py::test_top_function_f", + "runID": "dual_level_nested_folder/test_top_folder.py::test_top_function_f", + }, + ], + }, + { + "name": "nested_folder_one", + "path": test_nested_folder_one_path, + "type_": "folder", + "id_": test_nested_folder_one_path, + "children": [ + { + "name": "test_bottom_folder.py", + "path": test_bottom_folder_path, + "type_": "file", + "id_": test_bottom_folder_path, + "children": [ + { + "name": "test_bottom_function_t", + "path": test_bottom_folder_path, + "lineno": find_test_line_number( + "test_bottom_function_t", + test_bottom_folder_path, + ), + "type_": "test", + "id_": "dual_level_nested_folder/nested_folder_one/test_bottom_folder.py::test_bottom_function_t", + "runID": "dual_level_nested_folder/nested_folder_one/test_bottom_folder.py::test_bottom_function_t", + }, + { + "name": "test_bottom_function_f", + "path": test_bottom_folder_path, + "lineno": find_test_line_number( + "test_bottom_function_f", + test_bottom_folder_path, + ), + "type_": "test", + "id_": "dual_level_nested_folder/nested_folder_one/test_bottom_folder.py::test_bottom_function_f", + "runID": "dual_level_nested_folder/nested_folder_one/test_bottom_folder.py::test_bottom_function_f", + }, + ], + } + ], + }, + ], + } + ], + "id_": TEST_DATA_PATH_STR, +} + +# This is the expected output for the double_nested_folder tests. +# └── double_nested_folder +# └── nested_folder_one +# └── nested_folder_two +# └── test_nest.py +# └── test_function +double_nested_folder_path = os.fspath(TEST_DATA_PATH / "double_nested_folder") +double_nested_folder_one_path = os.fspath( + TEST_DATA_PATH / "double_nested_folder" / "nested_folder_one" +) +double_nested_folder_two_path = os.fspath( + TEST_DATA_PATH / "double_nested_folder" / "nested_folder_one" / "nested_folder_two" +) +double_nested_test_nest_path = os.fspath( + TEST_DATA_PATH + / "double_nested_folder" + / "nested_folder_one" + / "nested_folder_two" + / "test_nest.py" +) +double_nested_folder_expected_output = { + "name": ".data", + "path": TEST_DATA_PATH_STR, + "type_": "folder", + "children": [ + { + "name": "double_nested_folder", + "path": double_nested_folder_path, + "type_": "folder", + "id_": double_nested_folder_path, + "children": [ + { + "name": "nested_folder_one", + "path": double_nested_folder_one_path, + "type_": "folder", + "id_": double_nested_folder_one_path, + "children": [ + { + "name": "nested_folder_two", + "path": double_nested_folder_two_path, + "type_": "folder", + "id_": double_nested_folder_two_path, + "children": [ + { + "name": "test_nest.py", + "path": double_nested_test_nest_path, + "type_": "file", + "id_": double_nested_test_nest_path, + "children": [ + { + "name": "test_function", + "path": double_nested_test_nest_path, + "lineno": find_test_line_number( + "test_function", + double_nested_test_nest_path, + ), + "type_": "test", + "id_": "double_nested_folder/nested_folder_one/nested_folder_two/test_nest.py::test_function", + "runID": "double_nested_folder/nested_folder_one/nested_folder_two/test_nest.py::test_function", + } + ], + } + ], + } + ], + } + ], + } + ], + "id_": TEST_DATA_PATH_STR, +} + +# This is the expected output for the nested_folder tests. +# └── parametrize_tests.py +# └── test_adding[3+5-8] +# └── test_adding[2+4-6] +# └── test_adding[6+9-16] +parameterize_tests_path = os.fspath(TEST_DATA_PATH / "parametrize_tests.py") +parametrize_tests_expected_output = { + "name": ".data", + "path": TEST_DATA_PATH_STR, + "type_": "folder", + "children": [ + { + "name": "parametrize_tests.py", + "path": parameterize_tests_path, + "type_": "file", + "id_": parameterize_tests_path, + "children": [ + { + "name": "test_adding[3+5-8]", + "path": parameterize_tests_path, + "lineno": find_test_line_number( + "test_adding[3+5-8]", + parameterize_tests_path, + ), + "type_": "test", + "id_": "parametrize_tests.py::test_adding[3+5-8]", + "runID": "parametrize_tests.py::test_adding[3+5-8]", + }, + { + "name": "test_adding[2+4-6]", + "path": parameterize_tests_path, + "lineno": find_test_line_number( + "test_adding[2+4-6]", + parameterize_tests_path, + ), + "type_": "test", + "id_": "parametrize_tests.py::test_adding[2+4-6]", + "runID": "parametrize_tests.py::test_adding[2+4-6]", + }, + { + "name": "test_adding[6+9-16]", + "path": parameterize_tests_path, + "lineno": find_test_line_number( + "test_adding[6+9-16]", + parameterize_tests_path, + ), + "type_": "test", + "id_": "parametrize_tests.py::test_adding[6+9-16]", + "runID": "parametrize_tests.py::test_adding[6+9-16]", + }, + ], + } + ], + "id_": TEST_DATA_PATH_STR, +} + +# This is the expected output for the text_docstring.txt tests. +# └── text_docstring.txt +text_docstring_path = os.fspath(TEST_DATA_PATH / "text_docstring.txt") +doctest_pytest_expected_output = { + "name": ".data", + "path": TEST_DATA_PATH_STR, + "type_": "folder", + "children": [ + { + "name": "text_docstring.txt", + "path": text_docstring_path, + "type_": "file", + "id_": text_docstring_path, + "children": [ + { + "name": "text_docstring.txt", + "path": text_docstring_path, + "lineno": find_test_line_number( + "text_docstring.txt", + text_docstring_path, + ), + "type_": "test", + "id_": "text_docstring.txt::text_docstring.txt", + "runID": "text_docstring.txt::text_docstring.txt", + } + ], + } + ], + "id_": TEST_DATA_PATH_STR, +} diff --git a/pythonFiles/tests/pytestadapter/helpers.py b/pythonFiles/tests/pytestadapter/helpers.py new file mode 100644 index 000000000000..8d485456c145 --- /dev/null +++ b/pythonFiles/tests/pytestadapter/helpers.py @@ -0,0 +1,161 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. + +import contextlib +import io +import json +import os +import pathlib +import random +import socket +import subprocess +import sys +import uuid +from typing import Dict, List, Union + +TEST_DATA_PATH = pathlib.Path(__file__).parent / ".data" +from typing_extensions import TypedDict + + +@contextlib.contextmanager +def test_output_file(root: pathlib.Path, ext: str = ".txt"): + """Creates a temporary python file with a random name.""" + basename = ( + "".join(random.choice("abcdefghijklmnopqrstuvwxyz") for _ in range(9)) + ext + ) + fullpath = root / basename + try: + fullpath.write_text("", encoding="utf-8") + yield fullpath + finally: + os.unlink(str(fullpath)) + + +def create_server( + host: str = "127.0.0.1", + port: int = 0, + backlog: int = socket.SOMAXCONN, + timeout: int = 1000, +) -> socket.socket: + """Return a local server socket listening on the given port.""" + server: socket.socket = _new_sock() + if port: + # If binding to a specific port, make sure that the user doesn't have + # to wait until the OS times out waiting for socket in order to use + # that port again if the server or the adapter crash or are force-killed. + if sys.platform == "win32": + server.setsockopt(socket.SOL_SOCKET, socket.SO_EXCLUSIVEADDRUSE, 1) + else: + try: + server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + except (AttributeError, OSError): + pass # Not available everywhere + server.bind((host, port)) + if timeout: + server.settimeout(timeout) + server.listen(backlog) + return server + + +def _new_sock() -> socket.socket: + sock: socket.socket = socket.socket( + socket.AF_INET, socket.SOCK_STREAM, socket.IPPROTO_TCP + ) + options = [ + ("SOL_SOCKET", "SO_KEEPALIVE", 1), + ("IPPROTO_TCP", "TCP_KEEPIDLE", 1), + ("IPPROTO_TCP", "TCP_KEEPINTVL", 3), + ("IPPROTO_TCP", "TCP_KEEPCNT", 5), + ] + + for level, name, value in options: + try: + sock.setsockopt(getattr(socket, level), getattr(socket, name), value) + except (AttributeError, OSError): + pass # May not be available everywhere. + + return sock + + +CONTENT_LENGTH: str = "Content-Length:" +Env_Dict = TypedDict( + "Env_Dict", {"TEST_UUID": str, "TEST_PORT": str, "PYTHONPATH": str} +) + + +def process_rpc_json(data: str) -> Dict[str, str]: + """Process the JSON data which comes from the server which runs the pytest discovery.""" + str_stream: io.StringIO = io.StringIO(data) + + length: int = 0 + + while True: + line: str = str_stream.readline() + if CONTENT_LENGTH.lower() in line.lower(): + length = int(line[len(CONTENT_LENGTH) :]) + break + + if not line or line.isspace(): + raise ValueError("Header does not contain Content-Length") + + while True: + line: str = str_stream.readline() + if not line or line.isspace(): + break + + raw_json: str = str_stream.read(length) + return json.loads(raw_json) + + +def runner(args: List[str]) -> Union[Dict[str, str], None]: + """Run the pytest discovery and return the JSON data from the server.""" + process_args: List[str] = [ + sys.executable, + "-m", + "pytest", + "-p", + "vscode_pytest", + ] + args + + with test_output_file(TEST_DATA_PATH) as output_path: + env = os.environ.copy() + env.update( + { + "TEST_UUID": str(uuid.uuid4()), + "TEST_PORT": str(12345), # port is not used for tests + "PYTHONPATH": os.fspath(pathlib.Path(__file__).parent.parent.parent), + "TEST_OUTPUT_FILE": os.fspath(output_path), + } + ) + + result = subprocess.run( + process_args, + env=env, + cwd=os.fspath(TEST_DATA_PATH), + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + if result.returncode != 0: + print("Subprocess Run failed with:") + print(result.stdout.decode(encoding="utf-8")) + print(result.stderr.decode(encoding="utf-8")) + + return process_rpc_json(output_path.read_text(encoding="utf-8")) + + +def find_test_line_number(test_name: str, test_file_path) -> str: + """Function which finds the correct line number for a test by looking for the "test_marker--[test_name]" string. + + The test_name is split on the "[" character to remove the parameterization information. + + Args: + test_name: The name of the test to find the line number for, will be unique per file. + test_file_path: The path to the test file where the test is located. + """ + test_file_unique_id: str = "test_marker--" + test_name.split("[")[0] + with open(test_file_path) as f: + for i, line in enumerate(f): + if test_file_unique_id in line: + return str(i + 1) + error_str: str = f"Test {test_name!r} not found on any line in {test_file_path}" + raise ValueError(error_str) diff --git a/pythonFiles/tests/pytestadapter/test_discovery.py b/pythonFiles/tests/pytestadapter/test_discovery.py new file mode 100644 index 000000000000..57fa9d624bd6 --- /dev/null +++ b/pythonFiles/tests/pytestadapter/test_discovery.py @@ -0,0 +1,112 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +import os +import shutil +import signal + +import pytest + +from . import expected_discovery_test_output +from .helpers import TEST_DATA_PATH, runner + + +def test_syntax_error(tmp_path): + """Test pytest discovery on a file that has a syntax error. + + Copies the contents of a .txt file to a .py file in the temporary directory + to then run pytest discovery on. + + The json should still be returned but the errors list should be present. + + Keyword arguments: + tmp_path -- pytest fixture that creates a temporary directory. + """ + # Saving some files as .txt to avoid that file displaying a syntax error for + # the extension as a whole. Instead, rename it before running this test + # in order to test the error handling. + file_path = TEST_DATA_PATH / "error_syntax_discovery.txt" + temp_dir = tmp_path / "temp_data" + temp_dir.mkdir() + p = temp_dir / "error_syntax_discovery.py" + shutil.copyfile(file_path, p) + actual = runner(["--collect-only", os.fspath(p)]) + assert actual + assert all(item in actual for item in ("status", "cwd", "errors")) + assert actual["status"] == "error" + assert actual["cwd"] == os.fspath(TEST_DATA_PATH) + assert len(actual["errors"]) == 2 + + +def test_parameterized_error_collect(): + """Tests pytest discovery on specific file that incorrectly uses parametrize. + + The json should still be returned but the errors list should be present. + """ + file_path_str = "error_parametrize_discovery.py" + actual = runner(["--collect-only", file_path_str]) + assert actual + assert all(item in actual for item in ("status", "cwd", "errors")) + assert actual["status"] == "error" + assert actual["cwd"] == os.fspath(TEST_DATA_PATH) + assert len(actual["errors"]) == 2 + + +@pytest.mark.parametrize( + "file, expected_const", + [ + ( + "parametrize_tests.py", + expected_discovery_test_output.parametrize_tests_expected_output, + ), + ( + "empty_discovery.py", + expected_discovery_test_output.empty_discovery_pytest_expected_output, + ), + ( + "simple_pytest.py", + expected_discovery_test_output.simple_discovery_pytest_expected_output, + ), + ( + "unittest_pytest_same_file.py", + expected_discovery_test_output.unit_pytest_same_file_discovery_expected_output, + ), + ( + "unittest_folder", + expected_discovery_test_output.unittest_folder_discovery_expected_output, + ), + ( + "dual_level_nested_folder", + expected_discovery_test_output.dual_level_nested_folder_expected_output, + ), + ( + "double_nested_folder", + expected_discovery_test_output.double_nested_folder_expected_output, + ), + ( + "text_docstring.txt", + expected_discovery_test_output.doctest_pytest_expected_output, + ), + ], +) +def test_pytest_collect(file, expected_const): + """ + Test to test pytest discovery on a variety of test files/ folder structures. + Uses variables from expected_discovery_test_output.py to store the expected dictionary return. + Only handles discovery and therefore already contains the arg --collect-only. + All test discovery will succeed, be in the correct cwd, and match expected test output. + + Keyword arguments: + file -- a string with the file or folder to run pytest discovery on. + expected_const -- the expected output from running pytest discovery on the file. + """ + actual = runner( + [ + "--collect-only", + os.fspath(TEST_DATA_PATH / file), + ] + ) + assert actual + assert all(item in actual for item in ("status", "cwd", "tests")) + assert actual["status"] == "success" + assert actual["cwd"] == os.fspath(TEST_DATA_PATH) + assert actual["tests"] == expected_const diff --git a/pythonFiles/vscode_pytest/__init__.py b/pythonFiles/vscode_pytest/__init__.py new file mode 100644 index 000000000000..22acaab57953 --- /dev/null +++ b/pythonFiles/vscode_pytest/__init__.py @@ -0,0 +1,334 @@ +import json +import os +import pathlib +import sys +import traceback + +import pytest + +script_dir = pathlib.Path(__file__).parent.parent +sys.path.append(os.fspath(script_dir)) +sys.path.append(os.fspath(script_dir / "lib" / "python")) + +from typing import Any, Dict, List, Optional, Union + +from testing_tools import socket_manager +from typing_extensions import Literal, TypedDict + + +class TestData(TypedDict): + """A general class that all test objects inherit from.""" + + name: str + path: str + type_: Literal["class", "file", "folder", "test", "error"] + id_: str + + +class TestItem(TestData): + """A class defining test items.""" + + lineno: str + runID: str + + +class TestNode(TestData): + """A general class that handles all test data which contains children.""" + + children: "list[Union[TestNode, TestItem, None]]" + + +class VSCodePytestError(Exception): + """A custom exception class for pytest errors.""" + + def __init__(self, message): + super().__init__(message) + + +ERRORS = [] + + +def pytest_internalerror(excrepr, excinfo): + """A pytest hook that is called when an internal error occurs. + + Keyword arguments: + excrepr -- the exception representation. + excinfo -- the exception information of type ExceptionInfo. + """ + # call.excinfo.exconly() returns the exception as a string. + ERRORS.append(excinfo.exconly()) + + +def pytest_exception_interact(node, call, report): + """A pytest hook that is called when an exception is raised which could be handled. + + Keyword arguments: + node -- the node that raised the exception. + call -- the call object. + report -- the report object of either type CollectReport or TestReport. + """ + # call.excinfo is the captured exception of the call, if it raised as type ExceptionInfo. + # call.excinfo.exconly() returns the exception as a string. + ERRORS.append(call.excinfo.exconly()) + + +def pytest_keyboard_interrupt(excinfo): + """A pytest hook that is called when a keyboard interrupt is raised. + + Keyword arguments: + excinfo -- the exception information of type ExceptionInfo. + """ + # The function execonly() returns the exception as a string. + ERRORS.append(excinfo.exconly()) + + +def pytest_sessionfinish(session, exitstatus): + """A pytest hook that is called after pytest has fulled finished. + + Keyword arguments: + session -- the pytest session object. + exitstatus -- the status code of the session. + """ + cwd = pathlib.Path.cwd() + try: + session_node: Union[TestNode, None] = build_test_tree(session) + if not session_node: + raise VSCodePytestError( + "Something went wrong following pytest finish, \ + no session node was created" + ) + post_response(os.fsdecode(cwd), session_node) + except Exception as e: + ERRORS.append( + f"Error Occurred, traceback: {(traceback.format_exc() if e.__traceback__ else '')}" + ) + errorNode: TestNode = { + "name": "", + "path": "", + "type_": "error", + "children": [], + "id_": "", + } + post_response(os.fsdecode(cwd), errorNode) + + +def build_test_tree(session: pytest.Session) -> TestNode: + """Builds a tree made up of testing nodes from the pytest session. + + Keyword arguments: + session -- the pytest session object. + """ + session_node = create_session_node(session) + session_children_dict: Dict[str, TestNode] = {} + file_nodes_dict: Dict[Any, TestNode] = {} + class_nodes_dict: Dict[str, TestNode] = {} + + for test_case in session.items: + test_node = create_test_node(test_case) + if isinstance(test_case.parent, pytest.Class): + try: + test_class_node = class_nodes_dict[test_case.parent.name] + except KeyError: + test_class_node = create_class_node(test_case.parent) + class_nodes_dict[test_case.parent.name] = test_class_node + test_class_node["children"].append(test_node) + if test_case.parent.parent: + parent_module = test_case.parent.parent + else: + ERRORS.append(f"Test class {test_case.parent} has no parent") + break + # Create a file node that has the class as a child. + try: + test_file_node: TestNode = file_nodes_dict[parent_module] + except KeyError: + test_file_node = create_file_node(parent_module) + file_nodes_dict[parent_module] = test_file_node + # Check if the class is already a child of the file node. + if test_class_node not in test_file_node["children"]: + test_file_node["children"].append(test_class_node) + else: # This includes test cases that are pytest functions or a doctests. + try: + parent_test_case = file_nodes_dict[test_case.parent] + except KeyError: + parent_test_case = create_file_node(test_case.parent) + file_nodes_dict[test_case.parent] = parent_test_case + parent_test_case["children"].append(test_node) + created_files_folders_dict: Dict[str, TestNode] = {} + for file_module, file_node in file_nodes_dict.items(): + # Iterate through all the files that exist and construct them into nested folders. + root_folder_node: TestNode = build_nested_folders( + file_module, file_node, created_files_folders_dict, session + ) + # The final folder we get to is the highest folder in the path + # and therefore we add this as a child to the session. + root_id = root_folder_node.get("id_") + if root_id and root_id not in session_children_dict: + session_children_dict[root_id] = root_folder_node + session_node["children"] = list(session_children_dict.values()) + return session_node + + +def build_nested_folders( + file_module: Any, + file_node: TestNode, + created_files_folders_dict: Dict[str, TestNode], + session: pytest.Session, +) -> TestNode: + """Takes a file or folder and builds the nested folder structure for it. + + Keyword arguments: + file_module -- the created module for the file we are nesting. + file_node -- the file node that we are building the nested folders for. + created_files_folders_dict -- Dictionary of all the folders and files that have been created. + session -- the pytest session object. + """ + prev_folder_node = file_node + + # Begin the iterator_path one level above the current file. + iterator_path = file_module.path.parent + while iterator_path != session.path: + curr_folder_name = iterator_path.name + try: + curr_folder_node: TestNode = created_files_folders_dict[curr_folder_name] + except KeyError: + curr_folder_node: TestNode = create_folder_node( + curr_folder_name, iterator_path + ) + created_files_folders_dict[curr_folder_name] = curr_folder_node + if prev_folder_node not in curr_folder_node["children"]: + curr_folder_node["children"].append(prev_folder_node) + iterator_path = iterator_path.parent + prev_folder_node = curr_folder_node + return prev_folder_node + + +def create_test_node( + test_case: pytest.Item, +) -> TestItem: + """Creates a test node from a pytest test case. + + Keyword arguments: + test_case -- the pytest test case. + """ + test_case_loc: str = ( + str(test_case.location[1] + 1) if (test_case.location[1] is not None) else "" + ) + return { + "name": test_case.name, + "path": os.fspath(test_case.path), + "lineno": test_case_loc, + "type_": "test", + "id_": test_case.nodeid, + "runID": test_case.nodeid, + } + + +def create_session_node(session: pytest.Session) -> TestNode: + """Creates a session node from a pytest session. + + Keyword arguments: + session -- the pytest session. + """ + return { + "name": session.name, + "path": os.fspath(session.path), + "type_": "folder", + "children": [], + "id_": os.fspath(session.path), + } + + +def create_class_node(class_module: pytest.Class) -> TestNode: + """Creates a class node from a pytest class object. + + Keyword arguments: + class_module -- the pytest object representing a class module. + """ + return { + "name": class_module.name, + "path": os.fspath(class_module.path), + "type_": "class", + "children": [], + "id_": class_module.nodeid, + } + + +def create_file_node(file_module: Any) -> TestNode: + """Creates a file node from a pytest file module. + + Keyword arguments: + file_module -- the pytest file module. + """ + return { + "name": file_module.path.name, + "path": os.fspath(file_module.path), + "type_": "file", + "id_": os.fspath(file_module.path), + "children": [], + } + + +def create_folder_node(folderName: str, path_iterator: pathlib.Path) -> TestNode: + """Creates a folder node from a pytest folder name and its path. + + Keyword arguments: + folderName -- the name of the folder. + path_iterator -- the path of the folder. + """ + return { + "name": folderName, + "path": os.fspath(path_iterator), + "type_": "folder", + "id_": os.fspath(path_iterator), + "children": [], + } + + +class PayloadDict(TypedDict): + """A dictionary that is used to send a post request to the server.""" + + cwd: str + status: Literal["success", "error"] + tests: Optional[TestNode] + errors: Optional[List[str]] + + +def post_response(cwd: str, session_node: TestNode) -> None: + """Sends a post request to the server. + + Keyword arguments: + cwd -- the current working directory. + session_node -- the session node, which is the top of the testing tree. + errors -- a list of errors that occurred during test collection. + """ + payload: PayloadDict = { + "cwd": cwd, + "status": "success" if not ERRORS else "error", + "tests": session_node, + "errors": [], + } + if ERRORS: + payload["errors"] = ERRORS + + testPort: Union[str, int] = os.getenv("TEST_PORT", 45454) + testuuid: Union[str, None] = os.getenv("TEST_UUID") + addr = "localhost", int(testPort) + data = json.dumps(payload) + request = f"""Content-Length: {len(data)} +Content-Type: application/json +Request-uuid: {testuuid} + +{data}""" + test_output_file: Optional[str] = os.getenv("TEST_OUTPUT_FILE", None) + if test_output_file == "stdout": + print(request) + elif test_output_file: + pathlib.Path(test_output_file).write_text(request, encoding="utf-8") + else: + try: + with socket_manager.SocketManager(addr) as s: + if s.socket is not None: + s.socket.sendall(request.encode("utf-8")) + except Exception as e: + print(f"Plugin error connection error[vscode-pytest]: {e}") + print(f"[vscode-pytest] data: {request}") diff --git a/src/client/testing/testController/pytest/pytestDiscoveryAdapter.ts b/src/client/testing/testController/pytest/pytestDiscoveryAdapter.ts index 2787ea180bdb..379f1ea9d12c 100644 --- a/src/client/testing/testController/pytest/pytestDiscoveryAdapter.ts +++ b/src/client/testing/testController/pytest/pytestDiscoveryAdapter.ts @@ -45,16 +45,15 @@ export class PytestTestDiscoveryAdapter implements ITestDiscoveryAdapter { // const { pytestArgs } = settings.testing; // traceVerbose(pytestArgs); - // this.cwd = uri.fsPath; // return this.runPytestDiscovery(uri, executionFactory); // } async runPytestDiscovery(uri: Uri, executionFactory: IPythonExecutionFactory): Promise { const deferred = createDeferred(); - this.deferred = createDeferred(); const relativePathToPytest = 'pythonFiles'; const fullPluginPath = path.join(EXTENSION_ROOT_DIR, relativePathToPytest); const uuid = this.testServer.createUUID(uri.fsPath); + this.promiseMap.set(uuid, deferred); const settings = this.configSettings.getSettings(uri); const { pytestArgs } = settings.testing; @@ -86,7 +85,6 @@ export class PytestTestDiscoveryAdapter implements ITestDiscoveryAdapter { } catch (ex) { console.error(ex); } - return deferred.promise; } } diff --git a/src/client/testing/testController/pytest/pytestExecutionAdapter.ts b/src/client/testing/testController/pytest/pytestExecutionAdapter.ts index 25b7d92b767b..70aa2698c0d1 100644 --- a/src/client/testing/testController/pytest/pytestExecutionAdapter.ts +++ b/src/client/testing/testController/pytest/pytestExecutionAdapter.ts @@ -34,60 +34,61 @@ export class PytestTestExecutionAdapter implements ITestExecutionAdapter { this.deferred = createDeferred(); return this.deferred.promise; } +} - // public async runTests( - // uri: Uri, - // testIds: string[], - // debugBool?: boolean, - // executionFactory?: IPythonExecutionFactory, - // ): Promise { - // const deferred = createDeferred(); - // const relativePathToPytest = 'pythonFiles'; - // const fullPluginPath = path.join(EXTENSION_ROOT_DIR, relativePathToPytest); - // this.configSettings.isTestExecution(); - // const uuid = this.testServer.createUUID(uri.fsPath); - // this.promiseMap.set(uuid, deferred); - // const settings = this.configSettings.getSettings(uri); - // const { pytestArgs } = settings.testing; +// public async runTests( +// uri: Uri, +// testIds: string[], +// debugBool?: boolean, +// executionFactory?: IPythonExecutionFactory, +// ): Promise { +// const deferred = createDeferred(); +// const relativePathToPytest = 'pythonFiles'; +// const fullPluginPath = path.join(EXTENSION_ROOT_DIR, relativePathToPytest); +// this.configSettings.isTestExecution(); +// const uuid = this.testServer.createUUID(uri.fsPath); +// this.promiseMap.set(uuid, deferred); +// const settings = this.configSettings.getSettings(uri); +// const { pytestArgs } = settings.testing; - // const pythonPathParts: string[] = process.env.PYTHONPATH?.split(path.delimiter) ?? []; - // const pythonPathCommand = [fullPluginPath, ...pythonPathParts].join(path.delimiter); +// const pythonPathParts: string[] = process.env.PYTHONPATH?.split(path.delimiter) ?? []; +// const pythonPathCommand = [fullPluginPath, ...pythonPathParts].join(path.delimiter); - // const spawnOptions: SpawnOptions = { - // cwd: uri.fsPath, - // throwOnStdErr: true, - // extraVariables: { - // PYTHONPATH: pythonPathCommand, - // TEST_UUID: uuid.toString(), - // TEST_PORT: this.testServer.getPort().toString(), - // }, - // }; +// const spawnOptions: SpawnOptions = { +// cwd: uri.fsPath, +// throwOnStdErr: true, +// extraVariables: { +// PYTHONPATH: pythonPathCommand, +// TEST_UUID: uuid.toString(), +// TEST_PORT: this.testServer.getPort().toString(), +// }, +// }; - // // Create the Python environment in which to execute the command. - // const creationOptions: ExecutionFactoryCreateWithEnvironmentOptions = { - // allowEnvironmentFetchExceptions: false, - // resource: uri, - // }; - // // need to check what will happen in the exec service is NOT defined and is null - // const execService = await executionFactory?.createActivatedEnvironment(creationOptions); +// // Create the Python environment in which to execute the command. +// const creationOptions: ExecutionFactoryCreateWithEnvironmentOptions = { +// allowEnvironmentFetchExceptions: false, +// resource: uri, +// }; +// // need to check what will happen in the exec service is NOT defined and is null +// const execService = await executionFactory?.createActivatedEnvironment(creationOptions); - // const testIdsString = testIds.join(' '); - // console.debug('what to do with debug bool?', debugBool); - // try { - // execService?.exec(['-m', 'pytest', '-p', 'vscode_pytest', testIdsString].concat(pytestArgs), spawnOptions); - // } catch (ex) { - // console.error(ex); - // } +// const testIdsString = testIds.join(' '); +// console.debug('what to do with debug bool?', debugBool); +// try { +// execService?.exec(['-m', 'pytest', '-p', 'vscode_pytest', testIdsString].concat(pytestArgs), spawnOptions); +// } catch (ex) { +// console.error(ex); +// } - // return deferred.promise; - // } - // } +// return deferred.promise; +// } +// } - // function buildExecutionCommand(args: string[]): TestExecutionCommand { - // const executionScript = path.join(EXTENSION_ROOT_DIR, 'pythonFiles', 'unittestadapter', 'execution.py'); +// function buildExecutionCommand(args: string[]): TestExecutionCommand { +// const executionScript = path.join(EXTENSION_ROOT_DIR, 'pythonFiles', 'unittestadapter', 'execution.py'); - // return { - // script: executionScript, - // args: ['--udiscovery', ...args], - // }; -} +// return { +// script: executionScript, +// args: ['--udiscovery', ...args], +// }; +// } diff --git a/src/client/testing/testController/unittest/testExecutionAdapter.ts b/src/client/testing/testController/unittest/testExecutionAdapter.ts index 98ba8bfeb937..0bca2778ef75 100644 --- a/src/client/testing/testController/unittest/testExecutionAdapter.ts +++ b/src/client/testing/testController/unittest/testExecutionAdapter.ts @@ -37,7 +37,6 @@ export class UnittestTestExecutionAdapter implements ITestExecutionAdapter { } public async runTests(uri: Uri, testIds: string[], debugBool?: boolean): Promise { - const deferred = createDeferred(); const settings = this.configSettings.getSettings(uri); const { unittestArgs } = settings.testing; @@ -54,10 +53,11 @@ export class UnittestTestExecutionAdapter implements ITestExecutionAdapter { testIds, }; + const deferred = createDeferred(); this.promiseMap.set(uuid, deferred); - // send test command to server - // server fire onDataReceived event once it gets response + // Send test command to server. + // Server fire onDataReceived event once it gets response. this.testServer.sendCommand(options); return deferred.promise; diff --git a/src/test/testing/testController/pytest/pytestDiscoveryAdapter.unit.test.ts b/src/test/testing/testController/pytest/pytestDiscoveryAdapter.unit.test.ts new file mode 100644 index 000000000000..f5a2203dbd9a --- /dev/null +++ b/src/test/testing/testController/pytest/pytestDiscoveryAdapter.unit.test.ts @@ -0,0 +1,91 @@ +// /* eslint-disable @typescript-eslint/no-explicit-any */ +// // Copyright (c) Microsoft Corporation. All rights reserved. +// // Licensed under the MIT License. +// import * as assert from 'assert'; +// import { Uri } from 'vscode'; +// import * as typeMoq from 'typemoq'; +// import { IConfigurationService } from '../../../../client/common/types'; +// import { PytestTestDiscoveryAdapter } from '../../../../client/testing/testController/pytest/pytestDiscoveryAdapter'; +// import { DataReceivedEvent, ITestServer } from '../../../../client/testing/testController/common/types'; +// import { IPythonExecutionFactory, IPythonExecutionService } from '../../../../client/common/process/types'; +// import { createDeferred, Deferred } from '../../../../client/common/utils/async'; + +// suite('pytest test discovery adapter', () => { +// let testServer: typeMoq.IMock; +// let configService: IConfigurationService; +// let execFactory = typeMoq.Mock.ofType(); +// let adapter: PytestTestDiscoveryAdapter; +// let execService: typeMoq.IMock; +// let deferred: Deferred; +// setup(() => { +// testServer = typeMoq.Mock.ofType(); +// testServer.setup((t) => t.getPort()).returns(() => 12345); +// testServer +// .setup((t) => t.onDataReceived(typeMoq.It.isAny(), typeMoq.It.isAny())) +// .returns(() => ({ +// dispose: () => { +// /* no-body */ +// }, +// })); +// configService = ({ +// getSettings: () => ({ +// testing: { pytestArgs: ['.'] }, +// }), +// } as unknown) as IConfigurationService; +// execFactory = typeMoq.Mock.ofType(); +// execService = typeMoq.Mock.ofType(); +// execFactory +// .setup((x) => x.createActivatedEnvironment(typeMoq.It.isAny())) +// .returns(() => Promise.resolve(execService.object)); +// deferred = createDeferred(); +// execService +// .setup((x) => x.exec(typeMoq.It.isAny(), typeMoq.It.isAny())) +// .returns(() => { +// deferred.resolve(); +// return Promise.resolve({ stdout: '{}' }); +// }); +// execFactory.setup((p) => ((p as unknown) as any).then).returns(() => undefined); +// execService.setup((p) => ((p as unknown) as any).then).returns(() => undefined); +// }); +// test('onDataReceivedHandler should parse only if known UUID', async () => { +// const uri = Uri.file('/my/test/path/'); +// const uuid = 'uuid123'; +// const data = { status: 'success' }; +// testServer.setup((t) => t.createUUID(typeMoq.It.isAny())).returns(() => uuid); +// const eventData: DataReceivedEvent = { +// uuid, +// data: JSON.stringify(data), +// }; + +// adapter = new PytestTestDiscoveryAdapter(testServer.object, configService); +// // ** const promise = adapter.discoverTests(uri, execFactory.object); +// const promise = adapter.discoverTests(uri); +// await deferred.promise; +// adapter.onDataReceivedHandler(eventData); +// const result = await promise; +// assert.deepStrictEqual(result, data); +// }); +// test('onDataReceivedHandler should not parse if it is unknown UUID', async () => { +// const uri = Uri.file('/my/test/path/'); +// const uuid = 'uuid456'; +// let data = { status: 'error' }; +// testServer.setup((t) => t.createUUID(typeMoq.It.isAny())).returns(() => uuid); +// const wrongUriEventData: DataReceivedEvent = { +// uuid: 'incorrect-uuid456', +// data: JSON.stringify(data), +// }; +// adapter = new PytestTestDiscoveryAdapter(testServer.object, configService); +// // ** const promise = adapter.discoverTests(uri, execFactory.object); +// const promise = adapter.discoverTests(uri); +// adapter.onDataReceivedHandler(wrongUriEventData); + +// data = { status: 'success' }; +// const correctUriEventData: DataReceivedEvent = { +// uuid, +// data: JSON.stringify(data), +// }; +// adapter.onDataReceivedHandler(correctUriEventData); +// const result = await promise; +// assert.deepStrictEqual(result, data); +// }); +// });