Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: NoVersioningSystem functionality #603

Open
wants to merge 6 commits into
base: main
Choose a base branch
from

Conversation

thomasrockhu-codecov
Copy link
Contributor

No description provided.

Copy link

codecov bot commented Jan 26, 2025

❌ 3 Tests Failed:

Tests completed Failed Passed Skipped
2136 3 2133 0
View the top 3 failed tests by shortest run time
api.temp.calculator.test_calculator::test_divide
Stack Traces | 0.001s run time
def
                test_divide():
                > assert Calculator.divide(1, 2) == 0.5
                E assert 1.0 == 0.5
                E + where 1.0 = <function Calculator.divide at 0x104c9eb90>(1, 2)
                E + where <function Calculator.divide at 0x104c9eb90> = Calculator.divide
                .../temp/calculator/test_calculator.py:30: AssertionError
api.temp.calculator.test_calculator::test_divide
Stack Traces | 0.001s run time
def
                test_divide():
                > assert Calculator.divide(1, 2) == 0.5
                E assert 1.0 == 0.5
                E + where 1.0 = <function Calculator.divide at 0x104c9eb90>(1, 2)
                E + where <function Calculator.divide at 0x104c9eb90> = Calculator.divide
                .../temp/calculator/test_calculator.py:30: AssertionError
api.temp.calculator.test_calculator::test_divide
Stack Traces | 0.001s run time
def
                test_divide():
                > assert Calculator.divide(1, 2) == 0.5
                E assert 1.0 == 0.5
                E + where 1.0 = <function Calculator.divide at 0x104c9eb90>(1, 2)
                E + where <function Calculator.divide at 0x104c9eb90> = Calculator.divide
                .../temp/calculator/test_calculator.py:30: AssertionError

To view more test analytics, go to the Test Analytics Dashboard
📢 Thoughts on this report? Let us know!

Copy link

github-actions bot commented Jan 26, 2025

❌ 2 Tests Failed:

Tests completed Failed Passed Skipped
3540 2 3538 0
View the top 2 failed tests by shortest run time
test_fallback_collected_labels_codecov_max_wait_time_exceeded_dry_run
Stack Traces | 0.144s run time
self = &lt;tests.commands.test_invoke_labelanalysis.TestLabelAnalysisCommand object at 0x7fbb87a53d90&gt;
get_labelanalysis_deps = {'collected_labels': ['test_present', 'test_absent', 'test_in_diff', 'test_global'], 'fake_runner': &lt;tests.factory.FakeRunner object at 0x7fbb846bdcc0&gt;, 'mock_get_runner': &lt;MagicMock name='get_runner' id='140443357416848'&gt;}
mocker = &lt;pytest_mock.plugin.MockerFixture object at 0x7fbb846bd8d0&gt;
use_verbose_option = None

    def test_fallback_collected_labels_codecov_max_wait_time_exceeded_dry_run(
        self, get_labelanalysis_deps, mocker, use_verbose_option
    ):
        mock_get_runner = get_labelanalysis_deps["mock_get_runner"]
        fake_runner = get_labelanalysis_deps["fake_runner"]
        collected_labels = get_labelanalysis_deps["collected_labels"]
        mocker.patch.object(labelanalysis_time, "monotonic", side_effect=[0, 6])
    
        with responses.RequestsMock() as rsps:
            rsps.add(
                responses.POST,
                "https://api.codecov.io/labels/labels-analysis",
                json={"external_id": "label-analysis-request-id"},
                status=201,
                match=[
                    matchers.header_matcher({"Authorization": "Repotoken STATIC_TOKEN"})
                ],
            )
            rsps.add(
                responses.PATCH,
                "https://api.codecov.io/labels/labels-analysis/label-analysis-request-id",
                json={"external_id": "label-analysis-request-id"},
                status=201,
                match=[
                    matchers.header_matcher({"Authorization": "Repotoken STATIC_TOKEN"})
                ],
            )
            rsps.add(
                responses.GET,
                "https://api.codecov.io/labels/labels-analysis/label-analysis-request-id",
                json={"state": "processing"},
            )
            cli_runner = CliRunner(mix_stderr=False)
            result = cli_runner.invoke(
                cli,
                [
                    "label-analysis",
                    "--token=STATIC_TOKEN",
                    f"--base-sha={FAKE_BASE_SHA}",
                    "--max-wait-time=5",
                    "--dry-run",
                ],
                obj={},
            )
            mock_get_runner.assert_called()
            fake_runner.process_labelanalysis_result.assert_not_called()
        # Dry run format defaults to json
&gt;       assert json.loads(result.stdout) == {
            "runner_options": ["--labels"],
            "ats_tests_to_run": sorted(collected_labels),
            "ats_tests_to_skip": [],
            "ats_fallback_reason": "max_wait_time_exceeded",
        }

tests/commands/test_invoke_labelanalysis.py:723: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
/opt/hostedtoolcache/Python/3.10.16/x64/lib/python3.10/json/__init__.py:346: in loads
    return _default_decoder.decode(s)
/opt/hostedtoolcache/Python/3.10.16/x64/lib/python3.10/json/decoder.py:337: in decode
    obj, end = self.raw_decode(s, idx=_w(s, 0).end())
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = &lt;json.decoder.JSONDecoder object at 0x7fbb892c92a0&gt;, s = '', idx = 0

    def raw_decode(self, s, idx=0):
        """Decode a JSON document from ``s`` (a ``str`` beginning with
        a JSON document) and return a 2-tuple of the Python
        representation and the index in ``s`` where the document ended.
    
        This can be used to decode a JSON document from a string that may
        have extraneous data at the end.
    
        """
        try:
            obj, end = self.scan_once(s, idx)
        except StopIteration as err:
&gt;           raise JSONDecodeError("Expecting value", s, err.value) from None
E           json.decoder.JSONDecodeError: Expecting value: line 1 column 1 (char 0)

/opt/hostedtoolcache/Python/3.10.16/x64/lib/python3.10/json/decoder.py:355: JSONDecodeError
test_fallback_collected_labels_codecov_max_wait_time_exceeded
Stack Traces | 0.212s run time
self = &lt;tests.commands.test_invoke_labelanalysis.TestLabelAnalysisCommand object at 0x7f0f434c0aa0&gt;
get_labelanalysis_deps = {'collected_labels': ['test_present', 'test_absent', 'test_in_diff', 'test_global'], 'fake_runner': &lt;tests.factory.FakeRunner object at 0x7f0f40451430&gt;, 'mock_get_runner': &lt;MagicMock name='get_runner' id='139703479647552'&gt;}
mocker = &lt;pytest_mock.plugin.MockerFixture object at 0x7f0f400b35c0&gt;
use_verbose_option = None

    def test_fallback_collected_labels_codecov_max_wait_time_exceeded(
        self, get_labelanalysis_deps, mocker, use_verbose_option
    ):
        mock_get_runner = get_labelanalysis_deps["mock_get_runner"]
        fake_runner = get_labelanalysis_deps["fake_runner"]
        collected_labels = get_labelanalysis_deps["collected_labels"]
        mocker.patch.object(labelanalysis_time, "monotonic", side_effect=[0, 6])
    
        with responses.RequestsMock() as rsps:
            rsps.add(
                responses.POST,
                "https://api.codecov.io/labels/labels-analysis",
                json={"external_id": "label-analysis-request-id"},
                status=201,
                match=[
                    matchers.header_matcher({"Authorization": "Repotoken STATIC_TOKEN"})
                ],
            )
            rsps.add(
                responses.PATCH,
                "https://api.codecov.io/labels/labels-analysis/label-analysis-request-id",
                json={"external_id": "label-analysis-request-id"},
                status=201,
                match=[
                    matchers.header_matcher({"Authorization": "Repotoken STATIC_TOKEN"})
                ],
            )
            rsps.add(
                responses.GET,
                "https://api.codecov.io/labels/labels-analysis/label-analysis-request-id",
                json={"state": "processing"},
            )
            cli_runner = CliRunner()
            result = cli_runner.invoke(
                cli,
                [
                    "label-analysis",
                    "--token=STATIC_TOKEN",
                    f"--base-sha={FAKE_BASE_SHA}",
                    "--max-wait-time=5",
                ],
                obj={},
            )
            print(result)
&gt;           assert result.exit_code == 0
E           assert 1 == 0
E            +  where 1 = &lt;Result StopIteration()&gt;.exit_code

tests/commands/test_invoke_labelanalysis.py:665: AssertionError

📣 Thoughts on this report? Let Codecov know! | Powered by Codecov

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
None yet
Projects
None yet
Development

Successfully merging this pull request may close these issues.

1 participant