Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: update release pr workflow #599

Closed
wants to merge 2 commits into from
Closed

Conversation

thomasrockhu-codecov
Copy link
Contributor

No description provided.

Copy link

codecov bot commented Jan 19, 2025

❌ 3 Tests Failed:

Tests completed Failed Passed Skipped
2136 3 2133 0
View the top 3 failed tests by shortest run time
api.temp.calculator.test_calculator::test_divide
Stack Traces | 0.001s run time
def
                test_divide():
                > assert Calculator.divide(1, 2) == 0.5
                E assert 1.0 == 0.5
                E + where 1.0 = <function Calculator.divide at 0x104c9eb90>(1, 2)
                E + where <function Calculator.divide at 0x104c9eb90> = Calculator.divide
                .../temp/calculator/test_calculator.py:30: AssertionError
api.temp.calculator.test_calculator::test_divide
Stack Traces | 0.001s run time
def
                test_divide():
                > assert Calculator.divide(1, 2) == 0.5
                E assert 1.0 == 0.5
                E + where 1.0 = <function Calculator.divide at 0x104c9eb90>(1, 2)
                E + where <function Calculator.divide at 0x104c9eb90> = Calculator.divide
                .../temp/calculator/test_calculator.py:30: AssertionError
api.temp.calculator.test_calculator::test_divide
Stack Traces | 0.001s run time
def
                test_divide():
                > assert Calculator.divide(1, 2) == 0.5
                E assert 1.0 == 0.5
                E + where 1.0 = <function Calculator.divide at 0x104c9eb90>(1, 2)
                E + where <function Calculator.divide at 0x104c9eb90> = Calculator.divide
                .../temp/calculator/test_calculator.py:30: AssertionError

To view more test analytics, go to the Test Analytics Dashboard
📢 Thoughts on this report? Let us know!

Copy link

github-actions bot commented Jan 19, 2025

❌ 2 Tests Failed:

Tests completed Failed Passed Skipped
3540 2 3538 0
View the top 2 failed tests by shortest run time
test_fallback_collected_labels_codecov_max_wait_time_exceeded
Stack Traces | 0.172s run time
self = &lt;tests.commands.test_invoke_labelanalysis.TestLabelAnalysisCommand object at 0x7fd322f2dd90&gt;
get_labelanalysis_deps = {'collected_labels': ['test_present', 'test_absent', 'test_in_diff', 'test_global'], 'fake_runner': &lt;tests.factory.FakeRunner object at 0x7fd32291f250&gt;, 'mock_get_runner': &lt;MagicMock name='get_runner' id='140544794961840'&gt;}
mocker = &lt;pytest_mock.plugin.MockerFixture object at 0x7fd32175d550&gt;
use_verbose_option = None

    def test_fallback_collected_labels_codecov_max_wait_time_exceeded(
        self, get_labelanalysis_deps, mocker, use_verbose_option
    ):
        mock_get_runner = get_labelanalysis_deps["mock_get_runner"]
        fake_runner = get_labelanalysis_deps["fake_runner"]
        collected_labels = get_labelanalysis_deps["collected_labels"]
        mocker.patch.object(labelanalysis_time, "monotonic", side_effect=[0, 6])
    
        with responses.RequestsMock() as rsps:
            rsps.add(
                responses.POST,
                "https://api.codecov.io/labels/labels-analysis",
                json={"external_id": "label-analysis-request-id"},
                status=201,
                match=[
                    matchers.header_matcher({"Authorization": "Repotoken STATIC_TOKEN"})
                ],
            )
            rsps.add(
                responses.PATCH,
                "https://api.codecov.io/labels/labels-analysis/label-analysis-request-id",
                json={"external_id": "label-analysis-request-id"},
                status=201,
                match=[
                    matchers.header_matcher({"Authorization": "Repotoken STATIC_TOKEN"})
                ],
            )
            rsps.add(
                responses.GET,
                "https://api.codecov.io/labels/labels-analysis/label-analysis-request-id",
                json={"state": "processing"},
            )
            cli_runner = CliRunner()
            result = cli_runner.invoke(
                cli,
                [
                    "label-analysis",
                    "--token=STATIC_TOKEN",
                    f"--base-sha={FAKE_BASE_SHA}",
                    "--max-wait-time=5",
                ],
                obj={},
            )
            print(result)
&gt;           assert result.exit_code == 0
E           assert 1 == 0
E            +  where 1 = &lt;Result StopIteration()&gt;.exit_code

tests/commands/test_invoke_labelanalysis.py:665: AssertionError
test_fallback_collected_labels_codecov_max_wait_time_exceeded
Stack Traces | 0.188s run time
self = &lt;tests.commands.test_invoke_labelanalysis.TestLabelAnalysisCommand object at 0x7f557dc6b410&gt;
get_labelanalysis_deps = {'collected_labels': ['test_present', 'test_absent', 'test_in_diff', 'test_global'], 'fake_runner': &lt;tests.factory.FakeRunner object at 0x7f5574079bb0&gt;, 'mock_get_runner': &lt;MagicMock name='get_runner' id='140004995778128'&gt;}
mocker = &lt;pytest_mock.plugin.MockerFixture object at 0x7f5574079b20&gt;
use_verbose_option = None

    def test_fallback_collected_labels_codecov_max_wait_time_exceeded(
        self, get_labelanalysis_deps, mocker, use_verbose_option
    ):
        mock_get_runner = get_labelanalysis_deps["mock_get_runner"]
        fake_runner = get_labelanalysis_deps["fake_runner"]
        collected_labels = get_labelanalysis_deps["collected_labels"]
        mocker.patch.object(labelanalysis_time, "monotonic", side_effect=[0, 6])
    
        with responses.RequestsMock() as rsps:
            rsps.add(
                responses.POST,
                "https://api.codecov.io/labels/labels-analysis",
                json={"external_id": "label-analysis-request-id"},
                status=201,
                match=[
                    matchers.header_matcher({"Authorization": "Repotoken STATIC_TOKEN"})
                ],
            )
            rsps.add(
                responses.PATCH,
                "https://api.codecov.io/labels/labels-analysis/label-analysis-request-id",
                json={"external_id": "label-analysis-request-id"},
                status=201,
                match=[
                    matchers.header_matcher({"Authorization": "Repotoken STATIC_TOKEN"})
                ],
            )
            rsps.add(
                responses.GET,
                "https://api.codecov.io/labels/labels-analysis/label-analysis-request-id",
                json={"state": "processing"},
            )
            cli_runner = CliRunner()
            result = cli_runner.invoke(
                cli,
                [
                    "label-analysis",
                    "--token=STATIC_TOKEN",
                    f"--base-sha={FAKE_BASE_SHA}",
                    "--max-wait-time=5",
                ],
                obj={},
            )
            print(result)
&gt;           assert result.exit_code == 0
E           assert 1 == 0
E            +  where 1 = &lt;Result StopIteration()&gt;.exit_code

tests/commands/test_invoke_labelanalysis.py:665: AssertionError

📣 Thoughts on this report? Let Codecov know! | Powered by Codecov

@thomasrockhu-codecov thomasrockhu-codecov deleted the th/fix-release-pr branch January 21, 2025 17:25
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
None yet
Projects
None yet
Development

Successfully merging this pull request may close these issues.

2 participants