diff -Nru spyder-unittest-0.5.1/.github/issue_template.md spyder-unittest-0.6.0/.github/issue_template.md --- spyder-unittest-0.5.1/.github/issue_template.md 2022-09-03 20:29:17.000000000 +0000 +++ spyder-unittest-0.6.0/.github/issue_template.md 2023-07-02 15:10:44.000000000 +0000 @@ -29,6 +29,6 @@ * Version of spyder-unittest plugin: * Installation method for Spyder and the unittest plugin: Anaconda / pip / ... * Python version: -* Testing framework used: nose / py.test / unittest +* Testing framework used: nose2 / pytest / unittest * Testing framework version: * Operating system: diff -Nru spyder-unittest-0.5.1/.github/scripts/generate-without-spyder.py spyder-unittest-0.6.0/.github/scripts/generate-without-spyder.py --- spyder-unittest-0.5.1/.github/scripts/generate-without-spyder.py 1970-01-01 00:00:00.000000000 +0000 +++ spyder-unittest-0.6.0/.github/scripts/generate-without-spyder.py 2023-07-02 15:10:44.000000000 +0000 @@ -0,0 +1,17 @@ +#!/usr/bin/env python +# +# Copyright (c) Spyder Project Contributors +# Licensed under the terms of the MIT License +# (see LICENSE.txt for details) + +"""Script to generate requirements/without-spyder.txt""" + +import re + +with open('requirements/conda.txt') as infile: + with open('requirements/without-spyder.txt', 'w') as outfile: + for line in infile: + package_name = re.match('[-a-z0-9_]*', line).group(0) + if package_name != 'spyder': + outfile.write(line) + diff -Nru spyder-unittest-0.5.1/.github/workflows/linux-tests.yml spyder-unittest-0.6.0/.github/workflows/linux-tests.yml --- spyder-unittest-0.5.1/.github/workflows/linux-tests.yml 2022-09-03 20:29:17.000000000 +0000 +++ spyder-unittest-0.6.0/.github/workflows/linux-tests.yml 1970-01-01 00:00:00.000000000 +0000 @@ -1,59 +0,0 @@ -name: Linux tests - -on: - push: - branches: - - master - pull_request: - branches: - - master - -jobs: - linux: - name: Linux Py${{ matrix.PYTHON_VERSION }} - runs-on: ubuntu-latest - env: - CI: True - PYTHON_VERSION: ${{ matrix.PYTHON_VERSION }} - RUNNER_OS: 'ubuntu' - strategy: - fail-fast: false - matrix: - PYTHON_VERSION: ['3.7', '3.8', '3.9'] - steps: - - name: Checkout branch - uses: actions/checkout@v2 - - name: Install System Packages - run: | - sudo apt-get update --fix-missing - sudo apt-get install -qq pyqt5-dev-tools libxcb-xinerama0 xterm --fix-missing - - name: Install Conda - uses: conda-incubator/setup-miniconda@v2 - with: - channels: conda-forge - auto-update-conda: true - python-version: ${{ matrix.PYTHON_VERSION }} - - name: Install package dependencies - shell: bash -l {0} - run: conda install --file requirements/conda.txt -y -q - - name: Install test dependencies - shell: bash -l {0} - run: | - conda install nomkl -y -q - conda install -c spyder-ide --file requirements/tests.txt -y -q - - name: Install Package - shell: bash -l {0} - run: pip install --no-deps -e . - - name: Show environment information - shell: bash -l {0} - run: | - conda info - conda list - - name: Run tests - shell: bash -l {0} - run: xvfb-run --auto-servernum pytest spyder_unittest --cov=spyder_unittest -x -vv - timeout-minutes: 10 - - name: Upload coverage to Codecov - if: matrix.PYTHON_VERSION == '3.9' - shell: bash -l {0} - run: codecov -t 3458851b-c7a5-4108-be5e-9d19066a2fde diff -Nru spyder-unittest-0.5.1/.github/workflows/macos-tests.yml spyder-unittest-0.6.0/.github/workflows/macos-tests.yml --- spyder-unittest-0.5.1/.github/workflows/macos-tests.yml 2022-09-03 20:29:17.000000000 +0000 +++ spyder-unittest-0.6.0/.github/workflows/macos-tests.yml 1970-01-01 00:00:00.000000000 +0000 @@ -1,51 +0,0 @@ -name: Macos tests - -on: - push: - branches: - - master - pull_request: - branches: - - master - -jobs: - macos: - name: Mac Py${{ matrix.PYTHON_VERSION }} - runs-on: macos-latest - env: - CI: True - PYTHON_VERSION: ${{ matrix.PYTHON_VERSION }} - RUNNER_OS: 'macos' - strategy: - fail-fast: false - matrix: - PYTHON_VERSION: ['3.7', '3.8', '3.9'] - steps: - - name: Checkout branch - uses: actions/checkout@v2 - - name: Install Conda - uses: conda-incubator/setup-miniconda@v2 - with: - channels: conda-forge - auto-update-conda: true - python-version: ${{ matrix.PYTHON_VERSION }} - - name: Install package dependencies - shell: bash -l {0} - run: conda install --file requirements/conda.txt -y -q - - name: Install test dependencies - shell: bash -l {0} - run: | - conda install nomkl -y -q - conda install -c spyder-ide --file requirements/tests.txt -y -q - - name: Install Package - shell: bash -l {0} - run: pip install --no-deps -e . - - name: Show environment information - shell: bash -l {0} - run: | - conda info - conda list - - name: Run tests - shell: bash -l {0} - run: pytest spyder_unittest -x -vv - timeout-minutes: 10 diff -Nru spyder-unittest-0.5.1/.github/workflows/run-tests.yml spyder-unittest-0.6.0/.github/workflows/run-tests.yml --- spyder-unittest-0.5.1/.github/workflows/run-tests.yml 1970-01-01 00:00:00.000000000 +0000 +++ spyder-unittest-0.6.0/.github/workflows/run-tests.yml 2023-07-02 15:10:44.000000000 +0000 @@ -0,0 +1,109 @@ +name: Run tests + +on: + push: + branches: + - master + pull_request: + branches: + - master + +jobs: + main: + strategy: + fail-fast: false + matrix: + OS: ['ubuntu', 'macos', 'windows'] + PYTHON_VERSION: ['3.8', '3.9', '3.10'] + SPYDER_SOURCE: ['conda'] + name: ${{ matrix.OS }} py${{ matrix.PYTHON_VERSION }} spyder-from-${{ matrix.SPYDER_SOURCE }} + runs-on: ${{ matrix.OS }}-latest + env: + CI: True + PYTHON_VERSION: ${{ matrix.PYTHON_VERSION }} + steps: + - name: Checkout branch + uses: actions/checkout@v3 + - name: Install System Packages + if: matrix.OS == 'ubuntu' + run: | + sudo apt-get update --fix-missing + sudo apt-get install -qq pyqt5-dev-tools libxcb-xinerama0 xterm --fix-missing + - name: Install Conda + uses: conda-incubator/setup-miniconda@v2 + with: + miniforge-variant: Mambaforge + auto-update-conda: true + python-version: ${{ matrix.PYTHON_VERSION }} + - name: Checkout Spyder from git + if: matrix.SPYDER_SOURCE == 'git' + uses: actions/checkout@v3 + with: + repository: 'spyder-ide/spyder' + path: 'spyder' + - name: Install Spyder's dependencies (main) + if: matrix.SPYDER_SOURCE == 'git' + shell: bash -l {0} + run: mamba env update --file spyder/requirements/main.yml + - name: Install Spyder's dependencies (Linux) + if: matrix.SPYDER_SOURCE == 'git' && matrix.OS == 'ubuntu' + shell: bash -l {0} + run: mamba env update --file spyder/requirements/linux.yml + - name: Install Spyder's dependencies (Mac / Windows) + if: matrix.SPYDER_SOURCE == 'git' && matrix.OS != 'ubuntu' + shell: bash -l {0} + run: mamba env update --file spyder/requirements/${{ matrix.OS }}.yml + - name: Install Spyder from source + if: matrix.SPYDER_SOURCE == 'git' + shell: bash -l {0} + run: pip install --no-deps -e spyder + - name: Install plugin dependencies (without Spyder) + if: matrix.SPYDER_SOURCE == 'git' + shell: bash -l {0} + run: | + python .github/scripts/generate-without-spyder.py + mamba install --file requirements/without-spyder.txt -y + - name: Install plugin dependencies + if: matrix.SPYDER_SOURCE == 'conda' + shell: bash -l {0} + run: mamba install --file requirements/conda.txt -y + - name: Install test dependencies + shell: bash -l {0} + run: | + mamba install nomkl -y -q + mamba install --file requirements/tests.txt -y + - name: Install plugin + shell: bash -l {0} + run: pip install --no-deps -e . + - name: Show environment information + shell: bash -l {0} + run: | + mamba info + mamba list + - name: Run tests (Linux) + if: matrix.OS == 'ubuntu' + uses: nick-fields/retry@v2 + with: + timeout_minutes: 10 + max_attempts: 3 + shell: bash + command: | + . ~/.profile + xvfb-run --auto-servernum pytest spyder_unittest -vv + - name: Run tests (MacOS) + if: matrix.OS == 'macos' + uses: nick-fields/retry@v2 + with: + timeout_minutes: 10 + max_attempts: 3 + shell: bash + command: | + . ~/.profile + pytest spyder_unittest -vv + - name: Run tests (Windows) + if: matrix.OS == 'windows' + uses: nick-fields/retry@v2 + with: + timeout_minutes: 10 + max_attempts: 3 + command: pytest spyder_unittest -vv diff -Nru spyder-unittest-0.5.1/.github/workflows/windows-tests.yml spyder-unittest-0.6.0/.github/workflows/windows-tests.yml --- spyder-unittest-0.5.1/.github/workflows/windows-tests.yml 2022-09-03 20:29:17.000000000 +0000 +++ spyder-unittest-0.6.0/.github/workflows/windows-tests.yml 1970-01-01 00:00:00.000000000 +0000 @@ -1,49 +0,0 @@ -name: Windows tests - -on: - push: - branches: - - master - pull_request: - branches: - - master - -jobs: - windows: - name: Windows Py${{ matrix.PYTHON_VERSION }} - runs-on: windows-latest - env: - CI: True - PYTHON_VERSION: ${{ matrix.PYTHON_VERSION }} - RUNNER_OS: 'windows' - strategy: - fail-fast: false - matrix: - PYTHON_VERSION: ['3.7', '3.8', '3.9'] - steps: - - name: Checkout branch - uses: actions/checkout@v2 - - name: Install Conda - uses: conda-incubator/setup-miniconda@v2 - with: - channels: conda-forge - auto-update-conda: true - python-version: ${{ matrix.PYTHON_VERSION }} - - name: Install package dependencies - shell: bash -l {0} - run: conda install --file requirements/conda.txt -y -q - - name: Install test dependencies - shell: bash -l {0} - run: conda install -c spyder-ide --file requirements/tests.txt -y -q - - name: Install Package - shell: bash -l {0} - run: pip install --no-deps -e . - - name: Show environment information - shell: bash -l {0} - run: | - conda info - conda list - - name: Run tests - shell: bash -l {0} - run: pytest spyder_unittest -x -vv - timeout-minutes: 10 diff -Nru spyder-unittest-0.5.1/CHANGELOG.md spyder-unittest-0.6.0/CHANGELOG.md --- spyder-unittest-0.5.1/CHANGELOG.md 2022-09-03 20:29:17.000000000 +0000 +++ spyder-unittest-0.6.0/CHANGELOG.md 2023-07-02 15:10:44.000000000 +0000 @@ -1,5 +1,29 @@ # History of changes +## Version 0.6.0 (2023-07-02) + +### New Features + +* Support nose2 and drop support for nose ([Issue 178](https://github.com/spyder-ide/spyder-unittest/issues/178), [PR 200](https://github.com/spyder-ide/spyder-unittest/pull/200)) +* New menu item for running only a single test ([Issue 88](https://github.com/spyder-ide/spyder-unittest/issues/88), [PR 211](https://github.com/spyder-ide/spyder-unittest/pull/211)) +* New configuration option for adding extra command-line arguments when running tests ([Issue 199](https://github.com/spyder-ide/spyder-unittest/issues/199), [PR 204](https://github.com/spyder-ide/spyder-unittest/pull/204)) +* New configuration option to disable or enable abbreviating the test name ([Issue 122](https://github.com/spyder-ide/spyder-unittest/issues/122), [PR 208](https://github.com/spyder-ide/spyder-unittest/pull/208)) + +### Bug Fixes + +* Execute `unittest` tests programmatically for robustness ([Issue 73](https://github.com/spyder-ide/spyder-unittest/issues/73), [Issue 76](https://github.com/spyder-ide/spyder-unittest/issues/76), [Issue 160](https://github.com/spyder-ide/spyder-unittest/issues/160), [PR 202](https://github.com/spyder-ide/spyder-unittest/pull/202)) +* Support changed format of `unittest` output in Python 3.11 ([Issue 193](https://github.com/spyder-ide/spyder-unittest/issues/193), [PR 190](https://github.com/spyder-ide/spyder-unittest/pull/190), [PR 194](https://github.com/spyder-ide/spyder-unittest/pull/194), by [@juliangilbey](https://github.com/juliangilbey)) +* Fix keyboard shortcut for running tests ([Issue 172](https://github.com/spyder-ide/spyder-unittest/issues/172), [PR 203](https://github.com/spyder-ide/spyder-unittest/pull/203)) +* Use colours from Spyder's standard palette to get a uniform UI ([Issue 186](https://github.com/spyder-ide/spyder-unittest/issues/186), [PR 187](https://github.com/spyder-ide/spyder-unittest/pull/187)) + +### Maintenance + +* Keep plugin up-to-date with latest changes in Spyder 5 ([Issue 195](https://github.com/spyder-ide/spyder-unittest/issues/195), [Issue 206](https://github.com/spyder-ide/spyder-unittest/issues/206), [Issue 209](https://github.com/spyder-ide/spyder-unittest/issues/209), [PR 197](https://github.com/spyder-ide/spyder-unittest/pull/197), [PR 207](https://github.com/spyder-ide/spyder-unittest/pull/207), [PR 214](https://github.com/spyder-ide/spyder-unittest/pull/214)) +* Update translations ([PR 212](https://github.com/spyder-ide/spyder-unittest/pull/212)) +* Fix integration tests for the plugin ([Issue 167](https://github.com/spyder-ide/spyder-unittest/issues/167), [PR 197](https://github.com/spyder-ide/spyder-unittest/pull/197)) +* Update GitHub workflow for running tests ([PR 192](https://github.com/spyder-ide/spyder-unittest/pull/192), [PR 196](https://github.com/spyder-ide/spyder-unittest/pull/196), [PR 201](https://github.com/spyder-ide/spyder-unittest/pull/201)) + + ## Version 0.5.1 (2022/09/03) ### New Features diff -Nru spyder-unittest-0.5.1/README.md spyder-unittest-0.6.0/README.md --- spyder-unittest-0.5.1/README.md 2022-09-03 20:29:17.000000000 +0000 +++ spyder-unittest-0.6.0/README.md 2023-07-02 15:10:44.000000000 +0000 @@ -13,12 +13,11 @@ [![Windows status](https://github.com/spyder-ide/spyder-unittest/workflows/Windows%20tests/badge.svg)](https://github.com/spyder-ide/spyder-notebook/actions?query=workflow%3A%22Windows+tests%22) [![Linux status](https://github.com/spyder-ide/spyder-unittest/workflows/Linux%20tests/badge.svg)](https://github.com/spyder-ide/spyder-notebook/actions?query=workflow%3A%22Linux+tests%22) [![MacOS status](https://github.com/spyder-ide/spyder-unittest/workflows/Macos%20tests/badge.svg)](https://github.com/spyder-ide/spyder-notebook/actions?query=workflow%3A%22Macos+tests%22) -[![codecov](https://codecov.io/gh/spyder-ide/spyder-unittest/branch/master/graph/badge.svg)](https://codecov.io/gh/spyder-ide/spyder-notebook/branch/master) [![Crowdin](https://badges.crowdin.net/spyder-unittest/localized.svg)](https://crowdin.com/project/spyder-unittest) *Copyright © 2014 Spyder Project Contributors* -![Screenshot of spyder-unittest plugin showing test results](./screenshot.png) +![Screenshot of spyder-unittest plugin showing test results](./doc/screenshot.png) ## Description @@ -26,7 +25,7 @@ with Spyder, allowing you to run test suites and view the results in the IDE. The plugin supports the `unittest` module in the Python standard library -as well as the `pytest` and `nose` testing frameworks. +as well as the `pytest` and `nose2` testing frameworks. Support for `pytest` is most complete at the moment. @@ -76,10 +75,10 @@ * [spyder](https://github.com/spyder-ide/spyder) (obviously), at least version 4.0 * [lxml](http://lxml.de/) * the testing framework that you will be using: [pytest](https://pytest.org) - and/or [nose](https://nose.readthedocs.io) + and/or [nose2](https://docs.nose2.io) In order to run the tests distributed with this plugin, you need -[nose](https://nose.readthedocs.io), [pytest](https://pytest.org) +[nose2](https://docs.nose2.io), [pytest](https://pytest.org) and [pytest-qt](https://github.com/pytest-dev/pytest-qt). If you use Python 2, you also need [mock](https://github.com/testing-cabal/mock). diff -Nru spyder-unittest-0.5.1/codecov.yml spyder-unittest-0.6.0/codecov.yml --- spyder-unittest-0.5.1/codecov.yml 2022-09-03 20:29:17.000000000 +0000 +++ spyder-unittest-0.6.0/codecov.yml 1970-01-01 00:00:00.000000000 +0000 @@ -1,13 +0,0 @@ -# See https://docs.codecov.io/docs/codecovyml-reference - -coverage: - status: - project: - default: - threshold: 5% # allow for 5% decrease in total coverage - patch: - default: - target: 50% # require 50% of diff to be covered - -comment: - layout: "files" diff -Nru spyder-unittest-0.5.1/conftest.py spyder-unittest-0.6.0/conftest.py --- spyder-unittest-0.5.1/conftest.py 1970-01-01 00:00:00.000000000 +0000 +++ spyder-unittest-0.6.0/conftest.py 2023-07-02 15:10:44.000000000 +0000 @@ -0,0 +1,23 @@ +# -*- coding: utf-8 -*- +# +# Copyright © Spyder Project Contributors +# Licensed under the terms of the MIT License +# + +"""Configuration file for Pytest.""" + +# Standard library imports +import os + +# To activate/deactivate certain things in Spyder when running tests. +# NOTE: Please leave this before any other import here!! +os.environ['SPYDER_PYTEST'] = 'True' + +# Third-party imports +import pytest + + +@pytest.fixture(autouse=True) +def reset_conf_before_test(): + from spyder.config.manager import CONF + CONF.reset_to_defaults(notification=False) diff -Nru spyder-unittest-0.5.1/debian/changelog spyder-unittest-0.6.0/debian/changelog --- spyder-unittest-0.5.1/debian/changelog 2023-02-08 20:28:19.000000000 +0000 +++ spyder-unittest-0.6.0/debian/changelog 2023-07-26 17:35:01.000000000 +0000 @@ -1,3 +1,9 @@ +spyder-unittest (0.6.0-1) unstable; urgency=medium + + * New upstream version + + -- Julian Gilbey Wed, 26 Jul 2023 18:35:01 +0100 + spyder-unittest (0.5.1-3) unstable; urgency=medium * Correct patch from -2 to handle the case where the requested diff -Nru spyder-unittest-0.5.1/debian/control spyder-unittest-0.6.0/debian/control --- spyder-unittest-0.5.1/debian/control 2023-02-08 20:28:19.000000000 +0000 +++ spyder-unittest-0.6.0/debian/control 2023-07-26 17:35:01.000000000 +0000 @@ -10,7 +10,7 @@ python3-lxml, python3-setuptools, python3-spyder (<< 6~), - python3-spyder (>= 5.3.1), + python3-spyder (>= 5.4.4), python3-zmq Standards-Version: 4.6.2 Rules-Requires-Root: no @@ -21,7 +21,7 @@ Package: python3-spyder-unittest Architecture: all Depends: python3-spyder (<< 6~), - python3-spyder (>= 5.3.1), + python3-spyder (>= 5.4.1), ${misc:Depends}, ${python3:Depends} Description: Plugin to run tests within the Spyder IDE diff -Nru spyder-unittest-0.5.1/debian/patches/fix-unittest-python3.11-part2.patch spyder-unittest-0.6.0/debian/patches/fix-unittest-python3.11-part2.patch --- spyder-unittest-0.5.1/debian/patches/fix-unittest-python3.11-part2.patch 2023-02-08 20:28:19.000000000 +0000 +++ spyder-unittest-0.6.0/debian/patches/fix-unittest-python3.11-part2.patch 1970-01-01 00:00:00.000000000 +0000 @@ -1,240 +0,0 @@ -Description: Handle unittest changed output format in Python 3.11 - This patch fixes the previous attempt (now applied upstream) by - using the requested interpreter version of Python rather than the - system version -Forwarded: https://github.com/spyder-ide/spyder-unittest/pull/194 -Author: Julian Gilbey -Last-Update: 2022-02-08 - ---- a/spyder_unittest/backend/runnerbase.py -+++ b/spyder_unittest/backend/runnerbase.py -@@ -7,6 +7,7 @@ - - # Standard library imports - import os -+import sys - import tempfile - - # Third party imports -@@ -89,6 +90,9 @@ - Process running the unit test suite. - resultfilename : str - Name of file in which test results are stored. -+ executable : str -+ Path to Python executable used for test. This is required -+ by the UnittestRunner subclass. - - Signals - ------- -@@ -134,6 +138,8 @@ - 'unittest.results') - else: - self.resultfilename = resultfilename -+ # Set a sensible default -+ self.executable = sys.executable - - def create_argument_list(self, config, cov_path): - """ -@@ -189,6 +195,7 @@ - RuntimeError - If process failed to start. - """ -+ self.executable = executable - self.process = self._prepare_process(config, pythonpath) - p_args = self.create_argument_list(config, cov_path) - try: ---- a/spyder_unittest/backend/tests/test_unittestrunner.py -+++ b/spyder_unittest/backend/tests/test_unittestrunner.py -@@ -17,6 +17,8 @@ - # test_fail (testing.test_unittest.MyTest) ... FAIL - # but from Python 3.11, it reads: - # test_fail (testing.test_unittest.MyTest.test_fail) ... FAIL -+# These tests only test the system executable; they do not test -+# the situation where the requested interpreter is different. - IS_PY311_OR_GREATER = sys.version_info[:2] >= (3, 11) - - -@@ -39,6 +41,7 @@ - """ - output = output11 if IS_PY311_OR_GREATER else output10 - runner = UnittestRunner(None) -+ runner.set_fullname_version() - res = runner.load_data(output) - assert len(res) == 2 - -@@ -72,6 +75,7 @@ - """ - output = output11 if IS_PY311_OR_GREATER else output10 - runner = UnittestRunner(None) -+ runner.set_fullname_version() - res = runner.load_data(output) - assert len(res) == 1 - assert res[0].category == Category.OK -@@ -115,6 +119,7 @@ - """ - output = output11 if IS_PY311_OR_GREATER else output10 - runner = UnittestRunner(None) -+ runner.set_fullname_version() - res = runner.load_data(output) - assert len(res) == 2 - -@@ -151,6 +156,7 @@ - """ - output = output11 if IS_PY311_OR_GREATER else output10 - runner = UnittestRunner(None) -+ runner.set_fullname_version() - res = runner.load_data(output) - assert len(res) == 2 - -@@ -202,6 +208,7 @@ - """ - output = output11 if IS_PY311_OR_GREATER else output10 - runner = UnittestRunner(None) -+ runner.set_fullname_version() - res = runner.load_data(output) - assert len(res) == 1 - -@@ -214,6 +221,7 @@ - - def test_try_parse_header_with_ok(): - runner = UnittestRunner(None) -+ runner.set_fullname_version() - lines10 = ['test_isupper (testfoo.TestStringMethods) ... ok'] - lines11 = ['test_isupper (testfoo.TestStringMethods.test_isupper) ... ok'] - lines = lines11 if IS_PY311_OR_GREATER else lines10 -@@ -223,6 +231,7 @@ - - def test_try_parse_header_with_xfail(): - runner = UnittestRunner(None) -+ runner.set_fullname_version() - lines10 = ['test_isupper (testfoo.TestStringMethods) ... expected failure'] - lines11 = ['test_isupper (testfoo.TestStringMethods.test_isupper) ... expected failure'] - lines = lines11 if IS_PY311_OR_GREATER else lines10 -@@ -233,6 +242,7 @@ - - def test_try_parse_header_with_message(): - runner = UnittestRunner(None) -+ runner.set_fullname_version() - lines10 = ["test_nothing (testfoo.Tests) ... skipped 'msg'"] - lines11 = ["test_nothing (testfoo.Tests.test_nothing) ... skipped 'msg'"] - lines = lines11 if IS_PY311_OR_GREATER else lines10 -@@ -242,6 +252,7 @@ - - def test_try_parse_header_starting_with_digit(): - runner = UnittestRunner(None) -+ runner.set_fullname_version() - lines10 = ['0est_isupper (testfoo.TestStringMethods) ... ok'] - lines11 = ['0est_isupper (testfoo.TestStringMethods.0est_isupper) ... ok'] - lines = lines11 if IS_PY311_OR_GREATER else lines10 ---- a/spyder_unittest/backend/unittestrunner.py -+++ b/spyder_unittest/backend/unittestrunner.py -@@ -7,33 +7,23 @@ - - # Standard library imports - import re --import sys -+import subprocess - - # Local imports - from spyder_unittest.backend.runnerbase import Category, RunnerBase, TestResult - - --# Up to Python 3.10, unittest output read: --# test_fail (testing.test_unittest.MyTest) ... FAIL --# but from Python 3.11, it reads: --# test_fail (testing.test_unittest.MyTest.test_fail) ... FAIL --IS_PY311_OR_GREATER = sys.version_info[:2] >= (3, 11) -- -- --def _get_fullname(fn_name, cls_name): -- if IS_PY311_OR_GREATER: -- function_fullname = cls_name -- else: -- function_fullname = '{}.{}'.format(cls_name, fn_name) -- return function_fullname -- -- - class UnittestRunner(RunnerBase): - """Class for running tests with unittest module in standard library.""" - - module = 'unittest' - name = 'unittest' - -+ def __init__(self, widget, resultfilename=None): -+ super().__init__(widget, resultfilename) -+ # Set a sensible default -+ self.fullname_version = 'pre311' -+ - def create_argument_list(self, config, cov_path): - """Create argument list for testing process.""" - return ['-m', self.module, 'discover', '-v'] -@@ -44,6 +34,7 @@ - - This function reads the results and emits `sig_finished`. - """ -+ self.set_fullname_version() - output = self.read_all_process_output() - testresults = self.load_data(output) - self.sig_finished.emit(testresults, output, True) -@@ -113,7 +104,7 @@ - regexp = r'([^\d\W]\w*) \(([^\d\W][\w.]*)\)' - match = re.match(regexp, lines[line_index]) - if match: -- function_fullname = _get_fullname(match.group(1), match.group(2)) -+ function_fullname = self.get_fullname(match.group(1), match.group(2)) - else: - return None - while lines[line_index]: -@@ -155,5 +146,51 @@ - while lines[line_index]: - exception_text.append(lines[line_index]) - line_index += 1 -- function_fullname = _get_fullname(match.group(1), match.group(2)) -+ function_fullname = self.get_fullname(match.group(1), match.group(2)) - return (line_index, function_fullname, exception_text) -+ -+ def set_fullname_version(self): -+ script = 'import platform; print(platform.python_version())' -+ process = subprocess.run([self.executable, '-c', script], -+ capture_output=True, text=True) -+ if process.returncode != 0: -+ self.fullname_version = 'pre311' -+ return -+ -+ # We only take the first two components as the third might -+ # be something like '0a3'. -+ exec_version_components = process.stdout.split('.')[:2] -+ exec_version = tuple(map(int, exec_version_components)) -+ if exec_version < (3, 11): -+ self.fullname_version = 'pre311' -+ else: -+ self.fullname_version = '311' -+ -+ def get_fullname(self, fn_name, parenthesised): -+ """ -+ Determine the test name output by unittest. -+ -+ Up to Python 3.10, unittest output read: -+ test_fail (testing.test_unittest.MyTest) ... FAIL -+ but from Python 3.11, it reads: -+ test_fail (testing.test_unittest.MyTest.test_fail) ... FAIL -+ -+ Parameters -+ ---------- -+ fn_name : str -+ Part prior to the parentheses (without spaces) -+ parenthesised : str -+ Part within the parentheses -+ -+ Returns -+ ------- -+ fullname : str -+ The full name of the class plus function, -+ "testing.test_unittest.MyTest.test_fail" in the above -+ example. -+ """ -+ if self.fullname_version == 'pre311': -+ function_fullname = '{}.{}'.format(parenthesised, fn_name) -+ else: -+ function_fullname = parenthesised -+ return function_fullname diff -Nru spyder-unittest-0.5.1/debian/patches/fix-unittest-python3.11.patch spyder-unittest-0.6.0/debian/patches/fix-unittest-python3.11.patch --- spyder-unittest-0.5.1/debian/patches/fix-unittest-python3.11.patch 2023-02-08 20:28:19.000000000 +0000 +++ spyder-unittest-0.6.0/debian/patches/fix-unittest-python3.11.patch 1970-01-01 00:00:00.000000000 +0000 @@ -1,325 +0,0 @@ -Description: Handle unittest changed output format in Python 3.11 -Forwarded: https://github.com/spyder-ide/spyder-unittest/pull/190 -Author: Julian Gilbey -Last-Update: 2022-01-08 - ---- a/spyder_unittest/backend/unittestrunner.py -+++ b/spyder_unittest/backend/unittestrunner.py -@@ -7,11 +7,27 @@ - - # Standard library imports - import re -+import sys - - # Local imports - from spyder_unittest.backend.runnerbase import Category, RunnerBase, TestResult - - -+# Up to Python 3.10, unittest output read: -+# test_fail (testing.test_unittest.MyTest) ... FAIL -+# but from Python 3.11, it reads: -+# test_fail (testing.test_unittest.MyTest.test_fail) ... FAIL -+IS_PY311_OR_GREATER = sys.version_info[:2] >= (3, 11) -+ -+ -+def _get_fullname(fn_name, cls_name): -+ if IS_PY311_OR_GREATER: -+ function_fullname = cls_name -+ else: -+ function_fullname = '{}.{}'.format(cls_name, fn_name) -+ return function_fullname -+ -+ - class UnittestRunner(RunnerBase): - """Class for running tests with unittest module in standard library.""" - -@@ -52,15 +68,14 @@ - data = self.try_parse_result(lines, line_index) - if data: - line_index = data[0] -- if data[3] == 'ok': -+ if data[2] == 'ok': - cat = Category.OK -- elif data[3] == 'FAIL' or data[3] == 'ERROR': -+ elif data[2] == 'FAIL' or data[2] == 'ERROR': - cat = Category.FAIL - else: - cat = Category.SKIP -- name = '{}.{}'.format(data[2], data[1]) -- tr = TestResult(category=cat, status=data[3], name=name, -- message=data[4]) -+ tr = TestResult(category=cat, status=data[2], name=data[1], -+ message=data[3]) - res.append(tr) - else: - line_index += 1 -@@ -73,8 +88,8 @@ - line_index = data[0] - test_index = next( - i for i, tr in enumerate(res) -- if tr.name == '{}.{}'.format(data[2], data[1])) -- res[test_index].extra_text = data[3] -+ if tr.name == data[1]) -+ res[test_index].extra_text = data[2] - else: - line_index += 1 - except IndexError: -@@ -88,18 +103,17 @@ - - Returns - ------- -- (int, str, str, str, str) or None -+ (int, str, str, str) or None - If a test result is parsed successfully then return a tuple with -- the line index of the first line after the test result, the name -- of the test function, the name of the test class, the test result, -- and the reason (if no reason is given, the fourth string is empty). -+ the line index of the first line after the test result, the full -+ name of the test function (including the class), the test result, -+ and the reason (if no reason is given, the third string is empty). - Otherwise, return None. - """ - regexp = r'([^\d\W]\w*) \(([^\d\W][\w.]*)\)' - match = re.match(regexp, lines[line_index]) - if match: -- function_name = match.group(1) -- class_name = match.group(2) -+ function_fullname = _get_fullname(match.group(1), match.group(2)) - else: - return None - while lines[line_index]: -@@ -109,7 +123,7 @@ - if match: - result = match.group(1) - msg = match.group(3) or '' -- return (line_index + 1, function_name, class_name, result, msg) -+ return (line_index + 1, function_fullname, result, msg) - line_index += 1 - return None - -@@ -119,11 +133,11 @@ - - Returns - ------- -- (int, str, str, list of str) or None -+ (int, str, list of str) or None - If an exception block is parsed successfully, then return a tuple -- with the line index of the first line after the block, the name of -- the test function, the name of the test class, and the text of the -- exception. Otherwise, return None. -+ with the line index of the first line after the block, the full -+ name of the test function (including the class), and the text of -+ the exception. Otherwise, return None. - """ - if not all(char == '=' for char in lines[line_index]): - return None -@@ -141,4 +155,5 @@ - while lines[line_index]: - exception_text.append(lines[line_index]) - line_index += 1 -- return (line_index, match.group(1), match.group(2), exception_text) -+ function_fullname = _get_fullname(match.group(1), match.group(2)) -+ return (line_index, function_fullname, exception_text) ---- a/spyder_unittest/backend/tests/test_unittestrunner.py -+++ b/spyder_unittest/backend/tests/test_unittestrunner.py -@@ -5,13 +5,23 @@ - # (see LICENSE.txt for details) - """Tests for unittestrunner.py""" - -+# Standard library imports -+import sys -+ - # Local imports - from spyder_unittest.backend.runnerbase import Category - from spyder_unittest.backend.unittestrunner import UnittestRunner - - -+# Up to Python 3.10, unittest output read: -+# test_fail (testing.test_unittest.MyTest) ... FAIL -+# but from Python 3.11, it reads: -+# test_fail (testing.test_unittest.MyTest.test_fail) ... FAIL -+IS_PY311_OR_GREATER = sys.version_info[:2] >= (3, 11) -+ -+ - def test_unittestrunner_load_data_with_two_tests(): -- output = """test_isupper (teststringmethods.TestStringMethods) ... ok -+ output10 = """test_isupper (teststringmethods.TestStringMethods) ... ok - test_split (teststringmethods.TestStringMethods) ... ok - - ---------------------------------------------------------------------- -@@ -19,6 +29,15 @@ - - OK - """ -+ output11 = """test_isupper (teststringmethods.TestStringMethods.test_isupper) ... ok -+test_split (teststringmethods.TestStringMethods.test_split) ... ok -+ -+---------------------------------------------------------------------- -+Ran 2 tests in 0.012s -+ -+OK -+""" -+ output = output11 if IS_PY311_OR_GREATER else output10 - runner = UnittestRunner(None) - res = runner.load_data(output) - assert len(res) == 2 -@@ -37,13 +56,21 @@ - - - def test_unittestrunner_load_data_with_one_test(): -- output = """test1 (test_foo.Bar) ... ok -+ output10 = """test1 (test_foo.Bar) ... ok - - ---------------------------------------------------------------------- - Ran 1 test in 0.000s - - OK - """ -+ output11 = """test1 (test_foo.Bar.test1) ... ok -+ -+---------------------------------------------------------------------- -+Ran 1 test in 0.000s -+ -+OK -+""" -+ output = output11 if IS_PY311_OR_GREATER else output10 - runner = UnittestRunner(None) - res = runner.load_data(output) - assert len(res) == 1 -@@ -54,7 +81,7 @@ - - - def test_unittestrunner_load_data_with_exception(): -- output = """test1 (test_foo.Bar) ... FAIL -+ output10 = """test1 (test_foo.Bar) ... FAIL - test2 (test_foo.Bar) ... ok - - ====================================================================== -@@ -70,6 +97,23 @@ - - FAILED (failures=1) - """ -+ output11 = """test1 (test_foo.Bar.test1) ... FAIL -+test2 (test_foo.Bar.test2) ... ok -+ -+====================================================================== -+FAIL: test1 (test_foo.Bar.test1) -+---------------------------------------------------------------------- -+Traceback (most recent call last): -+ File "/somepath/test_foo.py", line 5, in test1 -+ self.assertEqual(1, 2) -+AssertionError: 1 != 2 -+ -+---------------------------------------------------------------------- -+Ran 2 tests in 0.012s -+ -+FAILED (failures=1) -+""" -+ output = output11 if IS_PY311_OR_GREATER else output10 - runner = UnittestRunner(None) - res = runner.load_data(output) - assert len(res) == 2 -@@ -87,7 +131,7 @@ - - - def test_unittestrunner_load_data_with_comment(): -- output = """test1 (test_foo.Bar) -+ output10 = """test1 (test_foo.Bar) - comment ... ok - test2 (test_foo.Bar) ... ok - -@@ -96,6 +140,16 @@ - - OK - """ -+ output11 = """test1 (test_foo.Bar.test1) -+comment ... ok -+test2 (test_foo.Bar.test2) ... ok -+ -+---------------------------------------------------------------------- -+Ran 2 tests in 0.000s -+ -+OK -+""" -+ output = output11 if IS_PY311_OR_GREATER else output10 - runner = UnittestRunner(None) - res = runner.load_data(output) - assert len(res) == 2 -@@ -112,7 +166,7 @@ - - - def test_unittestrunner_load_data_with_fail_and_comment(): -- output = """test1 (test_foo.Bar) -+ output10 = """test1 (test_foo.Bar) - comment ... FAIL - - ====================================================================== -@@ -129,6 +183,24 @@ - - FAILED (failures=1) - """ -+ output11 = """test1 (test_foo.Bar.test1) -+comment ... FAIL -+ -+====================================================================== -+FAIL: test1 (test_foo.Bar.test1) -+comment -+---------------------------------------------------------------------- -+Traceback (most recent call last): -+ File "/somepath/test_foo.py", line 30, in test1 -+ self.assertEqual(1, 2) -+AssertionError: 1 != 2 -+ -+---------------------------------------------------------------------- -+Ran 1 test in 0.000s -+ -+FAILED (failures=1) -+""" -+ output = output11 if IS_PY311_OR_GREATER else output10 - runner = UnittestRunner(None) - res = runner.load_data(output) - assert len(res) == 1 -@@ -142,28 +214,36 @@ - - def test_try_parse_header_with_ok(): - runner = UnittestRunner(None) -- lines = ['test_isupper (testfoo.TestStringMethods) ... ok'] -+ lines10 = ['test_isupper (testfoo.TestStringMethods) ... ok'] -+ lines11 = ['test_isupper (testfoo.TestStringMethods.test_isupper) ... ok'] -+ lines = lines11 if IS_PY311_OR_GREATER else lines10 - res = runner.try_parse_result(lines, 0) -- assert res == (1, 'test_isupper', 'testfoo.TestStringMethods', 'ok', '') -+ assert res == (1, 'testfoo.TestStringMethods.test_isupper', 'ok', '') - - - def test_try_parse_header_with_xfail(): - runner = UnittestRunner(None) -- lines = ['test_isupper (testfoo.TestStringMethods) ... expected failure'] -+ lines10 = ['test_isupper (testfoo.TestStringMethods) ... expected failure'] -+ lines11 = ['test_isupper (testfoo.TestStringMethods.test_isupper) ... expected failure'] -+ lines = lines11 if IS_PY311_OR_GREATER else lines10 - res = runner.try_parse_result(lines, 0) -- assert res == (1, 'test_isupper', 'testfoo.TestStringMethods', -+ assert res == (1, 'testfoo.TestStringMethods.test_isupper', - 'expected failure', '') - - - def test_try_parse_header_with_message(): - runner = UnittestRunner(None) -- lines = ["test_nothing (testfoo.Tests) ... skipped 'msg'"] -+ lines10 = ["test_nothing (testfoo.Tests) ... skipped 'msg'"] -+ lines11 = ["test_nothing (testfoo.Tests.test_nothing) ... skipped 'msg'"] -+ lines = lines11 if IS_PY311_OR_GREATER else lines10 - res = runner.try_parse_result(lines, 0) -- assert res == (1, 'test_nothing', 'testfoo.Tests', 'skipped', 'msg') -+ assert res == (1, 'testfoo.Tests.test_nothing', 'skipped', 'msg') - - - def test_try_parse_header_starting_with_digit(): - runner = UnittestRunner(None) -- lines = ['0est_isupper (testfoo.TestStringMethods) ... ok'] -+ lines10 = ['0est_isupper (testfoo.TestStringMethods) ... ok'] -+ lines11 = ['0est_isupper (testfoo.TestStringMethods.0est_isupper) ... ok'] -+ lines = lines11 if IS_PY311_OR_GREATER else lines10 - res = runner.try_parse_result(lines, 0) - assert res is None diff -Nru spyder-unittest-0.5.1/debian/patches/series spyder-unittest-0.6.0/debian/patches/series --- spyder-unittest-0.5.1/debian/patches/series 2023-02-08 20:28:19.000000000 +0000 +++ spyder-unittest-0.6.0/debian/patches/series 1970-01-01 00:00:00.000000000 +0000 @@ -1,2 +0,0 @@ -fix-unittest-python3.11.patch -fix-unittest-python3.11-part2.patch diff -Nru spyder-unittest-0.5.1/debian/tests/control spyder-unittest-0.6.0/debian/tests/control --- spyder-unittest-0.5.1/debian/tests/control 2023-02-08 20:28:19.000000000 +0000 +++ spyder-unittest-0.6.0/debian/tests/control 2023-07-26 17:35:01.000000000 +0000 @@ -3,7 +3,7 @@ pyqt5-dev-tools, python3-all, python3-flaky, - python3-nose, + python3-nose2, python3-pytest, python3-pytestqt, xauth, diff -Nru spyder-unittest-0.5.1/doc/example/test_foo.py spyder-unittest-0.6.0/doc/example/test_foo.py --- spyder-unittest-0.5.1/doc/example/test_foo.py 1970-01-01 00:00:00.000000000 +0000 +++ spyder-unittest-0.6.0/doc/example/test_foo.py 2023-07-02 15:10:44.000000000 +0000 @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- +# +# Copyright © Spyder Project Contributors +# Licensed under the terms of the MIT License +# (see LICENSE.txt for details) +"""Example tests used to generate screenshots.""" + +import pytest + +def test_one_plus_one_is_two(): + assert 1 + 1 == 2 + +def test_two_plus_two_is_four(): + assert 2 + 2 == 4 + +def test_one_plus_two_is_five(): + assert 1 + 2 == 5 + +def test_two_times_two_is_four(): + assert 2 * 2 == 4 + +@pytest.mark.skip +def test_will_be_skipped(): + assert 0 == 1 Binary files /tmp/tmp83t07kth/vqV3a7Laqb/spyder-unittest-0.5.1/doc/screenshot.png and /tmp/tmp83t07kth/vPPUqCMpiJ/spyder-unittest-0.6.0/doc/screenshot.png differ diff -Nru spyder-unittest-0.5.1/requirements/conda.txt spyder-unittest-0.6.0/requirements/conda.txt --- spyder-unittest-0.5.1/requirements/conda.txt 2022-09-03 20:29:17.000000000 +0000 +++ spyder-unittest-0.6.0/requirements/conda.txt 2023-07-02 15:10:44.000000000 +0000 @@ -1,2 +1,2 @@ lxml -spyder>=5.3.1,<6 +spyder>=5.4.1,<6 diff -Nru spyder-unittest-0.5.1/requirements/tests.txt spyder-unittest-0.6.0/requirements/tests.txt --- spyder-unittest-0.5.1/requirements/tests.txt 2022-09-03 20:29:17.000000000 +0000 +++ spyder-unittest-0.6.0/requirements/tests.txt 2023-07-02 15:10:44.000000000 +0000 @@ -1,7 +1,4 @@ -codecov flaky -nose +nose2 pytest>=5 -pytest-cov -pytest-mock pytest-qt Binary files /tmp/tmp83t07kth/vqV3a7Laqb/spyder-unittest-0.5.1/screenshot.png and /tmp/tmp83t07kth/vPPUqCMpiJ/spyder-unittest-0.6.0/screenshot.png differ diff -Nru spyder-unittest-0.5.1/setup.py spyder-unittest-0.6.0/setup.py --- spyder-unittest-0.5.1/setup.py 2022-09-03 20:29:17.000000000 +0000 +++ spyder-unittest-0.6.0/setup.py 2023-07-02 15:10:44.000000000 +0000 @@ -37,7 +37,7 @@ # Requirements -REQUIREMENTS = ['lxml', 'spyder>=5.3.1,<6', 'pyzmq'] +REQUIREMENTS = ['lxml', 'spyder>=5.4.1,<6', 'pyzmq'] EXTLIST = ['.jpg', '.png', '.json', '.mo', '.ini'] LIBNAME = 'spyder_unittest' @@ -47,7 +47,7 @@ frameworks. It allows you to run tests and view the results. The plugin supports the `unittest` framework in the Python -standard library and the `pytest` and `nose` testing frameworks. +standard library and the `pytest` and `nose2` testing frameworks. """ setup( @@ -56,7 +56,7 @@ packages=find_packages(), package_data={LIBNAME: get_package_data(LIBNAME, EXTLIST)}, keywords=["Qt PyQt4 PyQt5 spyder plugins testing"], - python_requires='>=3.5', + python_requires='>=3.7', install_requires=REQUIREMENTS, url='https://github.com/spyder-ide/spyder-unittest', license='MIT', diff -Nru spyder-unittest-0.5.1/spyder_unittest/__init__.py spyder-unittest-0.6.0/spyder_unittest/__init__.py --- spyder-unittest-0.5.1/spyder_unittest/__init__.py 2022-09-03 20:29:17.000000000 +0000 +++ spyder-unittest-0.6.0/spyder_unittest/__init__.py 2023-07-02 15:10:44.000000000 +0000 @@ -3,10 +3,10 @@ # Copyright © 2013 Spyder Project Contributors # Licensed under the terms of the MIT License # (see LICENSE.txt for details) -"""Spyder unitest plugin.""" +"""Spyder unittest plugin.""" # Local imports from .unittestplugin import UnitTestPlugin as PLUGIN_CLASS -__version__ = '0.5.1' +__version__ = '0.6.0' PLUGIN_CLASS diff -Nru spyder-unittest-0.5.1/spyder_unittest/backend/abbreviator.py spyder-unittest-0.6.0/spyder_unittest/backend/abbreviator.py --- spyder-unittest-0.5.1/spyder_unittest/backend/abbreviator.py 2022-09-03 20:29:17.000000000 +0000 +++ spyder-unittest-0.6.0/spyder_unittest/backend/abbreviator.py 2023-07-02 15:10:44.000000000 +0000 @@ -5,6 +5,23 @@ # (see LICENSE.txt for details) """Class for abbreviating test names.""" +from __future__ import annotations + +# Standard imports +from dataclasses import dataclass + +@dataclass +class Abbreviation: + """ + Abbreviation for one component of a test name. + + Abbreviations are defined recursively, so `.head` is the abbreviation + for the first component and `.tail` specifies the abbreviations for the + second and later components. + """ + head: str + tail: Abbreviator + class Abbreviator: """ @@ -26,7 +43,7 @@ the higher-level components as its second element. """ - def __init__(self, names=[]): + def __init__(self, names: list[str]=[]) -> None: """ Constructor. @@ -35,11 +52,11 @@ names : list of str list of words which needs to be abbreviated. """ - self.dic = {} + self.dic: dict[str, Abbreviation] = {} for name in names: self.add(name) - def add(self, name): + def add(self, name: str) -> None: """ Add name to list of names to be abbreviated. @@ -61,18 +78,18 @@ and len_abbrev < len(other)): len_abbrev += 1 if len_abbrev == len(start): - self.dic[other][0] = other[:len_abbrev + 1] + self.dic[other].head = other[:len_abbrev + 1] elif len_abbrev == len(other): - self.dic[other][0] = other + self.dic[other].head = other len_abbrev += 1 else: - if len(self.dic[other][0]) < len_abbrev: - self.dic[other][0] = other[:len_abbrev] + if len(self.dic[other].head) < len_abbrev: + self.dic[other].head = other[:len_abbrev] else: - self.dic[start] = [start[:len_abbrev], Abbreviator()] - self.dic[start][1].add(rest) + self.dic[start] = Abbreviation(start[:len_abbrev], Abbreviator()) + self.dic[start].tail.add(rest) - def abbreviate(self, name): + def abbreviate(self, name: str) -> str: """Return abbreviation of name.""" if '[' in name: name, parameters = name.split('[', 1) @@ -81,8 +98,8 @@ parameters = '' if '.' in name: start, rest = name.split('.', 1) - res = (self.dic[start][0] - + '.' + self.dic[start][1].abbreviate(rest)) + res = (self.dic[start].head + + '.' + self.dic[start].tail.abbreviate(rest)) else: res = name return res + parameters diff -Nru spyder-unittest-0.5.1/spyder_unittest/backend/frameworkregistry.py spyder-unittest-0.6.0/spyder_unittest/backend/frameworkregistry.py --- spyder-unittest-0.5.1/spyder_unittest/backend/frameworkregistry.py 2022-09-03 20:29:17.000000000 +0000 +++ spyder-unittest-0.6.0/spyder_unittest/backend/frameworkregistry.py 2023-07-02 15:10:44.000000000 +0000 @@ -5,6 +5,16 @@ # (see LICENSE.txt for details) """Keep track of testing frameworks and create test runners when requested.""" +from __future__ import annotations + +# Standard imports +from typing import Optional, TYPE_CHECKING + +# Local imports +if TYPE_CHECKING: + from spyder_unittest.backend.runnerbase import RunnerBase + from spyder_unittest.widgets.unittestgui import UnitTestWidget + class FrameworkRegistry(): """ @@ -24,21 +34,22 @@ associated runners. """ - def __init__(self): + def __init__(self) -> None: """Initialize self.""" - self.frameworks = {} + self.frameworks: dict[str, type[RunnerBase]] = {} - def register(self, runner_class): + def register(self, runner_class: type[RunnerBase]) -> None: """Register runner class for a testing framework. Parameters ---------- - runner_class : type + runner_class Class used for creating tests runners for the framework. """ self.frameworks[runner_class.name] = runner_class - def create_runner(self, framework, widget, tempfilename): + def create_runner(self, framework: str, widget: UnitTestWidget, + tempfilename: Optional[str]) -> RunnerBase: """Create test runner associated to some testing framework. This creates an instance of the runner class whose `name` attribute @@ -46,11 +57,11 @@ Parameters ---------- - framework : str + framework Name of testing framework. - widget : UnitTestWidget + widget Unit test widget which constructs the test runner. - resultfilename : str or None + resultfilename Name of file in which to store test results. If None, use default. Returns diff -Nru spyder-unittest-0.5.1/spyder_unittest/backend/nose2runner.py spyder-unittest-0.6.0/spyder_unittest/backend/nose2runner.py --- spyder-unittest-0.5.1/spyder_unittest/backend/nose2runner.py 1970-01-01 00:00:00.000000000 +0000 +++ spyder-unittest-0.6.0/spyder_unittest/backend/nose2runner.py 2023-07-02 15:10:44.000000000 +0000 @@ -0,0 +1,110 @@ +# -*- coding: utf-8 -*- +# +# Copyright © 2013 Spyder Project Contributors +# Licensed under the terms of the MIT License +# (see LICENSE.txt for details) +"""Support for Nose framework.""" + +from __future__ import annotations + +# Standard library imports +from typing import Optional, TYPE_CHECKING + +# Third party imports +from lxml import etree +from spyder.config.base import get_translation + +# Local imports +from spyder_unittest.backend.runnerbase import Category, RunnerBase, TestResult +if TYPE_CHECKING: + from spyder_unittest.widgets.configdialog import Config + +try: + _ = get_translation('spyder_unittest') +except KeyError: + import gettext + _ = gettext.gettext + + +class Nose2Runner(RunnerBase): + """Class for running tests within Nose framework.""" + + module = 'nose2' + name = 'nose2' + + def create_argument_list(self, config: Config, + cov_path: Optional[str], + single_test: Optional[str]) -> list[str]: + """Create argument list for testing process.""" + arguments = [ + '-m', self.module, '--plugin=nose2.plugins.junitxml', + '--junit-xml', '--junit-xml-path={}'.format(self.resultfilename) + ] + if single_test: + arguments.append(single_test) + arguments += config.args + return arguments + + def finished(self, exitcode: int) -> None: + """Called when the unit test process has finished.""" + output = self.read_all_process_output() + testresults = self.load_data() + self.sig_finished.emit(testresults, output, True) + + def load_data(self) -> list[TestResult]: + """ + Read and parse unit test results. + + This function reads the unit test results from the file with name + `self.resultfilename` and parses them. The file should contain the + test results in JUnitXML format. + + Returns + ------- + list of TestResult + Unit test results. + """ + try: + data = etree.parse(self.resultfilename).getroot() + except OSError: + return [] + + testresults = [] + for testcase in data: + category = Category.OK + status = 'ok' + name = '{}.{}'.format(testcase.get('classname'), + testcase.get('name')) + message = '' + time = float(testcase.get('time')) + extras = [] + + for child in testcase: + if child.tag in ('error', 'failure', 'skipped'): + if child.tag == 'skipped': + category = Category.SKIP + else: + category = Category.FAIL + status = child.tag + type_ = child.get('type') + message = child.get('message', default='') + if type_ and message: + message = '{0}: {1}'.format(type_, message) + elif type_: + message = type_ + if child.text: + extras.append(child.text) + elif child.tag in ('system-out', 'system-err') and child.text: + if child.tag == 'system-out': + heading = _('Captured stdout') + else: + heading = _('Captured stderr') + contents = child.text.rstrip('\n') + extras.append('----- {} -----\n{}'.format(heading, + contents)) + + extra_text = '\n\n'.join(extras) + testresults.append( + TestResult(category, status, name, message, time, extra_text)) + + return testresults diff -Nru spyder-unittest-0.5.1/spyder_unittest/backend/noserunner.py spyder-unittest-0.6.0/spyder_unittest/backend/noserunner.py --- spyder-unittest-0.5.1/spyder_unittest/backend/noserunner.py 2022-09-03 20:29:17.000000000 +0000 +++ spyder-unittest-0.6.0/spyder_unittest/backend/noserunner.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,97 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright © 2013 Spyder Project Contributors -# Licensed under the terms of the MIT License -# (see LICENSE.txt for details) -"""Support for Nose framework.""" - -# Third party imports -from lxml import etree -from spyder.config.base import get_translation - -# Local imports -from spyder_unittest.backend.runnerbase import Category, RunnerBase, TestResult - -try: - _ = get_translation('spyder_unittest') -except KeyError: - import gettext - _ = gettext.gettext - - -class NoseRunner(RunnerBase): - """Class for running tests within Nose framework.""" - - module = 'nose' - name = 'nose' - - def create_argument_list(self, config, cov_path): - """Create argument list for testing process.""" - return [ - '-m', self.module, '--with-xunit', - '--xunit-file={}'.format(self.resultfilename), - ] - - def finished(self): - """Called when the unit test process has finished.""" - output = self.read_all_process_output() - testresults = self.load_data() - self.sig_finished.emit(testresults, output, True) - - def load_data(self): - """ - Read and parse unit test results. - - This function reads the unit test results from the file with name - `self.resultfilename` and parses them. The file should contain the - test results in JUnitXML format. - - Returns - ------- - list of TestResult - Unit test results. - """ - try: - data = etree.parse(self.resultfilename).getroot() - except OSError: - data = [] - - testresults = [] - for testcase in data: - category = Category.OK - status = 'ok' - name = '{}.{}'.format(testcase.get('classname'), - testcase.get('name')) - message = '' - time = float(testcase.get('time')) - extras = [] - - for child in testcase: - if child.tag in ('error', 'failure', 'skipped'): - if child.tag == 'skipped': - category = Category.SKIP - else: - category = Category.FAIL - status = child.tag - type_ = child.get('type') - message = child.get('message', default='') - if type_ and message: - message = '{0}: {1}'.format(type_, message) - elif type_: - message = type_ - if child.text: - extras.append(child.text) - elif child.tag in ('system-out', 'system-err'): - if child.tag == 'system-out': - heading = _('Captured stdout') - else: - heading = _('Captured stderr') - contents = child.text.rstrip('\n') - extras.append('----- {} -----\n{}'.format(heading, - contents)) - - extra_text = '\n\n'.join(extras) - testresults.append( - TestResult(category, status, name, message, time, extra_text)) - - return testresults diff -Nru spyder-unittest-0.5.1/spyder_unittest/backend/pytestrunner.py spyder-unittest-0.6.0/spyder_unittest/backend/pytestrunner.py --- spyder-unittest-0.5.1/spyder_unittest/backend/pytestrunner.py 2022-09-03 20:29:17.000000000 +0000 +++ spyder-unittest-0.6.0/spyder_unittest/backend/pytestrunner.py 2023-07-02 15:10:44.000000000 +0000 @@ -5,16 +5,21 @@ # (see LICENSE.txt for details) """Support for pytest framework.""" +from __future__ import annotations + # Standard library imports import os import os.path as osp import re +from typing import Any, Optional, TYPE_CHECKING # Local imports from spyder.config.base import get_translation from spyder_unittest.backend.runnerbase import (Category, RunnerBase, TestResult, COV_TEST_NAME) from spyder_unittest.backend.zmqreader import ZmqStreamReader +if TYPE_CHECKING: + from spyder_unittest.widgets.configdialog import Config try: _ = get_translation('spyder_unittest') @@ -29,30 +34,36 @@ module = 'pytest' name = 'pytest' - def create_argument_list(self, config, cov_path): + def create_argument_list(self, config: Config, + cov_path: Optional[str], + single_test: Optional[str]) -> list[str]: """Create argument list for testing process.""" dirname = os.path.dirname(__file__) pyfile = os.path.join(dirname, 'workers', 'pytestworker.py') arguments = [pyfile, str(self.reader.port)] if config.coverage: arguments += [f'--cov={cov_path}', '--cov-report=term-missing'] + if single_test: + arguments.append(self.convert_testname_to_nodeid(single_test)) + arguments += config.args return arguments - - def start(self, config, cov_path, executable, pythonpath): + def start(self, config: Config, cov_path: Optional[str], + executable: str, pythonpath: list[str], + single_test: Optional[str]) -> None: """Start process which will run the unit test suite.""" self.config = config self.reader = ZmqStreamReader() self.reader.sig_received.connect(self.process_output) - RunnerBase.start(self, config, cov_path, executable, pythonpath) + super().start(config, cov_path, executable, pythonpath, single_test) - def process_output(self, output): + def process_output(self, output: list[dict[str, Any]]) -> None: """ Process output of test process. Parameters ---------- - output : list + output list of decoded Python object sent by test process. """ collected_list = [] @@ -63,15 +74,16 @@ if result_item['event'] == 'config': self.rootdir = result_item['rootdir'] elif result_item['event'] == 'collected': - testname = convert_nodeid_to_testname(result_item['nodeid']) - collected_list.append(testname) + name = self.convert_nodeid_to_testname(result_item['nodeid']) + collected_list.append(name) elif result_item['event'] == 'collecterror': - tupl = logreport_collecterror_to_tuple(result_item) + tupl = self.logreport_collecterror_to_tuple(result_item) collecterror_list.append(tupl) elif result_item['event'] == 'starttest': - starttest_list.append(logreport_starttest_to_str(result_item)) + name = self.logreport_starttest_to_str(result_item) + starttest_list.append(name) elif result_item['event'] == 'logreport': - testresult = logreport_to_testresult(result_item, self.rootdir) + testresult = self.logreport_to_testresult(result_item) result_list.append(testresult) if collected_list: @@ -83,7 +95,7 @@ if result_list: self.sig_testresult.emit(result_list) - def process_coverage(self, output): + def process_coverage(self, output: str) -> None: """Search the output text for coverage details. Called by the function 'finished' at the very end. @@ -107,8 +119,11 @@ for row in re.findall( r'^((.*?\.py) .*?(\d+%).*?(\d[\d\,\-\ ]*)?)$', cov_results.group(0), flags=re.M): - lineno = (int(re.search(r'^(\d*)', row[3]).group(1)) - 1 - if row[3] else None) + lineno: Optional[int] = None + if row[3]: + match = re.search(r'^(\d*)', row[3]) + if match: + lineno = int(match.group(1)) - 1 file_cov = TestResult( Category.COVERAGE, row[2], row[1], message=_('Missing: {}').format(row[3] if row[3] else _("(none)")), @@ -117,7 +132,7 @@ self.sig_collected.emit([row[1]]) self.sig_testresult.emit([file_cov]) - def finished(self, exitcode): + def finished(self, exitcode: int) -> None: """ Called when the unit test process has finished. @@ -125,7 +140,7 @@ Parameters ---------- - exitcode : int + exitcode Exit code of the test process. """ self.reader.close() @@ -137,56 +152,78 @@ # 2 = interrupted, 5 = no tests collected self.sig_finished.emit([], output, normal_exit) + def normalize_module_name(self, name: str) -> str: + """ + Convert module name reported by pytest to Python conventions. -def normalize_module_name(name): - """ - Convert module name reported by pytest to Python conventions. - - This function strips the .py suffix and replaces '/' by '.', so that - 'ham/spam.py' becomes 'ham.spam'. - """ - if name.endswith('.py'): - name = name[:-3] - return name.replace('/', '.') - - -def convert_nodeid_to_testname(nodeid): - """Convert a nodeid to a test name.""" - module, name = nodeid.split('::', 1) - module = normalize_module_name(module) - return '{}.{}'.format(module, name) - - -def logreport_collecterror_to_tuple(report): - """Convert a 'collecterror' logreport to a (str, str) tuple.""" - module = normalize_module_name(report['nodeid']) - return (module, report['longrepr']) - - -def logreport_starttest_to_str(report): - """Convert a 'starttest' logreport to a str.""" - return convert_nodeid_to_testname(report['nodeid']) - - -def logreport_to_testresult(report, rootdir): - """Convert a logreport sent by test process to a TestResult.""" - status = report['outcome'] - if report['outcome'] in ('failed', 'xpassed') or report['witherror']: - cat = Category.FAIL - elif report['outcome'] in ('passed', 'xfailed'): - cat = Category.OK - else: - cat = Category.SKIP - testname = convert_nodeid_to_testname(report['nodeid']) - message = report.get('message', '') - extra_text = report.get('longrepr', '') - if 'sections' in report: - if extra_text: - extra_text += '\n' - for (heading, text) in report['sections']: - extra_text += '----- {} -----\n{}'.format(heading, text) - filename = osp.join(rootdir, report['filename']) - result = TestResult(cat, status, testname, message=message, - time=report['duration'], extra_text=extra_text, - filename=filename, lineno=report['lineno']) - return result + This function strips the .py suffix and replaces '/' by '.', so that + 'ham/spam.py' becomes 'ham.spam'. + + The result is relative to the directory from which tests are run and + not the pytest root dir. + """ + wdir = osp.realpath(self.config.wdir) + if wdir != self.rootdir: + abspath = osp.join(self.rootdir, name) + try: + name = osp.relpath(abspath, start=wdir) + except ValueError: + # Happens on Windows if paths are on different drives + pass + + if name.endswith('.py'): + name = name[:-3] + return name.replace(osp.sep, '.') + + def convert_nodeid_to_testname(self, nodeid: str) -> str: + """Convert a nodeid to a test name.""" + module, name = nodeid.split('::', 1) + module = self.normalize_module_name(module) + return '{}.{}'.format(module, name) + + def convert_testname_to_nodeid(self, testname: str) -> str: + """ + Convert a test name to a nodeid relative to wdir. + + A true nodeid is relative to the pytest root dir. The return value of + this function is like a nodeid but relative to the wdir (i.e., the + directory from which test are run). This is the format that pytest + expects when running single tests. + """ + *path_parts, last_part = testname.split('.') + path_parts[-1] += '.py' + nodeid = osp.join(*path_parts) + '::' + last_part + return nodeid + + def logreport_collecterror_to_tuple( + self, report: dict[str, Any]) -> tuple[str, str]: + """Convert a 'collecterror' logreport to a (str, str) tuple.""" + module = self.normalize_module_name(report['nodeid']) + return (module, report['longrepr']) + + def logreport_starttest_to_str(self, report: dict[str, Any]) -> str: + """Convert a 'starttest' logreport to a str.""" + return self.convert_nodeid_to_testname(report['nodeid']) + + def logreport_to_testresult(self, report: dict[str, Any]) -> TestResult: + """Convert a logreport sent by test process to a TestResult.""" + status = report['outcome'] + if report['outcome'] in ('failed', 'xpassed') or report['witherror']: + cat = Category.FAIL + elif report['outcome'] in ('passed', 'xfailed'): + cat = Category.OK + else: + cat = Category.SKIP + testname = self.convert_nodeid_to_testname(report['nodeid']) + message = report.get('message', '') + extra_text = report.get('longrepr', '') + if 'sections' in report: + if extra_text: + extra_text += '\n' + for (heading, text) in report['sections']: + extra_text += '----- {} -----\n{}'.format(heading, text) + filename = osp.join(self.rootdir, report['filename']) + result = TestResult(cat, status, testname, message=message, + time=report['duration'], extra_text=extra_text, + filename=filename, lineno=report['lineno']) + return result diff -Nru spyder-unittest-0.5.1/spyder_unittest/backend/runnerbase.py spyder-unittest-0.6.0/spyder_unittest/backend/runnerbase.py --- spyder-unittest-0.5.1/spyder_unittest/backend/runnerbase.py 2022-09-03 20:29:17.000000000 +0000 +++ spyder-unittest-0.6.0/spyder_unittest/backend/runnerbase.py 2023-07-02 15:10:44.000000000 +0000 @@ -5,22 +5,34 @@ # (see LICENSE.txt for details) """Classes for running tests within various frameworks.""" +from __future__ import annotations + # Standard library imports +from enum import IntEnum +import logging import os import tempfile +from typing import ClassVar, Optional, TYPE_CHECKING # Third party imports -from importlib.util import find_spec as find_spec_or_loader -from qtpy.QtCore import (QObject, QProcess, QProcessEnvironment, QTextCodec, - Signal) +from qtpy.QtCore import ( + QObject, QProcess, QProcessEnvironment, QTextCodec, Signal) + +# Local imports +if TYPE_CHECKING: + from spyder_unittest.widgets.configdialog import Config + from spyder_unittest.widgets.unittestgui import UnitTestWidget +# Logging +logger = logging.getLogger(__name__) + # if generating coverage report, use this name for the TestResult # it's here in case we can get coverage results from unittest too COV_TEST_NAME = 'Total Test Coverage' -class Category: +class Category(IntEnum): """Enum type representing category of test result.""" FAIL = 1 @@ -35,21 +47,12 @@ __test__ = False # this is not a pytest test class - def __init__(self, category, status, name, message='', time=None, - extra_text='', filename=None, lineno=None): + def __init__(self, category: Category, status: str, name: str, + message: str = '', time: Optional[float] = None, + extra_text: str = '', filename: Optional[str] = None, + lineno: Optional[int] = None): """ Construct a test result. - - Parameters - ---------- - category : Category - status : str - name : str - message : str - time : float or None - extra_text : str - filename : str or None - lineno : int or None """ self.category = category self.status = status @@ -64,8 +67,10 @@ self.filename = filename self.lineno = lineno - def __eq__(self, other): + def __eq__(self, other: object) -> bool: """Test for equality.""" + if not isinstance(other, TestResult): + return NotImplemented return self.__dict__ == other.__dict__ @@ -74,7 +79,7 @@ Base class for running tests with a framework that uses JUnit XML. This is an abstract class, meant to be subclassed before being used. - Concrete subclasses should define executable and create_argument_list(), + Concrete subclasses should define create_argument_list() and finished(). All communication back to the caller is done via signals. @@ -109,6 +114,9 @@ Emitted when test process is being stopped. """ + module: ClassVar[str] + name: ClassVar[str] + sig_collected = Signal(object) sig_collecterror = Signal(object) sig_starttest = Signal(object) @@ -116,7 +124,8 @@ sig_finished = Signal(object, str, bool) sig_stop = Signal() - def __init__(self, widget, resultfilename=None): + def __init__(self, widget: UnitTestWidget, + resultfilename: Optional[str] = None): """ Construct test runner. @@ -128,14 +137,16 @@ Name of file in which to store test results. If None, use default. """ QObject.__init__(self, widget) - self.process = None + self.process: Optional[QProcess] = None if resultfilename is None: self.resultfilename = os.path.join(tempfile.gettempdir(), 'unittest.results') else: self.resultfilename = resultfilename - def create_argument_list(self, config, cov_path): + def create_argument_list(self, config: Config, + cov_path: Optional[str], + single_test: Optional[str]) -> list[str]: """ Create argument list for testing process (dummy). @@ -143,7 +154,8 @@ """ raise NotImplementedError - def _prepare_process(self, config, pythonpath): + def _prepare_process(self, config: Config, + pythonpath: list[str]) -> QProcess: """ Prepare and return process for running the unit test suite. @@ -155,7 +167,7 @@ process.finished.connect(self.finished) if pythonpath: env = QProcessEnvironment.systemEnvironment() - old_python_path = env.value('PYTHONPATH', None) + old_python_path = env.value('PYTHONPATH', '') python_path_str = os.pathsep.join(pythonpath) if old_python_path: python_path_str += os.pathsep + old_python_path @@ -163,7 +175,9 @@ process.setProcessEnvironment(env) return process - def start(self, config, cov_path, executable, pythonpath): + def start(self, config: Config, cov_path: Optional[str], + executable: str, pythonpath: list[str], + single_test: Optional[str]) -> None: """ Start process which will run the unit test suite. @@ -175,14 +189,17 @@ Parameters ---------- - config : TestConfig + config Unit test configuration. - cov_path : str or None + cov_path Path to filter source for coverage report - executable : str + executable Path to Python executable - pythonpath : list of str + pythonpath List of directories to be added to the Python path + single_test + If None, run all tests; otherwise, it is the name of the only test + to be run. Raises ------ @@ -190,17 +207,18 @@ If process failed to start. """ self.process = self._prepare_process(config, pythonpath) - p_args = self.create_argument_list(config, cov_path) + p_args = self.create_argument_list(config, cov_path, single_test) try: os.remove(self.resultfilename) except OSError: pass + logger.debug(f'Starting Python process with arguments {p_args}') self.process.start(executable, p_args) running = self.process.waitForStarted() if not running: raise RuntimeError - def finished(self): + def finished(self, exitcode: int) -> None: """ Called when the unit test process has finished. @@ -209,13 +227,14 @@ """ raise NotImplementedError - def read_all_process_output(self): + def read_all_process_output(self) -> str: """Read and return all output from `self.process` as unicode.""" + assert self.process is not None qbytearray = self.process.readAllStandardOutput() locale_codec = QTextCodec.codecForLocale() return locale_codec.toUnicode(qbytearray.data()) - def stop_if_running(self): + def stop_if_running(self) -> None: """Stop testing process if it is running.""" if self.process and self.process.state() == QProcess.Running: self.process.kill() diff -Nru spyder-unittest-0.5.1/spyder_unittest/backend/tests/test_nose2runner.py spyder-unittest-0.6.0/spyder_unittest/backend/tests/test_nose2runner.py --- spyder-unittest-0.5.1/spyder_unittest/backend/tests/test_nose2runner.py 1970-01-01 00:00:00.000000000 +0000 +++ spyder-unittest-0.6.0/spyder_unittest/backend/tests/test_nose2runner.py 2023-07-02 15:10:44.000000000 +0000 @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +# +# Copyright © 2013 Spyder Project Contributors +# Licensed under the terms of the MIT License +# (see LICENSE.txt for details) +"""Tests for nose2runner.py""" + +# Local imports +from spyder_unittest.backend.nose2runner import Nose2Runner +from spyder_unittest.backend.runnerbase import Category + + +def test_nose2runner_load_data(tmpdir): + result_file = tmpdir.join('results') + result_txt = """ + + + + + + text + + +""" + result_file.write(result_txt) + runner = Nose2Runner(None, result_file.strpath) + results = runner.load_data() + assert len(results) == 2 + + assert results[0].category == Category.OK + assert results[0].status == 'ok' + assert results[0].name == 'test_foo.test1' + assert results[0].message == '' + assert results[0].time == 0.04 + assert results[0].extra_text == [] + + assert results[1].category == Category.FAIL + assert results[1].status == 'failure' + assert results[1].name == 'test_foo.test2' + assert results[1].message == 'test failure' + assert results[1].time == 0.01 + assert results[1].extra_text == ['text'] diff -Nru spyder-unittest-0.5.1/spyder_unittest/backend/tests/test_noserunner.py spyder-unittest-0.6.0/spyder_unittest/backend/tests/test_noserunner.py --- spyder-unittest-0.5.1/spyder_unittest/backend/tests/test_noserunner.py 2022-09-03 20:29:17.000000000 +0000 +++ spyder-unittest-0.6.0/spyder_unittest/backend/tests/test_noserunner.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,76 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright © 2013 Spyder Project Contributors -# Licensed under the terms of the MIT License -# (see LICENSE.txt for details) -"""Tests for noserunner.py""" - -# Local imports -from spyder_unittest.backend.noserunner import NoseRunner -from spyder_unittest.backend.runnerbase import Category - - -def test_noserunner_load_data(tmpdir): - result_file = tmpdir.join('results') - result_txt = """ - - - - text - - - text2 -""" - result_file.write(result_txt) - runner = NoseRunner(None, result_file.strpath) - results = runner.load_data() - assert len(results) == 3 - - assert results[0].category == Category.OK - assert results[0].status == 'ok' - assert results[0].name == 'test_foo.test1' - assert results[0].message == '' - assert results[0].time == 0.04 - assert results[0].extra_text == [] - - assert results[1].category == Category.FAIL - assert results[1].status == 'failure' - assert results[1].name == 'test_foo.test2' - assert results[1].message == 'failure message' - assert results[1].time == 0.01 - assert results[1].extra_text == ['text'] - - assert results[2].category == Category.SKIP - assert results[2].status == 'skipped' - assert results[2].name == 'test_foo.test3' - assert results[2].message == 'skip message' - assert results[2].time == 0.05 - assert results[2].extra_text == ['text2'] - - -def test_noserunner_load_data_failing_test_with_stdout(tmpdir): - result_file = tmpdir.join('results') - result_txt = """ - - -text -stdout text -""" - result_file.write(result_txt) - runner = NoseRunner(None, result_file.strpath) - results = runner.load_data() - assert results[0].extra_text == ['text', '', '----- Captured stdout -----', 'stdout text'] - - -def test_noserunner_load_data_passing_test_with_stdout(tmpdir): - result_file = tmpdir.join('results') - result_txt = """ - - -stdout text -""" - result_file.write(result_txt) - runner = NoseRunner(None, result_file.strpath) - results = runner.load_data() - assert results[0].extra_text == ['----- Captured stdout -----', 'stdout text'] - diff -Nru spyder-unittest-0.5.1/spyder_unittest/backend/tests/test_pytestrunner.py spyder-unittest-0.6.0/spyder_unittest/backend/tests/test_pytestrunner.py --- spyder-unittest-0.5.1/spyder_unittest/backend/tests/test_pytestrunner.py 2022-09-03 20:29:17.000000000 +0000 +++ spyder-unittest-0.6.0/spyder_unittest/backend/tests/test_pytestrunner.py 2023-07-02 15:10:44.000000000 +0000 @@ -8,21 +8,28 @@ # Standard library imports import os.path as osp import sys -from unittest.mock import Mock +from unittest.mock import Mock, patch # Third party imports import pytest # Local imports -from spyder_unittest.backend.pytestrunner import (PyTestRunner, - logreport_to_testresult) +from spyder_unittest.backend.pytestrunner import PyTestRunner from spyder_unittest.backend.runnerbase import (Category, TestResult, COV_TEST_NAME) from spyder_unittest.widgets.configdialog import Config -def test_pytestrunner_create_argument_list(monkeypatch): - config = Config() +@pytest.fixture +def runner(): + res = PyTestRunner(None) + res.rootdir = 'ham' + res.config = Config(wdir='ham') + return res + + +def test_pytestrunner_create_argument_list(monkeypatch, runner): + config = Config(args=['--extra-arg']) cov_path = None MockZMQStreamReader = Mock() monkeypatch.setattr( @@ -34,9 +41,11 @@ runner.reader = mock_reader monkeypatch.setattr('spyder_unittest.backend.pytestrunner.os.path.dirname', lambda _: 'dir') - pyfile, port, *coverage = runner.create_argument_list(config, cov_path) + arg_list = runner.create_argument_list(config, cov_path, None) + pyfile, port, *coverage, last = arg_list assert pyfile == osp.join('dir', 'workers', 'pytestworker.py') assert port == '42' + assert last == '--extra-arg' def test_pytestrunner_start(monkeypatch): @@ -46,24 +55,23 @@ MockZMQStreamReader) mock_reader = MockZMQStreamReader() - MockRunnerBase = Mock(name='RunnerBase') - monkeypatch.setattr('spyder_unittest.backend.pytestrunner.RunnerBase', - MockRunnerBase) + mock_base_start = Mock() + monkeypatch.setattr('spyder_unittest.backend.unittestrunner.RunnerBase.start', + mock_base_start) runner = PyTestRunner(None, 'results') config = Config() cov_path = None - runner.start(config, cov_path, sys.executable, ['pythondir']) + runner.start(config, cov_path, sys.executable, ['pythondir'], None) assert runner.config is config assert runner.reader is mock_reader runner.reader.sig_received.connect.assert_called_once_with( runner.process_output) - MockRunnerBase.start.assert_called_once_with( - runner, config, cov_path, sys.executable, ['pythondir']) + mock_base_start.assert_called_once_with( + config, cov_path, sys.executable, ['pythondir'], None) -def test_pytestrunner_process_output_with_collected(qtbot): - runner = PyTestRunner(None) +def test_pytestrunner_process_output_with_collected(qtbot, runner): output = [{'event': 'collected', 'nodeid': 'spam.py::ham'}, {'event': 'collected', 'nodeid': 'eggs.py::bacon'}] with qtbot.waitSignal(runner.sig_collected) as blocker: @@ -72,8 +80,7 @@ assert blocker.args == [expected] -def test_pytestrunner_process_output_with_collecterror(qtbot): - runner = PyTestRunner(None) +def test_pytestrunner_process_output_with_collecterror(qtbot, runner): output = [{ 'event': 'collecterror', 'nodeid': 'ham/spam.py', @@ -85,8 +92,7 @@ assert blocker.args == [expected] -def test_pytestrunner_process_output_with_starttest(qtbot): - runner = PyTestRunner(None) +def test_pytestrunner_process_output_with_starttest(qtbot, runner): output = [{'event': 'starttest', 'nodeid': 'ham/spam.py::ham'}, {'event': 'starttest', 'nodeid': 'ham/eggs.py::bacon'}] with qtbot.waitSignal(runner.sig_starttest) as blocker: @@ -95,7 +101,7 @@ assert blocker.args == [expected] -@pytest.mark.parametrize('exitcode, normal_exit', +@pytest.mark.parametrize('exitcode, normal_exit', [(0, True), (1, True), (2, True), (3, False), (4, False), (5, True)]) def test_pytestrunner_finished(qtbot, exitcode, normal_exit): @@ -112,6 +118,39 @@ assert blocker.args == [results, output, normal_exit] +@pytest.mark.parametrize('wdir, expected', [ + ('ham', 'spam.eggs'), + (osp.join('ham', 'spam'), 'eggs'), + (osp.join('link-to-ham', 'spam'), 'eggs')]) +def test_normalize_module_name(runner, wdir, expected): + def new_realpath(name): + """Simulate link from `link-to-ham` to `ham`""" + if name.startswith('link-to-ham'): + return name[len('link-to-'):] + else: + return name + + with patch('spyder_unittest.backend.pytestrunner.osp.realpath', + side_effect=new_realpath): + runner.config = Config(wdir=wdir) + result = runner.normalize_module_name(osp.join('spam', 'eggs.py')) + assert result == expected + + +def test_convert_nodeid_to_testname(runner): + nodeid = osp.join('spam', 'eggs.py') + '::test_foo' + testname = 'spam.eggs.test_foo' + result = runner.convert_nodeid_to_testname(nodeid) + assert result == testname + + +def test_convert_testname_to_nodeid(runner): + nodeid = osp.join('spam', 'eggs.py') + '::test_foo' + testname = 'spam.eggs.test_foo' + result = runner.convert_testname_to_nodeid(testname) + assert result == nodeid + + def standard_logreport_output(): return { 'event': 'logreport', @@ -123,9 +162,8 @@ 'duration': 42 } -def test_pytestrunner_process_output_with_logreport_passed(qtbot): - runner = PyTestRunner(None) - runner.rootdir = 'ham' + +def test_pytestrunner_process_output_with_logreport_passed(qtbot, runner): output = [standard_logreport_output()] with qtbot.waitSignal(runner.sig_testresult) as blocker: runner.process_output(output) @@ -222,40 +260,39 @@ ('---', True, Category.FAIL) # ('---', False, this is not possible) ]) -def test_logreport_to_testresult_with_outcome_and_possible_error(outcome, - witherror, - category): +def test_logreport_to_testresult_with_outcome_and_possible_error( + runner, outcome, witherror, category): report = standard_logreport_output() report['outcome'] = outcome report['witherror'] = witherror expected = TestResult(category, outcome, 'foo.bar', time=42, filename=osp.join('ham', 'foo.py'), lineno=24) - assert logreport_to_testresult(report, 'ham') == expected + assert runner.logreport_to_testresult(report) == expected -def test_logreport_to_testresult_with_message(): +def test_logreport_to_testresult_with_message(runner): report = standard_logreport_output() report['message'] = 'msg' expected = TestResult(Category.OK, 'passed', 'foo.bar', message='msg', time=42, filename=osp.join('ham', 'foo.py'), lineno=24) - assert logreport_to_testresult(report, 'ham') == expected + assert runner.logreport_to_testresult(report) == expected -def test_logreport_to_testresult_with_extratext(): +def test_logreport_to_testresult_with_extratext(runner): report = standard_logreport_output() report['longrepr'] = 'long msg' expected = TestResult(Category.OK, 'passed', 'foo.bar', time=42, extra_text='long msg', filename=osp.join('ham', 'foo.py'), lineno=24) - assert logreport_to_testresult(report, 'ham') == expected + assert runner.logreport_to_testresult(report) == expected @pytest.mark.parametrize('longrepr,prefix', [ ('', ''), ('msg', '\n') ]) -def test_logreport_to_testresult_with_output(longrepr, prefix): +def test_logreport_to_testresult_with_output(runner, longrepr, prefix): report = standard_logreport_output() report['longrepr'] = longrepr report['sections'] = [['Captured stdout call', 'ham\n'], @@ -266,5 +303,4 @@ expected = TestResult(Category.OK, 'passed', 'foo.bar', time=42, extra_text=txt, filename=osp.join('ham', 'foo.py'), lineno=24) - assert logreport_to_testresult(report, 'ham') == expected - + assert runner.logreport_to_testresult(report) == expected diff -Nru spyder-unittest-0.5.1/spyder_unittest/backend/tests/test_runnerbase.py spyder-unittest-0.6.0/spyder_unittest/backend/tests/test_runnerbase.py --- spyder-unittest-0.5.1/spyder_unittest/backend/tests/test_runnerbase.py 2022-09-03 20:29:17.000000000 +0000 +++ spyder-unittest-0.6.0/spyder_unittest/backend/tests/test_runnerbase.py 2023-07-02 15:10:44.000000000 +0000 @@ -25,10 +25,10 @@ config = Config(foo_runner.module, 'wdir', True) with pytest.raises(NotImplementedError): - foo_runner.create_argument_list(config, 'cov_path') + foo_runner.create_argument_list(config, 'cov_path', None) with pytest.raises(NotImplementedError): - foo_runner.finished() + foo_runner.finished(0) @pytest.mark.parametrize('pythonpath,env_pythonpath', [ @@ -81,12 +81,12 @@ runner = RunnerBase(None, 'results') runner._prepare_process = lambda c, p: mock_process - runner.create_argument_list = lambda c, cp: ['arg1', 'arg2'] + runner.create_argument_list = lambda c, cp, st: ['arg1', 'arg2'] config = Config('pytest', 'wdir', False) cov_path = None mock_process.waitForStarted = lambda: False with pytest.raises(RuntimeError): - runner.start(config, cov_path, 'python_exec', ['pythondir']) + runner.start(config, cov_path, 'python_exec', ['pythondir'], None) mock_process.start.assert_called_once_with('python_exec', ['arg1', 'arg2']) mock_remove.assert_called_once_with('results') diff -Nru spyder-unittest-0.5.1/spyder_unittest/backend/tests/test_unittestrunner.py spyder-unittest-0.6.0/spyder_unittest/backend/tests/test_unittestrunner.py --- spyder-unittest-0.5.1/spyder_unittest/backend/tests/test_unittestrunner.py 2022-09-03 20:29:17.000000000 +0000 +++ spyder-unittest-0.6.0/spyder_unittest/backend/tests/test_unittestrunner.py 2023-07-02 15:10:44.000000000 +0000 @@ -5,165 +5,177 @@ # (see LICENSE.txt for details) """Tests for unittestrunner.py""" +# Standard library imports +import os.path as osp +import sys +from unittest.mock import Mock + # Local imports -from spyder_unittest.backend.runnerbase import Category from spyder_unittest.backend.unittestrunner import UnittestRunner +from spyder_unittest.backend.runnerbase import Category, TestResult +from spyder_unittest.widgets.configdialog import Config -def test_unittestrunner_load_data_with_two_tests(): - output = """test_isupper (teststringmethods.TestStringMethods) ... ok -test_split (teststringmethods.TestStringMethods) ... ok +def test_unittestrunner_create_argument_list(monkeypatch): + """ + Test that UnittestRunner.createArgumentList() returns the expected list. + """ + config = Config(args=['--extra-arg']) + cov_path = None + MockZMQStreamReader = Mock() + monkeypatch.setattr( + 'spyder_unittest.backend.unittestrunner.ZmqStreamReader', + MockZMQStreamReader) + mock_reader = MockZMQStreamReader() + mock_reader.port = 42 + runner = UnittestRunner(None, 'resultfile') + runner.reader = mock_reader + monkeypatch.setattr( + 'spyder_unittest.backend.unittestrunner.osp.dirname', + lambda _: 'dir') ----------------------------------------------------------------------- -Ran 2 tests in 0.012s + result = runner.create_argument_list(config, cov_path, None) -OK -""" - runner = UnittestRunner(None) - res = runner.load_data(output) - assert len(res) == 2 + pyfile = osp.join('dir', 'workers', 'unittestworker.py') + assert result == [pyfile, '42', '--extra-arg'] - assert res[0].category == Category.OK - assert res[0].status == 'ok' - assert res[0].name == 'teststringmethods.TestStringMethods.test_isupper' - assert res[0].message == '' - assert res[0].extra_text == [] - assert res[1].category == Category.OK - assert res[1].status == 'ok' - assert res[1].name == 'teststringmethods.TestStringMethods.test_split' - assert res[1].message == '' - assert res[1].extra_text == [] +def test_unittestrunner_start(monkeypatch): + """ + Test that UnittestRunner.start() sets the .config and .reader members + correctly, that it connects to the reader's sig_received, and that it + called the base class method. + """ + MockZMQStreamReader = Mock() + monkeypatch.setattr( + 'spyder_unittest.backend.unittestrunner.ZmqStreamReader', + MockZMQStreamReader) + mock_reader = MockZMQStreamReader() + mock_base_start = Mock() + monkeypatch.setattr('spyder_unittest.backend.unittestrunner.RunnerBase.start', + mock_base_start) + runner = UnittestRunner(None, 'results') + config = Config() + cov_path = None + runner.start(config, cov_path, sys.executable, ['pythondir'], None) -def test_unittestrunner_load_data_with_one_test(): - output = """test1 (test_foo.Bar) ... ok + assert runner.config is config + assert runner.reader is mock_reader + runner.reader.sig_received.connect.assert_called_once_with( + runner.process_output) + mock_base_start.assert_called_once_with( + config, cov_path, sys.executable, ['pythondir'], None) ----------------------------------------------------------------------- -Ran 1 test in 0.000s -OK -""" +def test_unittestrunner_process_output_with_collected(qtbot): + """Test UnittestRunner.processOutput() with two `collected` events.""" runner = UnittestRunner(None) - res = runner.load_data(output) - assert len(res) == 1 - assert res[0].category == Category.OK - assert res[0].status == 'ok' - assert res[0].name == 'test_foo.Bar.test1' - assert res[0].extra_text == [] - + output = [{'event': 'collected', 'id': 'spam.ham'}, + {'event': 'collected', 'id': 'eggs.bacon'}] -def test_unittestrunner_load_data_with_exception(): - output = """test1 (test_foo.Bar) ... FAIL -test2 (test_foo.Bar) ... ok + with qtbot.waitSignal(runner.sig_collected) as blocker: + runner.process_output(output) -====================================================================== -FAIL: test1 (test_foo.Bar) ----------------------------------------------------------------------- -Traceback (most recent call last): - File "/somepath/test_foo.py", line 5, in test1 - self.assertEqual(1, 2) -AssertionError: 1 != 2 + expected = ['spam.ham', 'eggs.bacon'] + assert blocker.args == [expected] ----------------------------------------------------------------------- -Ran 2 tests in 0.012s -FAILED (failures=1) -""" +def test_unittestrunner_process_output_with_starttest(qtbot): + """Test UnittestRunner.processOutput() with two `startTest` events.""" runner = UnittestRunner(None) - res = runner.load_data(output) - assert len(res) == 2 + output = [{'event': 'startTest', 'id': 'spam.ham'}, + {'event': 'startTest', 'id': 'eggs.bacon'}] - assert res[0].category == Category.FAIL - assert res[0].status == 'FAIL' - assert res[0].name == 'test_foo.Bar.test1' - assert res[0].extra_text[0].startswith('Traceback') - assert res[0].extra_text[-1].endswith('AssertionError: 1 != 2') + with qtbot.waitSignal(runner.sig_starttest) as blocker: + runner.process_output(output) - assert res[1].category == Category.OK - assert res[1].status == 'ok' - assert res[1].name == 'test_foo.Bar.test2' - assert res[1].extra_text == [] + expected = ['spam.ham', 'eggs.bacon'] + assert blocker.args == [expected] + + +def test_unittestrunner_process_output_with_addsuccess(qtbot): + """Test UnittestRunner.processOutput() with an `addSuccess` event.""" + runner = UnittestRunner(None) + output = [{'event': 'addSuccess', 'id': 'spam.ham'}] + with qtbot.waitSignal(runner.sig_testresult) as blocker: + runner.process_output(output) -def test_unittestrunner_load_data_with_comment(): - output = """test1 (test_foo.Bar) -comment ... ok -test2 (test_foo.Bar) ... ok + expected = [TestResult(Category.OK, 'success', 'spam.ham')] + assert blocker.args == [expected] ----------------------------------------------------------------------- -Ran 2 tests in 0.000s -OK -""" +def test_unittestrunner_process_output_with_addfailure(qtbot): + """Test UnittestRunner.processOutput() with an `addFailure` event.""" runner = UnittestRunner(None) - res = runner.load_data(output) - assert len(res) == 2 + output = [{'event': 'addFailure', + 'id': 'spam.ham', + 'reason': 'exception', + 'err': 'traceback'}] - assert res[0].category == Category.OK - assert res[0].status == 'ok' - assert res[0].name == 'test_foo.Bar.test1' - assert res[0].extra_text == [] + with qtbot.waitSignal(runner.sig_testresult) as blocker: + runner.process_output(output) - assert res[1].category == Category.OK - assert res[1].status == 'ok' - assert res[1].name == 'test_foo.Bar.test2' - assert res[1].extra_text == [] + expected = [TestResult(Category.FAIL, 'failure', 'spam.ham', + message='exception', extra_text='traceback')] + assert blocker.args == [expected] -def test_unittestrunner_load_data_with_fail_and_comment(): - output = """test1 (test_foo.Bar) -comment ... FAIL +def test_unittestrunner_process_output_with_adderror(qtbot): + """Test UnittestRunner.processOutput() with an `addError` event.""" + runner = UnittestRunner(None) + output = [{'event': 'addError', + 'id': 'spam.ham', + 'reason': 'exception', + 'err': 'traceback'}] + + with qtbot.waitSignal(runner.sig_testresult) as blocker: + runner.process_output(output) -====================================================================== -FAIL: test1 (test_foo.Bar) -comment ----------------------------------------------------------------------- -Traceback (most recent call last): - File "/somepath/test_foo.py", line 30, in test1 - self.assertEqual(1, 2) -AssertionError: 1 != 2 + expected = [TestResult(Category.FAIL, 'error', 'spam.ham', + message='exception', extra_text='traceback')] + assert blocker.args == [expected] ----------------------------------------------------------------------- -Ran 1 test in 0.000s -FAILED (failures=1) -""" +def test_unittestrunner_process_output_with_addskip(qtbot): + """Test UnittestRunner.processOutput() with an `addSkip` event.""" runner = UnittestRunner(None) - res = runner.load_data(output) - assert len(res) == 1 + output = [{'event': 'addSkip', + 'id': 'spam.ham', + 'reason': 'skip reason'}] + + with qtbot.waitSignal(runner.sig_testresult) as blocker: + runner.process_output(output) - assert res[0].category == Category.FAIL - assert res[0].status == 'FAIL' - assert res[0].name == 'test_foo.Bar.test1' - assert res[0].extra_text[0].startswith('Traceback') - assert res[0].extra_text[-1].endswith('AssertionError: 1 != 2') + expected = [TestResult(Category.SKIP, 'skip', 'spam.ham', + message='skip reason')] + assert blocker.args == [expected] -def test_try_parse_header_with_ok(): +def test_unittestrunner_process_output_with_addexpectedfailure(qtbot): + """Test UnittestRunner.processOutput() with an `addExpectedFailure` event.""" runner = UnittestRunner(None) - lines = ['test_isupper (testfoo.TestStringMethods) ... ok'] - res = runner.try_parse_result(lines, 0) - assert res == (1, 'test_isupper', 'testfoo.TestStringMethods', 'ok', '') + output = [{'event': 'addExpectedFailure', + 'id': 'spam.ham', + 'reason': 'exception', + 'err': 'traceback'}] + with qtbot.waitSignal(runner.sig_testresult) as blocker: + runner.process_output(output) -def test_try_parse_header_with_xfail(): - runner = UnittestRunner(None) - lines = ['test_isupper (testfoo.TestStringMethods) ... expected failure'] - res = runner.try_parse_result(lines, 0) - assert res == (1, 'test_isupper', 'testfoo.TestStringMethods', - 'expected failure', '') + expected = [TestResult(Category.OK, 'expectedFailure', 'spam.ham', + message='exception', extra_text='traceback')] + assert blocker.args == [expected] -def test_try_parse_header_with_message(): +def test_unittestrunner_process_output_with_addunexpectedsuccess(qtbot): + """Test UnittestRunner.processOutput() with an `addUnexpectedSuccess` event.""" runner = UnittestRunner(None) - lines = ["test_nothing (testfoo.Tests) ... skipped 'msg'"] - res = runner.try_parse_result(lines, 0) - assert res == (1, 'test_nothing', 'testfoo.Tests', 'skipped', 'msg') + output = [{'event': 'addUnexpectedSuccess', 'id': 'spam.ham'}] + with qtbot.waitSignal(runner.sig_testresult) as blocker: + runner.process_output(output) -def test_try_parse_header_starting_with_digit(): - runner = UnittestRunner(None) - lines = ['0est_isupper (testfoo.TestStringMethods) ... ok'] - res = runner.try_parse_result(lines, 0) - assert res is None + expected = [TestResult(Category.FAIL, 'unexpectedSuccess', 'spam.ham')] + assert blocker.args == [expected] diff -Nru spyder-unittest-0.5.1/spyder_unittest/backend/unittestrunner.py spyder-unittest-0.6.0/spyder_unittest/backend/unittestrunner.py --- spyder-unittest-0.5.1/spyder_unittest/backend/unittestrunner.py 2022-09-03 20:29:17.000000000 +0000 +++ spyder-unittest-0.6.0/spyder_unittest/backend/unittestrunner.py 2023-07-02 15:10:44.000000000 +0000 @@ -5,11 +5,16 @@ # (see LICENSE.txt for details) """Support for unittest framework.""" +from __future__ import annotations + # Standard library imports -import re +import os.path as osp +from typing import Any, Optional # Local imports +from spyder_unittest.widgets.configdialog import Config from spyder_unittest.backend.runnerbase import Category, RunnerBase, TestResult +from spyder_unittest.backend.zmqreader import ZmqStreamReader class UnittestRunner(RunnerBase): @@ -18,127 +23,79 @@ module = 'unittest' name = 'unittest' - def create_argument_list(self, config, cov_path): + def create_argument_list(self, config: Config, + cov_path: Optional[str], + single_test: Optional[str]) -> list[str]: """Create argument list for testing process.""" - return ['-m', self.module, 'discover', '-v'] + dirname = osp.dirname(__file__) + pyfile = osp.join(dirname, 'workers', 'unittestworker.py') + arguments = [pyfile, str(self.reader.port)] + if single_test: + arguments.append(single_test) + arguments += config.args + return arguments + + def start(self, config: Config, cov_path: Optional[str], + executable: str, pythonpath: list[str], + single_test: Optional[str]) -> None: + """Start process which will run the unit test suite.""" + self.config = config + self.reader = ZmqStreamReader() + self.reader.sig_received.connect(self.process_output) + super().start(config, cov_path, executable, pythonpath, single_test) - def finished(self): + def finished(self, exitcode: int) -> None: """ Called when the unit test process has finished. - This function reads the results and emits `sig_finished`. + This function reads the process output and emits `sig_finished`. """ + self.reader.close() output = self.read_all_process_output() - testresults = self.load_data(output) - self.sig_finished.emit(testresults, output, True) - - def load_data(self, output): - """ - Read and parse output from unittest module. - - Any parsing errors are silently ignored. - - Returns - ------- - list of TestResult - Unit test results. - """ - res = [] - lines = output.splitlines() - line_index = 0 - - try: - while lines[line_index]: - data = self.try_parse_result(lines, line_index) - if data: - line_index = data[0] - if data[3] == 'ok': - cat = Category.OK - elif data[3] == 'FAIL' or data[3] == 'ERROR': - cat = Category.FAIL - else: - cat = Category.SKIP - name = '{}.{}'.format(data[2], data[1]) - tr = TestResult(category=cat, status=data[3], name=name, - message=data[4]) - res.append(tr) - else: - line_index += 1 - - line_index += 1 - while not (lines[line_index] - and all(c == '-' for c in lines[line_index])): - data = self.try_parse_exception_block(lines, line_index) - if data: - line_index = data[0] - test_index = next( - i for i, tr in enumerate(res) - if tr.name == '{}.{}'.format(data[2], data[1])) - res[test_index].extra_text = data[3] - else: - line_index += 1 - except IndexError: - pass + self.sig_finished.emit([], output, True) - return res - - def try_parse_result(self, lines, line_index): - """ - Try to parse one or more lines of text as a test result. - - Returns - ------- - (int, str, str, str, str) or None - If a test result is parsed successfully then return a tuple with - the line index of the first line after the test result, the name - of the test function, the name of the test class, the test result, - and the reason (if no reason is given, the fourth string is empty). - Otherwise, return None. + def process_output(self, output: list[dict[str, Any]]) -> None: """ - regexp = r'([^\d\W]\w*) \(([^\d\W][\w.]*)\)' - match = re.match(regexp, lines[line_index]) - if match: - function_name = match.group(1) - class_name = match.group(2) - else: - return None - while lines[line_index]: - regexp = (r' \.\.\. (ok|FAIL|ERROR|skipped|expected failure|' - r"unexpected success)( '([^']*)')?\Z") - match = re.search(regexp, lines[line_index]) - if match: - result = match.group(1) - msg = match.group(3) or '' - return (line_index + 1, function_name, class_name, result, msg) - line_index += 1 - return None + Process output of test process. - def try_parse_exception_block(self, lines, line_index): - """ - Try to parse a block detailing an exception in unittest output. - - Returns - ------- - (int, str, str, list of str) or None - If an exception block is parsed successfully, then return a tuple - with the line index of the first line after the block, the name of - the test function, the name of the test class, and the text of the - exception. Otherwise, return None. - """ - if not all(char == '=' for char in lines[line_index]): - return None - regexp = r'\w+: ([^\d\W]\w*) \(([^\d\W][\w.]*)\)\Z' - match = re.match(regexp, lines[line_index + 1]) - if not match: - return None - line_index += 1 - while not all(char == '-' for char in lines[line_index]): - if not lines[line_index]: - return None - line_index += 1 - line_index += 1 - exception_text = [] - while lines[line_index]: - exception_text.append(lines[line_index]) - line_index += 1 - return (line_index, match.group(1), match.group(2), exception_text) + Parameters + ---------- + output : list + list of decoded Python object sent by test process. + """ + collected_list = [] + starttest_list = [] + result_list = [] + + for result_item in output: + if result_item['event'] == 'collected': + collected_list.append(result_item['id']) + elif result_item['event'] == 'startTest': + starttest_list.append(result_item['id']) + elif result_item['event'].startswith('add'): + testresult = add_event_to_testresult(result_item) + result_list.append(testresult) + + if collected_list: + self.sig_collected.emit(collected_list) + if starttest_list: + self.sig_starttest.emit(starttest_list) + if result_list: + self.sig_testresult.emit(result_list) + + +def add_event_to_testresult(event: dict[str, Any]) -> TestResult: + """Convert an addXXX event sent by test process to a TestResult.""" + status = event['event'][3].lower() + event['event'][4:] + if status in ('error', 'failure', 'unexpectedSuccess'): + cat = Category.FAIL + elif status in ('success', 'expectedFailure'): + cat = Category.OK + else: + cat = Category.SKIP + testname = event['id'] + message = event.get('reason', '') + extra_text = event.get('err', '') + result = TestResult(cat, status, testname, message=message, + extra_text=extra_text) + return result diff -Nru spyder-unittest-0.5.1/spyder_unittest/backend/workers/__init__.py spyder-unittest-0.6.0/spyder_unittest/backend/workers/__init__.py --- spyder-unittest-0.5.1/spyder_unittest/backend/workers/__init__.py 2022-09-03 20:29:17.000000000 +0000 +++ spyder-unittest-0.6.0/spyder_unittest/backend/workers/__init__.py 2023-07-02 15:10:44.000000000 +0000 @@ -14,4 +14,4 @@ Dependencies should be kept to a minimum, because they need to be installed in each target environment. -""" \ No newline at end of file +""" diff -Nru spyder-unittest-0.5.1/spyder_unittest/backend/workers/print_versions.py spyder-unittest-0.6.0/spyder_unittest/backend/workers/print_versions.py --- spyder-unittest-0.5.1/spyder_unittest/backend/workers/print_versions.py 2022-09-03 20:29:17.000000000 +0000 +++ spyder-unittest-0.6.0/spyder_unittest/backend/workers/print_versions.py 2023-07-02 15:10:44.000000000 +0000 @@ -37,23 +37,21 @@ 'plugins': plugins} -def get_nose_info(): - """Return information about nose.""" - from pkg_resources import iter_entry_points +def get_nose2_info(): + """ + Return information about nose2. + + This only returns the version of nose2. The function does not gather any + information about plugins. + """ try: - import nose + import nose2 except ImportError: return {'available': False} - plugins = {} - for entry_point, _ in (nose.plugins.manager.EntryPointPluginManager - .entry_points): - for ep in iter_entry_points(entry_point): - plugins[ep.dist.project_name] = ep.dist.version - return {'available': True, - 'version': nose.__version__, - 'plugins': plugins} + 'version': nose2.__version__, + 'plugins': {}} def get_unittest_info(): @@ -75,11 +73,11 @@ Information is returned as a dictionary like the following: {'pytest': {'available': True, 'version': '7.1.1', 'plugins': {'flaky': '3.7.0', 'pytest-mock': '3.6.1'}}, - 'nose': {'available': False}, + 'nose2': {'available': False}, 'unittest': {'available': True, 'version': '3.10.5', 'plugins': {}}} """ return {'pytest': get_pytest_info(), - 'nose': get_nose_info(), + 'nose2': get_nose2_info(), 'unittest': get_unittest_info()} diff -Nru spyder-unittest-0.5.1/spyder_unittest/backend/workers/pytestworker.py spyder-unittest-0.6.0/spyder_unittest/backend/workers/pytestworker.py --- spyder-unittest-0.5.1/spyder_unittest/backend/workers/pytestworker.py 2022-09-03 20:29:17.000000000 +0000 +++ spyder-unittest-0.6.0/spyder_unittest/backend/workers/pytestworker.py 2023-07-02 15:10:44.000000000 +0000 @@ -17,29 +17,10 @@ # Third party imports import pytest -# Local imports, needs to be relative otherwise it will fail if trying -# to execute in a different env with only spyder-kernel installed -try: - # this line is needed for the tests to succeed - from .zmqwriter import ZmqStreamWriter -except: - # this line is needed for the plugin to work - from zmqwriter import ZmqStreamWriter - -class FileStub(): - """Stub for ZmqStreamWriter which instead writes to a file.""" - - def __init__(self, filename): - """Constructor; connect to specified filename.""" - self.file = open(filename, 'w') - - def write(self, obj): - """Write Python object to file.""" - self.file.write(str(obj) + '\n') - - def close(self): - """Close file.""" - self.file.close() +# Local imports +# Note that the script can be run in an environment that does not contain +# spyder_unittest so `from spyder_unittest.xxx import xxx` does not work. +from zmqwriter import FileStub, ZmqStreamWriter class SpyderPlugin(): diff -Nru spyder-unittest-0.5.1/spyder_unittest/backend/workers/tests/test_print_versions.py spyder-unittest-0.6.0/spyder_unittest/backend/workers/tests/test_print_versions.py --- spyder-unittest-0.5.1/spyder_unittest/backend/workers/tests/test_print_versions.py 2022-09-03 20:29:17.000000000 +0000 +++ spyder-unittest-0.6.0/spyder_unittest/backend/workers/tests/test_print_versions.py 2023-07-02 15:10:44.000000000 +0000 @@ -6,7 +6,7 @@ """Tests for print_versions.py""" from spyder_unittest.backend.workers.print_versions import ( - get_nose_info, get_pytest_info, get_unittest_info) + get_nose2_info, get_pytest_info, get_unittest_info) def test_get_pytest_info_without_plugins(monkeypatch): @@ -37,30 +37,11 @@ assert get_pytest_info() == expected -def test_get_nose_info_without_plugins(monkeypatch): - import nose - import pkg_resources - monkeypatch.setattr(nose, '__version__', '1.2.3') - monkeypatch.setattr(pkg_resources, 'iter_entry_points', lambda x: ()) +def test_get_nose2_info(monkeypatch): + import nose2 + monkeypatch.setattr(nose2, '__version__', '1.2.3') expected = {'available': True, 'version': '1.2.3', 'plugins': {}} - assert get_nose_info() == expected - - -def test_get_nose_info_with_plugins(monkeypatch): - import nose - import pkg_resources - monkeypatch.setattr(nose, '__version__', '1.2.3') - dist = pkg_resources.Distribution(project_name='myPlugin', - version='4.5.6') - ep = pkg_resources.EntryPoint('name', 'module_name', dist=dist) - monkeypatch.setattr(pkg_resources, - 'iter_entry_points', - lambda ept: (x for x in (ep,) if ept == nose.plugins - .manager.EntryPointPluginManager - .entry_points[0][0])) - expected = {'available': True, 'version': '1.2.3', - 'plugins': {'myPlugin': '4.5.6'}} - assert get_nose_info() == expected + assert get_nose2_info() == expected def test_get_unittest_imfo(monkeypatch): diff -Nru spyder-unittest-0.5.1/spyder_unittest/backend/workers/tests/test_pytestworker.py spyder-unittest-0.6.0/spyder_unittest/backend/workers/tests/test_pytestworker.py --- spyder-unittest-0.5.1/spyder_unittest/backend/workers/tests/test_pytestworker.py 2022-09-03 20:29:17.000000000 +0000 +++ spyder-unittest-0.6.0/spyder_unittest/backend/workers/tests/test_pytestworker.py 2023-07-02 15:10:44.000000000 +0000 @@ -7,14 +7,22 @@ # Standard library imports import os +import os.path as osp +import sys from unittest.mock import create_autospec, MagicMock, Mock # Third party imports import pytest # Local imports +# Local imports +# Modules in spyder_unittest.backend.workers assume that their directory +# is in `sys.path`, so add that directory to the path. +old_path = sys.path +sys.path.insert(0, osp.join(osp.dirname(__file__), osp.pardir)) from spyder_unittest.backend.workers.pytestworker import SpyderPlugin, main from spyder_unittest.backend.workers.zmqwriter import ZmqStreamWriter +sys.path = old_path class EmptyClass: @@ -305,49 +313,66 @@ }) -def test_pytestworker_integration(monkeypatch, tmpdir): - os.chdir(tmpdir.strpath) - testfilename = tmpdir.join('test_foo.py').strpath - with open(testfilename, 'w') as f: - f.write("def test_ok(): assert 1+1 == 2\n" - "def test_fail(): assert 1+1 == 3\n") +@pytest.fixture(scope='module') +def testfile_path(tmp_path_factory): + tmp_path = tmp_path_factory.mktemp('pytestworker') + res = tmp_path / 'test_pytestworker_foo.py' + res.write_text('def test_ok(): assert 1+1 == 2\n' + 'def test_fail(): assert 1+1 == 3\n') + return res + +@pytest.mark.parametrize('alltests', [True, False]) +def test_pytestworker_integration(monkeypatch, testfile_path, alltests): mock_writer = create_autospec(ZmqStreamWriter) MockZmqStreamWriter = Mock(return_value=mock_writer) monkeypatch.setattr( 'spyder_unittest.backend.workers.pytestworker.ZmqStreamWriter', MockZmqStreamWriter) - main(['mockscriptname', '42', testfilename]) - args = mock_writer.write.call_args_list + os.chdir(testfile_path.parent) + testfilename = testfile_path.name + pytest_args = ['mockscriptname', '42'] + if not alltests: + pytest_args.append(f'{testfilename}::test_ok') + main(pytest_args) - assert args[0][0][0]['event'] == 'config' - assert 'rootdir' in args[0][0][0] + args = mock_writer.write.call_args_list + messages = [arg[0][0] for arg in args] + assert len(messages) == 7 if alltests else 4 - assert args[1][0][0]['event'] == 'collected' - assert args[1][0][0]['nodeid'] == 'test_foo.py::test_ok' + assert messages[0]['event'] == 'config' + assert 'rootdir' in messages[0] - assert args[2][0][0]['event'] == 'collected' - assert args[2][0][0]['nodeid'] == 'test_foo.py::test_fail' + assert messages[1]['event'] == 'collected' + assert messages[1]['nodeid'] == f'{testfilename}::test_ok' - assert args[3][0][0]['event'] == 'starttest' - assert args[3][0][0]['nodeid'] == 'test_foo.py::test_ok' - - assert args[4][0][0]['event'] == 'logreport' - assert args[4][0][0]['outcome'] == 'passed' - assert args[4][0][0]['nodeid'] == 'test_foo.py::test_ok' - assert args[4][0][0]['sections'] == [] - assert args[4][0][0]['filename'] == 'test_foo.py' - assert args[4][0][0]['lineno'] == 0 - assert 'duration' in args[4][0][0] - - assert args[5][0][0]['event'] == 'starttest' - assert args[5][0][0]['nodeid'] == 'test_foo.py::test_fail' - - assert args[6][0][0]['event'] == 'logreport' - assert args[6][0][0]['outcome'] == 'failed' - assert args[6][0][0]['nodeid'] == 'test_foo.py::test_fail' - assert args[6][0][0]['sections'] == [] - assert args[6][0][0]['filename'] == 'test_foo.py' - assert args[6][0][0]['lineno'] == 1 - assert 'duration' in args[6][0][0] + if alltests: + n = 3 + assert messages[2]['event'] == 'collected' + assert messages[2]['nodeid'] == f'{testfilename}::test_fail' + else: + n = 2 + + assert messages[n]['event'] == 'starttest' + assert messages[n]['nodeid'] == f'{testfilename}::test_ok' + + assert messages[n+1]['event'] == 'logreport' + assert messages[n+1]['outcome'] == 'passed' + assert messages[n+1]['nodeid'] == f'{testfilename}::test_ok' + assert messages[n+1]['sections'] == [] + assert messages[n+1]['filename'] == testfilename + assert messages[n+1]['lineno'] == 0 + assert 'duration' in messages[n+1] + + if alltests: + assert messages[n+2]['event'] == 'starttest' + assert messages[n+2]['nodeid'] == f'{testfilename}::test_fail' + + assert messages[n+3]['event'] == 'logreport' + assert messages[n+3]['outcome'] == 'failed' + assert messages[n+3]['nodeid'] == f'{testfilename}::test_fail' + assert messages[n+3]['sections'] == [] + assert messages[n+3]['filename'] == testfilename + assert messages[n+3]['lineno'] == 1 + assert 'duration' in messages[n+3] diff -Nru spyder-unittest-0.5.1/spyder_unittest/backend/workers/tests/test_unittestworker.py spyder-unittest-0.6.0/spyder_unittest/backend/workers/tests/test_unittestworker.py --- spyder-unittest-0.5.1/spyder_unittest/backend/workers/tests/test_unittestworker.py 1970-01-01 00:00:00.000000000 +0000 +++ spyder-unittest-0.6.0/spyder_unittest/backend/workers/tests/test_unittestworker.py 2023-07-02 15:10:44.000000000 +0000 @@ -0,0 +1,191 @@ +# -*- coding: utf-8 -*- +# +# Copyright © 2017 Spyder Project Contributors +# Licensed under the terms of the MIT License +# (see LICENSE.txt for details) +"""Tests for pytestworker.py""" + +# Standard library imports +import os +import os.path as osp +import sys +import unittest +from unittest.mock import call, create_autospec, Mock + +# Third-party imports +import pytest + +# Local imports +# Modules in spyder_unittest.backend.workers assume that their directory +# is in `sys.path`, so add that directory to the path. +old_path = sys.path +sys.path.insert(0, osp.join(osp.dirname(__file__), osp.pardir)) +from spyder_unittest.backend.workers.unittestworker import ( + main, report_collected, SpyderTestResult) +from spyder_unittest.backend.workers.zmqwriter import ZmqStreamWriter +sys.path = old_path + + +class MyTest(unittest.TestCase): + """Simple test class.""" + def first(): pass + def second(): pass + + +@pytest.fixture +def testresult(): + mock_writer = create_autospec(ZmqStreamWriter) + my_testresult = SpyderTestResult( + stream=Mock(), descriptions=True, verbosity=2) + my_testresult.writer = mock_writer + my_testresult._exc_info_to_string = lambda err, test: 'some exception info' + return my_testresult + + +def test_spydertestresult_starttest(testresult): + """Test that SpyderTestResult.startTest() writes the correct info.""" + test = MyTest(methodName='first') + testresult.startTest(test) + expected = {'event': 'startTest', 'id': test.id()} + testresult.writer.write.assert_called_once_with(expected) + + +def test_spydertestresult_addsuccess(testresult): + """Test that SpyderTestResult.addSuccess() writes the correct info.""" + test = MyTest(methodName='first') + testresult.addSuccess(test) + expected = {'event': 'addSuccess', 'id': test.id()} + testresult.writer.write.assert_called_once_with(expected) + + +def test_spydertestresult_addfailure(testresult): + """Test that SpyderTestResult.addFailure() writes the correct info.""" + test = MyTest(methodName='first') + err = ('notused', AssertionError('xxx'), 'notused') + testresult.addFailure(test, err) + expected = {'event': 'addFailure', + 'id': test.id(), + 'reason': 'AssertionError: xxx', + 'err': 'some exception info'} + testresult.writer.write.assert_called_once_with(expected) + + +def test_spydertestresult_adderror(testresult): + """Test that SpyderTestResult.addError() writes the correct info.""" + test = MyTest(methodName='first') + err = ('notused', AssertionError('xxx'), 'notused') + testresult.addError(test, err) + expected = {'event': 'addError', + 'id': test.id(), + 'reason': 'AssertionError: xxx', + 'err': 'some exception info'} + testresult.writer.write.assert_called_once_with(expected) + + +def test_spydertestresult_addskip(testresult): + """Test that SpyderTestResult.addSkip() writes the correct info.""" + test = MyTest(methodName='first') + reason = 'my reason' + testresult.addSkip(test, reason) + expected = {'event': 'addSkip', + 'id': test.id(), + 'reason': reason} + testresult.writer.write.assert_called_once_with(expected) + + +def test_spydertestresult_addexpectedfailure(testresult): + """Test that SpyderTestResult.addExpectedFailure() writes the correct info.""" + test = MyTest(methodName='first') + err = ('notused', AssertionError('xxx'), 'notused') + testresult.addExpectedFailure(test, err) + expected = {'event': 'addExpectedFailure', + 'id': test.id(), + 'reason': 'AssertionError: xxx', + 'err': 'some exception info'} + testresult.writer.write.assert_called_once_with(expected) + + +def test_spydertestresult_addunexpectedsuccess(testresult): + """Test that SpyderTestResult.addUnexpectedSuccess() writes the correct info.""" + test = MyTest(methodName='first') + testresult.addUnexpectedSuccess(test) + expected = {'event': 'addUnexpectedSuccess', 'id': test.id()} + testresult.writer.write.assert_called_once_with(expected) + + +def test_unittestworker_report_collected(): + """ + Test that report_collected() with a test suite containing two tests + writes two `collected` events to the ZMQ stream. + """ + mock_writer = create_autospec(ZmqStreamWriter) + test1 = MyTest(methodName='first') + test2 = MyTest(methodName='second') + test_suite_inner = unittest.TestSuite([test1, test2]) + test_suite = unittest.TestSuite([test_suite_inner]) + + report_collected(mock_writer, test_suite) + + expected = [call({'event': 'collected', 'id': test1.id()}), + call({'event': 'collected', 'id': test2.id()})] + assert mock_writer.write.mock_calls == expected + + +@pytest.fixture(scope='module') +def testfile_path(tmp_path_factory): + tmp_path = tmp_path_factory.mktemp('unittestworker') + res = tmp_path / 'test_unittestworker_foo.py' + res.write_text('import unittest\n' + 'class MyTest(unittest.TestCase):\n' + ' def test_ok(self): self.assertEqual(1+1, 2)\n' + ' def test_fail(self): self.assertEqual(1+1, 3)\n') + return res + + +@pytest.mark.parametrize('alltests', [True, False]) +def test_unittestworker_main(monkeypatch, testfile_path, alltests): + """ + Test that the main function with some tests writes the expected + output to the ZMQ stream. + """ + mock_writer = create_autospec(ZmqStreamWriter) + MockZmqStreamWriter = Mock(return_value=mock_writer) + monkeypatch.setattr( + 'spyder_unittest.backend.workers.unittestworker.ZmqStreamWriter', + MockZmqStreamWriter) + + os.chdir(testfile_path.parent) + testfilename = testfile_path.stem # `stem` removes the .py suffix + main_args = ['mockscriptname', '42'] + if not alltests: + main_args.append(f'{testfilename}.MyTest.test_fail') + main(main_args) + + args = mock_writer.write.call_args_list + messages = [arg[0][0] for arg in args] + assert len(messages) == (6 if alltests else 3) + + assert messages[0]['event'] == 'collected' + assert messages[0]['id'] == f'{testfilename}.MyTest.test_fail' + + if alltests: + n = 2 + assert messages[1]['event'] == 'collected' + assert messages[1]['id'] == f'{testfilename}.MyTest.test_ok' + else: + n = 1 + + assert messages[n]['event'] == 'startTest' + assert messages[n]['id'] == f'{testfilename}.MyTest.test_fail' + + assert messages[n+1]['event'] == 'addFailure' + assert messages[n+1]['id'] == f'{testfilename}.MyTest.test_fail' + assert 'AssertionError' in messages[n+1]['reason'] + assert 'assertEqual(1+1, 3)' in messages[n+1]['err'] + + if alltests: + assert messages[n+2]['event'] == 'startTest' + assert messages[n+2]['id'] == f'{testfilename}.MyTest.test_ok' + + assert messages[n+3]['event'] == 'addSuccess' + assert messages[n+3]['id'] == f'{testfilename}.MyTest.test_ok' diff -Nru spyder-unittest-0.5.1/spyder_unittest/backend/workers/unittestworker.py spyder-unittest-0.6.0/spyder_unittest/backend/workers/unittestworker.py --- spyder-unittest-0.5.1/spyder_unittest/backend/workers/unittestworker.py 1970-01-01 00:00:00.000000000 +0000 +++ spyder-unittest-0.6.0/spyder_unittest/backend/workers/unittestworker.py 2023-07-02 15:10:44.000000000 +0000 @@ -0,0 +1,144 @@ +# -*- coding: utf-8 -*- +# +# Copyright © Spyder Project Contributors +# Licensed under the terms of the MIT License +# (see LICENSE.txt for details) +""" +Script for running unittest tests. + +This script is meant to be run in a separate process by a UnittestRunner. +It runs tests via the unittest framework and transmits the results over a ZMQ +socket so that the UnittestRunner can read them. + +Usage: python unittestworker.py port [testname] + +Here, `port` is the port number of the ZMQ socket. Use `file` to store the +results in the file `unittestworker.json`. The optional argument `testname` +is the test to run; if omitted, run all tests. +""" + +from __future__ import annotations + +# Standard library imports +import os +import sys +from typing import ClassVar +from unittest import ( + defaultTestLoader, TestCase, TestSuite, TextTestResult, TextTestRunner) + +# Local imports +# Note that the script can be run in an environment that does not contain +# spyder_unittest so `from spyder_unittest.xxx import xxx` does not work. +from zmqwriter import FileStub, ZmqStreamWriter + + +class SpyderTestResult(TextTestResult): + """ + Store test results and write them to a ZmqStreamWriter. + + The member `.writer` should be set to a ZmqStreamWriter before + running any tests. + """ + + writer: ClassVar[ZmqStreamWriter] + + def startTest(self, test: TestCase) -> None: + self.writer.write({ + 'event': 'startTest', + 'id': test.id() + }) + super().startTest(test) + + def addSuccess(self, test: TestCase) -> None: + self.writer.write({ + 'event': 'addSuccess', + 'id': test.id() + }) + super().addSuccess(test) + + def addError(self, test: TestCase, err) -> None: + (__, value, __) = err + first_line = str(value).splitlines()[0] + self.writer.write({ + 'event': 'addError', + 'id': test.id(), + 'reason': f'{type(value).__name__}: {first_line}', + 'err': self._exc_info_to_string(err, test) + }) + super().addError(test, err) + + def addFailure(self, test: TestCase, err) -> None: + (__, value, __) = err + first_line = str(value).splitlines()[0] + self.writer.write({ + 'event': 'addFailure', + 'id': test.id(), + 'reason': f'{type(value).__name__}: {first_line}', + 'err': self._exc_info_to_string(err, test) + }) + super().addFailure(test, err) + + def addSkip(self, test: TestCase, reason: str) -> None: + self.writer.write({ + 'event': 'addSkip', + 'id': test.id(), + 'reason': reason + }) + super().addSkip(test, reason) + + def addExpectedFailure(self, test: TestCase, err) -> None: + (__, value, __) = err + first_line = str(value).splitlines()[0] + self.writer.write({ + 'event': 'addExpectedFailure', + 'id': test.id(), + 'reason': f'{type(value).__name__}: {first_line}', + 'err': self._exc_info_to_string(err, test) + }) + super().addExpectedFailure(test, err) + + def addUnexpectedSuccess(self, test: TestCase) -> None: + self.writer.write({ + 'event': 'addUnexpectedSuccess', + 'id': test.id() + }) + super().addUnexpectedSuccess(test) + + +def report_collected(writer: ZmqStreamWriter, test_suite: TestSuite) -> None: + for test in test_suite: + if isinstance(test, TestSuite): + report_collected(writer, test) + else: + writer.write({ + 'event': 'collected', + 'id': test.id() + }) + + +def main(args: list[str]) -> None: + """Run unittest tests.""" + # Parse first command line argument and create writer + if args[1] != 'file': + writer = ZmqStreamWriter(args[1]) + else: + writer = FileStub('unittestworker.log') + SpyderTestResult.writer = writer + + # Gather tests + if args[2:]: + # Add cwd to path so that modules can be found + sys.path = [os.getcwd()] + sys.path + test_suite = defaultTestLoader.loadTestsFromNames(args[2:]) + else: + test_suite = defaultTestLoader.discover('.') + report_collected(writer, test_suite) + + # Run tests + test_runner = TextTestRunner(verbosity=2, resultclass=SpyderTestResult) + test_runner.run(test_suite) + writer.close() + + +if __name__ == '__main__': + main(sys.argv) diff -Nru spyder-unittest-0.5.1/spyder_unittest/backend/workers/zmqwriter.py spyder-unittest-0.6.0/spyder_unittest/backend/workers/zmqwriter.py --- spyder-unittest-0.5.1/spyder_unittest/backend/workers/zmqwriter.py 2022-09-03 20:29:17.000000000 +0000 +++ spyder-unittest-0.6.0/spyder_unittest/backend/workers/zmqwriter.py 2023-07-02 15:10:44.000000000 +0000 @@ -22,13 +22,13 @@ class ZmqStreamWriter: """Writer for sending stream of Python object over a ZMQ stream.""" - def __init__(self, port): + def __init__(self, port: str) -> None: """ Constructor. Arguments --------- - port : int + port : str TCP port number to be used for the stream. This should equal the `port` attribute of the corresponding `ZmqStreamReader`. """ @@ -36,15 +36,31 @@ self.socket = context.socket(zmq.PAIR) self.socket.connect('tcp://localhost:{}'.format(port)) - def write(self, obj): + def write(self, obj: object) -> None: """Write arbitrary Python object to stream.""" self.socket.send_pyobj(obj) - def close(self): + def close(self) -> None: """Close stream.""" self.socket.close() +class FileStub(ZmqStreamWriter): + """Stub for ZmqStreamWriter which instead writes to a file.""" + + def __init__(self, filename: str) -> None: + """Constructor; connect to specified filename.""" + self.file = open(filename, 'w') + + def write(self, obj: object) -> None: + """Write Python object to file.""" + self.file.write(str(obj) + '\n') + + def close(self) -> None: + """Close file.""" + self.file.close() + + if __name__ == '__main__': # Usage: python zmqwriter.py # Construct a ZMQ stream on the given port number and send the number 42 diff -Nru spyder-unittest-0.5.1/spyder_unittest/backend/zmqreader.py spyder-unittest-0.6.0/spyder_unittest/backend/zmqreader.py --- spyder-unittest-0.5.1/spyder_unittest/backend/zmqreader.py 2022-09-03 20:29:17.000000000 +0000 +++ spyder-unittest-0.6.0/spyder_unittest/backend/zmqreader.py 2023-07-02 15:10:44.000000000 +0000 @@ -36,7 +36,7 @@ sig_received = Signal(object) - def __init__(self): + def __init__(self) -> None: """Constructor; also constructs ZMQ stream.""" super().__init__() self.context = zmq.Context() @@ -46,7 +46,7 @@ self.notifier = QSocketNotifier(fid, QSocketNotifier.Read, self) self.notifier.activated.connect(self.received_message) - def received_message(self): + def received_message(self) -> None: """Called when a message is received.""" self.notifier.setEnabled(False) messages = [] @@ -61,7 +61,7 @@ if messages: self.sig_received.emit(messages) - def close(self): + def close(self) -> None: """Read any remaining messages and close stream.""" self.received_message() # Flush remaining messages self.notifier.setEnabled(False) Binary files /tmp/tmp83t07kth/vqV3a7Laqb/spyder-unittest-0.5.1/spyder_unittest/locale/de/LC_MESSAGES/spyder_unittest.mo and /tmp/tmp83t07kth/vPPUqCMpiJ/spyder-unittest-0.6.0/spyder_unittest/locale/de/LC_MESSAGES/spyder_unittest.mo differ diff -Nru spyder-unittest-0.5.1/spyder_unittest/locale/de/LC_MESSAGES/spyder_unittest.po spyder-unittest-0.6.0/spyder_unittest/locale/de/LC_MESSAGES/spyder_unittest.po --- spyder-unittest-0.5.1/spyder_unittest/locale/de/LC_MESSAGES/spyder_unittest.po 2022-09-03 20:29:17.000000000 +0000 +++ spyder-unittest-0.6.0/spyder_unittest/locale/de/LC_MESSAGES/spyder_unittest.po 2023-07-02 15:10:44.000000000 +0000 @@ -1,8 +1,8 @@ msgid "" msgstr "" "Project-Id-Version: spyder-unittest\n" -"POT-Creation-Date: 2022-09-03 17:43+0100\n" -"PO-Revision-Date: 2022-09-03 20:06\n" +"POT-Creation-Date: 2023-06-23 16:41+0100\n" +"PO-Revision-Date: 2023-06-25 20:37\n" "Last-Translator: \n" "Language-Team: German\n" "MIME-Version: 1.0\n" @@ -17,205 +17,225 @@ "X-Crowdin-File-ID: 49\n" "Language: de_DE\n" -#: spyder_unittest/backend/noserunner.py:86 +#: spyder_unittest/backend/nose2runner.py:99 msgid "Captured stdout" -msgstr "" +msgstr "Erfasstes stdout" -#: spyder_unittest/backend/noserunner.py:88 +#: spyder_unittest/backend/nose2runner.py:101 msgid "Captured stderr" -msgstr "" +msgstr "Erfasstes stderr" -#: spyder_unittest/backend/pytestrunner.py:114 +#: spyder_unittest/backend/pytestrunner.py:129 msgid "(none)" msgstr "(keine)" -#: spyder_unittest/backend/pytestrunner.py:114 +#: spyder_unittest/backend/pytestrunner.py:129 msgid "Missing: {}" msgstr "Fehlt: {}" -#: spyder_unittest/backend/pytestrunner.py:115 +#: spyder_unittest/backend/pytestrunner.py:130 msgid "{}\n" "{}" msgstr "{}\n" "{}" -#: spyder_unittest/unittestplugin.py:60 spyder_unittest/widgets/unittestgui.py:136 +#: spyder_unittest/unittestplugin.py:73 spyder_unittest/widgets/unittestgui.py:138 msgid "Unit testing" msgstr "Unit-Tests" -#: spyder_unittest/unittestplugin.py:71 +#: spyder_unittest/unittestplugin.py:84 msgid "Run test suites and view their results" -msgstr "" +msgstr "Testsuiten ausführen und ihre Ergebnisse ansehen" -#: spyder_unittest/unittestplugin.py:94 spyder_unittest/unittestplugin.py:95 spyder_unittest/widgets/unittestgui.py:421 +#: spyder_unittest/unittestplugin.py:105 spyder_unittest/unittestplugin.py:106 spyder_unittest/widgets/unittestgui.py:424 msgid "Run unit tests" msgstr "Unit-Tests ausführen" -#: spyder_unittest/widgets/configdialog.py:66 +#: spyder_unittest/widgets/configdialog.py:78 msgid "Configure tests" msgstr "Tests konfigurieren" -#: spyder_unittest/widgets/configdialog.py:70 -msgid "Test framework" -msgstr "" +#: spyder_unittest/widgets/configdialog.py:86 +msgid "Test framework:" +msgstr "Test-Framework:" -#: spyder_unittest/widgets/configdialog.py:79 spyder_unittest/widgets/unittestgui.py:296 +#: spyder_unittest/widgets/configdialog.py:95 spyder_unittest/widgets/unittestgui.py:294 msgid "not available" msgstr "nicht verfügbar" -#: spyder_unittest/widgets/configdialog.py:88 +#: spyder_unittest/widgets/configdialog.py:102 +msgid "Command-line arguments:" +msgstr "Befehlszeilenargumente:" + +#: spyder_unittest/widgets/configdialog.py:106 +msgid "Extra command-line arguments when running tests" +msgstr "Zusätzliche Befehlszeilenargumente bei der Ausführung von Tests" + +#: spyder_unittest/widgets/configdialog.py:118 msgid "Include coverage report in output" msgstr "Abdeckungsbericht in Ausgabe einbeziehen" -#: spyder_unittest/widgets/configdialog.py:89 +#: spyder_unittest/widgets/configdialog.py:119 msgid "Works only for pytest, requires pytest-cov" msgstr "Funktioniert nur für pytest, erfordert pytest-cov" -#: spyder_unittest/widgets/configdialog.py:99 -msgid "Directory from which to run tests" -msgstr "Verzeichnis, aus dem Tests ausgeführt werden sollen" +#: spyder_unittest/widgets/configdialog.py:131 +msgid "Directory from which to run tests:" +msgstr "Verzeichnis, aus dem Tests ausgeführt werden sollen:" -#: spyder_unittest/widgets/configdialog.py:105 spyder_unittest/widgets/configdialog.py:159 +#: spyder_unittest/widgets/configdialog.py:137 spyder_unittest/widgets/configdialog.py:196 msgid "Select directory" msgstr "Verzeichnis auswählen" -#: spyder_unittest/widgets/datatree.py:51 +#: spyder_unittest/widgets/confpage.py:26 +msgid "Settings" +msgstr "Einstellungen" + +#: spyder_unittest/widgets/confpage.py:28 +msgid "Abbreviate test names" +msgstr "Testnamen abkürzen" + +#: spyder_unittest/widgets/datatree.py:45 msgid "Message" msgstr "Nachricht" -#: spyder_unittest/widgets/datatree.py:51 +#: spyder_unittest/widgets/datatree.py:45 msgid "Name" msgstr "Name" -#: spyder_unittest/widgets/datatree.py:51 +#: spyder_unittest/widgets/datatree.py:45 msgid "Status" msgstr "Status" -#: spyder_unittest/widgets/datatree.py:51 +#: spyder_unittest/widgets/datatree.py:45 msgid "Time (ms)" msgstr "Zeit (ms)" -#: spyder_unittest/widgets/datatree.py:148 +#: spyder_unittest/widgets/datatree.py:153 msgid "Collapse" msgstr "Einklappen" -#: spyder_unittest/widgets/datatree.py:151 +#: spyder_unittest/widgets/datatree.py:156 msgid "Expand" msgstr "Ausklappen" -#: spyder_unittest/widgets/datatree.py:156 +#: spyder_unittest/widgets/datatree.py:162 msgid "Go to definition" msgstr "Zur Definition gehen" -#: spyder_unittest/widgets/datatree.py:409 +#: spyder_unittest/widgets/datatree.py:169 +msgid "Run only this test" +msgstr "Nur diesen Test ausführen" + +#: spyder_unittest/widgets/datatree.py:421 msgid "test" msgstr "Test" -#: spyder_unittest/widgets/datatree.py:409 +#: spyder_unittest/widgets/datatree.py:421 msgid "tests" msgstr "Tests" -#: spyder_unittest/widgets/datatree.py:413 +#: spyder_unittest/widgets/datatree.py:425 msgid "No results to show." msgstr "Keine anzuzeigenden Ergebnisse." -#: spyder_unittest/widgets/datatree.py:418 +#: spyder_unittest/widgets/datatree.py:430 msgid "collected {}" msgstr "{} gesammelt" -#: spyder_unittest/widgets/datatree.py:419 +#: spyder_unittest/widgets/datatree.py:431 msgid "{} failed" msgstr "{} fehlgeschlagen" -#: spyder_unittest/widgets/datatree.py:420 +#: spyder_unittest/widgets/datatree.py:432 msgid ", {} passed" msgstr ", {} bestanden" -#: spyder_unittest/widgets/datatree.py:422 +#: spyder_unittest/widgets/datatree.py:434 msgid ", {} other" msgstr ", {} andere" -#: spyder_unittest/widgets/datatree.py:424 +#: spyder_unittest/widgets/datatree.py:436 msgid ", {} pending" msgstr ", {} ausstehend" -#: spyder_unittest/widgets/datatree.py:429 +#: spyder_unittest/widgets/datatree.py:441 msgid ", {} coverage" msgstr ", {} Abdeckung" -#: spyder_unittest/widgets/unittestgui.py:149 +#: spyder_unittest/widgets/unittestgui.py:151 msgid "Configure ..." msgstr "Konfigurieren ..." -#: spyder_unittest/widgets/unittestgui.py:156 +#: spyder_unittest/widgets/unittestgui.py:158 msgid "Show output" msgstr "Ausgabe anzeigen" -#: spyder_unittest/widgets/unittestgui.py:163 +#: spyder_unittest/widgets/unittestgui.py:165 msgid "Collapse all" msgstr "Alle einklappen" -#: spyder_unittest/widgets/unittestgui.py:170 +#: spyder_unittest/widgets/unittestgui.py:172 msgid "Expand all" msgstr "Alle ausklappen" -#: spyder_unittest/widgets/unittestgui.py:177 spyder_unittest/widgets/unittestgui.py:302 +#: spyder_unittest/widgets/unittestgui.py:179 spyder_unittest/widgets/unittestgui.py:300 msgid "Dependencies" msgstr "Abhängigkeiten" -#: spyder_unittest/widgets/unittestgui.py:250 +#: spyder_unittest/widgets/unittestgui.py:248 msgid "Unit testing output" msgstr "Ausgabe der Unit-Tests" -#: spyder_unittest/widgets/unittestgui.py:293 +#: spyder_unittest/widgets/unittestgui.py:291 msgid "Versions of frameworks and their installed plugins:" -msgstr "" +msgstr "Versionen von Frameworks und deren installierte Plugins:" -#: spyder_unittest/widgets/unittestgui.py:390 +#: spyder_unittest/widgets/unittestgui.py:393 msgid "Error" msgstr "Fehler" -#: spyder_unittest/widgets/unittestgui.py:390 +#: spyder_unittest/widgets/unittestgui.py:393 msgid "Process failed to start" msgstr "Prozess konnte nicht gestartet werden" -#: spyder_unittest/widgets/unittestgui.py:393 +#: spyder_unittest/widgets/unittestgui.py:396 msgid "Running tests ..." msgstr "Tests laufen ..." -#: spyder_unittest/widgets/unittestgui.py:414 +#: spyder_unittest/widgets/unittestgui.py:417 msgid "Stop" msgstr "Stopp" -#: spyder_unittest/widgets/unittestgui.py:415 +#: spyder_unittest/widgets/unittestgui.py:418 msgid "Stop current test process" msgstr "Aktuellen Testprozess stoppen" -#: spyder_unittest/widgets/unittestgui.py:420 +#: spyder_unittest/widgets/unittestgui.py:423 msgid "Run tests" msgstr "Tests ausführen" -#: spyder_unittest/widgets/unittestgui.py:448 +#: spyder_unittest/widgets/unittestgui.py:451 msgid "Test process exited abnormally" -msgstr "" +msgstr "Testprozess wurde abnormal beendet" -#: spyder_unittest/widgets/unittestgui.py:457 +#: spyder_unittest/widgets/unittestgui.py:460 msgid "not run" -msgstr "" +msgstr "nicht ausgeführt" -#: spyder_unittest/widgets/unittestgui.py:464 spyder_unittest/widgets/unittestgui.py:470 +#: spyder_unittest/widgets/unittestgui.py:467 spyder_unittest/widgets/unittestgui.py:473 msgid "pending" msgstr "ausstehend" -#: spyder_unittest/widgets/unittestgui.py:471 +#: spyder_unittest/widgets/unittestgui.py:474 msgid "running" msgstr "läuft" -#: spyder_unittest/widgets/unittestgui.py:477 +#: spyder_unittest/widgets/unittestgui.py:480 msgid "failure" msgstr "Fehlschlag" -#: spyder_unittest/widgets/unittestgui.py:478 +#: spyder_unittest/widgets/unittestgui.py:481 msgid "collection error" -msgstr "" +msgstr "Sammlungsfehler" Binary files /tmp/tmp83t07kth/vqV3a7Laqb/spyder-unittest-0.5.1/spyder_unittest/locale/es/LC_MESSAGES/spyder_unittest.mo and /tmp/tmp83t07kth/vPPUqCMpiJ/spyder-unittest-0.6.0/spyder_unittest/locale/es/LC_MESSAGES/spyder_unittest.mo differ diff -Nru spyder-unittest-0.5.1/spyder_unittest/locale/es/LC_MESSAGES/spyder_unittest.po spyder-unittest-0.6.0/spyder_unittest/locale/es/LC_MESSAGES/spyder_unittest.po --- spyder-unittest-0.5.1/spyder_unittest/locale/es/LC_MESSAGES/spyder_unittest.po 2022-09-03 20:29:17.000000000 +0000 +++ spyder-unittest-0.6.0/spyder_unittest/locale/es/LC_MESSAGES/spyder_unittest.po 2023-07-02 15:10:44.000000000 +0000 @@ -1,8 +1,8 @@ msgid "" msgstr "" "Project-Id-Version: spyder-unittest\n" -"POT-Creation-Date: 2022-09-03 17:43+0100\n" -"PO-Revision-Date: 2022-09-03 20:06\n" +"POT-Creation-Date: 2023-06-23 16:41+0100\n" +"PO-Revision-Date: 2023-06-23 20:17\n" "Last-Translator: \n" "Language-Team: Spanish\n" "MIME-Version: 1.0\n" @@ -17,204 +17,224 @@ "X-Crowdin-File-ID: 49\n" "Language: es_ES\n" -#: spyder_unittest/backend/noserunner.py:86 +#: spyder_unittest/backend/nose2runner.py:99 msgid "Captured stdout" msgstr "" -#: spyder_unittest/backend/noserunner.py:88 +#: spyder_unittest/backend/nose2runner.py:101 msgid "Captured stderr" msgstr "" -#: spyder_unittest/backend/pytestrunner.py:114 +#: spyder_unittest/backend/pytestrunner.py:129 msgid "(none)" msgstr "" -#: spyder_unittest/backend/pytestrunner.py:114 +#: spyder_unittest/backend/pytestrunner.py:129 msgid "Missing: {}" msgstr "" -#: spyder_unittest/backend/pytestrunner.py:115 +#: spyder_unittest/backend/pytestrunner.py:130 msgid "{}\n" "{}" msgstr "" -#: spyder_unittest/unittestplugin.py:60 spyder_unittest/widgets/unittestgui.py:136 +#: spyder_unittest/unittestplugin.py:73 spyder_unittest/widgets/unittestgui.py:138 msgid "Unit testing" msgstr "" -#: spyder_unittest/unittestplugin.py:71 +#: spyder_unittest/unittestplugin.py:84 msgid "Run test suites and view their results" msgstr "" -#: spyder_unittest/unittestplugin.py:94 spyder_unittest/unittestplugin.py:95 spyder_unittest/widgets/unittestgui.py:421 +#: spyder_unittest/unittestplugin.py:105 spyder_unittest/unittestplugin.py:106 spyder_unittest/widgets/unittestgui.py:424 msgid "Run unit tests" msgstr "" -#: spyder_unittest/widgets/configdialog.py:66 +#: spyder_unittest/widgets/configdialog.py:78 msgid "Configure tests" msgstr "" -#: spyder_unittest/widgets/configdialog.py:70 -msgid "Test framework" +#: spyder_unittest/widgets/configdialog.py:86 +msgid "Test framework:" msgstr "" -#: spyder_unittest/widgets/configdialog.py:79 spyder_unittest/widgets/unittestgui.py:296 +#: spyder_unittest/widgets/configdialog.py:95 spyder_unittest/widgets/unittestgui.py:294 msgid "not available" msgstr "" -#: spyder_unittest/widgets/configdialog.py:88 +#: spyder_unittest/widgets/configdialog.py:102 +msgid "Command-line arguments:" +msgstr "" + +#: spyder_unittest/widgets/configdialog.py:106 +msgid "Extra command-line arguments when running tests" +msgstr "" + +#: spyder_unittest/widgets/configdialog.py:118 msgid "Include coverage report in output" msgstr "" -#: spyder_unittest/widgets/configdialog.py:89 +#: spyder_unittest/widgets/configdialog.py:119 msgid "Works only for pytest, requires pytest-cov" msgstr "" -#: spyder_unittest/widgets/configdialog.py:99 -msgid "Directory from which to run tests" +#: spyder_unittest/widgets/configdialog.py:131 +msgid "Directory from which to run tests:" msgstr "" -#: spyder_unittest/widgets/configdialog.py:105 spyder_unittest/widgets/configdialog.py:159 +#: spyder_unittest/widgets/configdialog.py:137 spyder_unittest/widgets/configdialog.py:196 msgid "Select directory" msgstr "" -#: spyder_unittest/widgets/datatree.py:51 +#: spyder_unittest/widgets/confpage.py:26 +msgid "Settings" +msgstr "" + +#: spyder_unittest/widgets/confpage.py:28 +msgid "Abbreviate test names" +msgstr "" + +#: spyder_unittest/widgets/datatree.py:45 msgid "Message" msgstr "" -#: spyder_unittest/widgets/datatree.py:51 +#: spyder_unittest/widgets/datatree.py:45 msgid "Name" msgstr "" -#: spyder_unittest/widgets/datatree.py:51 +#: spyder_unittest/widgets/datatree.py:45 msgid "Status" msgstr "" -#: spyder_unittest/widgets/datatree.py:51 +#: spyder_unittest/widgets/datatree.py:45 msgid "Time (ms)" msgstr "" -#: spyder_unittest/widgets/datatree.py:148 +#: spyder_unittest/widgets/datatree.py:153 msgid "Collapse" msgstr "" -#: spyder_unittest/widgets/datatree.py:151 +#: spyder_unittest/widgets/datatree.py:156 msgid "Expand" msgstr "" -#: spyder_unittest/widgets/datatree.py:156 +#: spyder_unittest/widgets/datatree.py:162 msgid "Go to definition" msgstr "" -#: spyder_unittest/widgets/datatree.py:409 +#: spyder_unittest/widgets/datatree.py:169 +msgid "Run only this test" +msgstr "" + +#: spyder_unittest/widgets/datatree.py:421 msgid "test" msgstr "" -#: spyder_unittest/widgets/datatree.py:409 +#: spyder_unittest/widgets/datatree.py:421 msgid "tests" msgstr "" -#: spyder_unittest/widgets/datatree.py:413 +#: spyder_unittest/widgets/datatree.py:425 msgid "No results to show." msgstr "" -#: spyder_unittest/widgets/datatree.py:418 +#: spyder_unittest/widgets/datatree.py:430 msgid "collected {}" msgstr "" -#: spyder_unittest/widgets/datatree.py:419 +#: spyder_unittest/widgets/datatree.py:431 msgid "{} failed" msgstr "" -#: spyder_unittest/widgets/datatree.py:420 +#: spyder_unittest/widgets/datatree.py:432 msgid ", {} passed" msgstr "" -#: spyder_unittest/widgets/datatree.py:422 +#: spyder_unittest/widgets/datatree.py:434 msgid ", {} other" msgstr "" -#: spyder_unittest/widgets/datatree.py:424 +#: spyder_unittest/widgets/datatree.py:436 msgid ", {} pending" msgstr "" -#: spyder_unittest/widgets/datatree.py:429 +#: spyder_unittest/widgets/datatree.py:441 msgid ", {} coverage" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:149 +#: spyder_unittest/widgets/unittestgui.py:151 msgid "Configure ..." msgstr "" -#: spyder_unittest/widgets/unittestgui.py:156 +#: spyder_unittest/widgets/unittestgui.py:158 msgid "Show output" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:163 +#: spyder_unittest/widgets/unittestgui.py:165 msgid "Collapse all" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:170 +#: spyder_unittest/widgets/unittestgui.py:172 msgid "Expand all" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:177 spyder_unittest/widgets/unittestgui.py:302 +#: spyder_unittest/widgets/unittestgui.py:179 spyder_unittest/widgets/unittestgui.py:300 msgid "Dependencies" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:250 +#: spyder_unittest/widgets/unittestgui.py:248 msgid "Unit testing output" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:293 +#: spyder_unittest/widgets/unittestgui.py:291 msgid "Versions of frameworks and their installed plugins:" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:390 +#: spyder_unittest/widgets/unittestgui.py:393 msgid "Error" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:390 +#: spyder_unittest/widgets/unittestgui.py:393 msgid "Process failed to start" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:393 +#: spyder_unittest/widgets/unittestgui.py:396 msgid "Running tests ..." msgstr "" -#: spyder_unittest/widgets/unittestgui.py:414 +#: spyder_unittest/widgets/unittestgui.py:417 msgid "Stop" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:415 +#: spyder_unittest/widgets/unittestgui.py:418 msgid "Stop current test process" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:420 +#: spyder_unittest/widgets/unittestgui.py:423 msgid "Run tests" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:448 +#: spyder_unittest/widgets/unittestgui.py:451 msgid "Test process exited abnormally" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:457 +#: spyder_unittest/widgets/unittestgui.py:460 msgid "not run" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:464 spyder_unittest/widgets/unittestgui.py:470 +#: spyder_unittest/widgets/unittestgui.py:467 spyder_unittest/widgets/unittestgui.py:473 msgid "pending" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:471 +#: spyder_unittest/widgets/unittestgui.py:474 msgid "running" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:477 +#: spyder_unittest/widgets/unittestgui.py:480 msgid "failure" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:478 +#: spyder_unittest/widgets/unittestgui.py:481 msgid "collection error" msgstr "" Binary files /tmp/tmp83t07kth/vqV3a7Laqb/spyder-unittest-0.5.1/spyder_unittest/locale/fr/LC_MESSAGES/spyder_unittest.mo and /tmp/tmp83t07kth/vPPUqCMpiJ/spyder-unittest-0.6.0/spyder_unittest/locale/fr/LC_MESSAGES/spyder_unittest.mo differ diff -Nru spyder-unittest-0.5.1/spyder_unittest/locale/fr/LC_MESSAGES/spyder_unittest.po spyder-unittest-0.6.0/spyder_unittest/locale/fr/LC_MESSAGES/spyder_unittest.po --- spyder-unittest-0.5.1/spyder_unittest/locale/fr/LC_MESSAGES/spyder_unittest.po 2022-09-03 20:29:17.000000000 +0000 +++ spyder-unittest-0.6.0/spyder_unittest/locale/fr/LC_MESSAGES/spyder_unittest.po 2023-07-02 15:10:44.000000000 +0000 @@ -1,8 +1,8 @@ msgid "" msgstr "" "Project-Id-Version: spyder-unittest\n" -"POT-Creation-Date: 2022-09-03 17:43+0100\n" -"PO-Revision-Date: 2022-09-03 20:06\n" +"POT-Creation-Date: 2023-06-23 16:41+0100\n" +"PO-Revision-Date: 2023-06-24 20:15\n" "Last-Translator: \n" "Language-Team: French\n" "MIME-Version: 1.0\n" @@ -17,204 +17,224 @@ "X-Crowdin-File-ID: 49\n" "Language: fr_FR\n" -#: spyder_unittest/backend/noserunner.py:86 +#: spyder_unittest/backend/nose2runner.py:99 msgid "Captured stdout" msgstr "Stdout récupéré" -#: spyder_unittest/backend/noserunner.py:88 +#: spyder_unittest/backend/nose2runner.py:101 msgid "Captured stderr" msgstr "Stderr récupéré" -#: spyder_unittest/backend/pytestrunner.py:114 +#: spyder_unittest/backend/pytestrunner.py:129 msgid "(none)" msgstr "" -#: spyder_unittest/backend/pytestrunner.py:114 +#: spyder_unittest/backend/pytestrunner.py:129 msgid "Missing: {}" msgstr "" -#: spyder_unittest/backend/pytestrunner.py:115 +#: spyder_unittest/backend/pytestrunner.py:130 msgid "{}\n" "{}" msgstr "" -#: spyder_unittest/unittestplugin.py:60 spyder_unittest/widgets/unittestgui.py:136 +#: spyder_unittest/unittestplugin.py:73 spyder_unittest/widgets/unittestgui.py:138 msgid "Unit testing" msgstr "Tests unitaires" -#: spyder_unittest/unittestplugin.py:71 +#: spyder_unittest/unittestplugin.py:84 msgid "Run test suites and view their results" msgstr "" -#: spyder_unittest/unittestplugin.py:94 spyder_unittest/unittestplugin.py:95 spyder_unittest/widgets/unittestgui.py:421 +#: spyder_unittest/unittestplugin.py:105 spyder_unittest/unittestplugin.py:106 spyder_unittest/widgets/unittestgui.py:424 msgid "Run unit tests" msgstr "Lancement des tests unitaires" -#: spyder_unittest/widgets/configdialog.py:66 +#: spyder_unittest/widgets/configdialog.py:78 msgid "Configure tests" msgstr "Configurer les tests" -#: spyder_unittest/widgets/configdialog.py:70 -msgid "Test framework" -msgstr "Système de test" +#: spyder_unittest/widgets/configdialog.py:86 +msgid "Test framework:" +msgstr "Système de test:" -#: spyder_unittest/widgets/configdialog.py:79 spyder_unittest/widgets/unittestgui.py:296 +#: spyder_unittest/widgets/configdialog.py:95 spyder_unittest/widgets/unittestgui.py:294 msgid "not available" msgstr "non disponible" -#: spyder_unittest/widgets/configdialog.py:88 +#: spyder_unittest/widgets/configdialog.py:102 +msgid "Command-line arguments:" +msgstr "" + +#: spyder_unittest/widgets/configdialog.py:106 +msgid "Extra command-line arguments when running tests" +msgstr "" + +#: spyder_unittest/widgets/configdialog.py:118 msgid "Include coverage report in output" msgstr "" -#: spyder_unittest/widgets/configdialog.py:89 +#: spyder_unittest/widgets/configdialog.py:119 msgid "Works only for pytest, requires pytest-cov" msgstr "" -#: spyder_unittest/widgets/configdialog.py:99 -msgid "Directory from which to run tests" -msgstr "Répertoire depuis lequel exécuter les tests" +#: spyder_unittest/widgets/configdialog.py:131 +msgid "Directory from which to run tests:" +msgstr "Répertoire depuis lequel exécuter les tests:" -#: spyder_unittest/widgets/configdialog.py:105 spyder_unittest/widgets/configdialog.py:159 +#: spyder_unittest/widgets/configdialog.py:137 spyder_unittest/widgets/configdialog.py:196 msgid "Select directory" msgstr "Sélectionner un dossier" -#: spyder_unittest/widgets/datatree.py:51 +#: spyder_unittest/widgets/confpage.py:26 +msgid "Settings" +msgstr "" + +#: spyder_unittest/widgets/confpage.py:28 +msgid "Abbreviate test names" +msgstr "" + +#: spyder_unittest/widgets/datatree.py:45 msgid "Message" msgstr "Message" -#: spyder_unittest/widgets/datatree.py:51 +#: spyder_unittest/widgets/datatree.py:45 msgid "Name" msgstr "Nom" -#: spyder_unittest/widgets/datatree.py:51 +#: spyder_unittest/widgets/datatree.py:45 msgid "Status" msgstr "État " -#: spyder_unittest/widgets/datatree.py:51 +#: spyder_unittest/widgets/datatree.py:45 msgid "Time (ms)" msgstr "Durée (ms)" -#: spyder_unittest/widgets/datatree.py:148 +#: spyder_unittest/widgets/datatree.py:153 msgid "Collapse" msgstr "Réduire" -#: spyder_unittest/widgets/datatree.py:151 +#: spyder_unittest/widgets/datatree.py:156 msgid "Expand" msgstr "Déployer" -#: spyder_unittest/widgets/datatree.py:156 +#: spyder_unittest/widgets/datatree.py:162 msgid "Go to definition" msgstr "Aller à la définition" -#: spyder_unittest/widgets/datatree.py:409 +#: spyder_unittest/widgets/datatree.py:169 +msgid "Run only this test" +msgstr "" + +#: spyder_unittest/widgets/datatree.py:421 msgid "test" msgstr "test" -#: spyder_unittest/widgets/datatree.py:409 +#: spyder_unittest/widgets/datatree.py:421 msgid "tests" msgstr "tests" -#: spyder_unittest/widgets/datatree.py:413 +#: spyder_unittest/widgets/datatree.py:425 msgid "No results to show." msgstr "Aucun résultat à afficher." -#: spyder_unittest/widgets/datatree.py:418 +#: spyder_unittest/widgets/datatree.py:430 msgid "collected {}" msgstr "{} collectés" -#: spyder_unittest/widgets/datatree.py:419 +#: spyder_unittest/widgets/datatree.py:431 msgid "{} failed" msgstr "{} échec" -#: spyder_unittest/widgets/datatree.py:420 +#: spyder_unittest/widgets/datatree.py:432 msgid ", {} passed" msgstr ", {} réussi" -#: spyder_unittest/widgets/datatree.py:422 +#: spyder_unittest/widgets/datatree.py:434 msgid ", {} other" msgstr ", {} autres" -#: spyder_unittest/widgets/datatree.py:424 +#: spyder_unittest/widgets/datatree.py:436 msgid ", {} pending" msgstr ", {} en cours" -#: spyder_unittest/widgets/datatree.py:429 +#: spyder_unittest/widgets/datatree.py:441 msgid ", {} coverage" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:149 +#: spyder_unittest/widgets/unittestgui.py:151 msgid "Configure ..." msgstr "Configurer ..." -#: spyder_unittest/widgets/unittestgui.py:156 +#: spyder_unittest/widgets/unittestgui.py:158 msgid "Show output" msgstr "Afficher les résultats" -#: spyder_unittest/widgets/unittestgui.py:163 +#: spyder_unittest/widgets/unittestgui.py:165 msgid "Collapse all" msgstr "Tout réduire" -#: spyder_unittest/widgets/unittestgui.py:170 +#: spyder_unittest/widgets/unittestgui.py:172 msgid "Expand all" msgstr "Tout déployer" -#: spyder_unittest/widgets/unittestgui.py:177 spyder_unittest/widgets/unittestgui.py:302 +#: spyder_unittest/widgets/unittestgui.py:179 spyder_unittest/widgets/unittestgui.py:300 msgid "Dependencies" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:250 +#: spyder_unittest/widgets/unittestgui.py:248 msgid "Unit testing output" msgstr "Résultats de tests unitaires" -#: spyder_unittest/widgets/unittestgui.py:293 +#: spyder_unittest/widgets/unittestgui.py:291 msgid "Versions of frameworks and their installed plugins:" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:390 +#: spyder_unittest/widgets/unittestgui.py:393 msgid "Error" msgstr "Erreur" -#: spyder_unittest/widgets/unittestgui.py:390 +#: spyder_unittest/widgets/unittestgui.py:393 msgid "Process failed to start" msgstr "Le processus n'a pas pu démarrer" -#: spyder_unittest/widgets/unittestgui.py:393 +#: spyder_unittest/widgets/unittestgui.py:396 msgid "Running tests ..." msgstr "Exécution des tests ..." -#: spyder_unittest/widgets/unittestgui.py:414 +#: spyder_unittest/widgets/unittestgui.py:417 msgid "Stop" msgstr "Arrêter" -#: spyder_unittest/widgets/unittestgui.py:415 +#: spyder_unittest/widgets/unittestgui.py:418 msgid "Stop current test process" msgstr "Arrêter le processus de test en cours" -#: spyder_unittest/widgets/unittestgui.py:420 +#: spyder_unittest/widgets/unittestgui.py:423 msgid "Run tests" msgstr "Lancer les tests" -#: spyder_unittest/widgets/unittestgui.py:448 +#: spyder_unittest/widgets/unittestgui.py:451 msgid "Test process exited abnormally" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:457 +#: spyder_unittest/widgets/unittestgui.py:460 msgid "not run" msgstr "non lancé" -#: spyder_unittest/widgets/unittestgui.py:464 spyder_unittest/widgets/unittestgui.py:470 +#: spyder_unittest/widgets/unittestgui.py:467 spyder_unittest/widgets/unittestgui.py:473 msgid "pending" msgstr "en attente" -#: spyder_unittest/widgets/unittestgui.py:471 +#: spyder_unittest/widgets/unittestgui.py:474 msgid "running" msgstr "en cours" -#: spyder_unittest/widgets/unittestgui.py:477 +#: spyder_unittest/widgets/unittestgui.py:480 msgid "failure" msgstr "échec" -#: spyder_unittest/widgets/unittestgui.py:478 +#: spyder_unittest/widgets/unittestgui.py:481 msgid "collection error" msgstr "erreur de collecte" Binary files /tmp/tmp83t07kth/vqV3a7Laqb/spyder-unittest-0.5.1/spyder_unittest/locale/hr/LC_MESSAGES/spyder_unittest.mo and /tmp/tmp83t07kth/vPPUqCMpiJ/spyder-unittest-0.6.0/spyder_unittest/locale/hr/LC_MESSAGES/spyder_unittest.mo differ diff -Nru spyder-unittest-0.5.1/spyder_unittest/locale/hr/LC_MESSAGES/spyder_unittest.po spyder-unittest-0.6.0/spyder_unittest/locale/hr/LC_MESSAGES/spyder_unittest.po --- spyder-unittest-0.5.1/spyder_unittest/locale/hr/LC_MESSAGES/spyder_unittest.po 2022-09-03 20:29:17.000000000 +0000 +++ spyder-unittest-0.6.0/spyder_unittest/locale/hr/LC_MESSAGES/spyder_unittest.po 2023-07-02 15:10:44.000000000 +0000 @@ -1,8 +1,8 @@ msgid "" msgstr "" "Project-Id-Version: spyder-unittest\n" -"POT-Creation-Date: 2022-09-03 17:43+0100\n" -"PO-Revision-Date: 2022-09-03 20:06\n" +"POT-Creation-Date: 2023-06-23 16:41+0100\n" +"PO-Revision-Date: 2023-06-23 20:18\n" "Last-Translator: \n" "Language-Team: Croatian\n" "MIME-Version: 1.0\n" @@ -17,204 +17,224 @@ "X-Crowdin-File-ID: 49\n" "Language: hr_HR\n" -#: spyder_unittest/backend/noserunner.py:86 +#: spyder_unittest/backend/nose2runner.py:99 msgid "Captured stdout" msgstr "" -#: spyder_unittest/backend/noserunner.py:88 +#: spyder_unittest/backend/nose2runner.py:101 msgid "Captured stderr" msgstr "" -#: spyder_unittest/backend/pytestrunner.py:114 +#: spyder_unittest/backend/pytestrunner.py:129 msgid "(none)" msgstr "" -#: spyder_unittest/backend/pytestrunner.py:114 +#: spyder_unittest/backend/pytestrunner.py:129 msgid "Missing: {}" msgstr "" -#: spyder_unittest/backend/pytestrunner.py:115 +#: spyder_unittest/backend/pytestrunner.py:130 msgid "{}\n" "{}" msgstr "" -#: spyder_unittest/unittestplugin.py:60 spyder_unittest/widgets/unittestgui.py:136 +#: spyder_unittest/unittestplugin.py:73 spyder_unittest/widgets/unittestgui.py:138 msgid "Unit testing" msgstr "" -#: spyder_unittest/unittestplugin.py:71 +#: spyder_unittest/unittestplugin.py:84 msgid "Run test suites and view their results" msgstr "" -#: spyder_unittest/unittestplugin.py:94 spyder_unittest/unittestplugin.py:95 spyder_unittest/widgets/unittestgui.py:421 +#: spyder_unittest/unittestplugin.py:105 spyder_unittest/unittestplugin.py:106 spyder_unittest/widgets/unittestgui.py:424 msgid "Run unit tests" msgstr "" -#: spyder_unittest/widgets/configdialog.py:66 +#: spyder_unittest/widgets/configdialog.py:78 msgid "Configure tests" msgstr "" -#: spyder_unittest/widgets/configdialog.py:70 -msgid "Test framework" +#: spyder_unittest/widgets/configdialog.py:86 +msgid "Test framework:" msgstr "" -#: spyder_unittest/widgets/configdialog.py:79 spyder_unittest/widgets/unittestgui.py:296 +#: spyder_unittest/widgets/configdialog.py:95 spyder_unittest/widgets/unittestgui.py:294 msgid "not available" msgstr "" -#: spyder_unittest/widgets/configdialog.py:88 +#: spyder_unittest/widgets/configdialog.py:102 +msgid "Command-line arguments:" +msgstr "" + +#: spyder_unittest/widgets/configdialog.py:106 +msgid "Extra command-line arguments when running tests" +msgstr "" + +#: spyder_unittest/widgets/configdialog.py:118 msgid "Include coverage report in output" msgstr "" -#: spyder_unittest/widgets/configdialog.py:89 +#: spyder_unittest/widgets/configdialog.py:119 msgid "Works only for pytest, requires pytest-cov" msgstr "" -#: spyder_unittest/widgets/configdialog.py:99 -msgid "Directory from which to run tests" +#: spyder_unittest/widgets/configdialog.py:131 +msgid "Directory from which to run tests:" msgstr "" -#: spyder_unittest/widgets/configdialog.py:105 spyder_unittest/widgets/configdialog.py:159 +#: spyder_unittest/widgets/configdialog.py:137 spyder_unittest/widgets/configdialog.py:196 msgid "Select directory" msgstr "" -#: spyder_unittest/widgets/datatree.py:51 +#: spyder_unittest/widgets/confpage.py:26 +msgid "Settings" +msgstr "" + +#: spyder_unittest/widgets/confpage.py:28 +msgid "Abbreviate test names" +msgstr "" + +#: spyder_unittest/widgets/datatree.py:45 msgid "Message" msgstr "" -#: spyder_unittest/widgets/datatree.py:51 +#: spyder_unittest/widgets/datatree.py:45 msgid "Name" msgstr "" -#: spyder_unittest/widgets/datatree.py:51 +#: spyder_unittest/widgets/datatree.py:45 msgid "Status" msgstr "" -#: spyder_unittest/widgets/datatree.py:51 +#: spyder_unittest/widgets/datatree.py:45 msgid "Time (ms)" msgstr "" -#: spyder_unittest/widgets/datatree.py:148 +#: spyder_unittest/widgets/datatree.py:153 msgid "Collapse" msgstr "" -#: spyder_unittest/widgets/datatree.py:151 +#: spyder_unittest/widgets/datatree.py:156 msgid "Expand" msgstr "" -#: spyder_unittest/widgets/datatree.py:156 +#: spyder_unittest/widgets/datatree.py:162 msgid "Go to definition" msgstr "" -#: spyder_unittest/widgets/datatree.py:409 +#: spyder_unittest/widgets/datatree.py:169 +msgid "Run only this test" +msgstr "" + +#: spyder_unittest/widgets/datatree.py:421 msgid "test" msgstr "" -#: spyder_unittest/widgets/datatree.py:409 +#: spyder_unittest/widgets/datatree.py:421 msgid "tests" msgstr "" -#: spyder_unittest/widgets/datatree.py:413 +#: spyder_unittest/widgets/datatree.py:425 msgid "No results to show." msgstr "" -#: spyder_unittest/widgets/datatree.py:418 +#: spyder_unittest/widgets/datatree.py:430 msgid "collected {}" msgstr "" -#: spyder_unittest/widgets/datatree.py:419 +#: spyder_unittest/widgets/datatree.py:431 msgid "{} failed" msgstr "" -#: spyder_unittest/widgets/datatree.py:420 +#: spyder_unittest/widgets/datatree.py:432 msgid ", {} passed" msgstr "" -#: spyder_unittest/widgets/datatree.py:422 +#: spyder_unittest/widgets/datatree.py:434 msgid ", {} other" msgstr "" -#: spyder_unittest/widgets/datatree.py:424 +#: spyder_unittest/widgets/datatree.py:436 msgid ", {} pending" msgstr "" -#: spyder_unittest/widgets/datatree.py:429 +#: spyder_unittest/widgets/datatree.py:441 msgid ", {} coverage" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:149 +#: spyder_unittest/widgets/unittestgui.py:151 msgid "Configure ..." msgstr "" -#: spyder_unittest/widgets/unittestgui.py:156 +#: spyder_unittest/widgets/unittestgui.py:158 msgid "Show output" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:163 +#: spyder_unittest/widgets/unittestgui.py:165 msgid "Collapse all" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:170 +#: spyder_unittest/widgets/unittestgui.py:172 msgid "Expand all" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:177 spyder_unittest/widgets/unittestgui.py:302 +#: spyder_unittest/widgets/unittestgui.py:179 spyder_unittest/widgets/unittestgui.py:300 msgid "Dependencies" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:250 +#: spyder_unittest/widgets/unittestgui.py:248 msgid "Unit testing output" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:293 +#: spyder_unittest/widgets/unittestgui.py:291 msgid "Versions of frameworks and their installed plugins:" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:390 +#: spyder_unittest/widgets/unittestgui.py:393 msgid "Error" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:390 +#: spyder_unittest/widgets/unittestgui.py:393 msgid "Process failed to start" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:393 +#: spyder_unittest/widgets/unittestgui.py:396 msgid "Running tests ..." msgstr "" -#: spyder_unittest/widgets/unittestgui.py:414 +#: spyder_unittest/widgets/unittestgui.py:417 msgid "Stop" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:415 +#: spyder_unittest/widgets/unittestgui.py:418 msgid "Stop current test process" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:420 +#: spyder_unittest/widgets/unittestgui.py:423 msgid "Run tests" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:448 +#: spyder_unittest/widgets/unittestgui.py:451 msgid "Test process exited abnormally" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:457 +#: spyder_unittest/widgets/unittestgui.py:460 msgid "not run" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:464 spyder_unittest/widgets/unittestgui.py:470 +#: spyder_unittest/widgets/unittestgui.py:467 spyder_unittest/widgets/unittestgui.py:473 msgid "pending" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:471 +#: spyder_unittest/widgets/unittestgui.py:474 msgid "running" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:477 +#: spyder_unittest/widgets/unittestgui.py:480 msgid "failure" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:478 +#: spyder_unittest/widgets/unittestgui.py:481 msgid "collection error" msgstr "" Binary files /tmp/tmp83t07kth/vqV3a7Laqb/spyder-unittest-0.5.1/spyder_unittest/locale/hu/LC_MESSAGES/spyder_unittest.mo and /tmp/tmp83t07kth/vPPUqCMpiJ/spyder-unittest-0.6.0/spyder_unittest/locale/hu/LC_MESSAGES/spyder_unittest.mo differ diff -Nru spyder-unittest-0.5.1/spyder_unittest/locale/hu/LC_MESSAGES/spyder_unittest.po spyder-unittest-0.6.0/spyder_unittest/locale/hu/LC_MESSAGES/spyder_unittest.po --- spyder-unittest-0.5.1/spyder_unittest/locale/hu/LC_MESSAGES/spyder_unittest.po 2022-09-03 20:29:17.000000000 +0000 +++ spyder-unittest-0.6.0/spyder_unittest/locale/hu/LC_MESSAGES/spyder_unittest.po 2023-07-02 15:10:44.000000000 +0000 @@ -1,8 +1,8 @@ msgid "" msgstr "" "Project-Id-Version: spyder-unittest\n" -"POT-Creation-Date: 2022-09-03 17:43+0100\n" -"PO-Revision-Date: 2022-09-03 20:06\n" +"POT-Creation-Date: 2023-06-23 16:41+0100\n" +"PO-Revision-Date: 2023-06-23 20:17\n" "Last-Translator: \n" "Language-Team: Hungarian\n" "MIME-Version: 1.0\n" @@ -17,204 +17,224 @@ "X-Crowdin-File-ID: 49\n" "Language: hu_HU\n" -#: spyder_unittest/backend/noserunner.py:86 +#: spyder_unittest/backend/nose2runner.py:99 msgid "Captured stdout" msgstr "" -#: spyder_unittest/backend/noserunner.py:88 +#: spyder_unittest/backend/nose2runner.py:101 msgid "Captured stderr" msgstr "" -#: spyder_unittest/backend/pytestrunner.py:114 +#: spyder_unittest/backend/pytestrunner.py:129 msgid "(none)" msgstr "" -#: spyder_unittest/backend/pytestrunner.py:114 +#: spyder_unittest/backend/pytestrunner.py:129 msgid "Missing: {}" msgstr "" -#: spyder_unittest/backend/pytestrunner.py:115 +#: spyder_unittest/backend/pytestrunner.py:130 msgid "{}\n" "{}" msgstr "" -#: spyder_unittest/unittestplugin.py:60 spyder_unittest/widgets/unittestgui.py:136 +#: spyder_unittest/unittestplugin.py:73 spyder_unittest/widgets/unittestgui.py:138 msgid "Unit testing" msgstr "" -#: spyder_unittest/unittestplugin.py:71 +#: spyder_unittest/unittestplugin.py:84 msgid "Run test suites and view their results" msgstr "" -#: spyder_unittest/unittestplugin.py:94 spyder_unittest/unittestplugin.py:95 spyder_unittest/widgets/unittestgui.py:421 +#: spyder_unittest/unittestplugin.py:105 spyder_unittest/unittestplugin.py:106 spyder_unittest/widgets/unittestgui.py:424 msgid "Run unit tests" msgstr "" -#: spyder_unittest/widgets/configdialog.py:66 +#: spyder_unittest/widgets/configdialog.py:78 msgid "Configure tests" msgstr "" -#: spyder_unittest/widgets/configdialog.py:70 -msgid "Test framework" +#: spyder_unittest/widgets/configdialog.py:86 +msgid "Test framework:" msgstr "" -#: spyder_unittest/widgets/configdialog.py:79 spyder_unittest/widgets/unittestgui.py:296 +#: spyder_unittest/widgets/configdialog.py:95 spyder_unittest/widgets/unittestgui.py:294 msgid "not available" msgstr "" -#: spyder_unittest/widgets/configdialog.py:88 +#: spyder_unittest/widgets/configdialog.py:102 +msgid "Command-line arguments:" +msgstr "" + +#: spyder_unittest/widgets/configdialog.py:106 +msgid "Extra command-line arguments when running tests" +msgstr "" + +#: spyder_unittest/widgets/configdialog.py:118 msgid "Include coverage report in output" msgstr "" -#: spyder_unittest/widgets/configdialog.py:89 +#: spyder_unittest/widgets/configdialog.py:119 msgid "Works only for pytest, requires pytest-cov" msgstr "" -#: spyder_unittest/widgets/configdialog.py:99 -msgid "Directory from which to run tests" +#: spyder_unittest/widgets/configdialog.py:131 +msgid "Directory from which to run tests:" msgstr "" -#: spyder_unittest/widgets/configdialog.py:105 spyder_unittest/widgets/configdialog.py:159 +#: spyder_unittest/widgets/configdialog.py:137 spyder_unittest/widgets/configdialog.py:196 msgid "Select directory" msgstr "" -#: spyder_unittest/widgets/datatree.py:51 +#: spyder_unittest/widgets/confpage.py:26 +msgid "Settings" +msgstr "" + +#: spyder_unittest/widgets/confpage.py:28 +msgid "Abbreviate test names" +msgstr "" + +#: spyder_unittest/widgets/datatree.py:45 msgid "Message" msgstr "" -#: spyder_unittest/widgets/datatree.py:51 +#: spyder_unittest/widgets/datatree.py:45 msgid "Name" msgstr "" -#: spyder_unittest/widgets/datatree.py:51 +#: spyder_unittest/widgets/datatree.py:45 msgid "Status" msgstr "" -#: spyder_unittest/widgets/datatree.py:51 +#: spyder_unittest/widgets/datatree.py:45 msgid "Time (ms)" msgstr "" -#: spyder_unittest/widgets/datatree.py:148 +#: spyder_unittest/widgets/datatree.py:153 msgid "Collapse" msgstr "" -#: spyder_unittest/widgets/datatree.py:151 +#: spyder_unittest/widgets/datatree.py:156 msgid "Expand" msgstr "" -#: spyder_unittest/widgets/datatree.py:156 +#: spyder_unittest/widgets/datatree.py:162 msgid "Go to definition" msgstr "" -#: spyder_unittest/widgets/datatree.py:409 +#: spyder_unittest/widgets/datatree.py:169 +msgid "Run only this test" +msgstr "" + +#: spyder_unittest/widgets/datatree.py:421 msgid "test" msgstr "" -#: spyder_unittest/widgets/datatree.py:409 +#: spyder_unittest/widgets/datatree.py:421 msgid "tests" msgstr "" -#: spyder_unittest/widgets/datatree.py:413 +#: spyder_unittest/widgets/datatree.py:425 msgid "No results to show." msgstr "" -#: spyder_unittest/widgets/datatree.py:418 +#: spyder_unittest/widgets/datatree.py:430 msgid "collected {}" msgstr "" -#: spyder_unittest/widgets/datatree.py:419 +#: spyder_unittest/widgets/datatree.py:431 msgid "{} failed" msgstr "" -#: spyder_unittest/widgets/datatree.py:420 +#: spyder_unittest/widgets/datatree.py:432 msgid ", {} passed" msgstr "" -#: spyder_unittest/widgets/datatree.py:422 +#: spyder_unittest/widgets/datatree.py:434 msgid ", {} other" msgstr "" -#: spyder_unittest/widgets/datatree.py:424 +#: spyder_unittest/widgets/datatree.py:436 msgid ", {} pending" msgstr "" -#: spyder_unittest/widgets/datatree.py:429 +#: spyder_unittest/widgets/datatree.py:441 msgid ", {} coverage" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:149 +#: spyder_unittest/widgets/unittestgui.py:151 msgid "Configure ..." msgstr "" -#: spyder_unittest/widgets/unittestgui.py:156 +#: spyder_unittest/widgets/unittestgui.py:158 msgid "Show output" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:163 +#: spyder_unittest/widgets/unittestgui.py:165 msgid "Collapse all" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:170 +#: spyder_unittest/widgets/unittestgui.py:172 msgid "Expand all" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:177 spyder_unittest/widgets/unittestgui.py:302 +#: spyder_unittest/widgets/unittestgui.py:179 spyder_unittest/widgets/unittestgui.py:300 msgid "Dependencies" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:250 +#: spyder_unittest/widgets/unittestgui.py:248 msgid "Unit testing output" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:293 +#: spyder_unittest/widgets/unittestgui.py:291 msgid "Versions of frameworks and their installed plugins:" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:390 +#: spyder_unittest/widgets/unittestgui.py:393 msgid "Error" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:390 +#: spyder_unittest/widgets/unittestgui.py:393 msgid "Process failed to start" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:393 +#: spyder_unittest/widgets/unittestgui.py:396 msgid "Running tests ..." msgstr "" -#: spyder_unittest/widgets/unittestgui.py:414 +#: spyder_unittest/widgets/unittestgui.py:417 msgid "Stop" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:415 +#: spyder_unittest/widgets/unittestgui.py:418 msgid "Stop current test process" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:420 +#: spyder_unittest/widgets/unittestgui.py:423 msgid "Run tests" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:448 +#: spyder_unittest/widgets/unittestgui.py:451 msgid "Test process exited abnormally" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:457 +#: spyder_unittest/widgets/unittestgui.py:460 msgid "not run" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:464 spyder_unittest/widgets/unittestgui.py:470 +#: spyder_unittest/widgets/unittestgui.py:467 spyder_unittest/widgets/unittestgui.py:473 msgid "pending" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:471 +#: spyder_unittest/widgets/unittestgui.py:474 msgid "running" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:477 +#: spyder_unittest/widgets/unittestgui.py:480 msgid "failure" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:478 +#: spyder_unittest/widgets/unittestgui.py:481 msgid "collection error" msgstr "" Binary files /tmp/tmp83t07kth/vqV3a7Laqb/spyder-unittest-0.5.1/spyder_unittest/locale/ja/LC_MESSAGES/spyder_unittest.mo and /tmp/tmp83t07kth/vPPUqCMpiJ/spyder-unittest-0.6.0/spyder_unittest/locale/ja/LC_MESSAGES/spyder_unittest.mo differ diff -Nru spyder-unittest-0.5.1/spyder_unittest/locale/ja/LC_MESSAGES/spyder_unittest.po spyder-unittest-0.6.0/spyder_unittest/locale/ja/LC_MESSAGES/spyder_unittest.po --- spyder-unittest-0.5.1/spyder_unittest/locale/ja/LC_MESSAGES/spyder_unittest.po 2022-09-03 20:29:17.000000000 +0000 +++ spyder-unittest-0.6.0/spyder_unittest/locale/ja/LC_MESSAGES/spyder_unittest.po 2023-07-02 15:10:44.000000000 +0000 @@ -1,8 +1,8 @@ msgid "" msgstr "" "Project-Id-Version: spyder-unittest\n" -"POT-Creation-Date: 2022-09-03 17:43+0100\n" -"PO-Revision-Date: 2022-09-03 20:06\n" +"POT-Creation-Date: 2023-06-23 16:41+0100\n" +"PO-Revision-Date: 2023-06-23 20:17\n" "Last-Translator: \n" "Language-Team: Japanese\n" "MIME-Version: 1.0\n" @@ -17,204 +17,224 @@ "X-Crowdin-File-ID: 49\n" "Language: ja_JP\n" -#: spyder_unittest/backend/noserunner.py:86 +#: spyder_unittest/backend/nose2runner.py:99 msgid "Captured stdout" msgstr "" -#: spyder_unittest/backend/noserunner.py:88 +#: spyder_unittest/backend/nose2runner.py:101 msgid "Captured stderr" msgstr "" -#: spyder_unittest/backend/pytestrunner.py:114 +#: spyder_unittest/backend/pytestrunner.py:129 msgid "(none)" msgstr "" -#: spyder_unittest/backend/pytestrunner.py:114 +#: spyder_unittest/backend/pytestrunner.py:129 msgid "Missing: {}" msgstr "" -#: spyder_unittest/backend/pytestrunner.py:115 +#: spyder_unittest/backend/pytestrunner.py:130 msgid "{}\n" "{}" msgstr "" -#: spyder_unittest/unittestplugin.py:60 spyder_unittest/widgets/unittestgui.py:136 +#: spyder_unittest/unittestplugin.py:73 spyder_unittest/widgets/unittestgui.py:138 msgid "Unit testing" msgstr "" -#: spyder_unittest/unittestplugin.py:71 +#: spyder_unittest/unittestplugin.py:84 msgid "Run test suites and view their results" msgstr "" -#: spyder_unittest/unittestplugin.py:94 spyder_unittest/unittestplugin.py:95 spyder_unittest/widgets/unittestgui.py:421 +#: spyder_unittest/unittestplugin.py:105 spyder_unittest/unittestplugin.py:106 spyder_unittest/widgets/unittestgui.py:424 msgid "Run unit tests" msgstr "" -#: spyder_unittest/widgets/configdialog.py:66 +#: spyder_unittest/widgets/configdialog.py:78 msgid "Configure tests" msgstr "" -#: spyder_unittest/widgets/configdialog.py:70 -msgid "Test framework" +#: spyder_unittest/widgets/configdialog.py:86 +msgid "Test framework:" msgstr "" -#: spyder_unittest/widgets/configdialog.py:79 spyder_unittest/widgets/unittestgui.py:296 +#: spyder_unittest/widgets/configdialog.py:95 spyder_unittest/widgets/unittestgui.py:294 msgid "not available" msgstr "" -#: spyder_unittest/widgets/configdialog.py:88 +#: spyder_unittest/widgets/configdialog.py:102 +msgid "Command-line arguments:" +msgstr "" + +#: spyder_unittest/widgets/configdialog.py:106 +msgid "Extra command-line arguments when running tests" +msgstr "" + +#: spyder_unittest/widgets/configdialog.py:118 msgid "Include coverage report in output" msgstr "" -#: spyder_unittest/widgets/configdialog.py:89 +#: spyder_unittest/widgets/configdialog.py:119 msgid "Works only for pytest, requires pytest-cov" msgstr "" -#: spyder_unittest/widgets/configdialog.py:99 -msgid "Directory from which to run tests" +#: spyder_unittest/widgets/configdialog.py:131 +msgid "Directory from which to run tests:" msgstr "" -#: spyder_unittest/widgets/configdialog.py:105 spyder_unittest/widgets/configdialog.py:159 +#: spyder_unittest/widgets/configdialog.py:137 spyder_unittest/widgets/configdialog.py:196 msgid "Select directory" msgstr "" -#: spyder_unittest/widgets/datatree.py:51 +#: spyder_unittest/widgets/confpage.py:26 +msgid "Settings" +msgstr "" + +#: spyder_unittest/widgets/confpage.py:28 +msgid "Abbreviate test names" +msgstr "" + +#: spyder_unittest/widgets/datatree.py:45 msgid "Message" msgstr "" -#: spyder_unittest/widgets/datatree.py:51 +#: spyder_unittest/widgets/datatree.py:45 msgid "Name" msgstr "" -#: spyder_unittest/widgets/datatree.py:51 +#: spyder_unittest/widgets/datatree.py:45 msgid "Status" msgstr "" -#: spyder_unittest/widgets/datatree.py:51 +#: spyder_unittest/widgets/datatree.py:45 msgid "Time (ms)" msgstr "" -#: spyder_unittest/widgets/datatree.py:148 +#: spyder_unittest/widgets/datatree.py:153 msgid "Collapse" msgstr "" -#: spyder_unittest/widgets/datatree.py:151 +#: spyder_unittest/widgets/datatree.py:156 msgid "Expand" msgstr "" -#: spyder_unittest/widgets/datatree.py:156 +#: spyder_unittest/widgets/datatree.py:162 msgid "Go to definition" msgstr "" -#: spyder_unittest/widgets/datatree.py:409 +#: spyder_unittest/widgets/datatree.py:169 +msgid "Run only this test" +msgstr "" + +#: spyder_unittest/widgets/datatree.py:421 msgid "test" msgstr "" -#: spyder_unittest/widgets/datatree.py:409 +#: spyder_unittest/widgets/datatree.py:421 msgid "tests" msgstr "" -#: spyder_unittest/widgets/datatree.py:413 +#: spyder_unittest/widgets/datatree.py:425 msgid "No results to show." msgstr "" -#: spyder_unittest/widgets/datatree.py:418 +#: spyder_unittest/widgets/datatree.py:430 msgid "collected {}" msgstr "" -#: spyder_unittest/widgets/datatree.py:419 +#: spyder_unittest/widgets/datatree.py:431 msgid "{} failed" msgstr "" -#: spyder_unittest/widgets/datatree.py:420 +#: spyder_unittest/widgets/datatree.py:432 msgid ", {} passed" msgstr "" -#: spyder_unittest/widgets/datatree.py:422 +#: spyder_unittest/widgets/datatree.py:434 msgid ", {} other" msgstr "" -#: spyder_unittest/widgets/datatree.py:424 +#: spyder_unittest/widgets/datatree.py:436 msgid ", {} pending" msgstr "" -#: spyder_unittest/widgets/datatree.py:429 +#: spyder_unittest/widgets/datatree.py:441 msgid ", {} coverage" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:149 +#: spyder_unittest/widgets/unittestgui.py:151 msgid "Configure ..." msgstr "" -#: spyder_unittest/widgets/unittestgui.py:156 +#: spyder_unittest/widgets/unittestgui.py:158 msgid "Show output" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:163 +#: spyder_unittest/widgets/unittestgui.py:165 msgid "Collapse all" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:170 +#: spyder_unittest/widgets/unittestgui.py:172 msgid "Expand all" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:177 spyder_unittest/widgets/unittestgui.py:302 +#: spyder_unittest/widgets/unittestgui.py:179 spyder_unittest/widgets/unittestgui.py:300 msgid "Dependencies" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:250 +#: spyder_unittest/widgets/unittestgui.py:248 msgid "Unit testing output" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:293 +#: spyder_unittest/widgets/unittestgui.py:291 msgid "Versions of frameworks and their installed plugins:" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:390 +#: spyder_unittest/widgets/unittestgui.py:393 msgid "Error" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:390 +#: spyder_unittest/widgets/unittestgui.py:393 msgid "Process failed to start" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:393 +#: spyder_unittest/widgets/unittestgui.py:396 msgid "Running tests ..." msgstr "" -#: spyder_unittest/widgets/unittestgui.py:414 +#: spyder_unittest/widgets/unittestgui.py:417 msgid "Stop" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:415 +#: spyder_unittest/widgets/unittestgui.py:418 msgid "Stop current test process" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:420 +#: spyder_unittest/widgets/unittestgui.py:423 msgid "Run tests" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:448 +#: spyder_unittest/widgets/unittestgui.py:451 msgid "Test process exited abnormally" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:457 +#: spyder_unittest/widgets/unittestgui.py:460 msgid "not run" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:464 spyder_unittest/widgets/unittestgui.py:470 +#: spyder_unittest/widgets/unittestgui.py:467 spyder_unittest/widgets/unittestgui.py:473 msgid "pending" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:471 +#: spyder_unittest/widgets/unittestgui.py:474 msgid "running" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:477 +#: spyder_unittest/widgets/unittestgui.py:480 msgid "failure" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:478 +#: spyder_unittest/widgets/unittestgui.py:481 msgid "collection error" msgstr "" Binary files /tmp/tmp83t07kth/vqV3a7Laqb/spyder-unittest-0.5.1/spyder_unittest/locale/pl/LC_MESSAGES/spyder_unittest.mo and /tmp/tmp83t07kth/vPPUqCMpiJ/spyder-unittest-0.6.0/spyder_unittest/locale/pl/LC_MESSAGES/spyder_unittest.mo differ diff -Nru spyder-unittest-0.5.1/spyder_unittest/locale/pl/LC_MESSAGES/spyder_unittest.po spyder-unittest-0.6.0/spyder_unittest/locale/pl/LC_MESSAGES/spyder_unittest.po --- spyder-unittest-0.5.1/spyder_unittest/locale/pl/LC_MESSAGES/spyder_unittest.po 2022-09-03 20:29:17.000000000 +0000 +++ spyder-unittest-0.6.0/spyder_unittest/locale/pl/LC_MESSAGES/spyder_unittest.po 2023-07-02 15:10:44.000000000 +0000 @@ -1,8 +1,8 @@ msgid "" msgstr "" "Project-Id-Version: spyder-unittest\n" -"POT-Creation-Date: 2022-09-03 17:43+0100\n" -"PO-Revision-Date: 2022-09-03 20:06\n" +"POT-Creation-Date: 2023-06-23 16:41+0100\n" +"PO-Revision-Date: 2023-06-23 20:17\n" "Last-Translator: \n" "Language-Team: Polish\n" "MIME-Version: 1.0\n" @@ -17,204 +17,224 @@ "X-Crowdin-File-ID: 49\n" "Language: pl_PL\n" -#: spyder_unittest/backend/noserunner.py:86 +#: spyder_unittest/backend/nose2runner.py:99 msgid "Captured stdout" msgstr "" -#: spyder_unittest/backend/noserunner.py:88 +#: spyder_unittest/backend/nose2runner.py:101 msgid "Captured stderr" msgstr "" -#: spyder_unittest/backend/pytestrunner.py:114 +#: spyder_unittest/backend/pytestrunner.py:129 msgid "(none)" msgstr "" -#: spyder_unittest/backend/pytestrunner.py:114 +#: spyder_unittest/backend/pytestrunner.py:129 msgid "Missing: {}" msgstr "" -#: spyder_unittest/backend/pytestrunner.py:115 +#: spyder_unittest/backend/pytestrunner.py:130 msgid "{}\n" "{}" msgstr "" -#: spyder_unittest/unittestplugin.py:60 spyder_unittest/widgets/unittestgui.py:136 +#: spyder_unittest/unittestplugin.py:73 spyder_unittest/widgets/unittestgui.py:138 msgid "Unit testing" msgstr "" -#: spyder_unittest/unittestplugin.py:71 +#: spyder_unittest/unittestplugin.py:84 msgid "Run test suites and view their results" msgstr "" -#: spyder_unittest/unittestplugin.py:94 spyder_unittest/unittestplugin.py:95 spyder_unittest/widgets/unittestgui.py:421 +#: spyder_unittest/unittestplugin.py:105 spyder_unittest/unittestplugin.py:106 spyder_unittest/widgets/unittestgui.py:424 msgid "Run unit tests" msgstr "" -#: spyder_unittest/widgets/configdialog.py:66 +#: spyder_unittest/widgets/configdialog.py:78 msgid "Configure tests" msgstr "" -#: spyder_unittest/widgets/configdialog.py:70 -msgid "Test framework" +#: spyder_unittest/widgets/configdialog.py:86 +msgid "Test framework:" msgstr "" -#: spyder_unittest/widgets/configdialog.py:79 spyder_unittest/widgets/unittestgui.py:296 +#: spyder_unittest/widgets/configdialog.py:95 spyder_unittest/widgets/unittestgui.py:294 msgid "not available" msgstr "" -#: spyder_unittest/widgets/configdialog.py:88 +#: spyder_unittest/widgets/configdialog.py:102 +msgid "Command-line arguments:" +msgstr "" + +#: spyder_unittest/widgets/configdialog.py:106 +msgid "Extra command-line arguments when running tests" +msgstr "" + +#: spyder_unittest/widgets/configdialog.py:118 msgid "Include coverage report in output" msgstr "" -#: spyder_unittest/widgets/configdialog.py:89 +#: spyder_unittest/widgets/configdialog.py:119 msgid "Works only for pytest, requires pytest-cov" msgstr "" -#: spyder_unittest/widgets/configdialog.py:99 -msgid "Directory from which to run tests" +#: spyder_unittest/widgets/configdialog.py:131 +msgid "Directory from which to run tests:" msgstr "" -#: spyder_unittest/widgets/configdialog.py:105 spyder_unittest/widgets/configdialog.py:159 +#: spyder_unittest/widgets/configdialog.py:137 spyder_unittest/widgets/configdialog.py:196 msgid "Select directory" msgstr "" -#: spyder_unittest/widgets/datatree.py:51 +#: spyder_unittest/widgets/confpage.py:26 +msgid "Settings" +msgstr "" + +#: spyder_unittest/widgets/confpage.py:28 +msgid "Abbreviate test names" +msgstr "" + +#: spyder_unittest/widgets/datatree.py:45 msgid "Message" msgstr "" -#: spyder_unittest/widgets/datatree.py:51 +#: spyder_unittest/widgets/datatree.py:45 msgid "Name" msgstr "" -#: spyder_unittest/widgets/datatree.py:51 +#: spyder_unittest/widgets/datatree.py:45 msgid "Status" msgstr "" -#: spyder_unittest/widgets/datatree.py:51 +#: spyder_unittest/widgets/datatree.py:45 msgid "Time (ms)" msgstr "" -#: spyder_unittest/widgets/datatree.py:148 +#: spyder_unittest/widgets/datatree.py:153 msgid "Collapse" msgstr "" -#: spyder_unittest/widgets/datatree.py:151 +#: spyder_unittest/widgets/datatree.py:156 msgid "Expand" msgstr "" -#: spyder_unittest/widgets/datatree.py:156 +#: spyder_unittest/widgets/datatree.py:162 msgid "Go to definition" msgstr "" -#: spyder_unittest/widgets/datatree.py:409 +#: spyder_unittest/widgets/datatree.py:169 +msgid "Run only this test" +msgstr "" + +#: spyder_unittest/widgets/datatree.py:421 msgid "test" msgstr "" -#: spyder_unittest/widgets/datatree.py:409 +#: spyder_unittest/widgets/datatree.py:421 msgid "tests" msgstr "" -#: spyder_unittest/widgets/datatree.py:413 +#: spyder_unittest/widgets/datatree.py:425 msgid "No results to show." msgstr "" -#: spyder_unittest/widgets/datatree.py:418 +#: spyder_unittest/widgets/datatree.py:430 msgid "collected {}" msgstr "" -#: spyder_unittest/widgets/datatree.py:419 +#: spyder_unittest/widgets/datatree.py:431 msgid "{} failed" msgstr "" -#: spyder_unittest/widgets/datatree.py:420 +#: spyder_unittest/widgets/datatree.py:432 msgid ", {} passed" msgstr "" -#: spyder_unittest/widgets/datatree.py:422 +#: spyder_unittest/widgets/datatree.py:434 msgid ", {} other" msgstr "" -#: spyder_unittest/widgets/datatree.py:424 +#: spyder_unittest/widgets/datatree.py:436 msgid ", {} pending" msgstr "" -#: spyder_unittest/widgets/datatree.py:429 +#: spyder_unittest/widgets/datatree.py:441 msgid ", {} coverage" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:149 +#: spyder_unittest/widgets/unittestgui.py:151 msgid "Configure ..." msgstr "" -#: spyder_unittest/widgets/unittestgui.py:156 +#: spyder_unittest/widgets/unittestgui.py:158 msgid "Show output" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:163 +#: spyder_unittest/widgets/unittestgui.py:165 msgid "Collapse all" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:170 +#: spyder_unittest/widgets/unittestgui.py:172 msgid "Expand all" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:177 spyder_unittest/widgets/unittestgui.py:302 +#: spyder_unittest/widgets/unittestgui.py:179 spyder_unittest/widgets/unittestgui.py:300 msgid "Dependencies" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:250 +#: spyder_unittest/widgets/unittestgui.py:248 msgid "Unit testing output" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:293 +#: spyder_unittest/widgets/unittestgui.py:291 msgid "Versions of frameworks and their installed plugins:" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:390 +#: spyder_unittest/widgets/unittestgui.py:393 msgid "Error" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:390 +#: spyder_unittest/widgets/unittestgui.py:393 msgid "Process failed to start" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:393 +#: spyder_unittest/widgets/unittestgui.py:396 msgid "Running tests ..." msgstr "" -#: spyder_unittest/widgets/unittestgui.py:414 +#: spyder_unittest/widgets/unittestgui.py:417 msgid "Stop" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:415 +#: spyder_unittest/widgets/unittestgui.py:418 msgid "Stop current test process" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:420 +#: spyder_unittest/widgets/unittestgui.py:423 msgid "Run tests" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:448 +#: spyder_unittest/widgets/unittestgui.py:451 msgid "Test process exited abnormally" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:457 +#: spyder_unittest/widgets/unittestgui.py:460 msgid "not run" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:464 spyder_unittest/widgets/unittestgui.py:470 +#: spyder_unittest/widgets/unittestgui.py:467 spyder_unittest/widgets/unittestgui.py:473 msgid "pending" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:471 +#: spyder_unittest/widgets/unittestgui.py:474 msgid "running" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:477 +#: spyder_unittest/widgets/unittestgui.py:480 msgid "failure" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:478 +#: spyder_unittest/widgets/unittestgui.py:481 msgid "collection error" msgstr "" Binary files /tmp/tmp83t07kth/vqV3a7Laqb/spyder-unittest-0.5.1/spyder_unittest/locale/pt_BR/LC_MESSAGES/spyder_unittest.mo and /tmp/tmp83t07kth/vPPUqCMpiJ/spyder-unittest-0.6.0/spyder_unittest/locale/pt_BR/LC_MESSAGES/spyder_unittest.mo differ diff -Nru spyder-unittest-0.5.1/spyder_unittest/locale/pt_BR/LC_MESSAGES/spyder_unittest.po spyder-unittest-0.6.0/spyder_unittest/locale/pt_BR/LC_MESSAGES/spyder_unittest.po --- spyder-unittest-0.5.1/spyder_unittest/locale/pt_BR/LC_MESSAGES/spyder_unittest.po 2022-09-03 20:29:17.000000000 +0000 +++ spyder-unittest-0.6.0/spyder_unittest/locale/pt_BR/LC_MESSAGES/spyder_unittest.po 2023-07-02 15:10:44.000000000 +0000 @@ -1,8 +1,8 @@ msgid "" msgstr "" "Project-Id-Version: spyder-unittest\n" -"POT-Creation-Date: 2022-09-03 17:43+0100\n" -"PO-Revision-Date: 2022-09-03 20:06\n" +"POT-Creation-Date: 2023-06-23 16:41+0100\n" +"PO-Revision-Date: 2023-06-24 20:15\n" "Last-Translator: \n" "Language-Team: Portuguese, Brazilian\n" "MIME-Version: 1.0\n" @@ -17,204 +17,224 @@ "X-Crowdin-File-ID: 49\n" "Language: pt_BR\n" -#: spyder_unittest/backend/noserunner.py:86 +#: spyder_unittest/backend/nose2runner.py:99 msgid "Captured stdout" msgstr "stdout capturado" -#: spyder_unittest/backend/noserunner.py:88 +#: spyder_unittest/backend/nose2runner.py:101 msgid "Captured stderr" msgstr "stderr capturado" -#: spyder_unittest/backend/pytestrunner.py:114 +#: spyder_unittest/backend/pytestrunner.py:129 msgid "(none)" msgstr "" -#: spyder_unittest/backend/pytestrunner.py:114 +#: spyder_unittest/backend/pytestrunner.py:129 msgid "Missing: {}" msgstr "" -#: spyder_unittest/backend/pytestrunner.py:115 +#: spyder_unittest/backend/pytestrunner.py:130 msgid "{}\n" "{}" msgstr "" -#: spyder_unittest/unittestplugin.py:60 spyder_unittest/widgets/unittestgui.py:136 +#: spyder_unittest/unittestplugin.py:73 spyder_unittest/widgets/unittestgui.py:138 msgid "Unit testing" msgstr "Teste unitário" -#: spyder_unittest/unittestplugin.py:71 +#: spyder_unittest/unittestplugin.py:84 msgid "Run test suites and view their results" msgstr "" -#: spyder_unittest/unittestplugin.py:94 spyder_unittest/unittestplugin.py:95 spyder_unittest/widgets/unittestgui.py:421 +#: spyder_unittest/unittestplugin.py:105 spyder_unittest/unittestplugin.py:106 spyder_unittest/widgets/unittestgui.py:424 msgid "Run unit tests" msgstr "Executar testes unitários" -#: spyder_unittest/widgets/configdialog.py:66 +#: spyder_unittest/widgets/configdialog.py:78 msgid "Configure tests" msgstr "Configurar testes" -#: spyder_unittest/widgets/configdialog.py:70 -msgid "Test framework" -msgstr "Testar framework" +#: spyder_unittest/widgets/configdialog.py:86 +msgid "Test framework:" +msgstr "Testar framework:" -#: spyder_unittest/widgets/configdialog.py:79 spyder_unittest/widgets/unittestgui.py:296 +#: spyder_unittest/widgets/configdialog.py:95 spyder_unittest/widgets/unittestgui.py:294 msgid "not available" msgstr "indisponível" -#: spyder_unittest/widgets/configdialog.py:88 +#: spyder_unittest/widgets/configdialog.py:102 +msgid "Command-line arguments:" +msgstr "" + +#: spyder_unittest/widgets/configdialog.py:106 +msgid "Extra command-line arguments when running tests" +msgstr "" + +#: spyder_unittest/widgets/configdialog.py:118 msgid "Include coverage report in output" msgstr "" -#: spyder_unittest/widgets/configdialog.py:89 +#: spyder_unittest/widgets/configdialog.py:119 msgid "Works only for pytest, requires pytest-cov" msgstr "" -#: spyder_unittest/widgets/configdialog.py:99 -msgid "Directory from which to run tests" -msgstr "Diretório para execução dos testes" +#: spyder_unittest/widgets/configdialog.py:131 +msgid "Directory from which to run tests:" +msgstr "Diretório para execução dos testes:" -#: spyder_unittest/widgets/configdialog.py:105 spyder_unittest/widgets/configdialog.py:159 +#: spyder_unittest/widgets/configdialog.py:137 spyder_unittest/widgets/configdialog.py:196 msgid "Select directory" msgstr "Selecione o diretório" -#: spyder_unittest/widgets/datatree.py:51 +#: spyder_unittest/widgets/confpage.py:26 +msgid "Settings" +msgstr "" + +#: spyder_unittest/widgets/confpage.py:28 +msgid "Abbreviate test names" +msgstr "" + +#: spyder_unittest/widgets/datatree.py:45 msgid "Message" msgstr "Mensagem" -#: spyder_unittest/widgets/datatree.py:51 +#: spyder_unittest/widgets/datatree.py:45 msgid "Name" msgstr "Nome" -#: spyder_unittest/widgets/datatree.py:51 +#: spyder_unittest/widgets/datatree.py:45 msgid "Status" msgstr "Status" -#: spyder_unittest/widgets/datatree.py:51 +#: spyder_unittest/widgets/datatree.py:45 msgid "Time (ms)" msgstr "Tempo (ms)" -#: spyder_unittest/widgets/datatree.py:148 +#: spyder_unittest/widgets/datatree.py:153 msgid "Collapse" msgstr "Recolher" -#: spyder_unittest/widgets/datatree.py:151 +#: spyder_unittest/widgets/datatree.py:156 msgid "Expand" msgstr "Expandir" -#: spyder_unittest/widgets/datatree.py:156 +#: spyder_unittest/widgets/datatree.py:162 msgid "Go to definition" msgstr "Ir para definição" -#: spyder_unittest/widgets/datatree.py:409 +#: spyder_unittest/widgets/datatree.py:169 +msgid "Run only this test" +msgstr "" + +#: spyder_unittest/widgets/datatree.py:421 msgid "test" msgstr "teste" -#: spyder_unittest/widgets/datatree.py:409 +#: spyder_unittest/widgets/datatree.py:421 msgid "tests" msgstr "testes" -#: spyder_unittest/widgets/datatree.py:413 +#: spyder_unittest/widgets/datatree.py:425 msgid "No results to show." msgstr "Sem resultados para exibir." -#: spyder_unittest/widgets/datatree.py:418 +#: spyder_unittest/widgets/datatree.py:430 msgid "collected {}" msgstr "coletado {}" -#: spyder_unittest/widgets/datatree.py:419 +#: spyder_unittest/widgets/datatree.py:431 msgid "{} failed" msgstr "{} falhou" -#: spyder_unittest/widgets/datatree.py:420 +#: spyder_unittest/widgets/datatree.py:432 msgid ", {} passed" msgstr ", {} passou" -#: spyder_unittest/widgets/datatree.py:422 +#: spyder_unittest/widgets/datatree.py:434 msgid ", {} other" msgstr ", {} outro" -#: spyder_unittest/widgets/datatree.py:424 +#: spyder_unittest/widgets/datatree.py:436 msgid ", {} pending" msgstr ", {} pendente" -#: spyder_unittest/widgets/datatree.py:429 +#: spyder_unittest/widgets/datatree.py:441 msgid ", {} coverage" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:149 +#: spyder_unittest/widgets/unittestgui.py:151 msgid "Configure ..." msgstr "Configurar ..." -#: spyder_unittest/widgets/unittestgui.py:156 +#: spyder_unittest/widgets/unittestgui.py:158 msgid "Show output" msgstr "Mostrar saída" -#: spyder_unittest/widgets/unittestgui.py:163 +#: spyder_unittest/widgets/unittestgui.py:165 msgid "Collapse all" msgstr "Recolher tudo" -#: spyder_unittest/widgets/unittestgui.py:170 +#: spyder_unittest/widgets/unittestgui.py:172 msgid "Expand all" msgstr "Expandir tudo" -#: spyder_unittest/widgets/unittestgui.py:177 spyder_unittest/widgets/unittestgui.py:302 +#: spyder_unittest/widgets/unittestgui.py:179 spyder_unittest/widgets/unittestgui.py:300 msgid "Dependencies" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:250 +#: spyder_unittest/widgets/unittestgui.py:248 msgid "Unit testing output" msgstr "Saída do teste unitário" -#: spyder_unittest/widgets/unittestgui.py:293 +#: spyder_unittest/widgets/unittestgui.py:291 msgid "Versions of frameworks and their installed plugins:" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:390 +#: spyder_unittest/widgets/unittestgui.py:393 msgid "Error" msgstr "Erro" -#: spyder_unittest/widgets/unittestgui.py:390 +#: spyder_unittest/widgets/unittestgui.py:393 msgid "Process failed to start" msgstr "O processo falhou ao iniciar" -#: spyder_unittest/widgets/unittestgui.py:393 +#: spyder_unittest/widgets/unittestgui.py:396 msgid "Running tests ..." msgstr "Executando teste..." -#: spyder_unittest/widgets/unittestgui.py:414 +#: spyder_unittest/widgets/unittestgui.py:417 msgid "Stop" msgstr "Parar" -#: spyder_unittest/widgets/unittestgui.py:415 +#: spyder_unittest/widgets/unittestgui.py:418 msgid "Stop current test process" msgstr "Parar processo de teste atual" -#: spyder_unittest/widgets/unittestgui.py:420 +#: spyder_unittest/widgets/unittestgui.py:423 msgid "Run tests" msgstr "Executar testes" -#: spyder_unittest/widgets/unittestgui.py:448 +#: spyder_unittest/widgets/unittestgui.py:451 msgid "Test process exited abnormally" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:457 +#: spyder_unittest/widgets/unittestgui.py:460 msgid "not run" msgstr "não executar" -#: spyder_unittest/widgets/unittestgui.py:464 spyder_unittest/widgets/unittestgui.py:470 +#: spyder_unittest/widgets/unittestgui.py:467 spyder_unittest/widgets/unittestgui.py:473 msgid "pending" msgstr "pendente" -#: spyder_unittest/widgets/unittestgui.py:471 +#: spyder_unittest/widgets/unittestgui.py:474 msgid "running" msgstr "executando" -#: spyder_unittest/widgets/unittestgui.py:477 +#: spyder_unittest/widgets/unittestgui.py:480 msgid "failure" msgstr "falha" -#: spyder_unittest/widgets/unittestgui.py:478 +#: spyder_unittest/widgets/unittestgui.py:481 msgid "collection error" msgstr "erro na compilação" Binary files /tmp/tmp83t07kth/vqV3a7Laqb/spyder-unittest-0.5.1/spyder_unittest/locale/ru/LC_MESSAGES/spyder_unittest.mo and /tmp/tmp83t07kth/vPPUqCMpiJ/spyder-unittest-0.6.0/spyder_unittest/locale/ru/LC_MESSAGES/spyder_unittest.mo differ diff -Nru spyder-unittest-0.5.1/spyder_unittest/locale/ru/LC_MESSAGES/spyder_unittest.po spyder-unittest-0.6.0/spyder_unittest/locale/ru/LC_MESSAGES/spyder_unittest.po --- spyder-unittest-0.5.1/spyder_unittest/locale/ru/LC_MESSAGES/spyder_unittest.po 2022-09-03 20:29:17.000000000 +0000 +++ spyder-unittest-0.6.0/spyder_unittest/locale/ru/LC_MESSAGES/spyder_unittest.po 2023-07-02 15:10:44.000000000 +0000 @@ -1,8 +1,8 @@ msgid "" msgstr "" "Project-Id-Version: spyder-unittest\n" -"POT-Creation-Date: 2022-09-03 17:43+0100\n" -"PO-Revision-Date: 2022-09-03 20:06\n" +"POT-Creation-Date: 2023-06-23 16:41+0100\n" +"PO-Revision-Date: 2023-06-23 20:17\n" "Last-Translator: \n" "Language-Team: Russian\n" "MIME-Version: 1.0\n" @@ -17,204 +17,224 @@ "X-Crowdin-File-ID: 49\n" "Language: ru_RU\n" -#: spyder_unittest/backend/noserunner.py:86 +#: spyder_unittest/backend/nose2runner.py:99 msgid "Captured stdout" msgstr "" -#: spyder_unittest/backend/noserunner.py:88 +#: spyder_unittest/backend/nose2runner.py:101 msgid "Captured stderr" msgstr "" -#: spyder_unittest/backend/pytestrunner.py:114 +#: spyder_unittest/backend/pytestrunner.py:129 msgid "(none)" msgstr "" -#: spyder_unittest/backend/pytestrunner.py:114 +#: spyder_unittest/backend/pytestrunner.py:129 msgid "Missing: {}" msgstr "" -#: spyder_unittest/backend/pytestrunner.py:115 +#: spyder_unittest/backend/pytestrunner.py:130 msgid "{}\n" "{}" msgstr "" -#: spyder_unittest/unittestplugin.py:60 spyder_unittest/widgets/unittestgui.py:136 +#: spyder_unittest/unittestplugin.py:73 spyder_unittest/widgets/unittestgui.py:138 msgid "Unit testing" msgstr "" -#: spyder_unittest/unittestplugin.py:71 +#: spyder_unittest/unittestplugin.py:84 msgid "Run test suites and view their results" msgstr "" -#: spyder_unittest/unittestplugin.py:94 spyder_unittest/unittestplugin.py:95 spyder_unittest/widgets/unittestgui.py:421 +#: spyder_unittest/unittestplugin.py:105 spyder_unittest/unittestplugin.py:106 spyder_unittest/widgets/unittestgui.py:424 msgid "Run unit tests" msgstr "" -#: spyder_unittest/widgets/configdialog.py:66 +#: spyder_unittest/widgets/configdialog.py:78 msgid "Configure tests" msgstr "" -#: spyder_unittest/widgets/configdialog.py:70 -msgid "Test framework" +#: spyder_unittest/widgets/configdialog.py:86 +msgid "Test framework:" msgstr "" -#: spyder_unittest/widgets/configdialog.py:79 spyder_unittest/widgets/unittestgui.py:296 +#: spyder_unittest/widgets/configdialog.py:95 spyder_unittest/widgets/unittestgui.py:294 msgid "not available" msgstr "" -#: spyder_unittest/widgets/configdialog.py:88 +#: spyder_unittest/widgets/configdialog.py:102 +msgid "Command-line arguments:" +msgstr "" + +#: spyder_unittest/widgets/configdialog.py:106 +msgid "Extra command-line arguments when running tests" +msgstr "" + +#: spyder_unittest/widgets/configdialog.py:118 msgid "Include coverage report in output" msgstr "" -#: spyder_unittest/widgets/configdialog.py:89 +#: spyder_unittest/widgets/configdialog.py:119 msgid "Works only for pytest, requires pytest-cov" msgstr "" -#: spyder_unittest/widgets/configdialog.py:99 -msgid "Directory from which to run tests" +#: spyder_unittest/widgets/configdialog.py:131 +msgid "Directory from which to run tests:" msgstr "" -#: spyder_unittest/widgets/configdialog.py:105 spyder_unittest/widgets/configdialog.py:159 +#: spyder_unittest/widgets/configdialog.py:137 spyder_unittest/widgets/configdialog.py:196 msgid "Select directory" msgstr "" -#: spyder_unittest/widgets/datatree.py:51 +#: spyder_unittest/widgets/confpage.py:26 +msgid "Settings" +msgstr "" + +#: spyder_unittest/widgets/confpage.py:28 +msgid "Abbreviate test names" +msgstr "" + +#: spyder_unittest/widgets/datatree.py:45 msgid "Message" msgstr "" -#: spyder_unittest/widgets/datatree.py:51 +#: spyder_unittest/widgets/datatree.py:45 msgid "Name" msgstr "" -#: spyder_unittest/widgets/datatree.py:51 +#: spyder_unittest/widgets/datatree.py:45 msgid "Status" msgstr "" -#: spyder_unittest/widgets/datatree.py:51 +#: spyder_unittest/widgets/datatree.py:45 msgid "Time (ms)" msgstr "" -#: spyder_unittest/widgets/datatree.py:148 +#: spyder_unittest/widgets/datatree.py:153 msgid "Collapse" msgstr "" -#: spyder_unittest/widgets/datatree.py:151 +#: spyder_unittest/widgets/datatree.py:156 msgid "Expand" msgstr "" -#: spyder_unittest/widgets/datatree.py:156 +#: spyder_unittest/widgets/datatree.py:162 msgid "Go to definition" msgstr "" -#: spyder_unittest/widgets/datatree.py:409 +#: spyder_unittest/widgets/datatree.py:169 +msgid "Run only this test" +msgstr "" + +#: spyder_unittest/widgets/datatree.py:421 msgid "test" msgstr "" -#: spyder_unittest/widgets/datatree.py:409 +#: spyder_unittest/widgets/datatree.py:421 msgid "tests" msgstr "" -#: spyder_unittest/widgets/datatree.py:413 +#: spyder_unittest/widgets/datatree.py:425 msgid "No results to show." msgstr "" -#: spyder_unittest/widgets/datatree.py:418 +#: spyder_unittest/widgets/datatree.py:430 msgid "collected {}" msgstr "" -#: spyder_unittest/widgets/datatree.py:419 +#: spyder_unittest/widgets/datatree.py:431 msgid "{} failed" msgstr "" -#: spyder_unittest/widgets/datatree.py:420 +#: spyder_unittest/widgets/datatree.py:432 msgid ", {} passed" msgstr "" -#: spyder_unittest/widgets/datatree.py:422 +#: spyder_unittest/widgets/datatree.py:434 msgid ", {} other" msgstr "" -#: spyder_unittest/widgets/datatree.py:424 +#: spyder_unittest/widgets/datatree.py:436 msgid ", {} pending" msgstr "" -#: spyder_unittest/widgets/datatree.py:429 +#: spyder_unittest/widgets/datatree.py:441 msgid ", {} coverage" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:149 +#: spyder_unittest/widgets/unittestgui.py:151 msgid "Configure ..." msgstr "" -#: spyder_unittest/widgets/unittestgui.py:156 +#: spyder_unittest/widgets/unittestgui.py:158 msgid "Show output" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:163 +#: spyder_unittest/widgets/unittestgui.py:165 msgid "Collapse all" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:170 +#: spyder_unittest/widgets/unittestgui.py:172 msgid "Expand all" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:177 spyder_unittest/widgets/unittestgui.py:302 +#: spyder_unittest/widgets/unittestgui.py:179 spyder_unittest/widgets/unittestgui.py:300 msgid "Dependencies" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:250 +#: spyder_unittest/widgets/unittestgui.py:248 msgid "Unit testing output" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:293 +#: spyder_unittest/widgets/unittestgui.py:291 msgid "Versions of frameworks and their installed plugins:" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:390 +#: spyder_unittest/widgets/unittestgui.py:393 msgid "Error" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:390 +#: spyder_unittest/widgets/unittestgui.py:393 msgid "Process failed to start" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:393 +#: spyder_unittest/widgets/unittestgui.py:396 msgid "Running tests ..." msgstr "" -#: spyder_unittest/widgets/unittestgui.py:414 +#: spyder_unittest/widgets/unittestgui.py:417 msgid "Stop" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:415 +#: spyder_unittest/widgets/unittestgui.py:418 msgid "Stop current test process" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:420 +#: spyder_unittest/widgets/unittestgui.py:423 msgid "Run tests" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:448 +#: spyder_unittest/widgets/unittestgui.py:451 msgid "Test process exited abnormally" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:457 +#: spyder_unittest/widgets/unittestgui.py:460 msgid "not run" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:464 spyder_unittest/widgets/unittestgui.py:470 +#: spyder_unittest/widgets/unittestgui.py:467 spyder_unittest/widgets/unittestgui.py:473 msgid "pending" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:471 +#: spyder_unittest/widgets/unittestgui.py:474 msgid "running" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:477 +#: spyder_unittest/widgets/unittestgui.py:480 msgid "failure" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:478 +#: spyder_unittest/widgets/unittestgui.py:481 msgid "collection error" msgstr "" diff -Nru spyder-unittest-0.5.1/spyder_unittest/locale/spyder_unittest.pot spyder-unittest-0.6.0/spyder_unittest/locale/spyder_unittest.pot --- spyder-unittest-0.5.1/spyder_unittest/locale/spyder_unittest.pot 2022-09-03 20:29:17.000000000 +0000 +++ spyder-unittest-0.6.0/spyder_unittest/locale/spyder_unittest.pot 2023-07-02 15:10:44.000000000 +0000 @@ -5,7 +5,7 @@ msgid "" msgstr "" "Project-Id-Version: PACKAGE VERSION\n" -"POT-Creation-Date: 2022-09-03 17:43+0100\n" +"POT-Creation-Date: 2023-06-23 16:41+0100\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "Last-Translator: FULL NAME \n" "Language-Team: LANGUAGE \n" @@ -14,204 +14,224 @@ "Content-Transfer-Encoding: 8bit\n" "Generated-By: pygettext.py 1.5\n" -#: spyder_unittest/backend/noserunner.py:86 +#: spyder_unittest/backend/nose2runner.py:99 msgid "Captured stdout" msgstr "" -#: spyder_unittest/backend/noserunner.py:88 +#: spyder_unittest/backend/nose2runner.py:101 msgid "Captured stderr" msgstr "" -#: spyder_unittest/backend/pytestrunner.py:114 +#: spyder_unittest/backend/pytestrunner.py:129 msgid "(none)" msgstr "" -#: spyder_unittest/backend/pytestrunner.py:114 +#: spyder_unittest/backend/pytestrunner.py:129 msgid "Missing: {}" msgstr "" -#: spyder_unittest/backend/pytestrunner.py:115 +#: spyder_unittest/backend/pytestrunner.py:130 msgid "" "{}\n" "{}" msgstr "" -#: spyder_unittest/unittestplugin.py:60 spyder_unittest/widgets/unittestgui.py:136 +#: spyder_unittest/unittestplugin.py:73 spyder_unittest/widgets/unittestgui.py:138 msgid "Unit testing" msgstr "" -#: spyder_unittest/unittestplugin.py:71 +#: spyder_unittest/unittestplugin.py:84 msgid "Run test suites and view their results" msgstr "" -#: spyder_unittest/unittestplugin.py:94 spyder_unittest/unittestplugin.py:95 spyder_unittest/widgets/unittestgui.py:421 +#: spyder_unittest/unittestplugin.py:105 spyder_unittest/unittestplugin.py:106 spyder_unittest/widgets/unittestgui.py:424 msgid "Run unit tests" msgstr "" -#: spyder_unittest/widgets/configdialog.py:66 +#: spyder_unittest/widgets/configdialog.py:78 msgid "Configure tests" msgstr "" -#: spyder_unittest/widgets/configdialog.py:70 -msgid "Test framework" +#: spyder_unittest/widgets/configdialog.py:86 +msgid "Test framework:" msgstr "" -#: spyder_unittest/widgets/configdialog.py:79 spyder_unittest/widgets/unittestgui.py:296 +#: spyder_unittest/widgets/configdialog.py:95 spyder_unittest/widgets/unittestgui.py:294 msgid "not available" msgstr "" -#: spyder_unittest/widgets/configdialog.py:88 +#: spyder_unittest/widgets/configdialog.py:102 +msgid "Command-line arguments:" +msgstr "" + +#: spyder_unittest/widgets/configdialog.py:106 +msgid "Extra command-line arguments when running tests" +msgstr "" + +#: spyder_unittest/widgets/configdialog.py:118 msgid "Include coverage report in output" msgstr "" -#: spyder_unittest/widgets/configdialog.py:89 +#: spyder_unittest/widgets/configdialog.py:119 msgid "Works only for pytest, requires pytest-cov" msgstr "" -#: spyder_unittest/widgets/configdialog.py:99 -msgid "Directory from which to run tests" +#: spyder_unittest/widgets/configdialog.py:131 +msgid "Directory from which to run tests:" msgstr "" -#: spyder_unittest/widgets/configdialog.py:105 spyder_unittest/widgets/configdialog.py:159 +#: spyder_unittest/widgets/configdialog.py:137 spyder_unittest/widgets/configdialog.py:196 msgid "Select directory" msgstr "" -#: spyder_unittest/widgets/datatree.py:51 +#: spyder_unittest/widgets/confpage.py:26 +msgid "Settings" +msgstr "" + +#: spyder_unittest/widgets/confpage.py:28 +msgid "Abbreviate test names" +msgstr "" + +#: spyder_unittest/widgets/datatree.py:45 msgid "Message" msgstr "" -#: spyder_unittest/widgets/datatree.py:51 +#: spyder_unittest/widgets/datatree.py:45 msgid "Name" msgstr "" -#: spyder_unittest/widgets/datatree.py:51 +#: spyder_unittest/widgets/datatree.py:45 msgid "Status" msgstr "" -#: spyder_unittest/widgets/datatree.py:51 +#: spyder_unittest/widgets/datatree.py:45 msgid "Time (ms)" msgstr "" -#: spyder_unittest/widgets/datatree.py:148 +#: spyder_unittest/widgets/datatree.py:153 msgid "Collapse" msgstr "" -#: spyder_unittest/widgets/datatree.py:151 +#: spyder_unittest/widgets/datatree.py:156 msgid "Expand" msgstr "" -#: spyder_unittest/widgets/datatree.py:156 +#: spyder_unittest/widgets/datatree.py:162 msgid "Go to definition" msgstr "" -#: spyder_unittest/widgets/datatree.py:409 +#: spyder_unittest/widgets/datatree.py:169 +msgid "Run only this test" +msgstr "" + +#: spyder_unittest/widgets/datatree.py:421 msgid "test" msgstr "" -#: spyder_unittest/widgets/datatree.py:409 +#: spyder_unittest/widgets/datatree.py:421 msgid "tests" msgstr "" -#: spyder_unittest/widgets/datatree.py:413 +#: spyder_unittest/widgets/datatree.py:425 msgid "No results to show." msgstr "" -#: spyder_unittest/widgets/datatree.py:418 +#: spyder_unittest/widgets/datatree.py:430 msgid "collected {}" msgstr "" -#: spyder_unittest/widgets/datatree.py:419 +#: spyder_unittest/widgets/datatree.py:431 msgid "{} failed" msgstr "" -#: spyder_unittest/widgets/datatree.py:420 +#: spyder_unittest/widgets/datatree.py:432 msgid ", {} passed" msgstr "" -#: spyder_unittest/widgets/datatree.py:422 +#: spyder_unittest/widgets/datatree.py:434 msgid ", {} other" msgstr "" -#: spyder_unittest/widgets/datatree.py:424 +#: spyder_unittest/widgets/datatree.py:436 msgid ", {} pending" msgstr "" -#: spyder_unittest/widgets/datatree.py:429 +#: spyder_unittest/widgets/datatree.py:441 msgid ", {} coverage" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:149 +#: spyder_unittest/widgets/unittestgui.py:151 msgid "Configure ..." msgstr "" -#: spyder_unittest/widgets/unittestgui.py:156 +#: spyder_unittest/widgets/unittestgui.py:158 msgid "Show output" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:163 +#: spyder_unittest/widgets/unittestgui.py:165 msgid "Collapse all" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:170 +#: spyder_unittest/widgets/unittestgui.py:172 msgid "Expand all" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:177 spyder_unittest/widgets/unittestgui.py:302 +#: spyder_unittest/widgets/unittestgui.py:179 spyder_unittest/widgets/unittestgui.py:300 msgid "Dependencies" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:250 +#: spyder_unittest/widgets/unittestgui.py:248 msgid "Unit testing output" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:293 +#: spyder_unittest/widgets/unittestgui.py:291 msgid "Versions of frameworks and their installed plugins:" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:390 +#: spyder_unittest/widgets/unittestgui.py:393 msgid "Error" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:390 +#: spyder_unittest/widgets/unittestgui.py:393 msgid "Process failed to start" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:393 +#: spyder_unittest/widgets/unittestgui.py:396 msgid "Running tests ..." msgstr "" -#: spyder_unittest/widgets/unittestgui.py:414 +#: spyder_unittest/widgets/unittestgui.py:417 msgid "Stop" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:415 +#: spyder_unittest/widgets/unittestgui.py:418 msgid "Stop current test process" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:420 +#: spyder_unittest/widgets/unittestgui.py:423 msgid "Run tests" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:448 +#: spyder_unittest/widgets/unittestgui.py:451 msgid "Test process exited abnormally" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:457 +#: spyder_unittest/widgets/unittestgui.py:460 msgid "not run" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:464 spyder_unittest/widgets/unittestgui.py:470 +#: spyder_unittest/widgets/unittestgui.py:467 spyder_unittest/widgets/unittestgui.py:473 msgid "pending" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:471 +#: spyder_unittest/widgets/unittestgui.py:474 msgid "running" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:477 +#: spyder_unittest/widgets/unittestgui.py:480 msgid "failure" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:478 +#: spyder_unittest/widgets/unittestgui.py:481 msgid "collection error" msgstr "" Binary files /tmp/tmp83t07kth/vqV3a7Laqb/spyder-unittest-0.5.1/spyder_unittest/locale/zh_CN/LC_MESSAGES/spyder_unittest.mo and /tmp/tmp83t07kth/vPPUqCMpiJ/spyder-unittest-0.6.0/spyder_unittest/locale/zh_CN/LC_MESSAGES/spyder_unittest.mo differ diff -Nru spyder-unittest-0.5.1/spyder_unittest/locale/zh_CN/LC_MESSAGES/spyder_unittest.po spyder-unittest-0.6.0/spyder_unittest/locale/zh_CN/LC_MESSAGES/spyder_unittest.po --- spyder-unittest-0.5.1/spyder_unittest/locale/zh_CN/LC_MESSAGES/spyder_unittest.po 2022-09-03 20:29:17.000000000 +0000 +++ spyder-unittest-0.6.0/spyder_unittest/locale/zh_CN/LC_MESSAGES/spyder_unittest.po 2023-07-02 15:10:44.000000000 +0000 @@ -1,8 +1,8 @@ msgid "" msgstr "" "Project-Id-Version: spyder-unittest\n" -"POT-Creation-Date: 2022-09-03 17:43+0100\n" -"PO-Revision-Date: 2022-09-03 20:06\n" +"POT-Creation-Date: 2023-06-23 16:41+0100\n" +"PO-Revision-Date: 2023-06-23 20:17\n" "Last-Translator: \n" "Language-Team: Chinese Simplified\n" "MIME-Version: 1.0\n" @@ -17,204 +17,224 @@ "X-Crowdin-File-ID: 49\n" "Language: zh_CN\n" -#: spyder_unittest/backend/noserunner.py:86 +#: spyder_unittest/backend/nose2runner.py:99 msgid "Captured stdout" msgstr "" -#: spyder_unittest/backend/noserunner.py:88 +#: spyder_unittest/backend/nose2runner.py:101 msgid "Captured stderr" msgstr "" -#: spyder_unittest/backend/pytestrunner.py:114 +#: spyder_unittest/backend/pytestrunner.py:129 msgid "(none)" msgstr "" -#: spyder_unittest/backend/pytestrunner.py:114 +#: spyder_unittest/backend/pytestrunner.py:129 msgid "Missing: {}" msgstr "" -#: spyder_unittest/backend/pytestrunner.py:115 +#: spyder_unittest/backend/pytestrunner.py:130 msgid "{}\n" "{}" msgstr "" -#: spyder_unittest/unittestplugin.py:60 spyder_unittest/widgets/unittestgui.py:136 +#: spyder_unittest/unittestplugin.py:73 spyder_unittest/widgets/unittestgui.py:138 msgid "Unit testing" msgstr "" -#: spyder_unittest/unittestplugin.py:71 +#: spyder_unittest/unittestplugin.py:84 msgid "Run test suites and view their results" msgstr "" -#: spyder_unittest/unittestplugin.py:94 spyder_unittest/unittestplugin.py:95 spyder_unittest/widgets/unittestgui.py:421 +#: spyder_unittest/unittestplugin.py:105 spyder_unittest/unittestplugin.py:106 spyder_unittest/widgets/unittestgui.py:424 msgid "Run unit tests" msgstr "" -#: spyder_unittest/widgets/configdialog.py:66 +#: spyder_unittest/widgets/configdialog.py:78 msgid "Configure tests" msgstr "" -#: spyder_unittest/widgets/configdialog.py:70 -msgid "Test framework" +#: spyder_unittest/widgets/configdialog.py:86 +msgid "Test framework:" msgstr "" -#: spyder_unittest/widgets/configdialog.py:79 spyder_unittest/widgets/unittestgui.py:296 +#: spyder_unittest/widgets/configdialog.py:95 spyder_unittest/widgets/unittestgui.py:294 msgid "not available" msgstr "" -#: spyder_unittest/widgets/configdialog.py:88 +#: spyder_unittest/widgets/configdialog.py:102 +msgid "Command-line arguments:" +msgstr "" + +#: spyder_unittest/widgets/configdialog.py:106 +msgid "Extra command-line arguments when running tests" +msgstr "" + +#: spyder_unittest/widgets/configdialog.py:118 msgid "Include coverage report in output" msgstr "" -#: spyder_unittest/widgets/configdialog.py:89 +#: spyder_unittest/widgets/configdialog.py:119 msgid "Works only for pytest, requires pytest-cov" msgstr "" -#: spyder_unittest/widgets/configdialog.py:99 -msgid "Directory from which to run tests" +#: spyder_unittest/widgets/configdialog.py:131 +msgid "Directory from which to run tests:" msgstr "" -#: spyder_unittest/widgets/configdialog.py:105 spyder_unittest/widgets/configdialog.py:159 +#: spyder_unittest/widgets/configdialog.py:137 spyder_unittest/widgets/configdialog.py:196 msgid "Select directory" msgstr "" -#: spyder_unittest/widgets/datatree.py:51 +#: spyder_unittest/widgets/confpage.py:26 +msgid "Settings" +msgstr "" + +#: spyder_unittest/widgets/confpage.py:28 +msgid "Abbreviate test names" +msgstr "" + +#: spyder_unittest/widgets/datatree.py:45 msgid "Message" msgstr "" -#: spyder_unittest/widgets/datatree.py:51 +#: spyder_unittest/widgets/datatree.py:45 msgid "Name" msgstr "" -#: spyder_unittest/widgets/datatree.py:51 +#: spyder_unittest/widgets/datatree.py:45 msgid "Status" msgstr "" -#: spyder_unittest/widgets/datatree.py:51 +#: spyder_unittest/widgets/datatree.py:45 msgid "Time (ms)" msgstr "" -#: spyder_unittest/widgets/datatree.py:148 +#: spyder_unittest/widgets/datatree.py:153 msgid "Collapse" msgstr "" -#: spyder_unittest/widgets/datatree.py:151 +#: spyder_unittest/widgets/datatree.py:156 msgid "Expand" msgstr "" -#: spyder_unittest/widgets/datatree.py:156 +#: spyder_unittest/widgets/datatree.py:162 msgid "Go to definition" msgstr "" -#: spyder_unittest/widgets/datatree.py:409 +#: spyder_unittest/widgets/datatree.py:169 +msgid "Run only this test" +msgstr "" + +#: spyder_unittest/widgets/datatree.py:421 msgid "test" msgstr "" -#: spyder_unittest/widgets/datatree.py:409 +#: spyder_unittest/widgets/datatree.py:421 msgid "tests" msgstr "" -#: spyder_unittest/widgets/datatree.py:413 +#: spyder_unittest/widgets/datatree.py:425 msgid "No results to show." msgstr "" -#: spyder_unittest/widgets/datatree.py:418 +#: spyder_unittest/widgets/datatree.py:430 msgid "collected {}" msgstr "" -#: spyder_unittest/widgets/datatree.py:419 +#: spyder_unittest/widgets/datatree.py:431 msgid "{} failed" msgstr "" -#: spyder_unittest/widgets/datatree.py:420 +#: spyder_unittest/widgets/datatree.py:432 msgid ", {} passed" msgstr "" -#: spyder_unittest/widgets/datatree.py:422 +#: spyder_unittest/widgets/datatree.py:434 msgid ", {} other" msgstr "" -#: spyder_unittest/widgets/datatree.py:424 +#: spyder_unittest/widgets/datatree.py:436 msgid ", {} pending" msgstr "" -#: spyder_unittest/widgets/datatree.py:429 +#: spyder_unittest/widgets/datatree.py:441 msgid ", {} coverage" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:149 +#: spyder_unittest/widgets/unittestgui.py:151 msgid "Configure ..." msgstr "" -#: spyder_unittest/widgets/unittestgui.py:156 +#: spyder_unittest/widgets/unittestgui.py:158 msgid "Show output" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:163 +#: spyder_unittest/widgets/unittestgui.py:165 msgid "Collapse all" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:170 +#: spyder_unittest/widgets/unittestgui.py:172 msgid "Expand all" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:177 spyder_unittest/widgets/unittestgui.py:302 +#: spyder_unittest/widgets/unittestgui.py:179 spyder_unittest/widgets/unittestgui.py:300 msgid "Dependencies" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:250 +#: spyder_unittest/widgets/unittestgui.py:248 msgid "Unit testing output" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:293 +#: spyder_unittest/widgets/unittestgui.py:291 msgid "Versions of frameworks and their installed plugins:" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:390 +#: spyder_unittest/widgets/unittestgui.py:393 msgid "Error" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:390 +#: spyder_unittest/widgets/unittestgui.py:393 msgid "Process failed to start" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:393 +#: spyder_unittest/widgets/unittestgui.py:396 msgid "Running tests ..." msgstr "" -#: spyder_unittest/widgets/unittestgui.py:414 +#: spyder_unittest/widgets/unittestgui.py:417 msgid "Stop" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:415 +#: spyder_unittest/widgets/unittestgui.py:418 msgid "Stop current test process" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:420 +#: spyder_unittest/widgets/unittestgui.py:423 msgid "Run tests" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:448 +#: spyder_unittest/widgets/unittestgui.py:451 msgid "Test process exited abnormally" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:457 +#: spyder_unittest/widgets/unittestgui.py:460 msgid "not run" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:464 spyder_unittest/widgets/unittestgui.py:470 +#: spyder_unittest/widgets/unittestgui.py:467 spyder_unittest/widgets/unittestgui.py:473 msgid "pending" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:471 +#: spyder_unittest/widgets/unittestgui.py:474 msgid "running" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:477 +#: spyder_unittest/widgets/unittestgui.py:480 msgid "failure" msgstr "" -#: spyder_unittest/widgets/unittestgui.py:478 +#: spyder_unittest/widgets/unittestgui.py:481 msgid "collection error" msgstr "" diff -Nru spyder-unittest-0.5.1/spyder_unittest/tests/conftest.py spyder-unittest-0.6.0/spyder_unittest/tests/conftest.py --- spyder-unittest-0.5.1/spyder_unittest/tests/conftest.py 1970-01-01 00:00:00.000000000 +0000 +++ spyder-unittest-0.6.0/spyder_unittest/tests/conftest.py 2023-07-02 15:10:44.000000000 +0000 @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# +# Copyright © Spyder Project Contributors +# Licensed under the terms of the MIT License +# (see LICENSE.txt for details) +""" +Configuration file for Pytest. + +This contains the necessary definitions to make the main_window fixture +available for integration tests. +""" + +# Third-party imports +from qtpy.QtWidgets import QApplication +import pytest + +# QtWebEngineWidgets must be imported +# before a QCoreApplication instance is created +from qtpy import QtWebEngineWidgets # noqa + +# Spyder imports +from spyder import dependencies +from spyder.api.plugin_registration.registry import PLUGIN_REGISTRY +from spyder.app import start +from spyder.config.manager import CONF + + +@pytest.fixture +def main_window(monkeypatch): + """Main Window fixture""" + + # Disable loading of old third-party plugins in Spyder 5 + monkeypatch.setattr( + 'spyder.app.mainwindow.get_spyderplugins_mods', lambda: []) + + # Don't show tours message + CONF.set('tours', 'show_tour_message', False) + QApplication.processEvents() + + # Reset global state + dependencies.DEPENDENCIES = [] + PLUGIN_REGISTRY.reset() + + # Start the window + window = start.main() + QApplication.processEvents() + + yield window + + # Close main window + window.close() + CONF.reset_to_defaults(notification=False) diff -Nru spyder-unittest-0.5.1/spyder_unittest/tests/test_unittestplugin.py spyder-unittest-0.6.0/spyder_unittest/tests/test_unittestplugin.py --- spyder-unittest-0.5.1/spyder_unittest/tests/test_unittestplugin.py 2022-09-03 20:29:17.000000000 +0000 +++ spyder-unittest-0.6.0/spyder_unittest/tests/test_unittestplugin.py 2023-07-02 15:10:44.000000000 +0000 @@ -1,128 +1,155 @@ # -*- coding: utf-8 -*- # -# Copyright © 2017 Spyder Project Contributors +# Copyright © Spyder Project Contributors # Licensed under the terms of the MIT License # (see LICENSE.txt for details) -"""Tests for unittestplugin.py""" +""" +Tests for the integration of the plugin with Spyder. +""" + +# Standard library imports +from collections import OrderedDict +import os # Third party imports -import pytest -from spyder.plugins.projects.api import EmptyProject -from unittest.mock import MagicMock +from qtpy.QtCore import Qt + +# Spyder imports +from spyder.api.plugins import Plugins # Local imports from spyder_unittest.unittestplugin import UnitTestPlugin from spyder_unittest.widgets.configdialog import Config -class PluginForTesting(UnitTestPlugin): - CONF_FILE = False - - def __init__(self, parent): - UnitTestPlugin.__init__(self, parent) - - -@pytest.fixture -def plugin(qtbot): - """Set up the unittest plugin.""" - res = UnitTestPlugin(None, None) - res._main = MagicMock() - res._main.get_spyder_pythonpath = MagicMock(return_value='fakepythonpath') - res.initialize() - return res - - -@pytest.mark.skip('not clear how to test interactions between plugins') -def test_plugin_initialization(plugin): - assert len(plugin.main.run_menu_actions) == 2 - assert plugin.main.run_menu_actions[1].text() == 'Run unit tests' - - -def test_plugin_pythonpath(plugin): - # Test signal/slot connection - plugin.get_main().sig_pythonpath_changed.connect.assert_called_with( - plugin.update_pythonpath) - - # Test pythonpath is set to path provided by Spyder - assert plugin.get_widget().pythonpath == 'fakepythonpath' - - # Test that change in path propagates - plugin.get_main().get_spyder_pythonpath = MagicMock( - return_value='anotherpath') - plugin.update_pythonpath() - assert plugin.get_widget().pythonpath == 'anotherpath' - - -@pytest.mark.skip('not clear how to test interactions between plugins') -def test_plugin_wdir(plugin, monkeypatch, tmpdir): - # Test signal/slot connections - plugin.main.workingdirectory.sig_current_directory_changed.connect.assert_called_with( - plugin.update_default_wdir) - plugin.main.projects.sig_project_created.connect.assert_called_with( - plugin.handle_project_change) - plugin.main.projects.sig_project_loaded.connect.assert_called_with( - plugin.handle_project_change) - plugin.main.projects.sig_project_closed.connect.assert_called_with( - plugin.handle_project_change) - - # Test default_wdir is set to current working dir - monkeypatch.setattr('spyder_unittest.unittestplugin.getcwd', - lambda: 'fakecwd') - plugin.update_default_wdir() - assert plugin.unittestwidget.default_wdir == 'fakecwd' - - # Test after opening project, default_wdir is set to project dir - project = EmptyProject(str(tmpdir)) - plugin.main.projects.get_active_project = lambda: project - plugin.main.projects.get_active_project_path = lambda: project.root_path - plugin.handle_project_change() - assert plugin.unittestwidget.default_wdir == str(tmpdir) - - # Test after closing project, default_wdir is set back to cwd - plugin.main.projects.get_active_project = lambda: None - plugin.main.projects.get_active_project_path = lambda: None - plugin.handle_project_change() - assert plugin.unittestwidget.default_wdir == 'fakecwd' - +def test_menu_item(main_window): + """ + Test that plugin adds item 'Run unit tests' to Run menu. + """ + actions = main_window.run_menu_actions + + # Filter out seperators (indicated by action is None) and convert to text + menu_items = [action.text() for action in actions if action] + + assert 'Run unit tests' in menu_items + + +def test_pythonpath_change(main_window): + """ + Test that pythonpath changes in Spyder propagate to UnitTestWidget. + """ + ppm = main_window.get_plugin(Plugins.PythonpathManager) + unittest_plugin = main_window.get_plugin(UnitTestPlugin.NAME) + + new_path = '/some/path' + new_path_dict = OrderedDict([(new_path, True)]) + ppm.get_container()._update_python_path(new_path_dict) + + assert unittest_plugin.get_widget().pythonpath == [new_path] + + +def test_default_working_dir(main_window, tmpdir): + """ + Test that plugin's default working dir is current working directory. + After creating a project, the plugin's default working dir should be the + same as the project directory. When the project is closed again, the + plugin's default working dir should revert back to the current working + directory. + """ + projects = main_window.get_plugin(Plugins.Projects) + unittest_plugin = main_window.get_plugin(UnitTestPlugin.NAME) + project_dir = str(tmpdir) + + assert unittest_plugin.get_widget().default_wdir == os.getcwd() + + projects._create_project(project_dir) + assert unittest_plugin.get_widget().default_wdir == project_dir + + projects.close_project() + assert unittest_plugin.get_widget().default_wdir == os.getcwd() + + +def test_plugin_config(main_window, tmpdir, qtbot): + """ + Test that plugin uses the project's config file if a project is open. + """ + projects = main_window.get_plugin(Plugins.Projects) + unittest_plugin = main_window.get_plugin(UnitTestPlugin.NAME) + unittest_widget = unittest_plugin.get_widget() + project_dir = str(tmpdir) + config_file_path = tmpdir.join('.spyproject', 'config', 'unittest.ini') -@pytest.mark.skip('not clear how to test interactions between plugins') -def test_plugin_config(plugin, tmpdir, qtbot): # Test config file does not exist and config is empty - config_file_path = tmpdir.join('.spyproject', 'config', 'unittest.ini') assert not config_file_path.check() - assert plugin.unittestwidget.config is None + assert unittest_widget.config is None - # Open project - project = EmptyProject(str(tmpdir)) - plugin.main.projects.get_active_project = lambda: project - plugin.main.projects.get_active_project_path = lambda: project.root_path - plugin.handle_project_change() + # Create new project + projects._create_project(project_dir) # Test config file does exist but config is empty assert config_file_path.check() assert 'framework = ' in config_file_path.read().splitlines() - assert plugin.unittestwidget.config is None + assert unittest_widget.config is None # Set config and test that this is recorded in config file config = Config(framework='unittest', wdir=str(tmpdir)) - with qtbot.waitSignal(plugin.unittestwidget.sig_newconfig): - plugin.unittestwidget.config = config + with qtbot.waitSignal(unittest_widget.sig_newconfig): + unittest_widget.config = config assert 'framework = unittest' in config_file_path.read().splitlines() # Close project and test that config is empty - plugin.main.projects.get_active_project = lambda: None - plugin.main.projects.get_active_project_path = lambda: None - plugin.handle_project_change() - assert plugin.unittestwidget.config is None + projects.close_project() + assert unittest_widget.config is None # Re-open project and test that config is correctly read - plugin.main.projects.get_active_project = lambda: project - plugin.main.projects.get_active_project_path = lambda: project.root_path - plugin.handle_project_change() - assert plugin.unittestwidget.config == config + projects.open_project(project_dir) + assert unittest_widget.config == config + + # Close project before ending test, which removes the project dir + projects.close_project() -@pytest.mark.skip('not clear how to test interactions between plugins') -def test_plugin_goto_in_editor(plugin, qtbot): - plugin.unittestwidget.sig_edit_goto.emit('somefile', 42) - plugin.main.editor.load.assert_called_with('somefile', 43, '') +def test_go_to_test_definition(main_window, tmpdir, qtbot): + """ + Test that double clicking on a test result opens the file with the test + definition in the editor with the cursor on the test definition. + """ + unittest_plugin = main_window.get_plugin(UnitTestPlugin.NAME) + unittest_widget = unittest_plugin.get_widget() + model = unittest_widget.testdatamodel + view = unittest_widget.testdataview + + # Write test file + testdir_str = str(tmpdir) + testfile_str = tmpdir.join('test_foo.py').strpath + os.chdir(testdir_str) + with open(testfile_str, 'w') as f: + f.write("def test_ok(): assert 1+1 == 2\n" + "def test_fail(): assert 1+1 == 3\n") + + # Run tests + config = Config(wdir=testdir_str, framework='pytest', coverage=False) + with qtbot.waitSignal( + unittest_widget.sig_finished, timeout=10000, raising=True): + unittest_widget.run_tests(config) + + # Check that row 1 corresponds to `test_fail` + index = model.index(1, 1) + point = view.visualRect(index).center() + assert view.indexAt(point).data(Qt.DisplayRole).endswith('test_fail') + + # Double click on `test_fail` + unittest_plugin.switch_to_plugin() + with qtbot.waitSignal(view.sig_edit_goto): + qtbot.mouseClick(view.viewport(), Qt.LeftButton, pos=point, delay=100) + qtbot.mouseDClick(view.viewport(), Qt.LeftButton, pos=point) + + # Check that test file is opened in editor + editor = main_window.get_plugin(Plugins.Editor) + filename = editor.get_current_filename() + assert filename == testfile_str + + # Check that cursor is on line defining `test_fail` + cursor = editor.get_current_editor().textCursor() + line = cursor.block().text() + assert line.startswith('def test_fail') diff -Nru spyder-unittest-0.5.1/spyder_unittest/unittestplugin.py spyder-unittest-0.6.0/spyder_unittest/unittestplugin.py --- spyder-unittest-0.5.1/spyder_unittest/unittestplugin.py 2022-09-03 20:29:17.000000000 +0000 +++ spyder-unittest-0.6.0/spyder_unittest/unittestplugin.py 2023-07-02 15:10:44.000000000 +0000 @@ -10,14 +10,20 @@ import os.path as osp # Third party imports +import qtawesome +from qtpy.QtCore import Qt + +# Spyder imports from spyder.api.plugins import Plugins, SpyderDockablePlugin -from spyder.api.plugin_registration.decorators import on_plugin_available +from spyder.api.plugin_registration.decorators import ( + on_plugin_available, on_plugin_teardown) from spyder.config.base import get_translation -from spyder.config.gui import is_dark_interface from spyder.plugins.mainmenu.api import ApplicationMenus +from spyder.utils.palette import SpyderPalette # Local imports from spyder_unittest.widgets.configdialog import Config +from spyder_unittest.widgets.confpage import UnitTestConfigPage from spyder_unittest.widgets.unittestgui import UnitTestWidget _ = get_translation('spyder_unittest') @@ -33,17 +39,24 @@ NAME = 'unittest' REQUIRES = [] OPTIONAL = [Plugins.Editor, Plugins.MainMenu, Plugins.Preferences, - Plugins.Projects, Plugins.WorkingDirectory] + Plugins.Projects, Plugins.PythonpathManager, + Plugins.WorkingDirectory] TABIFY = [Plugins.VariableExplorer] WIDGET_CLASS = UnitTestWidget CONF_SECTION = NAME CONF_DEFAULTS = [(CONF_SECTION, - {'framework': '', 'wdir': '', 'coverage': False})] + {'framework': '', + 'wdir': '', + 'coverage': False, + 'abbrev_test_names': False}), + ('shortcuts', + {'unittest/Run tests': 'Alt+Shift+F11'})] CONF_NAMEMAP = {CONF_SECTION: [(CONF_SECTION, ['framework', 'wdir', 'coverage'])]} CONF_FILE = True - CONF_VERSION = '0.1.0' + CONF_VERSION = '0.2.0' + CONF_WIDGET_CLASS = UnitTestConfigPage # --- Mandatory SpyderDockablePlugin methods ------------------------------ @@ -79,24 +92,22 @@ QIcon QIcon instance """ - return self.create_icon('profiler') + return qtawesome.icon('mdi.test-tube', color=SpyderPalette.ICON_1) def on_initialize(self): """ Setup the plugin. """ - self.update_pythonpath() - self.get_main().sig_pythonpath_changed.connect(self.update_pythonpath) self.get_widget().sig_newconfig.connect(self.save_config) self.create_action( UnitTestPluginActions.Run, text=_('Run unit tests'), tip=_('Run unit tests'), - icon=self.create_icon('profiler'), + icon=self.get_icon(), triggered=self.maybe_configure_and_start, + context=Qt.ApplicationShortcut, register_shortcut=True) - # TODO: shortcut="Shift+Alt+F11" # ----- Set up interactions with other plugins ---------------------------- @@ -116,6 +127,14 @@ self.get_widget().pre_test_hook = editor.save_all self.get_widget().sig_edit_goto.connect(self.goto_in_editor) + @on_plugin_teardown(plugin=Plugins.Editor) + def on_editor_teardown(self): + """ + Disconnect from Editor plugin. + """ + self.get_widget().pre_test_hook = None + self.get_widget().sig_edit_goto.disconnect(self.goto_in_editor) + @on_plugin_available(plugin=Plugins.MainMenu) def on_main_menu_available(self): """ @@ -126,15 +145,34 @@ mainmenu.add_item_to_application_menu( run_action, menu_id=ApplicationMenus.Run) + @on_plugin_teardown(plugin=Plugins.MainMenu) + def on_main_menu_teardown(self): + """ + Remove 'Run unit tests; menu item from the application menu. + """ + mainmenu = self.get_plugin(Plugins.MainMenu) + mainmenu.remove_item_from_application_menu( + UnitTestPluginActions.Run, menu_id=ApplicationMenus.Run) + @on_plugin_available(plugin=Plugins.Preferences) def on_preferences_available(self): """ Use config when Preferences plugin available. - Specifically, find out whether Spyder uses a dark interface and - communicate this to the unittest widget. + Specifically, register the unittest plugin preferences, and find out + whether Spyder uses a dark interface and communicate this to the + unittest widget. + """ + preferences = self.get_plugin(Plugins.Preferences) + preferences.register_plugin_preferences(self) + + @on_plugin_teardown(plugin=Plugins.Preferences) + def on_preferences_teardown(self): """ - self.get_widget().use_dark_interface(is_dark_interface()) + De-register unittest plugin preferences. + """ + preferences = self.get_plugin(Plugins.Preferences) + preferences.deregister_plugin_preferences(self) @on_plugin_available(plugin=Plugins.Projects) def on_projects_available(self): @@ -148,6 +186,32 @@ projects.sig_project_loaded.connect(self.handle_project_change) projects.sig_project_closed.connect(self.handle_project_change) + @on_plugin_teardown(plugin=Plugins.Projects) + def on_projects_teardown(self): + """ + Disconnect from Projects plugin. + """ + projects = self.get_plugin(Plugins.Projects) + projects.sig_project_created.disconnect(self.handle_project_change) + projects.sig_project_loaded.disconnect(self.handle_project_change) + projects.sig_project_closed.disconnect(self.handle_project_change) + + @on_plugin_available(plugin=Plugins.PythonpathManager) + def on_pythonpath_manager_available(self): + """ + Connect to signal announcing that Python path changed. + """ + ppm = self.get_plugin(Plugins.PythonpathManager) + ppm.sig_pythonpath_changed.connect(self.update_pythonpath) + + @on_plugin_teardown(plugin=Plugins.PythonpathManager) + def on_pythonpath_manager_teardown(self): + """ + Disconnect from PythonpathManager plugin. + """ + ppm = self.get_plugin(Plugins.PythonpathManager) + ppm.sig_pythonpath_changed.disconnect(self.update_pythonpath) + @on_plugin_available(plugin=Plugins.WorkingDirectory) def on_working_directory_available(self): """ @@ -161,6 +225,15 @@ self.update_default_wdir) self.update_default_wdir() + @on_plugin_teardown(plugin=Plugins.WorkingDirectory) + def on_working_directory_teardown(self): + """ + Disconnect from WorkingDirectory plugin. + """ + working_directory = self.get_plugin(Plugins.WorkingDirectory) + working_directory.sig_current_directory_changed.disconnect( + self.update_default_wdir) + # --- UnitTestPlugin methods ---------------------------------------------- def update_pythonpath(self): @@ -171,7 +244,8 @@ It synchronizes the Python path in the unittest widget with the Python path in Spyder. """ - self.get_widget().pythonpath = self.get_main().get_spyder_pythonpath() + ppm = self.get_plugin(Plugins.PythonpathManager) + self.get_widget().pythonpath = ppm.get_spyder_pythonpath() def handle_project_change(self): """ diff -Nru spyder-unittest-0.5.1/spyder_unittest/widgets/configdialog.py spyder-unittest-0.6.0/spyder_unittest/widgets/configdialog.py --- spyder-unittest-0.5.1/spyder_unittest/widgets/configdialog.py 2022-09-03 20:29:17.000000000 +0000 +++ spyder-unittest-0.6.0/spyder_unittest/widgets/configdialog.py 2023-07-02 15:10:44.000000000 +0000 @@ -9,18 +9,20 @@ The main entry point is `ask_for_config()`. """ +from __future__ import annotations + # Standard library imports -from collections import namedtuple from os import getcwd -from pkgutil import find_loader as find_spec_or_loader import os.path as osp +import shlex +from typing import Optional, NamedTuple # Third party imports from qtpy.compat import getexistingdirectory from qtpy.QtCore import Slot -from qtpy.QtWidgets import (QApplication, QComboBox, QDialog, QDialogButtonBox, - QHBoxLayout, QLabel, QLineEdit, QPushButton, - QVBoxLayout, QCheckBox) +from qtpy.QtWidgets import ( + QApplication, QComboBox, QDialog, QDialogButtonBox, QGridLayout, + QHBoxLayout, QLabel, QLineEdit, QPushButton, QVBoxLayout, QCheckBox) from spyder.config.base import get_translation from spyder.utils import icon_manager as ima @@ -31,8 +33,11 @@ import gettext _ = gettext.gettext -Config = namedtuple('Config', ['framework', 'wdir', 'coverage']) -Config.__new__.__defaults__ = (None, '', False) +class Config(NamedTuple): + framework: Optional[str] = None + wdir: str = '' + coverage: bool = False + args: list[str] = [] class ConfigDialog(QDialog): @@ -46,6 +51,13 @@ the OK button. """ + # Width of strut in the layout of the dialog window; this determines + # the width of the dialog + STRUT_WIDTH = 400 + + # Extra vertical space added between elements in the dialog + EXTRA_SPACE = 10 + def __init__(self, frameworks, config, versions, parent=None): """ Construct a dialog window. @@ -65,10 +77,14 @@ self.versions = versions self.setWindowTitle(_('Configure tests')) layout = QVBoxLayout(self) + layout.addStrut(self.STRUT_WIDTH) + + grid_layout = QGridLayout() - framework_layout = QHBoxLayout() - framework_label = QLabel(_('Test framework')) - framework_layout.addWidget(framework_label) + # Combo box for selecting the test framework + + framework_label = QLabel(_('Test framework:')) + grid_layout.addWidget(framework_label, 0, 0) self.framework_combobox = QComboBox(self) for ix, (name, runner) in enumerate(sorted(frameworks.items())): @@ -79,11 +95,25 @@ label = '{} ({})'.format(name, _('not available')) self.framework_combobox.addItem(label) self.framework_combobox.model().item(ix).setEnabled(installed) + grid_layout.addWidget(self.framework_combobox, 0, 1) + + # Line edit field for adding extra command-line arguments + + args_label = QLabel(_('Command-line arguments:')) + grid_layout.addWidget(args_label, 1, 0) + + self.args_lineedit = QLineEdit(self) + args_toolTip = _('Extra command-line arguments when running tests') + self.args_lineedit.setToolTip(args_toolTip) + grid_layout.addWidget(self.args_lineedit, 1, 1) - framework_layout.addWidget(self.framework_combobox) - layout.addLayout(framework_layout) + layout.addLayout(grid_layout) + spacing = grid_layout.verticalSpacing() + self.EXTRA_SPACE + grid_layout.setVerticalSpacing(spacing) - layout.addSpacing(10) + layout.addSpacing(self.EXTRA_SPACE) + + # Checkbox for enabling coverage report coverage_label = _('Include coverage report in output') coverage_toolTip = _('Works only for pytest, requires pytest-cov') @@ -94,9 +124,11 @@ coverage_layout.addWidget(self.coverage_checkbox) layout.addLayout(coverage_layout) - layout.addSpacing(10) + layout.addSpacing(self.EXTRA_SPACE) + + # Line edit field for selecting directory - wdir_label = QLabel(_('Directory from which to run tests')) + wdir_label = QLabel(_('Directory from which to run tests:')) layout.addWidget(wdir_label) wdir_layout = QHBoxLayout() self.wdir_lineedit = QLineEdit(self) @@ -107,7 +139,9 @@ wdir_layout.addWidget(self.wdir_button) layout.addLayout(wdir_layout) - layout.addSpacing(20) + layout.addSpacing(2 * self.EXTRA_SPACE) + + # OK and Cancel buttons at the bottom self.buttons = QDialogButtonBox(QDialogButtonBox.Ok | QDialogButtonBox.Cancel) @@ -120,14 +154,17 @@ self.framework_combobox.currentIndexChanged.connect( self.framework_changed) + # Set initial values to agree with the given config + self.framework_combobox.setCurrentIndex(-1) if config.framework: index = self.framework_combobox.findText(config.framework) if index != -1: self.framework_combobox.setCurrentIndex(index) - self.wdir_lineedit.setText(config.wdir) self.coverage_checkbox.setChecked(config.coverage) self.enable_coverage_checkbox_if_available() + self.args_lineedit.setText(shlex.join(config.args)) + self.wdir_lineedit.setText(config.wdir) @Slot(int) def framework_changed(self, index): @@ -173,8 +210,12 @@ framework = self.framework_combobox.currentText() if framework == '': framework = None + + args = self.args_lineedit.text() + args = shlex.split(args) + return Config(framework=framework, wdir=self.wdir_lineedit.text(), - coverage=self.coverage_checkbox.isChecked()) + coverage=self.coverage_checkbox.isChecked(), args=args) def ask_for_config(frameworks, config, versions, parent=None): @@ -192,6 +233,14 @@ if __name__ == '__main__': app = QApplication([]) - frameworks = ['nose', 'pytest', 'unittest'] - config = Config(framework=None, wdir=getcwd(), coverage=False) - print(ask_for_config(frameworks, config)) + frameworks = { + 'nose2': object, + 'unittest': object, + 'pytest': object} + versions = { + 'nose2': {'available': False}, + 'unittest': {'available': True}, + 'pytest': {'available': True, 'plugins': {'pytest-cov', '3.1.4'}} + } + config = Config(wdir=getcwd()) + print(ask_for_config(frameworks, config, versions)) diff -Nru spyder-unittest-0.5.1/spyder_unittest/widgets/confpage.py spyder-unittest-0.6.0/spyder_unittest/widgets/confpage.py --- spyder-unittest-0.5.1/spyder_unittest/widgets/confpage.py 1970-01-01 00:00:00.000000000 +0000 +++ spyder-unittest-0.6.0/spyder_unittest/widgets/confpage.py 2023-07-02 15:10:44.000000000 +0000 @@ -0,0 +1,37 @@ +# -*- coding: utf-8 -*- +# +# ----------------------------------------------------------------------------- +# Copyright (c) 2023- Spyder Project Contributors +# +# Released under the terms of the MIT License +# (see LICENSE.txt in the project root directory for details) +# ----------------------------------------------------------------------------- + +""" +Spyder-unittest Preferences Page. +""" + +# Third party imports +from qtpy.QtWidgets import QGroupBox, QVBoxLayout +from spyder.api.preferences import PluginConfigPage +from spyder.api.translations import get_translation + +# Localization +_ = get_translation('spyder_unittest') + + +class UnitTestConfigPage(PluginConfigPage): + + def setup_page(self) -> None: + settings_group = QGroupBox(_('Settings')) + self.abbrev_box = self.create_checkbox( + _('Abbreviate test names'), 'abbrev_test_names', default=False) + + settings_layout = QVBoxLayout() + settings_layout.addWidget(self.abbrev_box) + settings_group.setLayout(settings_layout) + + vlayout = QVBoxLayout() + vlayout.addWidget(settings_group) + vlayout.addStretch(1) + self.setLayout(vlayout) diff -Nru spyder-unittest-0.5.1/spyder_unittest/widgets/datatree.py spyder-unittest-0.6.0/spyder_unittest/widgets/datatree.py --- spyder-unittest-0.5.1/spyder_unittest/widgets/datatree.py 2022-09-03 20:29:17.000000000 +0000 +++ spyder-unittest-0.6.0/spyder_unittest/widgets/datatree.py 2023-07-02 15:10:44.000000000 +0000 @@ -14,7 +14,9 @@ from qtpy.QtCore import QAbstractItemModel, QModelIndex, Qt, Signal from qtpy.QtGui import QBrush, QColor, QFont from qtpy.QtWidgets import QMenu, QTreeView +from spyder.api.config.mixins import SpyderConfigurationAccessor from spyder.config.base import get_translation +from spyder.utils.palette import QStylePalette, SpyderPalette from spyder.utils.qthelpers import create_action # Local imports @@ -28,19 +30,11 @@ _ = gettext.gettext COLORS = { - Category.OK: QBrush(QColor("#C1FFBA")), - Category.FAIL: QBrush(QColor("#FF5050")), - Category.SKIP: QBrush(QColor("#C5C5C5")), - Category.PENDING: QBrush(QColor("#C5C5C5")), - Category.COVERAGE: QBrush(QColor("#89CFF0")) -} - -COLORS_DARK = { - Category.OK: QBrush(QColor("#008000")), - Category.FAIL: QBrush(QColor("#C6001E")), - Category.SKIP: QBrush(QColor("#505050")), - Category.PENDING: QBrush(QColor("#505050")), - Category.COVERAGE: QBrush(QColor("#0047AB")) + Category.OK: SpyderPalette.COLOR_SUCCESS_1, + Category.FAIL: SpyderPalette.COLOR_ERROR_1, + Category.SKIP: SpyderPalette.COLOR_WARN_1, + Category.PENDING: QStylePalette.COLOR_BACKGROUND_1, + Category.COVERAGE: QStylePalette.COLOR_ACCENT_1 } STATUS_COLUMN = 0 @@ -61,9 +55,13 @@ ------- sig_edit_goto(str, int): Emitted if editor should go to some position. Arguments are file name and line number (zero-based). + sig_single_test_run_requested(str): Emitted to request a single test + to be run. Argument is the name of the test. """ sig_edit_goto = Signal(str, int) + sig_single_test_run_requested = Signal(str) + __test__ = False # this is not a pytest test class def __init__(self, parent=None): @@ -126,6 +124,13 @@ lineno = 0 self.sig_edit_goto.emit(filename, lineno) + def run_single_test(self, index): + """Ask plugin to run only the test corresponding to index.""" + index = self.make_index_canonical(index) + testresult = self.model().testresults[index.row()] + testname = testresult.name + self.sig_single_test_run_requested.emit(testname) + def make_index_canonical(self, index): """ Convert given index to canonical index for the same test. @@ -152,12 +157,21 @@ triggered=lambda: self.expand(index)) menuItem.setEnabled(self.model().hasChildren(index)) contextMenu.addAction(menuItem) + menuItem = create_action( self, _('Go to definition'), triggered=lambda: self.go_to_test_definition(index)) test_location = self.model().data(index, Qt.UserRole) menuItem.setEnabled(test_location[0] is not None) contextMenu.addAction(menuItem) + + menuItem = create_action( + self, _('Run only this test'), + triggered=lambda: self.run_single_test(index)) + result_category = self.model().testresults[index.row()].category + menuItem.setEnabled(result_category != Category.COVERAGE) + contextMenu.addAction(menuItem) + return contextMenu def resizeColumns(self): @@ -186,7 +200,7 @@ self.setFirstColumnSpanned(i, index, True) -class TestDataModel(QAbstractItemModel): +class TestDataModel(QAbstractItemModel, SpyderConfigurationAccessor): """ Model class storing test results for display. @@ -198,17 +212,14 @@ a tuple (row, column, id). The id is TOPLEVEL_ID for top-level items. For level-2 items, the id is the index of the test in `self.testresults`. - Attributes - ---------- - is_dark_interface : bool - Whether to use colours appropriate for a dark user interface. - Signals ------- sig_summary(str) Emitted with new summary if test results change. """ + CONF_SECTION = 'unittest' + sig_summary = Signal(str) __test__ = False # this is not a pytest test class @@ -216,7 +227,6 @@ """Constructor.""" QAbstractItemModel.__init__(self, parent) self.abbreviator = Abbreviator() - self.is_dark_interface = False self.testresults = [] try: self.monospace_font = parent.window().editor.get_plugin_font() @@ -321,10 +331,14 @@ elif column == STATUS_COLUMN: return self.testresults[row].status elif column == NAME_COLUMN: + name = self.testresults[row].name # don't abbreviate for the code coverage filename if self.testresults[row].category == Category.COVERAGE: - return self.testresults[row].name - return self.abbreviator.abbreviate(self.testresults[row].name) + return name + if self.get_conf('abbrev_test_names', False): + return self.abbreviator.abbreviate(name) + else: + return name elif column == MESSAGE_COLUMN: return self.testresults[row].message elif column == TIME_COLUMN: @@ -339,10 +353,8 @@ elif role == Qt.BackgroundRole: if id == TOPLEVEL_ID: testresult = self.testresults[row] - if self.is_dark_interface: - return COLORS_DARK[testresult.category] - else: - return COLORS[testresult.category] + color = COLORS[testresult.category] + return QBrush(QColor(color)) elif role == Qt.TextAlignmentRole: if id == TOPLEVEL_ID and column == TIME_COLUMN: return Qt.AlignRight diff -Nru spyder-unittest-0.5.1/spyder_unittest/widgets/tests/test_configdialog.py spyder-unittest-0.6.0/spyder_unittest/widgets/tests/test_configdialog.py --- spyder-unittest-0.5.1/spyder_unittest/widgets/tests/test_configdialog.py 2022-09-03 20:29:17.000000000 +0000 +++ spyder-unittest-0.6.0/spyder_unittest/widgets/tests/test_configdialog.py 2023-07-02 15:10:44.000000000 +0000 @@ -49,7 +49,7 @@ def default_config(): - return Config(framework=None, wdir=os.getcwd(), coverage=False) + return Config(framework=None, wdir=os.getcwd(), coverage=False, args=[]) def test_configdialog_uses_frameworks(qtbot): @@ -76,12 +76,13 @@ def test_configdialog_sets_initial_config(qtbot): - config = default_config() + config = Config(framework='pytest', wdir='/some/dir', + coverage=True, args=['some', 'arg']) configdialog = ConfigDialog(frameworks, config, versions) assert configdialog.get_config() == config -def test_configdialog_click_ham(qtbot): +def test_configdialog_click_pytest(qtbot): configdialog = ConfigDialog(frameworks, default_config(), versions) qtbot.addWidget(configdialog) configdialog.framework_combobox.setCurrentIndex(1) @@ -125,6 +126,13 @@ assert configdialog.coverage_checkbox.isEnabled() is False +def test_configdialog_args_lineedit(qtbot): + configdialog = ConfigDialog(frameworks, default_config(), versions) + qtbot.addWidget(configdialog) + configdialog.args_lineedit.setText('-x "ham and" spam') + assert configdialog.get_config().args == ['-x', 'ham and', 'spam'] + + def test_configdialog_wdir_lineedit(qtbot): configdialog = ConfigDialog(frameworks, default_config(), versions) qtbot.addWidget(configdialog) diff -Nru spyder-unittest-0.5.1/spyder_unittest/widgets/tests/test_confpage.py spyder-unittest-0.6.0/spyder_unittest/widgets/tests/test_confpage.py --- spyder-unittest-0.5.1/spyder_unittest/widgets/tests/test_confpage.py 1970-01-01 00:00:00.000000000 +0000 +++ spyder-unittest-0.6.0/spyder_unittest/widgets/tests/test_confpage.py 2023-07-02 15:10:44.000000000 +0000 @@ -0,0 +1,172 @@ +# -*- coding: utf-8 -*- +# ----------------------------------------------------------------------------- +# Copyright (c) 2023- Spyder Project Contributors +# +# Licensed under the terms of the MIT License +# (see LICENSE.txt for details) +# ----------------------------------------------------------------------------- + +# Standard library imports +import sys +import types +from unittest.mock import Mock, MagicMock + +# Third party imports +from qtpy.QtWidgets import QWidget, QMainWindow +import pytest + +# Spyder imports +from spyder.api.plugins import Plugins +from spyder.api.plugin_registration.registry import PLUGIN_REGISTRY +from spyder.app.cli_options import get_options +from spyder.config.manager import CONF +from spyder.plugins.preferences.plugin import Preferences + +# Local imports +from spyder_unittest.unittestplugin import UnitTestPlugin + + +# ----------------------------------------------------------------------------- +# +# Classes and fixtures copied from spyder/plugins/preferences/tests/conftest.py + +class MainWindowMock(QMainWindow): + register_shortcut = Mock() + + def __init__(self, parent): + super().__init__(parent) + self.default_style = None + self.widgetlist = [] + self.thirdparty_plugins = [] + self.shortcut_data = [] + self.prefs_dialog_instance = None + self._APPLICATION_TOOLBARS = MagicMock() + + self.console = Mock() + + # To provide command line options for plugins that need them + sys_argv = [sys.argv[0]] # Avoid options passed to pytest + self._cli_options = get_options(sys_argv)[0] + + PLUGIN_REGISTRY.reset() + PLUGIN_REGISTRY.sig_plugin_ready.connect(self.register_plugin) + PLUGIN_REGISTRY.register_plugin(self, Preferences) + + # Load shortcuts for tests + for context, name, __ in CONF.iter_shortcuts(): + self.shortcut_data.append((None, context, name, None, None)) + + for attr in ['mem_status', 'cpu_status']: + mock_attr = Mock() + setattr(mock_attr, 'toolTip', lambda: '') + setattr(mock_attr, 'setToolTip', lambda x: '') + setattr(mock_attr, 'prefs_dialog_instance', lambda: '') + setattr(self, attr, mock_attr) + + def register_plugin(self, plugin_name, external=False): + plugin = PLUGIN_REGISTRY.get_plugin(plugin_name) + plugin._register(omit_conf=True) + + def get_plugin(self, plugin_name, error=True): + if plugin_name in PLUGIN_REGISTRY: + return PLUGIN_REGISTRY.get_plugin(plugin_name) + + def set_prefs_size(self, size): + pass + + +class ConfigDialogTester(QWidget): + def __init__(self, parent, main_class, + general_config_plugins, plugins): + super().__init__(parent) + self._main = main_class(self) if main_class else None + if self._main is None: + self._main = MainWindowMock(self) + + def set_prefs_size(self, size): + pass + + def register_plugin(self, plugin_name, external=False): + plugin = PLUGIN_REGISTRY.get_plugin(plugin_name) + plugin._register() + + def get_plugin(self, plugin_name, error=True): + if plugin_name in PLUGIN_REGISTRY: + return PLUGIN_REGISTRY.get_plugin(plugin_name) + return None + + # Commented out because it gives the error: + # A plugin with section "unittest" already exists! + # setattr(self._main, 'register_plugin', + # types.MethodType(register_plugin, self._main)) + setattr(self._main, 'get_plugin', + types.MethodType(get_plugin, self._main)) + setattr(self._main, 'set_prefs_size', + types.MethodType(set_prefs_size, self._main)) + + PLUGIN_REGISTRY.reset() + PLUGIN_REGISTRY.sig_plugin_ready.connect(self._main.register_plugin) + print(f'ConfigDialogTester registering {Preferences=}') + PLUGIN_REGISTRY.register_plugin(self._main, Preferences) + + if plugins: + for Plugin in plugins: + if hasattr(Plugin, 'CONF_WIDGET_CLASS'): + for required in (Plugin.REQUIRES or []): + if required not in PLUGIN_REGISTRY: + PLUGIN_REGISTRY.plugin_registry[required] = MagicMock() + + PLUGIN_REGISTRY.register_plugin(self._main, Plugin) + else: + plugin = Plugin(self._main) + preferences = self._main.get_plugin(Plugins.Preferences) + preferences.register_plugin_preferences(plugin) + + +@pytest.fixture +def config_dialog(qtbot, request): + # mocker.patch.object(ima, 'icon', lambda x, *_: QIcon()) + # Above line commented out from source because it gave an error + + main_class, general_config_plugins, plugins = request.param + + main_ref = ConfigDialogTester( + None, main_class, general_config_plugins, plugins) + qtbot.addWidget(main_ref) + + preferences = main_ref._main.get_plugin(Plugins.Preferences) + preferences.open_dialog(None) + container = preferences.get_container() + dlg = container.dialog + + yield dlg + + dlg.close() + + +# ----------------------------------------------------------------------------- +# +# Test for the spyder-unittest plugin + +@pytest.mark.parametrize( + 'config_dialog', + [[MainWindowMock, [], [UnitTestPlugin]]], + indirect=True) +def test_unittestconfigpage(config_dialog): + """Test that changing "Abbreviate test names" works as expected.""" + # Get reference to Preferences dialog and widget page to interact with + dlg = config_dialog + widget = config_dialog.get_page() + + # Assert default value of option in True + assert widget.get_option('abbrev_test_names') is False + + # Toggle checkbox and assert that option value is now False + widget.abbrev_box.click() + dlg.apply_btn.click() + assert widget.get_option('abbrev_test_names') is True + + # Reset options to default and check that option value is True again + # Note: it is necessary to specify the section in reset_to_defaults() + CONF.reset_to_defaults(section='unittest', notification=False) + assert widget.get_option('abbrev_test_names') is False diff -Nru spyder-unittest-0.5.1/spyder_unittest/widgets/tests/test_datatree.py spyder-unittest-0.6.0/spyder_unittest/widgets/tests/test_datatree.py --- spyder-unittest-0.5.1/spyder_unittest/widgets/tests/test_datatree.py 2022-09-03 20:29:17.000000000 +0000 +++ spyder-unittest-0.6.0/spyder_unittest/widgets/tests/test_datatree.py 2023-07-02 15:10:44.000000000 +0000 @@ -7,14 +7,14 @@ # Third party imports from qtpy.QtCore import QModelIndex, QPoint, Qt -from qtpy.QtGui import QContextMenuEvent +from qtpy.QtGui import QBrush, QColor, QContextMenuEvent from unittest.mock import Mock import pytest # Local imports from spyder_unittest.backend.runnerbase import Category, TestResult -from spyder_unittest.widgets.datatree import (COLORS, COLORS_DARK, - TestDataModel, TestDataView) +from spyder_unittest.widgets.datatree import ( + COLORS, TestDataModel, TestDataView) @pytest.fixture @@ -67,6 +67,12 @@ view.go_to_test_definition(model.index(1, 0)) assert blocker.args == ['ham.py', 0] +def test_run_single_test(view_and_model, qtbot): + view, model = view_and_model + with qtbot.waitSignal(view.sig_single_test_run_requested) as blocker: + view.run_single_test(model.index(1, 0)) + assert blocker.args == ['foo.bar'] + def test_make_index_canonical_with_index_in_column2(view_and_model): view, model = view_and_model index = model.index(1, 2) @@ -88,20 +94,33 @@ def test_build_context_menu(view_and_model): view, model = view_and_model menu = view.build_context_menu(model.index(0, 0)) + assert len(menu.actions()) == 3 assert menu.actions()[0].text() == 'Expand' assert menu.actions()[1].text() == 'Go to definition' + assert menu.actions()[2].text() == 'Run only this test' def test_build_context_menu_with_disabled_entries(view_and_model): view, model = view_and_model menu = view.build_context_menu(model.index(0, 0)) assert menu.actions()[0].isEnabled() == False assert menu.actions()[1].isEnabled() == False + assert menu.actions()[2].isEnabled() == True def test_build_context_menu_with_enabled_entries(view_and_model): view, model = view_and_model menu = view.build_context_menu(model.index(1, 0)) assert menu.actions()[0].isEnabled() == True assert menu.actions()[1].isEnabled() == True + assert menu.actions()[2].isEnabled() == True + +def test_build_context_menu_with_coverage_entry(view_and_model): + view, model = view_and_model + testresult = TestResult(Category.COVERAGE, 'coverage', 'foo') + model.testresults.append(testresult) + menu = view.build_context_menu(model.index(2, 0)) + assert menu.actions()[0].isEnabled() == False + assert menu.actions()[1].isEnabled() == False + assert menu.actions()[2].isEnabled() == False def test_build_context_menu_with_expanded_entry(view_and_model): view, model = view_and_model @@ -118,12 +137,17 @@ model.testresults = res qtmodeltester.check(model) -def test_testdatamodel_shows_abbreviated_name_in_table(qtbot): +@pytest.mark.parametrize('config, result', + [(False, 'foo.bar'), (True, 'f.bar')]) +def test_testdatamodel_shows_abbreviated_name_in_table(qtbot, config, result): model = TestDataModel() + old_config = model.get_conf('abbrev_test_names') + model.set_conf('abbrev_test_names', config) res = TestResult(Category.OK, 'status', 'foo.bar', '', 0, '') model.testresults = [res] index = model.index(0, 1) - assert model.data(index, Qt.DisplayRole) == 'f.bar' + assert model.data(index, Qt.DisplayRole) == result + model.set_conf('abbrev_test_names', old_config) def test_testdatamodel_shows_full_name_in_tooltip(qtbot): model = TestDataModel() @@ -152,19 +176,17 @@ model.testresults = [res] assert model.data(model.index(0, 3), Qt.DisplayRole) == '' -@pytest.mark.parametrize('dark', [False, True]) -def test_testdatamodel_data_background(dark): +def test_testdatamodel_data_background(): model = TestDataModel() - if dark: - model.is_dark_interface = True res = [TestResult(Category.OK, 'status', 'foo.bar'), TestResult(Category.FAIL, 'error', 'foo.bar', 'kadoom')] model.testresults = res index = model.index(0, 0) - colors = COLORS_DARK if dark else COLORS - assert model.data(index, Qt.BackgroundRole) == colors[Category.OK] + expected = QBrush(QColor(COLORS[Category.OK])) + assert model.data(index, Qt.BackgroundRole) == expected index = model.index(1, 2) - assert model.data(index, Qt.BackgroundRole) == colors[Category.FAIL] + expected = QBrush(QColor(COLORS[Category.FAIL])) + assert model.data(index, Qt.BackgroundRole) == expected def test_testdatamodel_data_userrole(): model = TestDataModel() diff -Nru spyder-unittest-0.5.1/spyder_unittest/widgets/tests/test_unittestgui.py spyder-unittest-0.6.0/spyder_unittest/widgets/tests/test_unittestgui.py --- spyder-unittest-0.5.1/spyder_unittest/widgets/tests/test_unittestgui.py 2022-09-03 20:29:17.000000000 +0000 +++ spyder-unittest-0.6.0/spyder_unittest/widgets/tests/test_unittestgui.py 2023-07-02 15:10:44.000000000 +0000 @@ -8,7 +8,7 @@ # Standard library imports import os import sys -from unittest.mock import Mock +from unittest.mock import Mock, patch # Third party imports from qtpy.QtCore import Qt, QProcess @@ -151,15 +151,22 @@ expected_text = '{}'.format('Test process exited abnormally') assert widget.status_label.text() == expected_text -@pytest.mark.parametrize('framework', ['pytest', 'nose']) -def test_run_tests_and_display_results(qtbot, widget, tmpdir, monkeypatch, framework): +def test_unittestwidget_handles_sig_single_test_run_requested(widget): + with patch.object(widget, 'run_tests') as mock_run_tests: + widget.testdataview.sig_single_test_run_requested.emit('testname') + mock_run_tests.assert_called_once_with(single_test='testname') + +@pytest.mark.parametrize('framework', ['pytest', 'nose2']) +@pytest.mark.parametrize('alltests', [True, False]) +def test_run_tests_and_display_results(qtbot, widget, tmpdir, monkeypatch, + framework, alltests): """Basic integration test.""" os.chdir(tmpdir.strpath) testfilename = tmpdir.join('test_foo.py').strpath with open(testfilename, 'w') as f: - f.write("def test_ok(): assert 1+1 == 2\n" - "def test_fail(): assert 1+1 == 3\n") + f.write("def test_fail(): assert 1+1 == 3\n" + "def test_ok(): assert 1+1 == 2\n") MockQMessageBox = Mock() monkeypatch.setattr('spyder_unittest.widgets.unittestgui.QMessageBox', @@ -167,24 +174,29 @@ config = Config(wdir=tmpdir.strpath, framework=framework, coverage=False) with qtbot.waitSignal(widget.sig_finished, timeout=10000, raising=True): - widget.run_tests(config) + if alltests: + widget.run_tests(config) + else: + widget.run_tests(config, single_test='test_foo.test_fail') MockQMessageBox.assert_not_called() model = widget.testdatamodel - assert model.rowCount() == 2 + assert model.rowCount() == (2 if alltests else 1) assert model.index(0, 0).data( - Qt.DisplayRole) == 'ok' if framework == 'nose' else 'passed' - assert model.index(0, 1).data(Qt.DisplayRole) == 't.test_ok' - assert model.index(0, 1).data(Qt.ToolTipRole) == 'test_foo.test_ok' - assert model.index(0, 2).data(Qt.DisplayRole) == '' - assert model.index(1, 0).data( - Qt.DisplayRole) == 'failure' if framework == 'nose' else 'failed' - assert model.index(1, 1).data(Qt.DisplayRole) == 't.test_fail' - assert model.index(1, 1).data(Qt.ToolTipRole) == 'test_foo.test_fail' + Qt.DisplayRole) == 'failure' if framework == 'nose2' else 'failed' + assert model.index(0, 1).data(Qt.DisplayRole) == 'test_foo.test_fail' + assert model.index(0, 1).data(Qt.ToolTipRole) == 'test_foo.test_fail' + if alltests: + assert model.index(1, 0).data( + Qt.DisplayRole) == 'ok' if framework == 'nose2' else 'passed' + assert model.index(1, 1).data(Qt.DisplayRole) == 'test_foo.test_ok' + assert model.index(1, 1).data(Qt.ToolTipRole) == 'test_foo.test_ok' + assert model.index(1, 2).data(Qt.DisplayRole) == '' +@pytest.mark.parametrize('alltests', [True, False]) def test_run_tests_using_unittest_and_display_results( - qtbot, widget, tmpdir, monkeypatch): + qtbot, widget, tmpdir, monkeypatch, alltests): """Basic check.""" os.chdir(tmpdir.strpath) testfilename = tmpdir.join('test_foo.py').strpath @@ -201,21 +213,57 @@ config = Config(wdir=tmpdir.strpath, framework='unittest', coverage=False) with qtbot.waitSignal(widget.sig_finished, timeout=10000, raising=True): + if alltests: + widget.run_tests(config) + else: + widget.run_tests(config, single_test='test_foo.MyTest.test_fail') + + MockQMessageBox.assert_not_called() + model = widget.testdatamodel + assert model.rowCount() == (2 if alltests else 1) + assert model.index(0, 0).data(Qt.DisplayRole) == 'failure' + assert model.index(0, 1).data(Qt.DisplayRole) == 'test_foo.MyTest.test_fail' + assert model.index(0, 1).data(Qt.ToolTipRole) == 'test_foo.MyTest.test_fail' + if alltests: + assert model.index(1, 0).data(Qt.DisplayRole) == 'success' + assert model.index(1, 1).data(Qt.DisplayRole) == 'test_foo.MyTest.test_ok' + assert model.index(1, 1).data(Qt.ToolTipRole) == 'test_foo.MyTest.test_ok' + assert model.index(1, 2).data(Qt.DisplayRole) == '' + +def test_run_tests_with_print_using_unittest_and_display_results( + qtbot, widget, tmpdir, monkeypatch): + """ + Run a failing test which print to stderr using unittest and check + that it is displayed as a failing test. + Regression test for spyder-ide/spyder-unittest#160. + """ + os.chdir(tmpdir.strpath) + testfilename = tmpdir.join('test_foo.py').strpath + + with open(testfilename, 'w') as f: + f.write("import sys\n" + "import unittest\n" + "class MyTest(unittest.TestCase):\n" + " def test_fail(self):\n" + " print('text', file=sys.stderr)\n" + " self.assertEqual(1+1, 3)\n" + " def test_ok(self): self.assertEqual(1+1, 2)\n") + + MockQMessageBox = Mock() + monkeypatch.setattr('spyder_unittest.widgets.unittestgui.QMessageBox', + MockQMessageBox) + + config = Config(wdir=tmpdir.strpath, framework='unittest', coverage=False) + with qtbot.waitSignal(widget.sig_finished, timeout=10000, raising=True): widget.run_tests(config) MockQMessageBox.assert_not_called() model = widget.testdatamodel assert model.rowCount() == 2 - assert model.index(0, 0).data(Qt.DisplayRole) == 'FAIL' - assert model.index(0, 1).data(Qt.DisplayRole) == 't.M.test_fail' - assert model.index(0, 1).data(Qt.ToolTipRole) == 'test_foo.MyTest.test_fail' - assert model.index(0, 2).data(Qt.DisplayRole) == '' - assert model.index(1, 0).data(Qt.DisplayRole) == 'ok' - assert model.index(1, 1).data(Qt.DisplayRole) == 't.M.test_ok' - assert model.index(1, 1).data(Qt.ToolTipRole) == 'test_foo.MyTest.test_ok' - assert model.index(1, 2).data(Qt.DisplayRole) == '' + assert model.index(0, 0).data(Qt.DisplayRole) == 'failure' + assert model.index(1, 0).data(Qt.DisplayRole) == 'success' -@pytest.mark.parametrize('framework', ['unittest', 'pytest', 'nose']) +@pytest.mark.parametrize('framework', ['unittest', 'pytest', 'nose2']) def test_run_with_no_tests_discovered_and_display_results( qtbot, widget, tmpdir, monkeypatch, framework): """Basic integration test.""" diff -Nru spyder-unittest-0.5.1/spyder_unittest/widgets/unittestgui.py spyder-unittest-0.6.0/spyder_unittest/widgets/unittestgui.py --- spyder-unittest-0.5.1/spyder_unittest/widgets/unittestgui.py 2022-09-03 20:29:17.000000000 +0000 +++ spyder-unittest-0.6.0/spyder_unittest/widgets/unittestgui.py 2023-07-02 15:10:44.000000000 +0000 @@ -22,7 +22,7 @@ # Local imports from spyder_unittest.backend.frameworkregistry import FrameworkRegistry -from spyder_unittest.backend.noserunner import NoseRunner +from spyder_unittest.backend.nose2runner import Nose2Runner from spyder_unittest.backend.pytestrunner import PyTestRunner from spyder_unittest.backend.runnerbase import Category, TestResult from spyder_unittest.backend.unittestrunner import UnittestRunner @@ -37,7 +37,7 @@ _ = gettext.gettext # Supported testing frameworks -FRAMEWORKS = {NoseRunner, PyTestRunner, UnittestRunner} +FRAMEWORKS = {Nose2Runner, PyTestRunner, UnittestRunner} class UnitTestWidgetActions: @@ -117,6 +117,8 @@ self.testdatamodel = TestDataModel(self) self.testdataview.setModel(self.testdatamodel) self.testdataview.sig_edit_goto.connect(self.sig_edit_goto) + self.testdataview.sig_single_test_run_requested.connect( + self.run_single_test) self.testdatamodel.sig_summary.connect(self.set_status_label) self.framework_registry = FrameworkRegistry() @@ -238,10 +240,6 @@ """Set test configuration but do not emit any signal.""" self._config = new_config - def use_dark_interface(self, flag): - """Set whether widget should use colours appropriate for dark UI.""" - self.testdatamodel.is_dark_interface = flag - def show_log(self): """Show output of testing process.""" if self.output: @@ -341,14 +339,15 @@ if self.config_is_valid(): self.run_tests() - def run_tests(self, config=None): + def run_tests(self, config=None, single_test=None): """ Run unit tests. First, run `self.pre_test_hook` if it is set, and abort if its return value is `False`. - Then, run the unit tests. + Then, run the unit tests. If `single_test` is not None, then only run + that test. The process's output is consumed by `read_output()`. When the process finishes, the `finish` signal is emitted. @@ -358,6 +357,9 @@ config : Config or None configuration for unit tests. If None, use `self.config`. In either case, configuration should be valid. + single_test : str or None + If None, run all tests; otherwise, it is the name of the only test + to be run. """ if self.pre_test_hook: if self.pre_test_hook() is False: @@ -384,7 +386,8 @@ cov_path = config.wdir if cov_path == 'None' else cov_path executable = self.get_conf('executable', section='main_interpreter') try: - self.testrunner.start(config, cov_path, executable, pythonpath) + self.testrunner.start( + config, cov_path, executable, pythonpath, single_test) except RuntimeError: QMessageBox.critical(self, _("Error"), _("Process failed to start")) @@ -498,6 +501,12 @@ """ self.status_label.setText('{}'.format(msg)) + def run_single_test(self, test_name: str) -> None: + """ + Run a single test with the given name. + """ + self.run_tests(single_test=test_name) + def test(): """